/* SCC value numbering for trees
- Copyright (C) 2006, 2007, 2008
+ Copyright (C) 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org>
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "tree.h"
#include "basic-block.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-inline.h"
#include "tree-flow.h"
#include "gimple.h"
#include "fibheap.h"
#include "hashtab.h"
#include "tree-iterator.h"
-#include "real.h"
#include "alloc-pool.h"
#include "tree-pass.h"
#include "flags.h"
static unsigned int next_dfs_num;
static VEC (tree, heap) *sccstack;
-static bool may_insert;
-
DEF_VEC_P(vn_ssa_aux_t);
DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
{
vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
SSA_NAME_VERSION (name));
- gcc_assert (res);
+ gcc_checking_assert (res);
return res;
}
switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
{
case tcc_reference:
- if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
- || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
+ if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
+ || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
+ || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
gimple_expr_type (def_stmt),
TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
+ if (vc1->hashcode != vc2->hashcode)
+ return false;
+
return vn_constant_eq_with_type (vc1->constant, vc2->constant);
}
/* Hash table hash function for vn_constant_t. */
-
+
static hashval_t
vn_constant_hash (const void *p1)
{
get_or_alloc_constant_value_id (tree constant)
{
void **slot;
- vn_constant_t vc = XNEW (struct vn_constant_s);
-
- vc->hashcode = vn_hash_constant_with_type (constant);
- vc->constant = constant;
- slot = htab_find_slot_with_hash (constant_to_value_id, vc,
- vc->hashcode, INSERT);
+ struct vn_constant_s vc;
+ vn_constant_t vcp;
+
+ vc.hashcode = vn_hash_constant_with_type (constant);
+ vc.constant = constant;
+ slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
+ vc.hashcode, INSERT);
if (*slot)
- {
- free (vc);
- return ((vn_constant_t)*slot)->value_id;
- }
- vc->value_id = get_next_value_id ();
- *slot = vc;
- bitmap_set_bit (constant_value_ids, vc->value_id);
- return vc->value_id;
+ return ((vn_constant_t)*slot)->value_id;
+
+ vcp = XNEW (struct vn_constant_s);
+ vcp->hashcode = vc.hashcode;
+ vcp->constant = constant;
+ vcp->value_id = get_next_value_id ();
+ *slot = (void *) vcp;
+ bitmap_set_bit (constant_value_ids, vcp->value_id);
+ return vcp->value_id;
}
/* Return true if V is a value id for a constant. */
bool
value_id_constant_p (unsigned int v)
{
- return bitmap_bit_p (constant_value_ids, v);
+ return bitmap_bit_p (constant_value_ids, v);
}
/* Compare two reference operands P1 and P2 for equality. Return true if
{
const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
+
return vro1->opcode == vro2->opcode
&& types_compatible_p (vro1->type, vro2->type)
&& expressions_equal_p (vro1->op0, vro2->op0)
/* Compute the hash for a reference operand VRO1. */
static hashval_t
-vn_reference_op_compute_hash (const vn_reference_op_t vro1)
-{
- return iterative_hash_expr (vro1->op0, vro1->opcode)
- + iterative_hash_expr (vro1->op1, vro1->opcode)
- + iterative_hash_expr (vro1->op2, vro1->opcode);
+vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
+{
+ result = iterative_hash_hashval_t (vro1->opcode, result);
+ if (vro1->op0)
+ result = iterative_hash_expr (vro1->op0, result);
+ if (vro1->op1)
+ result = iterative_hash_expr (vro1->op1, result);
+ if (vro1->op2)
+ result = iterative_hash_expr (vro1->op2, result);
+ return result;
}
/* Return the hashcode for a given reference operation P1. */
vn_reference_compute_hash (const vn_reference_t vr1)
{
hashval_t result = 0;
- tree v;
int i;
vn_reference_op_t vro;
+ HOST_WIDE_INT off = -1;
+ bool deref = false;
- for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
- result += iterative_hash_expr (v, 0);
for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
- result += vn_reference_op_compute_hash (vro);
+ {
+ if (vro->opcode == MEM_REF)
+ deref = true;
+ else if (vro->opcode != ADDR_EXPR)
+ deref = false;
+ if (vro->off != -1)
+ {
+ if (off == -1)
+ off = 0;
+ off += vro->off;
+ }
+ else
+ {
+ if (off != -1
+ && off != 0)
+ result = iterative_hash_hashval_t (off, result);
+ off = -1;
+ if (deref
+ && vro->opcode == ADDR_EXPR)
+ {
+ if (vro->op0)
+ {
+ tree op = TREE_OPERAND (vro->op0, 0);
+ result = iterative_hash_hashval_t (TREE_CODE (op), result);
+ result = iterative_hash_expr (op, result);
+ }
+ }
+ else
+ result = vn_reference_op_compute_hash (vro, result);
+ }
+ }
+ if (vr1->vuse)
+ result += SSA_NAME_VERSION (vr1->vuse);
return result;
}
int
vn_reference_eq (const void *p1, const void *p2)
{
- tree v;
- int i;
- vn_reference_op_t vro;
+ unsigned i, j;
const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
+ if (vr1->hashcode != vr2->hashcode)
+ return false;
- if (vr1->vuses == vr2->vuses
- && vr1->operands == vr2->operands)
- return true;
+ /* Early out if this is not a hash collision. */
+ if (vr1->hashcode != vr2->hashcode)
+ return false;
- /* Impossible for them to be equivalent if they have different
- number of vuses. */
- if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
+ /* The VOP needs to be the same. */
+ if (vr1->vuse != vr2->vuse)
return false;
- /* We require that address operands be canonicalized in a way that
- two memory references will have the same operands if they are
- equivalent. */
- if (VEC_length (vn_reference_op_s, vr1->operands)
- != VEC_length (vn_reference_op_s, vr2->operands))
+ /* If the operands are the same we are done. */
+ if (vr1->operands == vr2->operands)
+ return true;
+
+ if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
return false;
- /* The memory state is more often different than the address of the
- store/load, so check it first. */
- for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
+ if (INTEGRAL_TYPE_P (vr1->type)
+ && INTEGRAL_TYPE_P (vr2->type))
{
- if (VEC_index (tree, vr2->vuses, i) != v)
+ if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
return false;
}
+ else if (INTEGRAL_TYPE_P (vr1->type)
+ && (TYPE_PRECISION (vr1->type)
+ != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
+ return false;
+ else if (INTEGRAL_TYPE_P (vr2->type)
+ && (TYPE_PRECISION (vr2->type)
+ != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
+ return false;
- for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
+ i = 0;
+ j = 0;
+ do
{
- if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
- vro))
+ HOST_WIDE_INT off1 = 0, off2 = 0;
+ vn_reference_op_t vro1, vro2;
+ vn_reference_op_s tem1, tem2;
+ bool deref1 = false, deref2 = false;
+ for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+ {
+ if (vro1->opcode == MEM_REF)
+ deref1 = true;
+ if (vro1->off == -1)
+ break;
+ off1 += vro1->off;
+ }
+ for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+ {
+ if (vro2->opcode == MEM_REF)
+ deref2 = true;
+ if (vro2->off == -1)
+ break;
+ off2 += vro2->off;
+ }
+ if (off1 != off2)
return false;
+ if (deref1 && vro1->opcode == ADDR_EXPR)
+ {
+ memset (&tem1, 0, sizeof (tem1));
+ tem1.op0 = TREE_OPERAND (vro1->op0, 0);
+ tem1.type = TREE_TYPE (tem1.op0);
+ tem1.opcode = TREE_CODE (tem1.op0);
+ vro1 = &tem1;
+ }
+ if (deref2 && vro2->opcode == ADDR_EXPR)
+ {
+ memset (&tem2, 0, sizeof (tem2));
+ tem2.op0 = TREE_OPERAND (vro2->op0, 0);
+ tem2.type = TREE_TYPE (tem2.op0);
+ tem2.opcode = TREE_CODE (tem2.op0);
+ vro2 = &tem2;
+ }
+ if (!vn_reference_op_eq (vro1, vro2))
+ return false;
+ ++j;
+ ++i;
}
- return true;
-}
-
-/* Place the vuses from STMT into *result. */
+ while (VEC_length (vn_reference_op_s, vr1->operands) != i
+ || VEC_length (vn_reference_op_s, vr2->operands) != j);
-static inline void
-vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
-{
- ssa_op_iter iter;
- tree vuse;
-
- if (!stmt)
- return;
-
- VEC_reserve_exact (tree, gc, *result,
- num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
-
- FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
- VEC_quick_push (tree, *result, vuse);
-}
-
-
-/* Copy the VUSE names in STMT into a vector, and return
- the vector. */
-
-static VEC (tree, gc) *
-copy_vuses_from_stmt (gimple stmt)
-{
- VEC (tree, gc) *vuses = NULL;
-
- vuses_to_vec (stmt, &vuses);
-
- return vuses;
-}
-
-/* Place the vdefs from STMT into *result. */
-
-static inline void
-vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
-{
- ssa_op_iter iter;
- tree vdef;
-
- if (!stmt)
- return;
-
- *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
-
- FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- VEC_quick_push (tree, *result, vdef);
-}
-
-/* Copy the names of vdef results in STMT into a vector, and return
- the vector. */
-
-static VEC (tree, gc) *
-copy_vdefs_from_stmt (gimple stmt)
-{
- VEC (tree, gc) *vdefs = NULL;
-
- vdefs_to_vec (stmt, &vdefs);
-
- return vdefs;
-}
-
-/* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
-static VEC (tree, gc) *shared_lookup_vops;
-
-/* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
- This function will overwrite the current SHARED_LOOKUP_VOPS
- variable. */
-
-VEC (tree, gc) *
-shared_vuses_from_stmt (gimple stmt)
-{
- VEC_truncate (tree, shared_lookup_vops, 0);
- vuses_to_vec (stmt, &shared_lookup_vops);
-
- return shared_lookup_vops;
+ return true;
}
/* Copy the operations present in load/store REF into RESULT, a vector of
if (TREE_CODE (ref) == TARGET_MEM_REF)
{
vn_reference_op_s temp;
+ tree base;
+
+ base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
+ if (!base)
+ base = null_pointer_node;
memset (&temp, 0, sizeof (temp));
/* We do not care for spurious type qualifications. */
temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
temp.opcode = TREE_CODE (ref);
- temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
- temp.op1 = TMR_INDEX (ref);
+ temp.op0 = TMR_INDEX (ref);
+ temp.op1 = TMR_STEP (ref);
+ temp.op2 = TMR_OFFSET (ref);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
- temp.opcode = TREE_CODE (ref);
- temp.op0 = TMR_STEP (ref);
- temp.op1 = TMR_OFFSET (ref);
+ temp.opcode = TREE_CODE (base);
+ temp.op0 = base;
+ temp.op1 = TMR_ORIGINAL (ref);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
return;
}
/* We do not care for spurious type qualifications. */
temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
temp.opcode = TREE_CODE (ref);
+ temp.off = -1;
switch (temp.opcode)
{
- case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
- /* The only operand is the address, which gets its own
- vn_reference_op_s structure. */
- break;
case MISALIGNED_INDIRECT_REF:
temp.op0 = TREE_OPERAND (ref, 1);
break;
+ case MEM_REF:
+ /* The base address gets its own vn_reference_op_s structure. */
+ temp.op0 = TREE_OPERAND (ref, 1);
+ if (host_integerp (TREE_OPERAND (ref, 1), 0))
+ temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
+ break;
case BIT_FIELD_REF:
/* Record bits and position. */
temp.op0 = TREE_OPERAND (ref, 1);
a matching type is not necessary and a mismatching type
is always a spurious difference. */
temp.type = NULL_TREE;
- /* If this is a reference to a union member, record the union
- member size as operand. Do so only if we are doing
- expression insertion (during FRE), as PRE currently gets
- confused with this. */
- if (may_insert
- && TREE_OPERAND (ref, 2) == NULL_TREE
- && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
- && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
- && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
- temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
- else
- {
- /* Record field as operand. */
- temp.op0 = TREE_OPERAND (ref, 1);
- temp.op1 = TREE_OPERAND (ref, 2);
- }
+ temp.op0 = TREE_OPERAND (ref, 1);
+ temp.op1 = TREE_OPERAND (ref, 2);
+ {
+ tree this_offset = component_ref_field_offset (ref);
+ if (this_offset
+ && TREE_CODE (this_offset) == INTEGER_CST)
+ {
+ tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
+ if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
+ {
+ double_int off
+ = double_int_add (tree_to_double_int (this_offset),
+ double_int_sdiv
+ (tree_to_double_int (bit_offset),
+ uhwi_to_double_int (BITS_PER_UNIT),
+ TRUNC_DIV_EXPR));
+ if (double_int_fits_in_shwi_p (off))
+ temp.off = off.low;
+ }
+ }
+ }
break;
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* Record index as operand. */
temp.op0 = TREE_OPERAND (ref, 1);
- temp.op1 = TREE_OPERAND (ref, 2);
- temp.op2 = TREE_OPERAND (ref, 3);
+ /* Always record lower bounds and element size. */
+ temp.op1 = array_ref_low_bound (ref);
+ temp.op2 = array_ref_element_size (ref);
+ if (TREE_CODE (temp.op0) == INTEGER_CST
+ && TREE_CODE (temp.op1) == INTEGER_CST
+ && TREE_CODE (temp.op2) == INTEGER_CST)
+ {
+ double_int off = tree_to_double_int (temp.op0);
+ off = double_int_add (off,
+ double_int_neg
+ (tree_to_double_int (temp.op1)));
+ off = double_int_mul (off, tree_to_double_int (temp.op2));
+ if (double_int_fits_in_shwi_p (off))
+ temp.off = off.low;
+ }
break;
case STRING_CST:
case INTEGER_CST:
ref in the chain of references (IE they require an
operand), so we don't have to put anything
for op* as it will be handled by the iteration */
- case IMAGPART_EXPR:
case REALPART_EXPR:
case VIEW_CONVERT_EXPR:
+ temp.off = 0;
+ break;
+ case IMAGPART_EXPR:
+ /* This is only interesting for its constant offset. */
+ temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
break;
default:
gcc_unreachable ();
}
}
-/* Re-create a reference tree from the reference ops OPS.
- Returns NULL_TREE if the ops were not handled.
- This routine needs to be kept in sync with copy_reference_ops_from_ref. */
+/* Build a alias-oracle reference abstraction in *REF from the vn_reference
+ operands in *OPS, the reference alias set SET and the reference type TYPE.
+ Return true if something useful was produced. */
-static tree
-get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
+bool
+ao_ref_init_from_vn_reference (ao_ref *ref,
+ alias_set_type set, tree type,
+ VEC (vn_reference_op_s, heap) *ops)
{
vn_reference_op_t op;
unsigned i;
- tree ref, *op0_p = &ref;
+ tree base = NULL_TREE;
+ tree *op0_p = &base;
+ HOST_WIDE_INT offset = 0;
+ HOST_WIDE_INT max_size;
+ HOST_WIDE_INT size = -1;
+ tree size_tree = NULL_TREE;
+ alias_set_type base_alias_set = -1;
+
+ /* First get the final access size from just the outermost expression. */
+ op = VEC_index (vn_reference_op_s, ops, 0);
+ if (op->opcode == COMPONENT_REF)
+ size_tree = DECL_SIZE (op->op0);
+ else if (op->opcode == BIT_FIELD_REF)
+ size_tree = op->op0;
+ else
+ {
+ enum machine_mode mode = TYPE_MODE (type);
+ if (mode == BLKmode)
+ size_tree = TYPE_SIZE (type);
+ else
+ size = GET_MODE_BITSIZE (mode);
+ }
+ if (size_tree != NULL_TREE)
+ {
+ if (!host_integerp (size_tree, 1))
+ size = -1;
+ else
+ size = TREE_INT_CST_LOW (size_tree);
+ }
+
+ /* Initially, maxsize is the same as the accessed element size.
+ In the following it will only grow (or become -1). */
+ max_size = size;
+ /* Compute cumulative bit-offset for nested component-refs and array-refs,
+ and find the ultimate containing object. */
for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
{
switch (op->opcode)
{
+ /* These may be in the reference ops, but we cannot do anything
+ sensible with them here. */
+ case ADDR_EXPR:
+ /* Apart from ADDR_EXPR arguments to MEM_REF. */
+ if (base != NULL_TREE
+ && TREE_CODE (base) == MEM_REF
+ && op->op0
+ && DECL_P (TREE_OPERAND (op->op0, 0)))
+ {
+ vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
+ base = TREE_OPERAND (op->op0, 0);
+ if (pop->off == -1)
+ {
+ max_size = -1;
+ offset = 0;
+ }
+ else
+ offset += pop->off * BITS_PER_UNIT;
+ op0_p = NULL;
+ break;
+ }
+ /* Fallthru. */
case CALL_EXPR:
- return NULL_TREE;
+ return false;
- case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
- *op0_p = build1 (op->opcode, op->type, NULL_TREE);
+ /* Record the base objects. */
+ case MISALIGNED_INDIRECT_REF:
+ *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
+ NULL_TREE, op->op0);
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
- case MISALIGNED_INDIRECT_REF:
- *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
+ case MEM_REF:
+ base_alias_set = get_deref_alias_set (op->op0);
+ *op0_p = build2 (MEM_REF, op->type,
NULL_TREE, op->op0);
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ case SSA_NAME:
+ *op0_p = op->op0;
+ op0_p = NULL;
+ break;
+
+ /* And now the usual component-reference style ops. */
case BIT_FIELD_REF:
- *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
- op->op0, op->op1);
- op0_p = &TREE_OPERAND (*op0_p, 0);
+ offset += tree_low_cst (op->op1, 0);
break;
case COMPONENT_REF:
- *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
- op->op0, op->op1);
- op0_p = &TREE_OPERAND (*op0_p, 0);
- break;
+ {
+ tree field = op->op0;
+ /* We do not have a complete COMPONENT_REF tree here so we
+ cannot use component_ref_field_offset. Do the interesting
+ parts manually. */
+
+ if (op->op1
+ || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ max_size = -1;
+ else
+ {
+ offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ * BITS_PER_UNIT);
+ offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
+ }
+ break;
+ }
case ARRAY_RANGE_REF:
case ARRAY_REF:
- *op0_p = build4 (op->opcode, op->type, NULL_TREE,
- op->op0, op->op1, op->op2);
- op0_p = &TREE_OPERAND (*op0_p, 0);
+ /* We recorded the lower bound and the element size. */
+ if (!host_integerp (op->op0, 0)
+ || !host_integerp (op->op1, 0)
+ || !host_integerp (op->op2, 0))
+ max_size = -1;
+ else
+ {
+ HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
+ hindex -= TREE_INT_CST_LOW (op->op1);
+ hindex *= TREE_INT_CST_LOW (op->op2);
+ hindex *= BITS_PER_UNIT;
+ offset += hindex;
+ }
+ break;
+
+ case REALPART_EXPR:
+ break;
+
+ case IMAGPART_EXPR:
+ offset += size;
+ break;
+
+ case VIEW_CONVERT_EXPR:
break;
case STRING_CST:
case VECTOR_CST:
case REAL_CST:
case CONSTRUCTOR:
- case VAR_DECL:
- case PARM_DECL:
case CONST_DECL:
- case RESULT_DECL:
- case SSA_NAME:
- *op0_p = op->op0;
- break;
-
- case ADDR_EXPR:
- if (op->op0 != NULL_TREE)
- {
- gcc_assert (is_gimple_min_invariant (op->op0));
- *op0_p = op->op0;
- break;
- }
- /* Fallthrough. */
- case IMAGPART_EXPR:
- case REALPART_EXPR:
- case VIEW_CONVERT_EXPR:
- *op0_p = build1 (op->opcode, op->type, NULL_TREE);
- op0_p = &TREE_OPERAND (*op0_p, 0);
- break;
+ return false;
default:
- return NULL_TREE;
+ return false;
}
}
- return ref;
+ if (base == NULL_TREE)
+ return false;
+
+ ref->ref = NULL_TREE;
+ ref->base = base;
+ ref->offset = offset;
+ ref->size = size;
+ ref->max_size = max_size;
+ ref->ref_alias_set = set;
+ if (base_alias_set != -1)
+ ref->base_alias_set = base_alias_set;
+ else
+ ref->base_alias_set = get_alias_set (base);
+
+ return true;
}
/* Copy the operations present in load/store/call REF into RESULT, a vector of
temp.opcode = CALL_EXPR;
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
+ temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
/* Copy the call arguments. As they can be references as well,
return result;
}
-static VEC(vn_reference_op_s, heap) *shared_lookup_references;
-
-/* Create a vector of vn_reference_op_s structures from REF, a
- REFERENCE_CLASS_P tree. The vector is shared among all callers of
- this function. */
+/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
+ *I_P to point to the last element of the replacement. */
+void
+vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+ unsigned int *i_p)
+{
+ unsigned int i = *i_p;
+ vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
+ vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ tree addr_base;
+ HOST_WIDE_INT addr_offset;
+
+ /* The only thing we have to do is from &OBJ.foo.bar add the offset
+ from .foo.bar to the preceeding MEM_REF offset and replace the
+ address with &OBJ. */
+ addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
+ &addr_offset);
+ gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
+ if (addr_base != op->op0)
+ {
+ double_int off = tree_to_double_int (mem_op->op0);
+ off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+ off = double_int_add (off, shwi_to_double_int (addr_offset));
+ mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ op->op0 = build_fold_addr_expr (addr_base);
+ if (host_integerp (mem_op->op0, 0))
+ mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ else
+ mem_op->off = -1;
+ }
+}
-static VEC(vn_reference_op_s, heap) *
-shared_reference_ops_from_ref (tree ref)
+/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
+ *I_P to point to the last element of the replacement. */
+static void
+vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
+ unsigned int *i_p)
{
- if (!ref)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
- copy_reference_ops_from_ref (ref, &shared_lookup_references);
- return shared_lookup_references;
+ unsigned int i = *i_p;
+ vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
+ vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ gimple def_stmt;
+ enum tree_code code;
+ double_int off;
+
+ def_stmt = SSA_NAME_DEF_STMT (op->op0);
+ if (!is_gimple_assign (def_stmt))
+ return;
+
+ code = gimple_assign_rhs_code (def_stmt);
+ if (code != ADDR_EXPR
+ && code != POINTER_PLUS_EXPR)
+ return;
+
+ off = tree_to_double_int (mem_op->op0);
+ off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+
+ /* The only thing we have to do is from &OBJ.foo.bar add the offset
+ from .foo.bar to the preceeding MEM_REF offset and replace the
+ address with &OBJ. */
+ if (code == ADDR_EXPR)
+ {
+ tree addr, addr_base;
+ HOST_WIDE_INT addr_offset;
+
+ addr = gimple_assign_rhs1 (def_stmt);
+ addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
+ &addr_offset);
+ if (!addr_base
+ || TREE_CODE (addr_base) != MEM_REF)
+ return;
+
+ off = double_int_add (off, shwi_to_double_int (addr_offset));
+ off = double_int_add (off, mem_ref_offset (addr_base));
+ op->op0 = TREE_OPERAND (addr_base, 0);
+ }
+ else
+ {
+ tree ptr, ptroff;
+ ptr = gimple_assign_rhs1 (def_stmt);
+ ptroff = gimple_assign_rhs2 (def_stmt);
+ if (TREE_CODE (ptr) != SSA_NAME
+ || TREE_CODE (ptroff) != INTEGER_CST)
+ return;
+
+ off = double_int_add (off, tree_to_double_int (ptroff));
+ op->op0 = ptr;
+ }
+
+ mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ if (host_integerp (mem_op->op0, 0))
+ mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ else
+ mem_op->off = -1;
+ if (TREE_CODE (op->op0) == SSA_NAME)
+ {
+ op->op0 = SSA_VAL (op->op0);
+ if (TREE_CODE (op->op0) != SSA_NAME)
+ op->opcode = TREE_CODE (op->op0);
+ }
+
+ /* And recurse. */
+ if (TREE_CODE (op->op0) == SSA_NAME)
+ vn_reference_maybe_forwprop_address (ops, i_p);
+ else if (TREE_CODE (op->op0) == ADDR_EXPR)
+ vn_reference_fold_indirect (ops, i_p);
}
-/* Create a vector of vn_reference_op_s structures from CALL, a
- call statement. The vector is shared among all callers of
- this function. */
+/* Optimize the reference REF to a constant if possible or return
+ NULL_TREE if not. */
-static VEC(vn_reference_op_s, heap) *
-shared_reference_ops_from_call (gimple call)
+tree
+fully_constant_vn_reference_p (vn_reference_t ref)
{
- if (!call)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
- copy_reference_ops_from_call (call, &shared_lookup_references);
- return shared_lookup_references;
-}
+ VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vn_reference_op_t op;
+
+ /* Try to simplify the translated expression if it is
+ a call to a builtin function with at most two arguments. */
+ op = VEC_index (vn_reference_op_s, operands, 0);
+ if (op->opcode == CALL_EXPR
+ && TREE_CODE (op->op0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
+ && VEC_length (vn_reference_op_s, operands) >= 2
+ && VEC_length (vn_reference_op_s, operands) <= 3)
+ {
+ vn_reference_op_t arg0, arg1 = NULL;
+ bool anyconst = false;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (VEC_length (vn_reference_op_s, operands) > 2)
+ arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
+ || (arg0->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg0->op0)))
+ anyconst = true;
+ if (arg1
+ && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
+ || (arg1->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg1->op0))))
+ anyconst = true;
+ if (anyconst)
+ {
+ tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
+ arg1 ? 2 : 1,
+ arg0->op0,
+ arg1 ? arg1->op0 : NULL);
+ if (folded
+ && TREE_CODE (folded) == NOP_EXPR)
+ folded = TREE_OPERAND (folded, 0);
+ if (folded
+ && is_gimple_min_invariant (folded))
+ return folded;
+ }
+ }
+
+ /* Simplify reads from constant strings. */
+ else if (op->opcode == ARRAY_REF
+ && TREE_CODE (op->op0) == INTEGER_CST
+ && integer_zerop (op->op1)
+ && VEC_length (vn_reference_op_s, operands) == 2)
+ {
+ vn_reference_op_t arg0;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (arg0->opcode == STRING_CST
+ && (TYPE_MODE (op->type)
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
+ && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
+ && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
+ && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
+ return build_int_cst_type (op->type,
+ (TREE_STRING_POINTER (arg0->op0)
+ [TREE_INT_CST_LOW (op->op0)]));
+ }
+ return NULL_TREE;
+}
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
valueize_refs (VEC (vn_reference_op_s, heap) *orig)
{
vn_reference_op_t vro;
- int i;
+ unsigned int i;
for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
{
if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
vro->opcode = TREE_CODE (vro->op0);
}
- /* TODO: Do we want to valueize op2 and op1 of
- ARRAY_REF/COMPONENT_REF for Ada */
-
- }
-
- return orig;
-}
-
-/* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
- their value numbers. This is done in-place, and the vector passed
- in is returned. */
-
-static VEC (tree, gc) *
-valueize_vuses (VEC (tree, gc) *orig)
-{
- bool made_replacement = false;
- tree vuse;
- int i;
-
- for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
- {
- if (vuse != SSA_VAL (vuse))
+ if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+ vro->op1 = SSA_VAL (vro->op1);
+ if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
+ vro->op2 = SSA_VAL (vro->op2);
+ /* If it transforms from an SSA_NAME to an address, fold with
+ a preceding indirect reference. */
+ if (i > 0
+ && vro->op0
+ && TREE_CODE (vro->op0) == ADDR_EXPR
+ && VEC_index (vn_reference_op_s,
+ orig, i - 1)->opcode == MEM_REF)
+ vn_reference_fold_indirect (&orig, &i);
+ else if (i > 0
+ && vro->opcode == SSA_NAME
+ && VEC_index (vn_reference_op_s,
+ orig, i - 1)->opcode == MEM_REF)
+ vn_reference_maybe_forwprop_address (&orig, &i);
+ /* If it transforms a non-constant ARRAY_REF into a constant
+ one, adjust the constant offset. */
+ else if (vro->opcode == ARRAY_REF
+ && vro->off == -1
+ && TREE_CODE (vro->op0) == INTEGER_CST
+ && TREE_CODE (vro->op1) == INTEGER_CST
+ && TREE_CODE (vro->op2) == INTEGER_CST)
{
- made_replacement = true;
- VEC_replace (tree, orig, i, SSA_VAL (vuse));
+ double_int off = tree_to_double_int (vro->op0);
+ off = double_int_add (off,
+ double_int_neg
+ (tree_to_double_int (vro->op1)));
+ off = double_int_mul (off, tree_to_double_int (vro->op2));
+ if (double_int_fits_in_shwi_p (off))
+ vro->off = off.low;
}
}
- if (made_replacement && VEC_length (tree, orig) > 1)
- sort_vuses (orig);
-
return orig;
}
-/* Return the single reference statement defining all virtual uses
- in VUSES or NULL_TREE, if there are multiple defining statements.
- Take into account only definitions that alias REF if following
- back-edges. */
-
-static gimple
-get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
-{
- gimple def_stmt;
- tree vuse;
- unsigned int i;
-
- gcc_assert (VEC_length (tree, vuses) >= 1);
+static VEC(vn_reference_op_s, heap) *shared_lookup_references;
- def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
- if (gimple_code (def_stmt) == GIMPLE_PHI)
- {
- /* We can only handle lookups over PHI nodes for a single
- virtual operand. */
- if (VEC_length (tree, vuses) == 1)
- {
- def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
- goto cont;
- }
- else
- return NULL;
- }
+/* Create a vector of vn_reference_op_s structures from REF, a
+ REFERENCE_CLASS_P tree. The vector is shared among all callers of
+ this function. */
- /* Verify each VUSE reaches the same defining stmt. */
- for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
- {
- gimple tmp = SSA_NAME_DEF_STMT (vuse);
- if (tmp != def_stmt)
- return NULL;
- }
+static VEC(vn_reference_op_s, heap) *
+valueize_shared_reference_ops_from_ref (tree ref)
+{
+ if (!ref)
+ return NULL;
+ VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ copy_reference_ops_from_ref (ref, &shared_lookup_references);
+ shared_lookup_references = valueize_refs (shared_lookup_references);
+ return shared_lookup_references;
+}
- /* Now see if the definition aliases ref, and loop until it does. */
-cont:
- while (def_stmt
- && is_gimple_assign (def_stmt)
- && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
- def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
+/* Create a vector of vn_reference_op_s structures from CALL, a
+ call statement. The vector is shared among all callers of
+ this function. */
- return def_stmt;
+static VEC(vn_reference_op_s, heap) *
+valueize_shared_reference_ops_from_call (gimple call)
+{
+ if (!call)
+ return NULL;
+ VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ copy_reference_ops_from_call (call, &shared_lookup_references);
+ shared_lookup_references = valueize_refs (shared_lookup_references);
+ return shared_lookup_references;
}
/* Lookup a SCCVN reference operation VR in the current hash table.
*vnresult = (vn_reference_t)*slot;
return ((vn_reference_t)*slot)->result;
}
-
+
return NULL_TREE;
}
+static tree *last_vuse_ptr;
+
+/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
+ with the current VUSE and performs the expression lookup. */
+
+static void *
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+{
+ vn_reference_t vr = (vn_reference_t)vr_;
+ void **slot;
+ hashval_t hash;
+
+ if (last_vuse_ptr)
+ *last_vuse_ptr = vuse;
+
+ /* Fixup vuse and hash. */
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
+ vr->vuse = SSA_VAL (vuse);
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
+
+ hash = vr->hashcode;
+ slot = htab_find_slot_with_hash (current_info->references, vr,
+ hash, NO_INSERT);
+ if (!slot && current_info == optimistic_info)
+ slot = htab_find_slot_with_hash (valid_info->references, vr,
+ hash, NO_INSERT);
+ if (slot)
+ return *slot;
+
+ return NULL;
+}
+
+/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
+ from the statement defining VUSE and if not successful tries to
+ translate *REFP and VR_ through an aggregate copy at the defintion
+ of VUSE. */
+
+static void *
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
+{
+ vn_reference_t vr = (vn_reference_t)vr_;
+ gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ tree fndecl;
+ tree base;
+ HOST_WIDE_INT offset, maxsize;
+
+ /* First try to disambiguate after value-replacing in the definitions LHS. */
+ if (is_gimple_assign (def_stmt))
+ {
+ tree lhs = gimple_assign_lhs (def_stmt);
+ ao_ref ref1;
+ VEC (vn_reference_op_s, heap) *operands = NULL;
+ bool res = true;
+ copy_reference_ops_from_ref (lhs, &operands);
+ operands = valueize_refs (operands);
+ if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
+ TREE_TYPE (lhs), operands))
+ res = refs_may_alias_p_1 (ref, &ref1, true);
+ VEC_free (vn_reference_op_s, heap, operands);
+ if (!res)
+ return NULL;
+ }
+
+ base = ao_ref_base (ref);
+ offset = ref->offset;
+ maxsize = ref->max_size;
+
+ /* If we cannot constrain the size of the reference we cannot
+ test if anything kills it. */
+ if (maxsize == -1)
+ return (void *)-1;
+
+ /* def_stmt may-defs *ref. See if we can derive a value for *ref
+ from that defintion.
+ 1) Memset. */
+ if (is_gimple_reg_type (vr->type)
+ && is_gimple_call (def_stmt)
+ && (fndecl = gimple_call_fndecl (def_stmt))
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
+ && integer_zerop (gimple_call_arg (def_stmt, 1))
+ && host_integerp (gimple_call_arg (def_stmt, 2), 1)
+ && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
+ {
+ tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
+ size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
+ if ((unsigned HOST_WIDE_INT)size2 / 8
+ == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = fold_convert (vr->type, integer_zero_node);
+ unsigned int value_id = get_or_alloc_constant_value_id (val);
+ return vn_reference_insert_pieces (vuse, vr->set, vr->type,
+ VEC_copy (vn_reference_op_s,
+ heap, vr->operands),
+ val, value_id);
+ }
+ }
+
+ /* 2) Assignment from an empty CONSTRUCTOR. */
+ else if (is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ if (operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = fold_convert (vr->type, integer_zero_node);
+ unsigned int value_id = get_or_alloc_constant_value_id (val);
+ return vn_reference_insert_pieces (vuse, vr->set, vr->type,
+ VEC_copy (vn_reference_op_s,
+ heap, vr->operands),
+ val, value_id);
+ }
+ }
+
+ /* For aggregate copies translate the reference through them if
+ the copy kills ref. */
+ else if (gimple_assign_single_p (def_stmt)
+ && (DECL_P (gimple_assign_rhs1 (def_stmt))
+ || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
+ || handled_component_p (gimple_assign_rhs1 (def_stmt))))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ int i, j;
+ VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
+ vn_reference_op_t vro;
+ ao_ref r;
+
+ /* See if the assignment kills REF. */
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ if (!operand_equal_p (base, base2, 0)
+ || offset2 > offset
+ || offset2 + size2 < offset + maxsize)
+ return (void *)-1;
+
+ /* Find the common base of ref and the lhs. */
+ copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
+ i = VEC_length (vn_reference_op_s, vr->operands) - 1;
+ j = VEC_length (vn_reference_op_s, lhs) - 1;
+ while (j >= 0 && i >= 0
+ && vn_reference_op_eq (VEC_index (vn_reference_op_s,
+ vr->operands, i),
+ VEC_index (vn_reference_op_s, lhs, j)))
+ {
+ i--;
+ j--;
+ }
+
+ VEC_free (vn_reference_op_s, heap, lhs);
+ /* i now points to the first additional op.
+ ??? LHS may not be completely contained in VR, one or more
+ VIEW_CONVERT_EXPRs could be in its way. We could at least
+ try handling outermost VIEW_CONVERT_EXPRs. */
+ if (j != -1)
+ return (void *)-1;
+
+ /* Now re-write REF to be based on the rhs of the assignment. */
+ copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
+ /* We need to pre-pend vr->operands[0..i] to rhs. */
+ if (i + 1 + VEC_length (vn_reference_op_s, rhs)
+ > VEC_length (vn_reference_op_s, vr->operands))
+ {
+ VEC (vn_reference_op_s, heap) *old = vr->operands;
+ VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
+ i + 1 + VEC_length (vn_reference_op_s, rhs));
+ if (old == shared_lookup_references
+ && vr->operands != old)
+ shared_lookup_references = NULL;
+ }
+ else
+ VEC_truncate (vn_reference_op_s, vr->operands,
+ i + 1 + VEC_length (vn_reference_op_s, rhs));
+ for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
+ VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
+ VEC_free (vn_reference_op_s, heap, rhs);
+ vr->hashcode = vn_reference_compute_hash (vr);
+
+ /* Adjust *ref from the new operands. */
+ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
+ return (void *)-1;
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
+ *ref = r;
+
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
+ /* Keep looking for the adjusted *REF / VR pair. */
+ return NULL;
+ }
+
+ /* Bail out and stop walking. */
+ return (void *)-1;
+}
/* Lookup a reference operation by it's parts, in the current hash table.
Returns the resulting value number if it exists in the hash table,
vn_reference_t stored in the hashtable if something is found. */
tree
-vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
+vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands,
vn_reference_t *vnresult, bool maywalk)
{
struct vn_reference_s vr1;
- tree result;
- if (vnresult)
- *vnresult = NULL;
-
- vr1.vuses = valueize_vuses (vuses);
- vr1.operands = valueize_refs (operands);
+ vn_reference_t tmp;
+ tree cst;
+
+ if (!vnresult)
+ vnresult = &tmp;
+ *vnresult = NULL;
+
+ vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+ VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
+ VEC_length (vn_reference_op_s, operands));
+ memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
+ VEC_address (vn_reference_op_s, operands),
+ sizeof (vn_reference_op_s)
+ * VEC_length (vn_reference_op_s, operands));
+ vr1.operands = operands = shared_lookup_references
+ = valueize_refs (shared_lookup_references);
+ vr1.type = type;
+ vr1.set = set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
- result = vn_reference_lookup_1 (&vr1, vnresult);
+ if ((cst = fully_constant_vn_reference_p (&vr1)))
+ return cst;
- /* If there is a single defining statement for all virtual uses, we can
- use that, following virtual use-def chains. */
- if (!result
+ vn_reference_lookup_1 (&vr1, vnresult);
+ if (!*vnresult
&& maywalk
- && vr1.vuses
- && VEC_length (tree, vr1.vuses) >= 1)
- {
- tree ref = get_ref_from_reference_ops (operands);
- gimple def_stmt;
- if (ref
- && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
- && is_gimple_assign (def_stmt))
- {
- /* We are now at an aliasing definition for the vuses we want to
- look up. Re-do the lookup with the vdefs for this stmt. */
- vdefs_to_vec (def_stmt, &vuses);
- vr1.vuses = valueize_vuses (vuses);
- vr1.hashcode = vn_reference_compute_hash (&vr1);
- result = vn_reference_lookup_1 (&vr1, vnresult);
- }
+ && vr1.vuse)
+ {
+ ao_ref r;
+ if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
+ *vnresult =
+ (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+ vn_reference_lookup_2,
+ vn_reference_lookup_3, &vr1);
+ if (vr1.operands != operands)
+ VEC_free (vn_reference_op_s, heap, vr1.operands);
}
- return result;
+ if (*vnresult)
+ return (*vnresult)->result;
+
+ return NULL_TREE;
}
/* Lookup OP in the current hash table, and return the resulting value
stored in the hashtable if one exists. */
tree
-vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
+vn_reference_lookup (tree op, tree vuse, bool maywalk,
vn_reference_t *vnresult)
{
+ VEC (vn_reference_op_s, heap) *operands;
struct vn_reference_s vr1;
- tree result;
- gimple def_stmt;
+ tree cst;
+
if (vnresult)
*vnresult = NULL;
- vr1.vuses = valueize_vuses (vuses);
- vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
+ vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+ vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
+ vr1.type = TREE_TYPE (op);
+ vr1.set = get_alias_set (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
- result = vn_reference_lookup_1 (&vr1, vnresult);
+ if ((cst = fully_constant_vn_reference_p (&vr1)))
+ return cst;
+
+ if (maywalk
+ && vr1.vuse)
+ {
+ vn_reference_t wvnresult;
+ ao_ref r;
+ ao_ref_init (&r, op);
+ wvnresult =
+ (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+ vn_reference_lookup_2,
+ vn_reference_lookup_3, &vr1);
+ if (vr1.operands != operands)
+ VEC_free (vn_reference_op_s, heap, vr1.operands);
+ if (wvnresult)
+ {
+ if (vnresult)
+ *vnresult = wvnresult;
+ return wvnresult->result;
+ }
- /* If there is a single defining statement for all virtual uses, we can
- use that, following virtual use-def chains. */
- if (!result
- && maywalk
- && vr1.vuses
- && VEC_length (tree, vr1.vuses) >= 1
- && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
- && is_gimple_assign (def_stmt))
- {
- /* We are now at an aliasing definition for the vuses we want to
- look up. Re-do the lookup with the vdefs for this stmt. */
- vdefs_to_vec (def_stmt, &vuses);
- vr1.vuses = valueize_vuses (vuses);
- vr1.hashcode = vn_reference_compute_hash (&vr1);
- result = vn_reference_lookup_1 (&vr1, vnresult);
+ return NULL_TREE;
}
- return result;
+ return vn_reference_lookup_1 (&vr1, vnresult);
}
RESULT, and return the resulting reference structure we created. */
vn_reference_t
-vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
+vn_reference_insert (tree op, tree result, tree vuse)
{
void **slot;
vn_reference_t vr1;
vr1->value_id = VN_INFO (result)->value_id;
else
vr1->value_id = get_or_alloc_constant_value_id (result);
- vr1->vuses = valueize_vuses (vuses);
+ vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
+ vr1->type = TREE_TYPE (op);
+ vr1->set = get_alias_set (op);
vr1->hashcode = vn_reference_compute_hash (vr1);
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
structure we created. */
vn_reference_t
-vn_reference_insert_pieces (VEC (tree, gc) *vuses,
+vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands,
tree result, unsigned int value_id)
vn_reference_t vr1;
vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
- vr1->value_id = value_id;
- vr1->vuses = valueize_vuses (vuses);
+ vr1->value_id = value_id;
+ vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1->operands = valueize_refs (operands);
+ vr1->type = type;
+ vr1->set = set;
vr1->hashcode = vn_reference_compute_hash (vr1);
if (result && TREE_CODE (result) == SSA_NAME)
result = SSA_VAL (result);
slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
INSERT);
-
+
/* At this point we should have all the things inserted that we have
- seen before, and we should never try inserting something that
- already exists. */
+ seen before, and we should never try inserting something that
+ already exists. */
gcc_assert (!*slot);
if (*slot)
free_reference (*slot);
/* Compute and return the hash value for nary operation VBO1. */
-inline hashval_t
+hashval_t
vn_nary_op_compute_hash (const vn_nary_op_t vno1)
{
- hashval_t hash = 0;
+ hashval_t hash;
unsigned i;
for (i = 0; i < vno1->length; ++i)
vno1->op[1] = temp;
}
+ hash = iterative_hash_hashval_t (vno1->opcode, 0);
for (i = 0; i < vno1->length; ++i)
- hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
+ hash = iterative_hash_expr (vno1->op[i], hash);
return hash;
}
const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
unsigned i;
+ if (vno1->hashcode != vno2->hashcode)
+ return false;
+
if (vno1->opcode != vno2->opcode
|| !types_compatible_p (vno1->type, vno2->type))
return false;
tree
vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
tree type, tree op0, tree op1, tree op2,
- tree op3, vn_nary_op_t *vnresult)
+ tree op3, vn_nary_op_t *vnresult)
{
void **slot;
struct vn_nary_op_s vno1;
*vnresult = NULL;
vno1.opcode = gimple_assign_rhs_code (stmt);
vno1.length = gimple_num_ops (stmt) - 1;
- vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
+ vno1.type = gimple_expr_type (stmt);
for (i = 0; i < vno1.length; ++i)
vno1.op[i] = gimple_op (stmt, i + 1);
if (vno1.opcode == REALPART_EXPR
tree type, tree op0,
tree op1, tree op2, tree op3,
tree result,
- unsigned int value_id)
+ unsigned int value_id)
{
void **slot;
vn_nary_op_t vno1;
*slot = vno1;
return vno1;
-
+
}
/* Insert OP into the current hash table with a value number of
vno1->value_id = VN_INFO (result)->value_id;
vno1->opcode = gimple_assign_rhs_code (stmt);
vno1->length = length;
- vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
+ vno1->type = gimple_expr_type (stmt);
for (i = 0; i < vno1->length; ++i)
vno1->op[i] = gimple_op (stmt, i + 1);
if (vno1->opcode == REALPART_EXPR
static inline hashval_t
vn_phi_compute_hash (vn_phi_t vp1)
{
- hashval_t result = 0;
+ hashval_t result;
int i;
tree phi1op;
tree type;
{
if (phi1op == VN_TOP)
continue;
- result += iterative_hash_expr (phi1op, result);
+ result = iterative_hash_expr (phi1op, result);
}
return result;
const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
+ if (vp1->hashcode != vp2->hashcode)
+ return false;
+
if (vp1->block == vp2->block)
{
int i;
value number if it exists in the hash table. Return NULL_TREE if
it does not exist in the hash table. */
-tree
+static tree
vn_phi_lookup (gimple phi)
{
void **slot;
if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
{
- SSA_VAL (from) = to;
+ VN_INFO (from)->valnum = to;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " (changed)\n");
return true;
bool changed = false;
struct vn_reference_s vr1;
tree result;
+ tree vuse = gimple_vuse (stmt);
- vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
- vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
+ vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+ vr1.operands = valueize_shared_reference_ops_from_call (stmt);
+ vr1.type = gimple_expr_type (stmt);
+ vr1.set = 0;
vr1.hashcode = vn_reference_compute_hash (&vr1);
result = vn_reference_lookup_1 (&vr1, NULL);
if (result)
vn_reference_t vr2;
changed = set_ssa_val_to (lhs, lhs);
vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
- vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
+ vr2->vuse = vr1.vuse;
vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
+ vr2->type = vr1.type;
+ vr2->set = vr1.set;
vr2->hashcode = vr1.hashcode;
vr2->result = lhs;
slot = htab_find_slot_with_hash (current_info->references,
visit_reference_op_load (tree lhs, tree op, gimple stmt)
{
bool changed = false;
- tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
- NULL);
+ tree last_vuse;
+ tree result;
+
+ last_vuse = gimple_vuse (stmt);
+ last_vuse_ptr = &last_vuse;
+ result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
+ last_vuse_ptr = NULL;
+
+ /* If we have a VCE, try looking up its operand as it might be stored in
+ a different type. */
+ if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
+ result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
+ true, NULL);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a
tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
if ((CONVERT_EXPR_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR)
- && (tem = fold_unary (TREE_CODE (val), TREE_TYPE (val), tem)))
+ && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
+ TREE_TYPE (val), tem)))
val = tem;
}
result = val;
result = vn_nary_op_lookup (val, NULL);
/* If the expression is not yet available, value-number lhs to
a new SSA_NAME we create. */
- if (!result && may_insert)
+ if (!result)
{
- result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
+ result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
/* Initialize value-number information properly. */
VN_INFO_GET (result)->valnum = result;
VN_INFO (result)->value_id = get_next_value_id ();
else
{
changed = set_ssa_val_to (lhs, lhs);
- vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
+ vn_reference_insert (op, lhs, last_vuse);
}
return changed;
Otherwise, the vdefs for the store are used when inserting into
the table, since the store generates a new memory state. */
- result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
- NULL);
+ result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
if (result)
{
if (!result || !resultsame)
{
- VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
- int i;
tree vdef;
if (dump_file && (dump_flags & TDF_DETAILS))
}
/* Have to set value numbers before insert, since insert is
going to valueize the references in-place. */
- for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
+ if ((vdef = gimple_vdef (stmt)))
{
VN_INFO (vdef)->use_processed = true;
changed |= set_ssa_val_to (vdef, vdef);
/* Do not insert structure copies into the tables. */
if (is_gimple_min_invariant (op)
|| is_gimple_reg (op))
- vn_reference_insert (lhs, op, vdefs);
+ vn_reference_insert (lhs, op, vdef);
}
else
{
- /* We had a match, so value number the vdefs to have the value
- number of the vuses they came from. */
- ssa_op_iter op_iter;
- def_operand_p var;
- vuse_vec_p vv;
+ /* We had a match, so value number the vdef to have the value
+ number of the vuse it came from. */
+ tree def, use;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Store matched earlier value,"
"value numbering store vdefs to matching vuses.\n");
- FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
- {
- tree def = DEF_FROM_PTR (var);
- tree use;
-
- /* Uh, if the vuse is a multiuse, we can't really do much
- here, sadly, since we don't know which value number of
- which vuse to use. */
- if (VUSE_VECT_NUM_ELEM (*vv) != 1)
- use = def;
- else
- use = VUSE_ELEMENT_VAR (*vv, 0);
+ def = gimple_vdef (stmt);
+ use = gimple_vuse (stmt);
- VN_INFO (def)->use_processed = true;
- changed |= set_ssa_val_to (def, SSA_VAL (use));
- }
+ VN_INFO (def)->use_processed = true;
+ changed |= set_ssa_val_to (def, SSA_VAL (use));
}
return changed;
case GIMPLE_BINARY_RHS:
return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
|| is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
+ case GIMPLE_TERNARY_RHS:
+ return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
+ || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
+ || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
case GIMPLE_SINGLE_RHS:
/* Constants inside reference ops are rarely interesting, but
it can take a lot of looking to find them. */
fold_defer_overflow_warnings ();
result = fold_binary (gimple_assign_rhs_code (stmt),
- TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
+ gimple_expr_type (stmt), op0, op1);
if (result)
STRIP_USELESS_TYPE_CONVERSION (result);
if (op0 == orig_op0)
return NULL_TREE;
- result = fold_unary (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), op0);
+ result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
+ gimple_expr_type (stmt), op0);
if (result)
{
STRIP_USELESS_TYPE_CONVERSION (result);
VN_INFO (lhs)->expr = NULL_TREE;
}
- if (TREE_CODE (lhs) == SSA_NAME
- /* We can substitute SSA_NAMEs that are live over
- abnormal edges with their constant value. */
- && !(gimple_assign_copy_p (stmt)
- && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
- && !(simplified
- && is_gimple_min_invariant (simplified))
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ if ((TREE_CODE (lhs) == SSA_NAME
+ /* We can substitute SSA_NAMEs that are live over
+ abnormal edges with their constant value. */
+ && !(gimple_assign_copy_p (stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ && !(simplified
+ && is_gimple_min_invariant (simplified))
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ /* Stores or copies from SSA_NAMEs that are live over
+ abnormal edges are a problem. */
+ || (gimple_assign_single_p (stmt)
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
changed = defs_to_varying (stmt);
else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
{
basic_block bbb;
if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
- return 0;
+ return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
else if (gimple_nop_p (opstmta))
return -1;
else if (gimple_nop_p (opstmtb))
bbb = gimple_bb (opstmtb);
if (!bba && !bbb)
- return 0;
+ return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
else if (!bba)
return -1;
else if (!bbb)
{
if (gimple_code (opstmta) == GIMPLE_PHI
&& gimple_code (opstmtb) == GIMPLE_PHI)
- return 0;
+ return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
else if (gimple_code (opstmta) == GIMPLE_PHI)
return -1;
else if (gimple_code (opstmtb) == GIMPLE_PHI)
return 1;
- return gimple_uid (opstmta) - gimple_uid (opstmtb);
+ else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
+ return gimple_uid (opstmta) - gimple_uid (opstmtb);
+ else
+ return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
}
return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
}
compare_ops);
}
+/* Insert the no longer used nary ONARY to the hash INFO. */
+
+static void
+copy_nary (vn_nary_op_t onary, vn_tables_t info)
+{
+ size_t size = (sizeof (struct vn_nary_op_s)
+ - sizeof (tree) * (4 - onary->length));
+ vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
+ void **slot;
+ memcpy (nary, onary, size);
+ slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
+ gcc_assert (!*slot);
+ *slot = nary;
+}
+
+/* Insert the no longer used phi OPHI to the hash INFO. */
+
+static void
+copy_phi (vn_phi_t ophi, vn_tables_t info)
+{
+ vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
+ void **slot;
+ memcpy (phi, ophi, sizeof (*phi));
+ ophi->phiargs = NULL;
+ slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
+ gcc_assert (!*slot);
+ *slot = phi;
+}
+
+/* Insert the no longer used reference OREF to the hash INFO. */
+
+static void
+copy_reference (vn_reference_t oref, vn_tables_t info)
+{
+ vn_reference_t ref;
+ void **slot;
+ ref = (vn_reference_t) pool_alloc (info->references_pool);
+ memcpy (ref, oref, sizeof (*ref));
+ oref->operands = NULL;
+ slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
+ INSERT);
+ if (*slot)
+ free_reference (*slot);
+ *slot = ref;
+}
+
/* Process a strongly connected component in the SSA graph. */
static void
process_scc (VEC (tree, heap) *scc)
{
- /* If the SCC has a single member, just visit it. */
+ tree var;
+ unsigned int i;
+ unsigned int iterations = 0;
+ bool changed = true;
+ htab_iterator hi;
+ vn_nary_op_t nary;
+ vn_phi_t phi;
+ vn_reference_t ref;
+ /* If the SCC has a single member, just visit it. */
if (VEC_length (tree, scc) == 1)
{
tree use = VEC_index (tree, scc, 0);
if (!VN_INFO (use)->use_processed)
visit_use (use);
+ return;
}
- else
+
+ /* Iterate over the SCC with the optimistic table until it stops
+ changing. */
+ current_info = optimistic_info;
+ while (changed)
{
- tree var;
- unsigned int i;
- unsigned int iterations = 0;
- bool changed = true;
+ changed = false;
+ iterations++;
+ /* As we are value-numbering optimistically we have to
+ clear the expression tables and the simplified expressions
+ in each iteration until we converge. */
+ htab_empty (optimistic_info->nary);
+ htab_empty (optimistic_info->phis);
+ htab_empty (optimistic_info->references);
+ obstack_free (&optimistic_info->nary_obstack, NULL);
+ gcc_obstack_init (&optimistic_info->nary_obstack);
+ empty_alloc_pool (optimistic_info->phis_pool);
+ empty_alloc_pool (optimistic_info->references_pool);
+ for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+ VN_INFO (var)->expr = NULL_TREE;
+ for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+ changed |= visit_use (var);
+ }
- /* Iterate over the SCC with the optimistic table until it stops
- changing. */
- current_info = optimistic_info;
- while (changed)
- {
- changed = false;
- iterations++;
- /* As we are value-numbering optimistically we have to
- clear the expression tables and the simplified expressions
- in each iteration until we converge. */
- htab_empty (optimistic_info->nary);
- htab_empty (optimistic_info->phis);
- htab_empty (optimistic_info->references);
- obstack_free (&optimistic_info->nary_obstack, NULL);
- gcc_obstack_init (&optimistic_info->nary_obstack);
- empty_alloc_pool (optimistic_info->phis_pool);
- empty_alloc_pool (optimistic_info->references_pool);
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
- VN_INFO (var)->expr = NULL_TREE;
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
- changed |= visit_use (var);
- }
+ statistics_histogram_event (cfun, "SCC iterations", iterations);
- statistics_histogram_event (cfun, "SCC iterations", iterations);
+ /* Finally, copy the contents of the no longer used optimistic
+ table to the valid table. */
+ FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
+ copy_nary (nary, valid_info);
+ FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
+ copy_phi (phi, valid_info);
+ FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
+ copy_reference (ref, valid_info);
- /* Finally, visit the SCC once using the valid table. */
- current_info = valid_info;
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
- visit_use (var);
- }
+ current_info = valid_info;
}
DEF_VEC_O(ssa_op_iter);
usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
}
else
- iter.done = true;
+ clear_and_done_ssa_iter (&iter);
while (1)
{
sccstack = NULL;
constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
free);
-
+
constant_value_ids = BITMAP_ALLOC (NULL);
-
+
next_dfs_num = 1;
next_value_id = 1;
-
+
vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
/* VEC_alloc doesn't actually grow it to the right size, it just
preallocates the space to do so. */
gcc_obstack_init (&vn_ssa_aux_obstack);
shared_lookup_phiargs = NULL;
- shared_lookup_vops = NULL;
shared_lookup_references = NULL;
rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
htab_delete (constant_to_value_id);
BITMAP_FREE (constant_value_ids);
VEC_free (tree, heap, shared_lookup_phiargs);
- VEC_free (tree, gc, shared_lookup_vops);
VEC_free (vn_reference_op_s, heap, shared_lookup_references);
XDELETEVEC (rpo_numbers);
table. */
FOR_EACH_HTAB_ELEMENT (valid_info->nary,
- vno, vn_nary_op_t, hi)
+ vno, vn_nary_op_t, hi)
{
if (vno->result)
{
}
FOR_EACH_HTAB_ELEMENT (valid_info->phis,
- vp, vn_phi_t, hi)
+ vp, vn_phi_t, hi)
{
if (vp->result)
{
}
FOR_EACH_HTAB_ELEMENT (valid_info->references,
- vr, vn_reference_t, hi)
+ vr, vn_reference_t, hi)
{
if (vr->result)
{
due to resource constraints. */
bool
-run_scc_vn (bool may_insert_arg)
+run_scc_vn (void)
{
size_t i;
tree param;
bool changed = true;
-
- may_insert = may_insert_arg;
init_scc_vn ();
current_info = valid_info;
for (param = DECL_ARGUMENTS (current_function_decl);
param;
- param = TREE_CHAIN (param))
+ param = DECL_CHAIN (param))
{
if (gimple_default_def (cfun, param) != NULL)
{
tree def = gimple_default_def (cfun, param);
- SSA_VAL (def) = def;
+ VN_INFO (def)->valnum = def;
}
}
if (!DFS (name))
{
free_scc_vn ();
- may_insert = false;
return false;
}
}
/* Initialize the value ids. */
-
+
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
if (!name)
continue;
info = VN_INFO (name);
- if (info->valnum == name)
+ if (info->valnum == name
+ || info->valnum == VN_TOP)
info->value_id = get_next_value_id ();
else if (is_gimple_min_invariant (info->valnum))
info->value_id = get_or_alloc_constant_value_id (info->valnum);
}
-
+
/* Propagate until they stop changing. */
while (changed)
{
}
}
}
-
+
set_hashtable_value_ids ();
-
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Value numbers:\n");
}
}
- may_insert = false;
return true;
}
/* Return the maximum value id we have ever seen. */
unsigned int
-get_max_value_id (void)
+get_max_value_id (void)
{
return next_value_id;
}
if (!e1 || !e2)
return false;
- /* Recurse on elements of lists. */
- if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
- {
- tree lop1 = e1;
- tree lop2 = e2;
- for (lop1 = e1, lop2 = e2;
- lop1 || lop2;
- lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
- {
- if (!lop1 || !lop2)
- return false;
- if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
- return false;
- }
- return true;
- }
-
/* Now perform the actual comparison. */
if (TREE_CODE (e1) == TREE_CODE (e2)
&& operand_equal_p (e1, e2, OEP_PURE_SAME))
return false;
}
-/* Sort the VUSE array so that we can do equality comparisons
- quicker on two vuse vecs. */
-void
-sort_vuses (VEC (tree,gc) *vuses)
+/* Return true if the nary operation NARY may trap. This is a copy
+ of stmt_could_throw_1_p adjusted to the SCCVN IL. */
+
+bool
+vn_nary_may_trap (vn_nary_op_t nary)
{
- if (VEC_length (tree, vuses) > 1)
- qsort (VEC_address (tree, vuses),
- VEC_length (tree, vuses),
- sizeof (tree),
- operand_build_cmp);
-}
+ tree type;
+ tree rhs2 = NULL_TREE;
+ bool honor_nans = false;
+ bool honor_snans = false;
+ bool fp_operation = false;
+ bool honor_trapv = false;
+ bool handled, ret;
+ unsigned i;
-/* Sort the VUSE array so that we can do equality comparisons
- quicker on two vuse vecs. */
+ if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
+ || TREE_CODE_CLASS (nary->opcode) == tcc_unary
+ || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
+ {
+ type = nary->type;
+ fp_operation = FLOAT_TYPE_P (type);
+ if (fp_operation)
+ {
+ honor_nans = flag_trapping_math && !flag_finite_math_only;
+ honor_snans = flag_signaling_nans != 0;
+ }
+ else if (INTEGRAL_TYPE_P (type)
+ && TYPE_OVERFLOW_TRAPS (type))
+ honor_trapv = true;
+ }
+ if (nary->length >= 2)
+ rhs2 = nary->op[1];
+ ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
+ honor_trapv,
+ honor_nans, honor_snans, rhs2,
+ &handled);
+ if (handled
+ && ret)
+ return true;
-void
-sort_vuses_heap (VEC (tree,heap) *vuses)
-{
- if (VEC_length (tree, vuses) > 1)
- qsort (VEC_address (tree, vuses),
- VEC_length (tree, vuses),
- sizeof (tree),
- operand_build_cmp);
+ for (i = 0; i < nary->length; ++i)
+ if (tree_could_trap_p (nary->op[i]))
+ return true;
+
+ return false;
}