#include "params.h"
#include "tree-ssa-propagate.h"
#include "tree-ssa-sccvn.h"
+#include "gimple-fold.h"
/* This algorithm is based on the SCC algorithm presented by Keith
Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
vn_ssa_aux_t vn = VN_INFO (name);
gimple def_stmt;
tree expr = NULL_TREE;
+ enum tree_code code;
if (vn->valnum == VN_TOP)
return name;
/* Otherwise use the defining statement to build the expression. */
def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
- /* If the value number is a default-definition or a PHI result
- use it directly. */
- if (gimple_nop_p (def_stmt)
- || gimple_code (def_stmt) == GIMPLE_PHI)
- return vn->valnum;
-
+ /* If the value number is not an assignment use it directly. */
if (!is_gimple_assign (def_stmt))
return vn->valnum;
/* FIXME tuples. This is incomplete and likely will miss some
simplifications. */
- switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
+ code = gimple_assign_rhs_code (def_stmt);
+ switch (TREE_CODE_CLASS (code))
{
case tcc_reference:
- if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
- || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
- && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
- expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+ if ((code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR)
+ && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
+ 0)) == SSA_NAME)
+ expr = fold_build1 (code,
gimple_expr_type (def_stmt),
TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
break;
case tcc_unary:
- expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+ expr = fold_build1 (code,
gimple_expr_type (def_stmt),
gimple_assign_rhs1 (def_stmt));
break;
case tcc_binary:
- expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
+ expr = fold_build2 (code,
gimple_expr_type (def_stmt),
gimple_assign_rhs1 (def_stmt),
gimple_assign_rhs2 (def_stmt));
const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
- return vro1->opcode == vro2->opcode
- && types_compatible_p (vro1->type, vro2->type)
- && expressions_equal_p (vro1->op0, vro2->op0)
- && expressions_equal_p (vro1->op1, vro2->op1)
- && expressions_equal_p (vro1->op2, vro2->op2);
+ return (vro1->opcode == vro2->opcode
+ /* We do not care for differences in type qualification. */
+ && (vro1->type == vro2->type
+ || (vro1->type && vro2->type
+ && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
+ TYPE_MAIN_VARIANT (vro2->type))))
+ && expressions_equal_p (vro1->op0, vro2->op0)
+ && expressions_equal_p (vro1->op1, vro2->op1)
+ && expressions_equal_p (vro1->op2, vro2->op2));
}
/* Compute the hash for a reference operand VRO1. */
HOST_WIDE_INT off = -1;
bool deref = false;
- for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
{
if (vro->opcode == MEM_REF)
deref = true;
if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
return false;
+ if (INTEGRAL_TYPE_P (vr1->type)
+ && INTEGRAL_TYPE_P (vr2->type))
+ {
+ if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
+ return false;
+ }
+ else if (INTEGRAL_TYPE_P (vr1->type)
+ && (TYPE_PRECISION (vr1->type)
+ != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
+ return false;
+ else if (INTEGRAL_TYPE_P (vr2->type)
+ && (TYPE_PRECISION (vr2->type)
+ != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
+ return false;
+
i = 0;
j = 0;
do
if (TREE_CODE (ref) == TARGET_MEM_REF)
{
vn_reference_op_s temp;
- tree base;
-
- base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
- if (!base)
- base = null_pointer_node;
memset (&temp, 0, sizeof (temp));
- /* We do not care for spurious type qualifications. */
- temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
+ temp.type = TREE_TYPE (ref);
temp.opcode = TREE_CODE (ref);
temp.op0 = TMR_INDEX (ref);
temp.op1 = TMR_STEP (ref);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
- temp.opcode = TREE_CODE (base);
- temp.op0 = base;
- temp.op1 = TMR_ORIGINAL (ref);
+ temp.opcode = ERROR_MARK;
+ temp.op0 = TMR_INDEX2 (ref);
+ temp.off = -1;
+ VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+
+ memset (&temp, 0, sizeof (temp));
+ temp.type = NULL_TREE;
+ temp.opcode = TREE_CODE (TMR_BASE (ref));
+ temp.op0 = TMR_BASE (ref);
temp.off = -1;
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
return;
vn_reference_op_s temp;
memset (&temp, 0, sizeof (temp));
- /* We do not care for spurious type qualifications. */
- temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
+ temp.type = TREE_TYPE (ref);
temp.opcode = TREE_CODE (ref);
temp.off = -1;
switch (temp.opcode)
{
- case MISALIGNED_INDIRECT_REF:
- temp.op0 = TREE_OPERAND (ref, 1);
- break;
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
{
double_int off
= double_int_add (tree_to_double_int (this_offset),
- double_int_sdiv
+ double_int_rshift
(tree_to_double_int (bit_offset),
- uhwi_to_double_int (BITS_PER_UNIT),
- TRUNC_DIV_EXPR));
+ BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT),
+ HOST_BITS_PER_DOUBLE_INT, true));
if (double_int_fits_in_shwi_p (off))
temp.off = off.low;
}
temp.off = off.low;
}
break;
+ case VAR_DECL:
+ if (DECL_HARD_REGISTER (ref))
+ {
+ temp.op0 = ref;
+ break;
+ }
+ /* Fallthru. */
+ case PARM_DECL:
+ case CONST_DECL:
+ case RESULT_DECL:
+ /* Canonicalize decls to MEM[&decl] which is what we end up with
+ when valueizing MEM[ptr] with ptr = &decl. */
+ temp.opcode = MEM_REF;
+ temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
+ temp.off = 0;
+ VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ temp.opcode = ADDR_EXPR;
+ temp.op0 = build_fold_addr_expr (ref);
+ temp.type = TREE_TYPE (temp.op0);
+ temp.off = -1;
+ break;
case STRING_CST:
case INTEGER_CST:
case COMPLEX_CST:
case VECTOR_CST:
case REAL_CST:
+ case FIXED_CST:
case CONSTRUCTOR:
- case VAR_DECL:
- case PARM_DECL:
- case CONST_DECL:
- case RESULT_DECL:
case SSA_NAME:
temp.op0 = ref;
break;
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
- for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
{
switch (op->opcode)
{
return false;
/* Record the base objects. */
- case MISALIGNED_INDIRECT_REF:
- *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
- NULL_TREE, op->op0);
- op0_p = &TREE_OPERAND (*op0_p, 0);
- break;
-
case MEM_REF:
base_alias_set = get_deref_alias_set (op->op0);
*op0_p = build2 (MEM_REF, op->type,
else
mem_op->off = -1;
if (TREE_CODE (op->op0) == SSA_NAME)
- {
- op->op0 = SSA_VAL (op->op0);
- if (TREE_CODE (op->op0) != SSA_NAME)
- op->opcode = TREE_CODE (op->op0);
- }
+ op->op0 = SSA_VAL (op->op0);
+ if (TREE_CODE (op->op0) != SSA_NAME)
+ op->opcode = TREE_CODE (op->op0);
/* And recurse. */
if (TREE_CODE (op->op0) == SSA_NAME)
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
- the vector passed in is returned. */
+ the vector passed in is returned. *VALUEIZED_ANYTHING will specify
+ whether any operands were valueized. */
static VEC (vn_reference_op_s, heap) *
-valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
{
vn_reference_op_t vro;
unsigned int i;
- for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
+ *valueized_anything = false;
+
+ FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
{
- vro->op0 = SSA_VAL (vro->op0);
+ tree tem = SSA_VAL (vro->op0);
+ if (tem != vro->op0)
+ {
+ *valueized_anything = true;
+ vro->op0 = tem;
+ }
/* If it transforms from an SSA_NAME to a constant, update
the opcode. */
if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
vro->opcode = TREE_CODE (vro->op0);
}
if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
- vro->op1 = SSA_VAL (vro->op1);
+ {
+ tree tem = SSA_VAL (vro->op1);
+ if (tem != vro->op1)
+ {
+ *valueized_anything = true;
+ vro->op1 = tem;
+ }
+ }
if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
- vro->op2 = SSA_VAL (vro->op2);
+ {
+ tree tem = SSA_VAL (vro->op2);
+ if (tem != vro->op2)
+ {
+ *valueized_anything = true;
+ vro->op2 = tem;
+ }
+ }
/* If it transforms from an SSA_NAME to an address, fold with
a preceding indirect reference. */
if (i > 0
return orig;
}
+static VEC (vn_reference_op_s, heap) *
+valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+{
+ bool tem;
+ return valueize_refs_1 (orig, &tem);
+}
+
static VEC(vn_reference_op_s, heap) *shared_lookup_references;
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is shared among all callers of
- this function. */
+ this function. *VALUEIZED_ANYTHING will specify whether any
+ operands were valueized. */
static VEC(vn_reference_op_s, heap) *
-valueize_shared_reference_ops_from_ref (tree ref)
+valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
{
if (!ref)
return NULL;
VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
- shared_lookup_references = valueize_refs (shared_lookup_references);
+ shared_lookup_references = valueize_refs_1 (shared_lookup_references,
+ valueized_anything);
return shared_lookup_references;
}
}
static tree *last_vuse_ptr;
+static vn_lookup_kind vn_walk_kind;
+static vn_lookup_kind default_vn_walk_kind;
/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
with the current VUSE and performs the expression lookup. */
{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
- tree fndecl;
tree base;
HOST_WIDE_INT offset, maxsize;
+ static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
+ ao_ref lhs_ref;
+ bool lhs_ref_ok = false;
/* First try to disambiguate after value-replacing in the definitions LHS. */
if (is_gimple_assign (def_stmt))
{
+ VEC (vn_reference_op_s, heap) *tem;
tree lhs = gimple_assign_lhs (def_stmt);
- ao_ref ref1;
- VEC (vn_reference_op_s, heap) *operands = NULL;
- bool res = true;
- copy_reference_ops_from_ref (lhs, &operands);
- operands = valueize_refs (operands);
- if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
- TREE_TYPE (lhs), operands))
- res = refs_may_alias_p_1 (ref, &ref1, true);
- VEC_free (vn_reference_op_s, heap, operands);
- if (!res)
- return NULL;
+ bool valueized_anything = false;
+ /* Avoid re-allocation overhead. */
+ VEC_truncate (vn_reference_op_s, lhs_ops, 0);
+ copy_reference_ops_from_ref (lhs, &lhs_ops);
+ tem = lhs_ops;
+ lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
+ gcc_assert (lhs_ops == tem);
+ if (valueized_anything)
+ {
+ lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
+ get_alias_set (lhs),
+ TREE_TYPE (lhs), lhs_ops);
+ if (lhs_ref_ok
+ && !refs_may_alias_p_1 (ref, &lhs_ref, true))
+ return NULL;
+ }
+ else
+ {
+ ao_ref_init (&lhs_ref, lhs);
+ lhs_ref_ok = true;
+ }
}
base = ao_ref_base (ref);
from that defintion.
1) Memset. */
if (is_gimple_reg_type (vr->type)
- && is_gimple_call (def_stmt)
- && (fndecl = gimple_call_fndecl (def_stmt))
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
+ && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
&& integer_zerop (gimple_call_arg (def_stmt, 1))
&& host_integerp (gimple_call_arg (def_stmt, 2), 1)
&& TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
if ((unsigned HOST_WIDE_INT)size2 / 8
== TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
+ && maxsize2 != -1
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
{
- tree val = fold_convert (vr->type, integer_zero_node);
+ tree val = build_zero_cst (vr->type);
unsigned int value_id = get_or_alloc_constant_value_id (val);
return vn_reference_insert_pieces (vuse, vr->set, vr->type,
VEC_copy (vn_reference_op_s,
HOST_WIDE_INT offset2, size2, maxsize2;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
&offset2, &size2, &maxsize2);
- if (operand_equal_p (base, base2, 0)
+ if (maxsize2 != -1
+ && operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
{
- tree val = fold_convert (vr->type, integer_zero_node);
+ tree val = build_zero_cst (vr->type);
unsigned int value_id = get_or_alloc_constant_value_id (val);
return vn_reference_insert_pieces (vuse, vr->set, vr->type,
VEC_copy (vn_reference_op_s,
}
}
- /* For aggregate copies translate the reference through them if
+ /* 3) For aggregate copies translate the reference through them if
the copy kills ref. */
- else if (gimple_assign_single_p (def_stmt)
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
+ VEC (vn_reference_op_s, heap) *rhs = NULL;
vn_reference_op_t vro;
ao_ref r;
+ if (!lhs_ref_ok)
+ return (void *)-1;
+
/* See if the assignment kills REF. */
- base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
- &offset2, &size2, &maxsize2);
- if (!operand_equal_p (base, base2, 0)
+ base2 = ao_ref_base (&lhs_ref);
+ offset2 = lhs_ref.offset;
+ size2 = lhs_ref.size;
+ maxsize2 = lhs_ref.max_size;
+ if (maxsize2 == -1
+ || (base != base2 && !operand_equal_p (base, base2, 0))
|| offset2 > offset
|| offset2 + size2 < offset + maxsize)
return (void *)-1;
- /* Find the common base of ref and the lhs. */
- copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
+ /* Find the common base of ref and the lhs. lhs_ops already
+ contains valueized operands for the lhs. */
i = VEC_length (vn_reference_op_s, vr->operands) - 1;
- j = VEC_length (vn_reference_op_s, lhs) - 1;
+ j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
while (j >= 0 && i >= 0
&& vn_reference_op_eq (VEC_index (vn_reference_op_s,
vr->operands, i),
- VEC_index (vn_reference_op_s, lhs, j)))
+ VEC_index (vn_reference_op_s, lhs_ops, j)))
{
i--;
j--;
}
- VEC_free (vn_reference_op_s, heap, lhs);
+ /* ??? The innermost op should always be a MEM_REF and we already
+ checked that the assignment to the lhs kills vr. Thus for
+ aggregate copies using char[] types the vn_reference_op_eq
+ may fail when comparing types for compatibility. But we really
+ don't care here - further lookups with the rewritten operands
+ will simply fail if we messed up types too badly. */
+ if (j == 0 && i >= 0
+ && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
+ && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
+ && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
+ == VEC_index (vn_reference_op_s, vr->operands, i)->off))
+ i--, j--;
+
/* i now points to the first additional op.
??? LHS may not be completely contained in VR, one or more
VIEW_CONVERT_EXPRs could be in its way. We could at least
else
VEC_truncate (vn_reference_op_s, vr->operands,
i + 1 + VEC_length (vn_reference_op_s, rhs));
- for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
VEC_free (vn_reference_op_s, heap, rhs);
vr->hashcode = vn_reference_compute_hash (vr);
return NULL;
}
+ /* 4) For memcpy copies translate the reference through them if
+ the copy kills ref. */
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && is_gimple_reg_type (vr->type)
+ /* ??? Handle BCOPY as well. */
+ && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
+ && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
+ && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
+ && host_integerp (gimple_call_arg (def_stmt, 2), 1))
+ {
+ tree lhs, rhs;
+ ao_ref r;
+ HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
+ vn_reference_op_s op;
+ HOST_WIDE_INT at;
+
+
+ /* Only handle non-variable, addressable refs. */
+ if (ref->size != maxsize
+ || offset % BITS_PER_UNIT != 0
+ || ref->size % BITS_PER_UNIT != 0)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the destination. */
+ lhs = gimple_call_arg (def_stmt, 0);
+ lhs_offset = 0;
+ if (TREE_CODE (lhs) == SSA_NAME)
+ lhs = SSA_VAL (lhs);
+ if (TREE_CODE (lhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
+ &lhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ lhs = TREE_OPERAND (tem, 0);
+ lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ lhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (lhs) != SSA_NAME
+ && TREE_CODE (lhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the source. */
+ rhs = gimple_call_arg (def_stmt, 1);
+ rhs_offset = 0;
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = SSA_VAL (rhs);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
+ &rhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ rhs = TREE_OPERAND (tem, 0);
+ rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ rhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (rhs) != SSA_NAME
+ && TREE_CODE (rhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
+
+ /* The bases of the destination and the references have to agree. */
+ if ((TREE_CODE (base) != MEM_REF
+ && !DECL_P (base))
+ || (TREE_CODE (base) == MEM_REF
+ && (TREE_OPERAND (base, 0) != lhs
+ || !host_integerp (TREE_OPERAND (base, 1), 1)))
+ || (DECL_P (base)
+ && (TREE_CODE (lhs) != ADDR_EXPR
+ || TREE_OPERAND (lhs, 0) != base)))
+ return (void *)-1;
+
+ /* And the access has to be contained within the memcpy destination. */
+ at = offset / BITS_PER_UNIT;
+ if (TREE_CODE (base) == MEM_REF)
+ at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
+ if (lhs_offset > at
+ || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
+ return (void *)-1;
+
+ /* Make room for 2 operands in the new reference. */
+ if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ {
+ VEC (vn_reference_op_s, heap) *old = vr->operands;
+ VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ if (old == shared_lookup_references
+ && vr->operands != old)
+ shared_lookup_references = NULL;
+ }
+ else
+ VEC_truncate (vn_reference_op_s, vr->operands, 2);
+
+ /* The looked-through reference is a simple MEM_REF. */
+ memset (&op, 0, sizeof (op));
+ op.type = vr->type;
+ op.opcode = MEM_REF;
+ op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
+ op.off = at - lhs_offset + rhs_offset;
+ VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
+ op.type = TREE_TYPE (rhs);
+ op.opcode = TREE_CODE (rhs);
+ op.op0 = rhs;
+ op.off = -1;
+ VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
+ vr->hashcode = vn_reference_compute_hash (vr);
+
+ /* Adjust *ref from the new operands. */
+ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
+ return (void *)-1;
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
+ *ref = r;
+
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
+ /* Keep looking for the adjusted *REF / VR pair. */
+ return NULL;
+ }
+
/* Bail out and stop walking. */
return (void *)-1;
}
tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands,
- vn_reference_t *vnresult, bool maywalk)
+ vn_reference_t *vnresult, vn_lookup_kind kind)
{
struct vn_reference_s vr1;
vn_reference_t tmp;
vn_reference_lookup_1 (&vr1, vnresult);
if (!*vnresult
- && maywalk
+ && kind != VN_NOWALK
&& vr1.vuse)
{
ao_ref r;
+ vn_walk_kind = kind;
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
stored in the hashtable if one exists. */
tree
-vn_reference_lookup (tree op, tree vuse, bool maywalk,
+vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t *vnresult)
{
VEC (vn_reference_op_s, heap) *operands;
struct vn_reference_s vr1;
tree cst;
+ bool valuezied_anything;
if (vnresult)
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
- vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
+ vr1.operands = operands
+ = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
vr1.type = TREE_TYPE (op);
vr1.set = get_alias_set (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
- if (maywalk
+ if (kind != VN_NOWALK
&& vr1.vuse)
{
vn_reference_t wvnresult;
ao_ref r;
- ao_ref_init (&r, op);
+ /* Make sure to use a valueized reference if we valueized anything.
+ Otherwise preserve the full reference for advanced TBAA. */
+ if (!valuezied_anything
+ || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
+ vr1.operands))
+ ao_ref_init (&r, op);
+ vn_walk_kind = kind;
wvnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
if (vno1->hashcode != vno2->hashcode)
return false;
+ if (vno1->length != vno2->length)
+ return false;
+
if (vno1->opcode != vno2->opcode
|| !types_compatible_p (vno1->type, vno2->type))
return false;
return true;
}
-/* Lookup a n-ary operation by its pieces and return the resulting value
- number if it exists in the hash table. Return NULL_TREE if it does
- not exist in the hash table or if the result field of the operation
- is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
- if it exists. */
+/* Initialize VNO from the pieces provided. */
-tree
-vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
- tree type, tree op0, tree op1, tree op2,
- tree op3, vn_nary_op_t *vnresult)
+static void
+init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
+ enum tree_code code, tree type, tree *ops)
+{
+ vno->opcode = code;
+ vno->length = length;
+ vno->type = type;
+ memcpy (&vno->op[0], ops, sizeof (tree) * length);
+}
+
+/* Initialize VNO from OP. */
+
+static void
+init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
+{
+ unsigned i;
+
+ vno->opcode = TREE_CODE (op);
+ vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
+ vno->type = TREE_TYPE (op);
+ for (i = 0; i < vno->length; ++i)
+ vno->op[i] = TREE_OPERAND (op, i);
+}
+
+/* Return the number of operands for a vn_nary ops structure from STMT. */
+
+static unsigned int
+vn_nary_length_from_stmt (gimple stmt)
+{
+ switch (gimple_assign_rhs_code (stmt))
+ {
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ return 1;
+
+ case CONSTRUCTOR:
+ return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
+
+ default:
+ return gimple_num_ops (stmt) - 1;
+ }
+}
+
+/* Initialize VNO from STMT. */
+
+static void
+init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
+{
+ unsigned i;
+
+ vno->opcode = gimple_assign_rhs_code (stmt);
+ vno->type = gimple_expr_type (stmt);
+ switch (vno->opcode)
+ {
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ vno->length = 1;
+ vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+ break;
+
+ case CONSTRUCTOR:
+ vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
+ for (i = 0; i < vno->length; ++i)
+ vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
+ break;
+
+ default:
+ vno->length = gimple_num_ops (stmt) - 1;
+ for (i = 0; i < vno->length; ++i)
+ vno->op[i] = gimple_op (stmt, i + 1);
+ }
+}
+
+/* Compute the hashcode for VNO and look for it in the hash table;
+ return the resulting value number if it exists in the hash table.
+ Return NULL_TREE if it does not exist in the hash table or if the
+ result field of the operation is NULL. VNRESULT will contain the
+ vn_nary_op_t from the hashtable if it exists. */
+
+static tree
+vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
{
void **slot;
- struct vn_nary_op_s vno1;
+
if (vnresult)
*vnresult = NULL;
- vno1.opcode = code;
- vno1.length = length;
- vno1.type = type;
- vno1.op[0] = op0;
- vno1.op[1] = op1;
- vno1.op[2] = op2;
- vno1.op[3] = op3;
- vno1.hashcode = vn_nary_op_compute_hash (&vno1);
- slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
+
+ vno->hashcode = vn_nary_op_compute_hash (vno);
+ slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
NO_INSERT);
if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
+ slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
NO_INSERT);
if (!slot)
return NULL_TREE;
return ((vn_nary_op_t)*slot)->result;
}
+/* Lookup a n-ary operation by its pieces and return the resulting value
+ number if it exists in the hash table. Return NULL_TREE if it does
+ not exist in the hash table or if the result field of the operation
+ is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
+ if it exists. */
+
+tree
+vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
+ tree type, tree *ops, vn_nary_op_t *vnresult)
+{
+ vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
+ sizeof_vn_nary_op (length));
+ init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
+ return vn_nary_op_lookup_1 (vno1, vnresult);
+}
+
/* Lookup OP in the current hash table, and return the resulting value
number if it exists in the hash table. Return NULL_TREE if it does
not exist in the hash table or if the result field of the operation
tree
vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
{
- void **slot;
- struct vn_nary_op_s vno1;
- unsigned i;
-
- if (vnresult)
- *vnresult = NULL;
- vno1.opcode = TREE_CODE (op);
- vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
- vno1.type = TREE_TYPE (op);
- for (i = 0; i < vno1.length; ++i)
- vno1.op[i] = TREE_OPERAND (op, i);
- vno1.hashcode = vn_nary_op_compute_hash (&vno1);
- slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
- NO_INSERT);
- if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
- NO_INSERT);
- if (!slot)
- return NULL_TREE;
- if (vnresult)
- *vnresult = (vn_nary_op_t)*slot;
- return ((vn_nary_op_t)*slot)->result;
+ vn_nary_op_t vno1
+ = XALLOCAVAR (struct vn_nary_op_s,
+ sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
+ init_vn_nary_op_from_op (vno1, op);
+ return vn_nary_op_lookup_1 (vno1, vnresult);
}
/* Lookup the rhs of STMT in the current hash table, and return the resulting
tree
vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
{
- void **slot;
- struct vn_nary_op_s vno1;
- unsigned i;
+ vn_nary_op_t vno1
+ = XALLOCAVAR (struct vn_nary_op_s,
+ sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
+ init_vn_nary_op_from_stmt (vno1, stmt);
+ return vn_nary_op_lookup_1 (vno1, vnresult);
+}
- if (vnresult)
- *vnresult = NULL;
- vno1.opcode = gimple_assign_rhs_code (stmt);
- vno1.length = gimple_num_ops (stmt) - 1;
- vno1.type = gimple_expr_type (stmt);
- for (i = 0; i < vno1.length; ++i)
- vno1.op[i] = gimple_op (stmt, i + 1);
- if (vno1.opcode == REALPART_EXPR
- || vno1.opcode == IMAGPART_EXPR
- || vno1.opcode == VIEW_CONVERT_EXPR)
- vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
- vno1.hashcode = vn_nary_op_compute_hash (&vno1);
- slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
- NO_INSERT);
- if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
- NO_INSERT);
- if (!slot)
- return NULL_TREE;
- if (vnresult)
- *vnresult = (vn_nary_op_t)*slot;
- return ((vn_nary_op_t)*slot)->result;
+/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
+
+static vn_nary_op_t
+alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
+{
+ return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
}
-/* Insert a n-ary operation into the current hash table using it's
- pieces. Return the vn_nary_op_t structure we created and put in
- the hashtable. */
+/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
+ obstack. */
-vn_nary_op_t
-vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
- tree type, tree op0,
- tree op1, tree op2, tree op3,
- tree result,
- unsigned int value_id)
+static vn_nary_op_t
+alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
{
- void **slot;
- vn_nary_op_t vno1;
+ vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
+ ¤t_info->nary_obstack);
- vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
- (sizeof (struct vn_nary_op_s)
- - sizeof (tree) * (4 - length)));
vno1->value_id = value_id;
- vno1->opcode = code;
vno1->length = length;
- vno1->type = type;
- if (length >= 1)
- vno1->op[0] = op0;
- if (length >= 2)
- vno1->op[1] = op1;
- if (length >= 3)
- vno1->op[2] = op2;
- if (length >= 4)
- vno1->op[3] = op3;
vno1->result = result;
- vno1->hashcode = vn_nary_op_compute_hash (vno1);
- slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
- INSERT);
- gcc_assert (!*slot);
- *slot = vno1;
return vno1;
+}
+
+/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
+ VNO->HASHCODE first. */
+
+static vn_nary_op_t
+vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
+{
+ void **slot;
+
+ if (compute_hash)
+ vno->hashcode = vn_nary_op_compute_hash (vno);
+ slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
+ gcc_assert (!*slot);
+
+ *slot = vno;
+ return vno;
+}
+
+/* Insert a n-ary operation into the current hash table using it's
+ pieces. Return the vn_nary_op_t structure we created and put in
+ the hashtable. */
+
+vn_nary_op_t
+vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
+ tree type, tree *ops,
+ tree result, unsigned int value_id)
+{
+ vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
+ init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
+ return vn_nary_op_insert_into (vno1, current_info->nary, true);
}
/* Insert OP into the current hash table with a value number of
vn_nary_op_insert (tree op, tree result)
{
unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
- void **slot;
vn_nary_op_t vno1;
- unsigned i;
-
- vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
- (sizeof (struct vn_nary_op_s)
- - sizeof (tree) * (4 - length)));
- vno1->value_id = VN_INFO (result)->value_id;
- vno1->opcode = TREE_CODE (op);
- vno1->length = length;
- vno1->type = TREE_TYPE (op);
- for (i = 0; i < vno1->length; ++i)
- vno1->op[i] = TREE_OPERAND (op, i);
- vno1->result = result;
- vno1->hashcode = vn_nary_op_compute_hash (vno1);
- slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
- INSERT);
- gcc_assert (!*slot);
- *slot = vno1;
- return vno1;
+ vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
+ init_vn_nary_op_from_op (vno1, op);
+ return vn_nary_op_insert_into (vno1, current_info->nary, true);
}
/* Insert the rhs of STMT into the current hash table with a value number of
vn_nary_op_t
vn_nary_op_insert_stmt (gimple stmt, tree result)
{
- unsigned length = gimple_num_ops (stmt) - 1;
- void **slot;
- vn_nary_op_t vno1;
- unsigned i;
-
- vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
- (sizeof (struct vn_nary_op_s)
- - sizeof (tree) * (4 - length)));
- vno1->value_id = VN_INFO (result)->value_id;
- vno1->opcode = gimple_assign_rhs_code (stmt);
- vno1->length = length;
- vno1->type = gimple_expr_type (stmt);
- for (i = 0; i < vno1->length; ++i)
- vno1->op[i] = gimple_op (stmt, i + 1);
- if (vno1->opcode == REALPART_EXPR
- || vno1->opcode == IMAGPART_EXPR
- || vno1->opcode == VIEW_CONVERT_EXPR)
- vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
- vno1->result = result;
- vno1->hashcode = vn_nary_op_compute_hash (vno1);
- slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
- INSERT);
- gcc_assert (!*slot);
-
- *slot = vno1;
- return vno1;
+ vn_nary_op_t vno1
+ = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
+ result, VN_INFO (result)->value_id);
+ init_vn_nary_op_from_stmt (vno1, stmt);
+ return vn_nary_op_insert_into (vno1, current_info->nary, true);
}
/* Compute a hashcode for PHI operation VP1 and return it. */
+ (INTEGRAL_TYPE_P (type)
? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
- for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
+ FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
{
if (phi1op == VN_TOP)
continue;
/* Any phi in the same block will have it's arguments in the
same edge order, because of how we store phi nodes. */
- for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
+ FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
{
tree phi2op = VEC_index (tree, vp2->phiargs, i);
if (phi1op == VN_TOP || phi2op == VN_TOP)
unsigned int i;
fprintf (out, "SCC consists of: ");
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+ FOR_EACH_VEC_ELT (tree, scc, i, var)
{
print_generic_expr (out, var, 0);
fprintf (out, " ");
static inline bool
set_ssa_val_to (tree from, tree to)
{
- tree currval;
+ tree currval = SSA_VAL (from);
- if (from != to
- && TREE_CODE (to) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
- to = from;
+ if (from != to)
+ {
+ if (currval == from)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Not changing value number of ");
+ print_generic_expr (dump_file, from, 0);
+ fprintf (dump_file, " from VARYING to ");
+ print_generic_expr (dump_file, to, 0);
+ fprintf (dump_file, "\n");
+ }
+ return false;
+ }
+ else if (TREE_CODE (to) == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
+ to = from;
+ }
/* The only thing we allow as value numbers are VN_TOP, ssa_names
and invariants. So assert that here. */
print_generic_expr (dump_file, to, 0);
}
- currval = SSA_VAL (from);
-
if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
{
VN_INFO (from)->valnum = to;
return set_ssa_val_to (lhs, rhs);
}
-/* Visit a unary operator RHS, value number it, and return true if the
+/* Visit a nary operator RHS, value number it, and return true if the
value number of LHS has changed as a result. */
static bool
-visit_unary_op (tree lhs, gimple stmt)
+visit_nary_op (tree lhs, gimple stmt)
{
bool changed = false;
tree result = vn_nary_op_lookup_stmt (stmt, NULL);
if (result)
- {
- changed = set_ssa_val_to (lhs, result);
- }
- else
- {
- changed = set_ssa_val_to (lhs, lhs);
- vn_nary_op_insert_stmt (stmt, lhs);
- }
-
- return changed;
-}
-
-/* Visit a binary operator RHS, value number it, and return true if the
- value number of LHS has changed as a result. */
-
-static bool
-visit_binary_op (tree lhs, gimple stmt)
-{
- bool changed = false;
- tree result = vn_nary_op_lookup_stmt (stmt, NULL);
-
- if (result)
- {
- changed = set_ssa_val_to (lhs, result);
- }
+ changed = set_ssa_val_to (lhs, result);
else
{
changed = set_ssa_val_to (lhs, lhs);
last_vuse = gimple_vuse (stmt);
last_vuse_ptr = &last_vuse;
- result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
+ result = vn_reference_lookup (op, gimple_vuse (stmt),
+ default_vn_walk_kind, NULL);
last_vuse_ptr = NULL;
/* If we have a VCE, try looking up its operand as it might be stored in
a different type. */
if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
- true, NULL);
+ default_vn_walk_kind, NULL);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a
Otherwise, the vdefs for the store are used when inserting into
the table, since the store generates a new memory state. */
- result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
+ result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
if (result)
{
{
switch (TREE_CODE_CLASS (TREE_CODE (expr)))
{
- case tcc_unary:
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
- TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
- break;
case tcc_binary:
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
- TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
- if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
- TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
- break;
- default:
+ TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
+ /* Fallthru. */
+ case tcc_unary:
+ TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
break;
+ default:;
}
return expr;
}
tree result = NULL_TREE;
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
/* This will not catch every single case we could combine, but will
catch those with constants. The goal here is to simultaneously
if (TREE_CODE (op0) == SSA_NAME)
{
if (VN_INFO (op0)->has_constants
- || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
+ || TREE_CODE_CLASS (code) == tcc_comparison
+ || code == COMPLEX_EXPR)
op0 = valueize_expr (vn_get_expr_for (op0));
- else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
- op0 = SSA_VAL (op0);
+ else
+ op0 = vn_valueize (op0);
}
if (TREE_CODE (op1) == SSA_NAME)
{
- if (VN_INFO (op1)->has_constants)
+ if (VN_INFO (op1)->has_constants
+ || code == COMPLEX_EXPR)
op1 = valueize_expr (vn_get_expr_for (op1));
- else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
- op1 = SSA_VAL (op1);
+ else
+ op1 = vn_valueize (op1);
}
+ /* Pointer plus constant can be represented as invariant address.
+ Do so to allow further propatation, see also tree forwprop. */
+ if (code == POINTER_PLUS_EXPR
+ && host_integerp (op1, 1)
+ && TREE_CODE (op0) == ADDR_EXPR
+ && is_gimple_min_invariant (op0))
+ return build_invariant_address (TREE_TYPE (op0),
+ TREE_OPERAND (op0, 0),
+ TREE_INT_CST_LOW (op1));
+
/* Avoid folding if nothing changed. */
if (op0 == gimple_assign_rhs1 (stmt)
&& op1 == gimple_assign_rhs2 (stmt))
fold_defer_overflow_warnings ();
- result = fold_binary (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), op0, op1);
+ result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
if (result)
STRIP_USELESS_TYPE_CONVERSION (result);
{
tree result = NULL_TREE;
tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
/* We handle some tcc_reference codes here that are all
GIMPLE_ASSIGN_SINGLE codes. */
- if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
- || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
+ if (code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR)
op0 = TREE_OPERAND (op0, 0);
if (TREE_CODE (op0) != SSA_NAME)
orig_op0 = op0;
if (VN_INFO (op0)->has_constants)
op0 = valueize_expr (vn_get_expr_for (op0));
- else if (gimple_assign_cast_p (stmt)
- || gimple_assign_rhs_code (stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
- || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
+ else if (CONVERT_EXPR_CODE_P (code)
+ || code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR)
{
/* We want to do tree-combining on conversion-like expressions.
Make sure we feed only SSA_NAMEs or constants to fold though. */
if (op0 == orig_op0)
return NULL_TREE;
- result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), op0);
+ result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
if (result)
{
STRIP_USELESS_TYPE_CONVERSION (result);
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
return NULL_TREE;
+ /* First try constant folding based on our current lattice. */
+ tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
+ if (tem)
+ return tem;
+
+ /* If that didn't work try combining multiple statements. */
switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
{
- case tcc_declaration:
- tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
- if (tem)
- return tem;
- break;
-
case tcc_reference:
- /* Do not do full-blown reference lookup here, but simplify
- reads from constant aggregates. */
- tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
- if (tem)
- return tem;
-
/* Fallthrough for some codes that can operate on registers. */
if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
|| TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
into binary ops, but it's debatable whether it is worth it. */
case tcc_unary:
return simplify_unary_expression (stmt);
- break;
+
case tcc_comparison:
case tcc_binary:
return simplify_binary_expression (stmt);
- break;
+
default:
break;
}
changed = defs_to_varying (stmt);
else if (is_gimple_assign (stmt))
{
+ enum tree_code code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
tree simplified;
/* Shortcut for copies. Simplifying copies is pointless,
since we copy the expression and value they represent. */
- if (gimple_assign_copy_p (stmt)
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+ if (code == SSA_NAME
&& TREE_CODE (lhs) == SSA_NAME)
{
- changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
+ changed = visit_copy (lhs, rhs1);
goto done;
}
simplified = try_to_simplify (stmt);
/* We can substitute SSA_NAMEs that are live over
abnormal edges with their constant value. */
&& !(gimple_assign_copy_p (stmt)
- && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ && is_gimple_min_invariant (rhs1))
&& !(simplified
&& is_gimple_min_invariant (simplified))
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
/* Stores or copies from SSA_NAMEs that are live over
abnormal edges are a problem. */
- || (gimple_assign_single_p (stmt)
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
+ || (code == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
changed = defs_to_varying (stmt);
- else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
- {
- changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
- }
+ else if (REFERENCE_CLASS_P (lhs)
+ || DECL_P (lhs))
+ changed = visit_reference_op_store (lhs, rhs1, stmt);
else if (TREE_CODE (lhs) == SSA_NAME)
{
if ((gimple_assign_copy_p (stmt)
- && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ && is_gimple_min_invariant (rhs1))
|| (simplified
&& is_gimple_min_invariant (simplified)))
{
if (simplified)
changed = set_ssa_val_to (lhs, simplified);
else
- changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
+ changed = set_ssa_val_to (lhs, rhs1);
}
else
{
- switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+ switch (get_gimple_rhs_class (code))
{
case GIMPLE_UNARY_RHS:
- changed = visit_unary_op (lhs, stmt);
- break;
case GIMPLE_BINARY_RHS:
- changed = visit_binary_op (lhs, stmt);
+ case GIMPLE_TERNARY_RHS:
+ changed = visit_nary_op (lhs, stmt);
break;
case GIMPLE_SINGLE_RHS:
- switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
+ switch (TREE_CODE_CLASS (code))
{
case tcc_reference:
/* VOP-less references can go through unary case. */
- if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
- || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
- && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
+ if ((code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR)
+ && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
{
- changed = visit_unary_op (lhs, stmt);
+ changed = visit_nary_op (lhs, stmt);
break;
}
/* Fallthrough. */
case tcc_declaration:
- changed = visit_reference_op_load
- (lhs, gimple_assign_rhs1 (stmt), stmt);
+ changed = visit_reference_op_load (lhs, rhs1, stmt);
break;
- case tcc_expression:
- if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
+ default:
+ if (code == ADDR_EXPR)
{
- changed = visit_unary_op (lhs, stmt);
+ changed = visit_nary_op (lhs, stmt);
+ break;
+ }
+ else if (code == CONSTRUCTOR)
+ {
+ changed = visit_nary_op (lhs, stmt);
break;
}
- /* Fallthrough. */
- default:
changed = defs_to_varying (stmt);
}
break;
/* ??? We should handle stores from calls. */
else if (TREE_CODE (lhs) == SSA_NAME)
{
- if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
+ if (!gimple_call_internal_p (stmt)
+ && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
changed = visit_reference_op_call (lhs, stmt);
else
changed = defs_to_varying (stmt);
static void
sort_scc (VEC (tree, heap) *scc)
{
- qsort (VEC_address (tree, scc),
- VEC_length (tree, scc),
- sizeof (tree),
- compare_ops);
+ VEC_qsort (tree, scc, compare_ops);
}
/* Insert the no longer used nary ONARY to the hash INFO. */
static void
copy_nary (vn_nary_op_t onary, vn_tables_t info)
{
- size_t size = (sizeof (struct vn_nary_op_s)
- - sizeof (tree) * (4 - onary->length));
- vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
- void **slot;
+ size_t size = sizeof_vn_nary_op (onary->length);
+ vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
+ &info->nary_obstack);
memcpy (nary, onary, size);
- slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
- gcc_assert (!*slot);
- *slot = nary;
+ vn_nary_op_insert_into (nary, info->nary, false);
}
/* Insert the no longer used phi OPHI to the hash INFO. */
if (VEC_length (tree, scc) == 1)
{
tree use = VEC_index (tree, scc, 0);
- if (!VN_INFO (use)->use_processed)
- visit_use (use);
- return;
+ if (VN_INFO (use)->use_processed)
+ return;
+ /* We need to make sure it doesn't form a cycle itself, which can
+ happen for self-referential PHI nodes. In that case we would
+ end up inserting an expression with VN_TOP operands into the
+ valid table which makes us derive bogus equivalences later.
+ The cheapest way to check this is to assume it for all PHI nodes. */
+ if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
+ /* Fallthru to iteration. */ ;
+ else
+ {
+ visit_use (use);
+ return;
+ }
}
/* Iterate over the SCC with the optimistic table until it stops
{
changed = false;
iterations++;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Starting iteration %d\n", iterations);
/* As we are value-numbering optimistically we have to
clear the expression tables and the simplified expressions
in each iteration until we converge. */
gcc_obstack_init (&optimistic_info->nary_obstack);
empty_alloc_pool (optimistic_info->phis_pool);
empty_alloc_pool (optimistic_info->references_pool);
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+ FOR_EACH_VEC_ELT (tree, scc, i, var)
VN_INFO (var)->expr = NULL_TREE;
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+ FOR_EACH_VEC_ELT (tree, scc, i, var)
changed |= visit_use (var);
}
XDELETE (optimistic_info);
}
+/* Set *ID if we computed something useful in RESULT. */
+
+static void
+set_value_id_for_result (tree result, unsigned int *id)
+{
+ if (result)
+ {
+ if (TREE_CODE (result) == SSA_NAME)
+ *id = VN_INFO (result)->value_id;
+ else if (is_gimple_min_invariant (result))
+ *id = get_or_alloc_constant_value_id (result);
+ }
+}
+
/* Set the value ids in the valid hash tables. */
static void
FOR_EACH_HTAB_ELEMENT (valid_info->nary,
vno, vn_nary_op_t, hi)
- {
- if (vno->result)
- {
- if (TREE_CODE (vno->result) == SSA_NAME)
- vno->value_id = VN_INFO (vno->result)->value_id;
- else if (is_gimple_min_invariant (vno->result))
- vno->value_id = get_or_alloc_constant_value_id (vno->result);
- }
- }
+ set_value_id_for_result (vno->result, &vno->value_id);
FOR_EACH_HTAB_ELEMENT (valid_info->phis,
vp, vn_phi_t, hi)
- {
- if (vp->result)
- {
- if (TREE_CODE (vp->result) == SSA_NAME)
- vp->value_id = VN_INFO (vp->result)->value_id;
- else if (is_gimple_min_invariant (vp->result))
- vp->value_id = get_or_alloc_constant_value_id (vp->result);
- }
- }
+ set_value_id_for_result (vp->result, &vp->value_id);
FOR_EACH_HTAB_ELEMENT (valid_info->references,
vr, vn_reference_t, hi)
- {
- if (vr->result)
- {
- if (TREE_CODE (vr->result) == SSA_NAME)
- vr->value_id = VN_INFO (vr->result)->value_id;
- else if (is_gimple_min_invariant (vr->result))
- vr->value_id = get_or_alloc_constant_value_id (vr->result);
- }
- }
+ set_value_id_for_result (vr->result, &vr->value_id);
}
/* Do SCCVN. Returns true if it finished, false if we bailed out
- due to resource constraints. */
+ due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
+ how we use the alias oracle walking during the VN process. */
bool
-run_scc_vn (void)
+run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
{
size_t i;
tree param;
bool changed = true;
+ default_vn_walk_kind = default_vn_walk_kind_;
+
init_scc_vn ();
current_info = valid_info;