vn_ssa_aux_t vn = VN_INFO (name);
gimple def_stmt;
tree expr = NULL_TREE;
+ enum tree_code code;
if (vn->valnum == VN_TOP)
return name;
/* Otherwise use the defining statement to build the expression. */
def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
- /* If the value number is a default-definition or a PHI result
- use it directly. */
- if (gimple_nop_p (def_stmt)
- || gimple_code (def_stmt) == GIMPLE_PHI)
- return vn->valnum;
-
+ /* If the value number is not an assignment use it directly. */
if (!is_gimple_assign (def_stmt))
return vn->valnum;
/* FIXME tuples. This is incomplete and likely will miss some
simplifications. */
- switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
+ code = gimple_assign_rhs_code (def_stmt);
+ switch (TREE_CODE_CLASS (code))
{
case tcc_reference:
- if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
- || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
- && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
- expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+ if ((code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR)
+ && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
+ 0)) == SSA_NAME)
+ expr = fold_build1 (code,
gimple_expr_type (def_stmt),
TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
break;
case tcc_unary:
- expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+ expr = fold_build1 (code,
gimple_expr_type (def_stmt),
gimple_assign_rhs1 (def_stmt));
break;
case tcc_binary:
- expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
+ expr = fold_build2 (code,
gimple_expr_type (def_stmt),
gimple_assign_rhs1 (def_stmt),
gimple_assign_rhs2 (def_stmt));
break;
+ case tcc_exceptional:
+ if (code == CONSTRUCTOR
+ && TREE_CODE
+ (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
+ expr = gimple_assign_rhs1 (def_stmt);
+ break;
+
default:;
}
if (expr == NULL_TREE)
tem1.type = TREE_TYPE (tem1.op0);
tem1.opcode = TREE_CODE (tem1.op0);
vro1 = &tem1;
+ deref1 = false;
}
if (deref2 && vro2->opcode == ADDR_EXPR)
{
tem2.type = TREE_TYPE (tem2.op0);
tem2.opcode = TREE_CODE (tem2.op0);
vro2 = &tem2;
+ deref2 = false;
}
+ if (deref1 != deref2)
+ return false;
if (!vn_reference_op_eq (vro1, vro2))
return false;
++j;
switch (temp.opcode)
{
+ case WITH_SIZE_EXPR:
+ temp.op0 = TREE_OPERAND (ref, 1);
+ temp.off = 0;
+ break;
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
if (REFERENCE_CLASS_P (ref)
+ || TREE_CODE (ref) == WITH_SIZE_EXPR
|| (TREE_CODE (ref) == ADDR_EXPR
&& !is_gimple_min_invariant (ref)))
ref = TREE_OPERAND (ref, 0);
ref->base_alias_set = base_alias_set;
else
ref->base_alias_set = get_alias_set (base);
+ /* We discount volatiles from value-numbering elsewhere. */
+ ref->volatile_p = false;
return true;
}
return NULL;
}
+/* Lookup an existing or insert a new vn_reference entry into the
+ value table for the VUSE, SET, TYPE, OPERANDS reference which
+ has the value VALUE which is either a constant or an SSA name. */
+
+static vn_reference_t
+vn_reference_lookup_or_insert_for_pieces (tree vuse,
+ alias_set_type set,
+ tree type,
+ VEC (vn_reference_op_s,
+ heap) *operands,
+ tree value)
+{
+ struct vn_reference_s vr1;
+ vn_reference_t result;
+ unsigned value_id;
+ vr1.vuse = vuse;
+ vr1.operands = operands;
+ vr1.type = type;
+ vr1.set = set;
+ vr1.hashcode = vn_reference_compute_hash (&vr1);
+ if (vn_reference_lookup_1 (&vr1, &result))
+ return result;
+ if (TREE_CODE (value) == SSA_NAME)
+ value_id = VN_INFO (value)->value_id;
+ else
+ value_id = get_or_alloc_constant_value_id (value);
+ return vn_reference_insert_pieces (vuse, set, type,
+ VEC_copy (vn_reference_op_s, heap,
+ operands), value, value_id);
+}
+
/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
from the statement defining VUSE and if not successful tries to
translate *REFP and VR_ through an aggregate copy at the defintion
if (maxsize == -1)
return (void *)-1;
+ /* We can't deduce anything useful from clobbers. */
+ if (gimple_clobber_p (def_stmt))
+ return (void *)-1;
+
/* def_stmt may-defs *ref. See if we can derive a value for *ref
- from that defintion.
+ from that definition.
1) Memset. */
if (is_gimple_reg_type (vr->type)
&& gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
&& offset2 + size2 >= offset + maxsize)
{
tree val = build_zero_cst (vr->type);
- unsigned int value_id = get_or_alloc_constant_value_id (val);
- return vn_reference_insert_pieces (vuse, vr->set, vr->type,
- VEC_copy (vn_reference_op_s,
- heap, vr->operands),
- val, value_id);
+ return vn_reference_lookup_or_insert_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
}
}
&& offset2 + size2 >= offset + maxsize)
{
tree val = build_zero_cst (vr->type);
- unsigned int value_id = get_or_alloc_constant_value_id (val);
- return vn_reference_insert_pieces (vuse, vr->set, vr->type,
- VEC_copy (vn_reference_op_s,
- heap, vr->operands),
- val, value_id);
+ return vn_reference_lookup_or_insert_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
}
}
- /* 3) For aggregate copies translate the reference through them if
+ /* 3) Assignment from a constant. We can use folds native encode/interpret
+ routines to extract the assigned bits. */
+ else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
+ && ref->size == maxsize
+ && maxsize % BITS_PER_UNIT == 0
+ && offset % BITS_PER_UNIT == 0
+ && is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ if (maxsize2 != -1
+ && maxsize2 == size2
+ && size2 % BITS_PER_UNIT == 0
+ && offset2 % BITS_PER_UNIT == 0
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ /* We support up to 512-bit values (for V8DFmode). */
+ unsigned char buffer[64];
+ int len;
+
+ len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
+ buffer, sizeof (buffer));
+ if (len > 0)
+ {
+ tree val = native_interpret_expr (vr->type,
+ buffer
+ + ((offset - offset2)
+ / BITS_PER_UNIT),
+ ref->size / BITS_PER_UNIT);
+ if (val)
+ return vn_reference_lookup_or_insert_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+ }
+
+ /* 4) Assignment from an SSA name which definition we may be able
+ to access pieces from. */
+ else if (ref->size == maxsize
+ && is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
+ {
+ tree rhs1 = gimple_assign_rhs1 (def_stmt);
+ gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
+ if (is_gimple_assign (def_stmt2)
+ && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
+ || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
+ && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2, off;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ off = offset - offset2;
+ if (maxsize2 != -1
+ && maxsize2 == size2
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = NULL_TREE;
+ HOST_WIDE_INT elsz
+ = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
+ if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
+ {
+ if (off == 0)
+ val = gimple_assign_rhs1 (def_stmt2);
+ else if (off == elsz)
+ val = gimple_assign_rhs2 (def_stmt2);
+ }
+ else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
+ && off % elsz == 0)
+ {
+ tree ctor = gimple_assign_rhs1 (def_stmt2);
+ unsigned i = off / elsz;
+ if (i < CONSTRUCTOR_NELTS (ctor))
+ {
+ constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
+ if (compare_tree_int (elt->index, i) == 0)
+ val = elt->value;
+ }
+ }
+ if (val)
+ return vn_reference_lookup_or_insert_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+ }
+
+ /* 5) For aggregate copies translate the reference through them if
the copy kills ref. */
else if (vn_walk_kind == VN_WALKREWRITE
&& gimple_assign_single_p (def_stmt)
FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
VEC_free (vn_reference_op_s, heap, rhs);
+ vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
/* Adjust *ref from the new operands. */
return NULL;
}
- /* 4) For memcpy copies translate the reference through them if
+ /* 6) For memcpy copies translate the reference through them if
the copy kills ref. */
else if (vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (vr->type)
tree var;
unsigned int i;
- fprintf (out, "SCC consists of: ");
+ fprintf (out, "SCC consists of:");
FOR_EACH_VEC_ELT (tree, scc, i, var)
{
- print_generic_expr (out, var, 0);
fprintf (out, " ");
+ print_generic_expr (out, var, 0);
}
fprintf (out, "\n");
}
{
switch (TREE_CODE_CLASS (TREE_CODE (expr)))
{
- case tcc_unary:
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
- TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
- break;
case tcc_binary:
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
- TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
- if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
- && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
- TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
- break;
- default:
+ TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
+ /* Fallthru. */
+ case tcc_unary:
+ TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
break;
+ default:;
}
return expr;
}
tree result = NULL_TREE;
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
/* This will not catch every single case we could combine, but will
catch those with constants. The goal here is to simultaneously
if (TREE_CODE (op0) == SSA_NAME)
{
if (VN_INFO (op0)->has_constants
- || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
+ || TREE_CODE_CLASS (code) == tcc_comparison
+ || code == COMPLEX_EXPR)
op0 = valueize_expr (vn_get_expr_for (op0));
- else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
- op0 = SSA_VAL (op0);
+ else
+ op0 = vn_valueize (op0);
}
if (TREE_CODE (op1) == SSA_NAME)
{
- if (VN_INFO (op1)->has_constants)
+ if (VN_INFO (op1)->has_constants
+ || code == COMPLEX_EXPR)
op1 = valueize_expr (vn_get_expr_for (op1));
- else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
- op1 = SSA_VAL (op1);
+ else
+ op1 = vn_valueize (op1);
}
/* Pointer plus constant can be represented as invariant address.
Do so to allow further propatation, see also tree forwprop. */
- if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ if (code == POINTER_PLUS_EXPR
&& host_integerp (op1, 1)
&& TREE_CODE (op0) == ADDR_EXPR
&& is_gimple_min_invariant (op0))
fold_defer_overflow_warnings ();
- result = fold_binary (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), op0, op1);
+ result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
if (result)
STRIP_USELESS_TYPE_CONVERSION (result);
{
tree result = NULL_TREE;
tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
/* We handle some tcc_reference codes here that are all
GIMPLE_ASSIGN_SINGLE codes. */
- if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
- || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
+ if (code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR
+ || code == BIT_FIELD_REF)
op0 = TREE_OPERAND (op0, 0);
if (TREE_CODE (op0) != SSA_NAME)
orig_op0 = op0;
if (VN_INFO (op0)->has_constants)
op0 = valueize_expr (vn_get_expr_for (op0));
- else if (gimple_assign_cast_p (stmt)
- || gimple_assign_rhs_code (stmt) == REALPART_EXPR
- || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
- || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
+ else if (CONVERT_EXPR_CODE_P (code)
+ || code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR
+ || code == BIT_FIELD_REF)
{
/* We want to do tree-combining on conversion-like expressions.
Make sure we feed only SSA_NAMEs or constants to fold though. */
|| BINARY_CLASS_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR
|| TREE_CODE (tem) == SSA_NAME
+ || TREE_CODE (tem) == CONSTRUCTOR
|| is_gimple_min_invariant (tem))
op0 = tem;
}
if (op0 == orig_op0)
return NULL_TREE;
- result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), op0);
+ if (code == BIT_FIELD_REF)
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
+ op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
+ }
+ else
+ result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
if (result)
{
STRIP_USELESS_TYPE_CONVERSION (result);
return NULL_TREE;
}
-/* Valueize NAME if it is an SSA name, otherwise just return it. */
-
-static inline tree
-vn_valueize (tree name)
-{
- if (TREE_CODE (name) == SSA_NAME)
- {
- tree tem = SSA_VAL (name);
- return tem == VN_TOP ? name : tem;
- }
- return name;
-}
-
/* Try to simplify RHS using equivalences and constant folding. */
static tree
try_to_simplify (gimple stmt)
{
+ enum tree_code code = gimple_assign_rhs_code (stmt);
tree tem;
/* For stores we can end up simplifying a SSA_NAME rhs. Just return
in this case, there is no point in doing extra work. */
- if (gimple_assign_copy_p (stmt)
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+ if (code == SSA_NAME)
return NULL_TREE;
/* First try constant folding based on our current lattice. */
- tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
- if (tem)
+ tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
+ if (tem
+ && (TREE_CODE (tem) == SSA_NAME
+ || is_gimple_min_invariant (tem)))
return tem;
/* If that didn't work try combining multiple statements. */
- switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
+ switch (TREE_CODE_CLASS (code))
{
case tcc_reference:
- /* Fallthrough for some codes that can operate on registers. */
- if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
- || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
- || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
+ /* Fallthrough for some unary codes that can operate on registers. */
+ if (!(code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR
+ || code == BIT_FIELD_REF))
break;
/* We could do a little more with unary ops, if they expand
into binary ops, but it's debatable whether it is worth it. */
if (gimple_code (stmt) == GIMPLE_PHI)
changed = visit_phi (stmt);
else if (!gimple_has_lhs (stmt)
- || gimple_has_volatile_ops (stmt)
- || stmt_could_throw_p (stmt))
+ || gimple_has_volatile_ops (stmt))
changed = defs_to_varying (stmt);
else if (is_gimple_assign (stmt))
{
/* VOP-less references can go through unary case. */
if ((code == REALPART_EXPR
|| code == IMAGPART_EXPR
- || code == VIEW_CONVERT_EXPR)
+ || code == VIEW_CONVERT_EXPR
+ || code == BIT_FIELD_REF)
&& TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
{
changed = visit_nary_op (lhs, stmt);