+static tree *last_vuse_ptr;
+static vn_lookup_kind vn_walk_kind;
+static vn_lookup_kind default_vn_walk_kind;
+
+/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
+ with the current VUSE and performs the expression lookup. */
+
+static void *
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+{
+ vn_reference_t vr = (vn_reference_t)vr_;
+ void **slot;
+ hashval_t hash;
+
+ if (last_vuse_ptr)
+ *last_vuse_ptr = vuse;
+
+ /* Fixup vuse and hash. */
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
+ vr->vuse = SSA_VAL (vuse);
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
+
+ hash = vr->hashcode;
+ slot = htab_find_slot_with_hash (current_info->references, vr,
+ hash, NO_INSERT);
+ if (!slot && current_info == optimistic_info)
+ slot = htab_find_slot_with_hash (valid_info->references, vr,
+ hash, NO_INSERT);
+ if (slot)
+ return *slot;
+
+ return NULL;
+}
+
+/* Lookup an existing or insert a new vn_reference entry into the
+ value table for the VUSE, SET, TYPE, OPERANDS reference which
+ has the constant value CST. */
+
+static vn_reference_t
+vn_reference_lookup_or_insert_constant_for_pieces (tree vuse,
+ alias_set_type set,
+ tree type,
+ VEC (vn_reference_op_s,
+ heap) *operands,
+ tree cst)
+{
+ struct vn_reference_s vr1;
+ vn_reference_t result;
+ vr1.vuse = vuse;
+ vr1.operands = operands;
+ vr1.type = type;
+ vr1.set = set;
+ vr1.hashcode = vn_reference_compute_hash (&vr1);
+ if (vn_reference_lookup_1 (&vr1, &result))
+ return result;
+ return vn_reference_insert_pieces (vuse, set, type,
+ VEC_copy (vn_reference_op_s, heap,
+ operands), cst,
+ get_or_alloc_constant_value_id (cst));
+}
+
+/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
+ from the statement defining VUSE and if not successful tries to
+ translate *REFP and VR_ through an aggregate copy at the defintion
+ of VUSE. */
+
+static void *
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
+{
+ vn_reference_t vr = (vn_reference_t)vr_;
+ gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ tree base;
+ HOST_WIDE_INT offset, maxsize;
+ static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
+ ao_ref lhs_ref;
+ bool lhs_ref_ok = false;
+
+ /* First try to disambiguate after value-replacing in the definitions LHS. */
+ if (is_gimple_assign (def_stmt))
+ {
+ VEC (vn_reference_op_s, heap) *tem;
+ tree lhs = gimple_assign_lhs (def_stmt);
+ bool valueized_anything = false;
+ /* Avoid re-allocation overhead. */
+ VEC_truncate (vn_reference_op_s, lhs_ops, 0);
+ copy_reference_ops_from_ref (lhs, &lhs_ops);
+ tem = lhs_ops;
+ lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
+ gcc_assert (lhs_ops == tem);
+ if (valueized_anything)
+ {
+ lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
+ get_alias_set (lhs),
+ TREE_TYPE (lhs), lhs_ops);
+ if (lhs_ref_ok
+ && !refs_may_alias_p_1 (ref, &lhs_ref, true))
+ return NULL;
+ }
+ else
+ {
+ ao_ref_init (&lhs_ref, lhs);
+ lhs_ref_ok = true;
+ }
+ }
+
+ base = ao_ref_base (ref);
+ offset = ref->offset;
+ maxsize = ref->max_size;
+
+ /* If we cannot constrain the size of the reference we cannot
+ test if anything kills it. */
+ if (maxsize == -1)
+ return (void *)-1;
+
+ /* We can't deduce anything useful from clobbers. */
+ if (gimple_clobber_p (def_stmt))
+ return (void *)-1;
+
+ /* def_stmt may-defs *ref. See if we can derive a value for *ref
+ from that definition.
+ 1) Memset. */
+ if (is_gimple_reg_type (vr->type)
+ && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
+ && integer_zerop (gimple_call_arg (def_stmt, 1))
+ && host_integerp (gimple_call_arg (def_stmt, 2), 1)
+ && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
+ {
+ tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
+ size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
+ if ((unsigned HOST_WIDE_INT)size2 / 8
+ == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
+ && maxsize2 != -1
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = build_zero_cst (vr->type);
+ return vn_reference_lookup_or_insert_constant_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+
+ /* 2) Assignment from an empty CONSTRUCTOR. */
+ else if (is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ if (maxsize2 != -1
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = build_zero_cst (vr->type);
+ return vn_reference_lookup_or_insert_constant_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+
+ /* 3) Assignment from a constant. We can use folds native encode/interpret
+ routines to extract the assigned bits. */
+ else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
+ && ref->size == maxsize
+ && maxsize % BITS_PER_UNIT == 0
+ && offset % BITS_PER_UNIT == 0
+ && is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ if (maxsize2 != -1
+ && maxsize2 == size2
+ && size2 % BITS_PER_UNIT == 0
+ && offset2 % BITS_PER_UNIT == 0
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ /* We support up to 512-bit values (for V8DFmode). */
+ unsigned char buffer[64];
+ int len;
+
+ len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
+ buffer, sizeof (buffer));
+ if (len > 0)
+ {
+ tree val = native_interpret_expr (vr->type,
+ buffer
+ + ((offset - offset2)
+ / BITS_PER_UNIT),
+ ref->size / BITS_PER_UNIT);
+ if (val)
+ return vn_reference_lookup_or_insert_constant_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+ }
+
+ /* 4) Assignment from an SSA name which definition we may be able
+ to access pieces from. */
+ else if (ref->size == maxsize
+ && is_gimple_reg_type (vr->type)
+ && gimple_assign_single_p (def_stmt)
+ && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
+ {
+ tree rhs1 = gimple_assign_rhs1 (def_stmt);
+ gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
+ if (is_gimple_assign (def_stmt2)
+ && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
+ || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
+ && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2, off;
+ base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
+ &offset2, &size2, &maxsize2);
+ off = offset - offset2;
+ if (maxsize2 != -1
+ && maxsize2 == size2
+ && operand_equal_p (base, base2, 0)
+ && offset2 <= offset
+ && offset2 + size2 >= offset + maxsize)
+ {
+ tree val = NULL_TREE;
+ HOST_WIDE_INT elsz
+ = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
+ if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
+ {
+ if (off == 0)
+ val = gimple_assign_rhs1 (def_stmt2);
+ else if (off == elsz)
+ val = gimple_assign_rhs2 (def_stmt2);
+ }
+ else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
+ && off % elsz == 0)
+ {
+ tree ctor = gimple_assign_rhs1 (def_stmt2);
+ unsigned i = off / elsz;
+ if (i < CONSTRUCTOR_NELTS (ctor))
+ {
+ constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
+ if (compare_tree_int (elt->index, i) == 0)
+ val = elt->value;
+ }
+ }
+ if (val)
+ return vn_reference_lookup_or_insert_constant_for_pieces
+ (vuse, vr->set, vr->type, vr->operands, val);
+ }
+ }
+ }
+
+ /* 5) For aggregate copies translate the reference through them if
+ the copy kills ref. */
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && gimple_assign_single_p (def_stmt)
+ && (DECL_P (gimple_assign_rhs1 (def_stmt))
+ || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
+ || handled_component_p (gimple_assign_rhs1 (def_stmt))))
+ {
+ tree base2;
+ HOST_WIDE_INT offset2, size2, maxsize2;
+ int i, j;
+ VEC (vn_reference_op_s, heap) *rhs = NULL;
+ vn_reference_op_t vro;
+ ao_ref r;
+
+ if (!lhs_ref_ok)
+ return (void *)-1;
+
+ /* See if the assignment kills REF. */
+ base2 = ao_ref_base (&lhs_ref);
+ offset2 = lhs_ref.offset;
+ size2 = lhs_ref.size;
+ maxsize2 = lhs_ref.max_size;
+ if (maxsize2 == -1
+ || (base != base2 && !operand_equal_p (base, base2, 0))
+ || offset2 > offset
+ || offset2 + size2 < offset + maxsize)
+ return (void *)-1;
+
+ /* Find the common base of ref and the lhs. lhs_ops already
+ contains valueized operands for the lhs. */
+ i = VEC_length (vn_reference_op_s, vr->operands) - 1;
+ j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
+ while (j >= 0 && i >= 0
+ && vn_reference_op_eq (VEC_index (vn_reference_op_s,
+ vr->operands, i),
+ VEC_index (vn_reference_op_s, lhs_ops, j)))
+ {
+ i--;
+ j--;
+ }
+
+ /* ??? The innermost op should always be a MEM_REF and we already
+ checked that the assignment to the lhs kills vr. Thus for
+ aggregate copies using char[] types the vn_reference_op_eq
+ may fail when comparing types for compatibility. But we really
+ don't care here - further lookups with the rewritten operands
+ will simply fail if we messed up types too badly. */
+ if (j == 0 && i >= 0
+ && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
+ && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
+ && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
+ == VEC_index (vn_reference_op_s, vr->operands, i)->off))
+ i--, j--;
+
+ /* i now points to the first additional op.
+ ??? LHS may not be completely contained in VR, one or more
+ VIEW_CONVERT_EXPRs could be in its way. We could at least
+ try handling outermost VIEW_CONVERT_EXPRs. */
+ if (j != -1)
+ return (void *)-1;
+
+ /* Now re-write REF to be based on the rhs of the assignment. */
+ copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
+ /* We need to pre-pend vr->operands[0..i] to rhs. */
+ if (i + 1 + VEC_length (vn_reference_op_s, rhs)
+ > VEC_length (vn_reference_op_s, vr->operands))
+ {
+ VEC (vn_reference_op_s, heap) *old = vr->operands;
+ VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
+ i + 1 + VEC_length (vn_reference_op_s, rhs));
+ if (old == shared_lookup_references
+ && vr->operands != old)
+ shared_lookup_references = NULL;
+ }
+ else
+ VEC_truncate (vn_reference_op_s, vr->operands,
+ i + 1 + VEC_length (vn_reference_op_s, rhs));
+ FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
+ VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
+ VEC_free (vn_reference_op_s, heap, rhs);
+ vr->operands = valueize_refs (vr->operands);
+ vr->hashcode = vn_reference_compute_hash (vr);
+
+ /* Adjust *ref from the new operands. */
+ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
+ return (void *)-1;
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
+ *ref = r;
+
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
+ /* Keep looking for the adjusted *REF / VR pair. */
+ return NULL;
+ }
+
+ /* 6) For memcpy copies translate the reference through them if
+ the copy kills ref. */
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && is_gimple_reg_type (vr->type)
+ /* ??? Handle BCOPY as well. */
+ && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
+ || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
+ && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
+ && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
+ || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
+ && host_integerp (gimple_call_arg (def_stmt, 2), 1))
+ {
+ tree lhs, rhs;
+ ao_ref r;
+ HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
+ vn_reference_op_s op;
+ HOST_WIDE_INT at;
+
+
+ /* Only handle non-variable, addressable refs. */
+ if (ref->size != maxsize
+ || offset % BITS_PER_UNIT != 0
+ || ref->size % BITS_PER_UNIT != 0)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the destination. */
+ lhs = gimple_call_arg (def_stmt, 0);
+ lhs_offset = 0;
+ if (TREE_CODE (lhs) == SSA_NAME)
+ lhs = SSA_VAL (lhs);
+ if (TREE_CODE (lhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
+ &lhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ lhs = TREE_OPERAND (tem, 0);
+ lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ lhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (lhs) != SSA_NAME
+ && TREE_CODE (lhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ /* Extract a pointer base and an offset for the source. */
+ rhs = gimple_call_arg (def_stmt, 1);
+ rhs_offset = 0;
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = SSA_VAL (rhs);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ {
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
+ &rhs_offset);
+ if (!tem)
+ return (void *)-1;
+ if (TREE_CODE (tem) == MEM_REF
+ && host_integerp (TREE_OPERAND (tem, 1), 1))
+ {
+ rhs = TREE_OPERAND (tem, 0);
+ rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ }
+ else if (DECL_P (tem))
+ rhs = build_fold_addr_expr (tem);
+ else
+ return (void *)-1;
+ }
+ if (TREE_CODE (rhs) != SSA_NAME
+ && TREE_CODE (rhs) != ADDR_EXPR)
+ return (void *)-1;
+
+ copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
+
+ /* The bases of the destination and the references have to agree. */
+ if ((TREE_CODE (base) != MEM_REF
+ && !DECL_P (base))
+ || (TREE_CODE (base) == MEM_REF
+ && (TREE_OPERAND (base, 0) != lhs
+ || !host_integerp (TREE_OPERAND (base, 1), 1)))
+ || (DECL_P (base)
+ && (TREE_CODE (lhs) != ADDR_EXPR
+ || TREE_OPERAND (lhs, 0) != base)))
+ return (void *)-1;
+
+ /* And the access has to be contained within the memcpy destination. */
+ at = offset / BITS_PER_UNIT;
+ if (TREE_CODE (base) == MEM_REF)
+ at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
+ if (lhs_offset > at
+ || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
+ return (void *)-1;
+
+ /* Make room for 2 operands in the new reference. */
+ if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ {
+ VEC (vn_reference_op_s, heap) *old = vr->operands;
+ VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ if (old == shared_lookup_references
+ && vr->operands != old)
+ shared_lookup_references = NULL;
+ }
+ else
+ VEC_truncate (vn_reference_op_s, vr->operands, 2);
+
+ /* The looked-through reference is a simple MEM_REF. */
+ memset (&op, 0, sizeof (op));
+ op.type = vr->type;
+ op.opcode = MEM_REF;
+ op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
+ op.off = at - lhs_offset + rhs_offset;
+ VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
+ op.type = TREE_TYPE (rhs);
+ op.opcode = TREE_CODE (rhs);
+ op.op0 = rhs;
+ op.off = -1;
+ VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
+ vr->hashcode = vn_reference_compute_hash (vr);
+
+ /* Adjust *ref from the new operands. */
+ if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
+ return (void *)-1;
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
+ *ref = r;
+
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
+ /* Keep looking for the adjusted *REF / VR pair. */
+ return NULL;
+ }
+
+ /* Bail out and stop walking. */
+ return (void *)-1;
+}