+/* Build a alias-oracle reference abstraction in *REF from the vn_reference
+ operands in *OPS, the reference alias set SET and the reference type TYPE.
+ Return true if something useful was produced. */
+
+bool
+ao_ref_init_from_vn_reference (ao_ref *ref,
+ alias_set_type set, tree type,
+ VEC (vn_reference_op_s, heap) *ops)
+{
+ vn_reference_op_t op;
+ unsigned i;
+ tree base = NULL_TREE;
+ tree *op0_p = &base;
+ HOST_WIDE_INT offset = 0;
+ HOST_WIDE_INT max_size;
+ HOST_WIDE_INT size = -1;
+ tree size_tree = NULL_TREE;
+
+ /* First get the final access size from just the outermost expression. */
+ op = VEC_index (vn_reference_op_s, ops, 0);
+ if (op->opcode == COMPONENT_REF)
+ {
+ if (TREE_CODE (op->op0) == INTEGER_CST)
+ size_tree = op->op0;
+ else
+ size_tree = DECL_SIZE (op->op0);
+ }
+ else if (op->opcode == BIT_FIELD_REF)
+ size_tree = op->op0;
+ else
+ {
+ enum machine_mode mode = TYPE_MODE (type);
+ if (mode == BLKmode)
+ size_tree = TYPE_SIZE (type);
+ else
+ size = GET_MODE_BITSIZE (mode);
+ }
+ if (size_tree != NULL_TREE)
+ {
+ if (!host_integerp (size_tree, 1))
+ size = -1;
+ else
+ size = TREE_INT_CST_LOW (size_tree);
+ }
+
+ /* Initially, maxsize is the same as the accessed element size.
+ In the following it will only grow (or become -1). */
+ max_size = size;
+
+ /* Compute cumulative bit-offset for nested component-refs and array-refs,
+ and find the ultimate containing object. */
+ for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ {
+ switch (op->opcode)
+ {
+ /* These may be in the reference ops, but we cannot do anything
+ sensible with them here. */
+ case CALL_EXPR:
+ case ADDR_EXPR:
+ return false;
+
+ /* Record the base objects. */
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ *op0_p = build1 (op->opcode, op->type, NULL_TREE);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
+ case MISALIGNED_INDIRECT_REF:
+ *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
+ NULL_TREE, op->op0);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ case SSA_NAME:
+ *op0_p = op->op0;
+ break;
+
+ /* And now the usual component-reference style ops. */
+ case BIT_FIELD_REF:
+ offset += tree_low_cst (op->op1, 0);
+ break;
+
+ case COMPONENT_REF:
+ {
+ tree field = op->op0;
+ /* We do not have a complete COMPONENT_REF tree here so we
+ cannot use component_ref_field_offset. Do the interesting
+ parts manually. */
+
+ /* Our union trick, done for offset zero only. */
+ if (TREE_CODE (field) == INTEGER_CST)
+ ;
+ else if (op->op1
+ || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ max_size = -1;
+ else
+ {
+ offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ * BITS_PER_UNIT);
+ offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
+ }
+ break;
+ }
+
+ case ARRAY_RANGE_REF:
+ case ARRAY_REF:
+ /* We recorded the lower bound and the element size. */
+ if (!host_integerp (op->op0, 0)
+ || !host_integerp (op->op1, 0)
+ || !host_integerp (op->op2, 0))
+ max_size = -1;
+ else
+ {
+ HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
+ hindex -= TREE_INT_CST_LOW (op->op1);
+ hindex *= TREE_INT_CST_LOW (op->op2);
+ hindex *= BITS_PER_UNIT;
+ offset += hindex;
+ }
+ break;
+
+ case REALPART_EXPR:
+ break;
+
+ case IMAGPART_EXPR:
+ offset += size;
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ break;
+
+ case STRING_CST:
+ case INTEGER_CST:
+ case COMPLEX_CST:
+ case VECTOR_CST:
+ case REAL_CST:
+ case CONSTRUCTOR:
+ case CONST_DECL:
+ return false;
+
+ default:
+ return false;
+ }
+ }
+
+ if (base == NULL_TREE)
+ return false;
+
+ ref->ref = NULL_TREE;
+ ref->base = base;
+ ref->offset = offset;
+ ref->size = size;
+ ref->max_size = max_size;
+ ref->ref_alias_set = set;
+ ref->base_alias_set = -1;
+
+ return true;
+}
+
+/* Copy the operations present in load/store/call REF into RESULT, a vector of
+ vn_reference_op_s's. */
+
+void
+copy_reference_ops_from_call (gimple call,
+ VEC(vn_reference_op_s, heap) **result)
+{
+ vn_reference_op_s temp;
+ unsigned i;
+
+ /* Copy the type, opcode, function being called and static chain. */
+ memset (&temp, 0, sizeof (temp));
+ temp.type = gimple_call_return_type (call);
+ temp.opcode = CALL_EXPR;
+ temp.op0 = gimple_call_fn (call);
+ temp.op1 = gimple_call_chain (call);
+ VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+
+ /* Copy the call arguments. As they can be references as well,
+ just chain them together. */
+ for (i = 0; i < gimple_call_num_args (call); ++i)
+ {
+ tree callarg = gimple_call_arg (call, i);
+ copy_reference_ops_from_ref (callarg, result);
+ }
+}
+
+/* Create a vector of vn_reference_op_s structures from REF, a
+ REFERENCE_CLASS_P tree. The vector is not shared. */
+
+static VEC(vn_reference_op_s, heap) *
+create_reference_ops_from_ref (tree ref)
+{
+ VEC (vn_reference_op_s, heap) *result = NULL;
+
+ copy_reference_ops_from_ref (ref, &result);
+ return result;
+}
+
+/* Create a vector of vn_reference_op_s structures from CALL, a
+ call statement. The vector is not shared. */
+
+static VEC(vn_reference_op_s, heap) *
+create_reference_ops_from_call (gimple call)
+{
+ VEC (vn_reference_op_s, heap) *result = NULL;
+
+ copy_reference_ops_from_call (call, &result);
+ return result;
+}
+
+/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
+ *I_P to point to the last element of the replacement. */
+void
+vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+ unsigned int *i_p)
+{
+ VEC(vn_reference_op_s, heap) *mem = NULL;
+ vn_reference_op_t op;
+ unsigned int i = *i_p;
+ unsigned int j;
+
+ /* Get ops for the addressed object. */
+ op = VEC_index (vn_reference_op_s, *ops, i);
+ /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
+ around it to avoid later ICEs. */
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
+ {
+ vn_reference_op_s aref;
+ tree dom;
+ aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
+ aref.opcode = ARRAY_REF;
+ aref.op0 = integer_zero_node;
+ if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
+ && TYPE_MIN_VALUE (dom))
+ aref.op0 = TYPE_MIN_VALUE (dom);
+ aref.op1 = aref.op0;
+ aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
+ VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
+ }
+ copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
+
+ /* Do the replacement - we should have at least one op in mem now. */
+ if (VEC_length (vn_reference_op_s, mem) == 1)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_ordered_remove (vn_reference_op_s, *ops, i);
+ i--;
+ }
+ else if (VEC_length (vn_reference_op_s, mem) == 2)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_replace (vn_reference_op_s, *ops, i,
+ VEC_index (vn_reference_op_s, mem, 1));
+ }
+ else if (VEC_length (vn_reference_op_s, mem) > 2)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_replace (vn_reference_op_s, *ops, i,
+ VEC_index (vn_reference_op_s, mem, 1));
+ /* ??? There is no VEC_splice. */
+ for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
+ VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
+ }
+ else
+ gcc_unreachable ();
+
+ VEC_free (vn_reference_op_s, heap, mem);
+ *i_p = i;
+}
+
+/* Transform any SSA_NAME's in a vector of vn_reference_op_s
+ structures into their value numbers. This is done in-place, and
+ the vector passed in is returned. */