+}
+
+/* Build a alias-oracle reference abstraction in *REF from the vn_reference
+ operands in *OPS, the reference alias set SET and the reference type TYPE.
+ Return true if something useful was produced. */
+
+bool
+ao_ref_init_from_vn_reference (ao_ref *ref,
+ alias_set_type set, tree type,
+ VEC (vn_reference_op_s, heap) *ops)
+{
+ vn_reference_op_t op;
+ unsigned i;
+ tree base = NULL_TREE;
+ tree *op0_p = &base;
+ HOST_WIDE_INT offset = 0;
+ HOST_WIDE_INT max_size;
+ HOST_WIDE_INT size = -1;
+ tree size_tree = NULL_TREE;
+
+ /* First get the final access size from just the outermost expression. */
+ op = VEC_index (vn_reference_op_s, ops, 0);
+ if (op->opcode == COMPONENT_REF)
+ {
+ if (TREE_CODE (op->op0) == INTEGER_CST)
+ size_tree = op->op0;
+ else
+ size_tree = DECL_SIZE (op->op0);
+ }
+ else if (op->opcode == BIT_FIELD_REF)
+ size_tree = op->op0;
+ else
+ {
+ enum machine_mode mode = TYPE_MODE (type);
+ if (mode == BLKmode)
+ size_tree = TYPE_SIZE (type);
+ else
+ size = GET_MODE_BITSIZE (mode);
+ }
+ if (size_tree != NULL_TREE)
+ {
+ if (!host_integerp (size_tree, 1))
+ size = -1;
+ else
+ size = TREE_INT_CST_LOW (size_tree);
+ }
+
+ /* Initially, maxsize is the same as the accessed element size.
+ In the following it will only grow (or become -1). */
+ max_size = size;
+
+ /* Compute cumulative bit-offset for nested component-refs and array-refs,
+ and find the ultimate containing object. */
+ for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ {
+ switch (op->opcode)
+ {
+ /* These may be in the reference ops, but we cannot do anything
+ sensible with them here. */
+ case CALL_EXPR:
+ case ADDR_EXPR:
+ return false;
+
+ /* Record the base objects. */
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ *op0_p = build1 (op->opcode, op->type, NULL_TREE);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
+ case MISALIGNED_INDIRECT_REF:
+ *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
+ NULL_TREE, op->op0);
+ op0_p = &TREE_OPERAND (*op0_p, 0);
+ break;
+
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ case SSA_NAME:
+ *op0_p = op->op0;
+ break;
+
+ /* And now the usual component-reference style ops. */
+ case BIT_FIELD_REF:
+ offset += tree_low_cst (op->op1, 0);
+ break;
+
+ case COMPONENT_REF:
+ {
+ tree field = op->op0;
+ /* We do not have a complete COMPONENT_REF tree here so we
+ cannot use component_ref_field_offset. Do the interesting
+ parts manually. */
+
+ /* Our union trick, done for offset zero only. */
+ if (TREE_CODE (field) == INTEGER_CST)
+ ;
+ else if (op->op1
+ || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ max_size = -1;
+ else
+ {
+ offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ * BITS_PER_UNIT);
+ offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
+ }
+ break;
+ }
+
+ case ARRAY_RANGE_REF:
+ case ARRAY_REF:
+ /* We recorded the lower bound and the element size. */
+ if (!host_integerp (op->op0, 0)
+ || !host_integerp (op->op1, 0)
+ || !host_integerp (op->op2, 0))
+ max_size = -1;
+ else
+ {
+ HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
+ hindex -= TREE_INT_CST_LOW (op->op1);
+ hindex *= TREE_INT_CST_LOW (op->op2);
+ hindex *= BITS_PER_UNIT;
+ offset += hindex;
+ }
+ break;
+
+ case REALPART_EXPR:
+ break;
+
+ case IMAGPART_EXPR:
+ offset += size;
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ break;
+
+ case STRING_CST:
+ case INTEGER_CST:
+ case COMPLEX_CST:
+ case VECTOR_CST:
+ case REAL_CST:
+ case CONSTRUCTOR:
+ case CONST_DECL:
+ return false;
+
+ default:
+ return false;
+ }
+ }
+
+ if (base == NULL_TREE)
+ return false;
+
+ ref->ref = NULL_TREE;
+ ref->base = base;
+ ref->offset = offset;
+ ref->size = size;
+ ref->max_size = max_size;
+ ref->ref_alias_set = set;
+ ref->base_alias_set = -1;
+
+ return true;
+}
+
+/* Copy the operations present in load/store/call REF into RESULT, a vector of
+ vn_reference_op_s's. */
+
+void
+copy_reference_ops_from_call (gimple call,
+ VEC(vn_reference_op_s, heap) **result)
+{
+ vn_reference_op_s temp;
+ unsigned i;
+
+ /* Copy the type, opcode, function being called and static chain. */
+ memset (&temp, 0, sizeof (temp));
+ temp.type = gimple_call_return_type (call);
+ temp.opcode = CALL_EXPR;
+ temp.op0 = gimple_call_fn (call);
+ temp.op1 = gimple_call_chain (call);
+ VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+
+ /* Copy the call arguments. As they can be references as well,
+ just chain them together. */
+ for (i = 0; i < gimple_call_num_args (call); ++i)
+ {
+ tree callarg = gimple_call_arg (call, i);
+ copy_reference_ops_from_ref (callarg, result);
+ }
+}
+
+/* Create a vector of vn_reference_op_s structures from REF, a
+ REFERENCE_CLASS_P tree. The vector is not shared. */
+
+static VEC(vn_reference_op_s, heap) *
+create_reference_ops_from_ref (tree ref)
+{
+ VEC (vn_reference_op_s, heap) *result = NULL;
+
+ copy_reference_ops_from_ref (ref, &result);
+ return result;
+}
+
+/* Create a vector of vn_reference_op_s structures from CALL, a
+ call statement. The vector is not shared. */
+
+static VEC(vn_reference_op_s, heap) *
+create_reference_ops_from_call (gimple call)
+{
+ VEC (vn_reference_op_s, heap) *result = NULL;
+
+ copy_reference_ops_from_call (call, &result);
+ return result;
+}
+
+/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
+ *I_P to point to the last element of the replacement. */
+void
+vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+ unsigned int *i_p)
+{
+ VEC(vn_reference_op_s, heap) *mem = NULL;
+ vn_reference_op_t op;
+ unsigned int i = *i_p;
+ unsigned int j;
+
+ /* Get ops for the addressed object. */
+ op = VEC_index (vn_reference_op_s, *ops, i);
+ /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
+ around it to avoid later ICEs. */
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
+ {
+ vn_reference_op_s aref;
+ tree dom;
+ aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
+ aref.opcode = ARRAY_REF;
+ aref.op0 = integer_zero_node;
+ if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
+ && TYPE_MIN_VALUE (dom))
+ aref.op0 = TYPE_MIN_VALUE (dom);
+ aref.op1 = aref.op0;
+ aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
+ VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
+ }
+ copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
+
+ /* Do the replacement - we should have at least one op in mem now. */
+ if (VEC_length (vn_reference_op_s, mem) == 1)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_ordered_remove (vn_reference_op_s, *ops, i);
+ i--;
+ }
+ else if (VEC_length (vn_reference_op_s, mem) == 2)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_replace (vn_reference_op_s, *ops, i,
+ VEC_index (vn_reference_op_s, mem, 1));
+ }
+ else if (VEC_length (vn_reference_op_s, mem) > 2)
+ {
+ VEC_replace (vn_reference_op_s, *ops, i - 1,
+ VEC_index (vn_reference_op_s, mem, 0));
+ VEC_replace (vn_reference_op_s, *ops, i,
+ VEC_index (vn_reference_op_s, mem, 1));
+ /* ??? There is no VEC_splice. */
+ for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
+ VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
+ }
+ else
+ gcc_unreachable ();
+
+ VEC_free (vn_reference_op_s, heap, mem);
+ *i_p = i;
+}
+
+/* Optimize the reference REF to a constant if possible or return
+ NULL_TREE if not. */
+
+tree
+fully_constant_vn_reference_p (vn_reference_t ref)
+{
+ VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vn_reference_op_t op;
+
+ /* Try to simplify the translated expression if it is
+ a call to a builtin function with at most two arguments. */
+ op = VEC_index (vn_reference_op_s, operands, 0);
+ if (op->opcode == CALL_EXPR
+ && TREE_CODE (op->op0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
+ && VEC_length (vn_reference_op_s, operands) >= 2
+ && VEC_length (vn_reference_op_s, operands) <= 3)
+ {
+ vn_reference_op_t arg0, arg1 = NULL;
+ bool anyconst = false;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (VEC_length (vn_reference_op_s, operands) > 2)
+ arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
+ || (arg0->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg0->op0)))
+ anyconst = true;
+ if (arg1
+ && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
+ || (arg1->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg1->op0))))
+ anyconst = true;
+ if (anyconst)
+ {
+ tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
+ arg1 ? 2 : 1,
+ arg0->op0,
+ arg1 ? arg1->op0 : NULL);
+ if (folded
+ && TREE_CODE (folded) == NOP_EXPR)
+ folded = TREE_OPERAND (folded, 0);
+ if (folded
+ && is_gimple_min_invariant (folded))
+ return folded;
+ }
+ }
+
+ /* Simplify reads from constant strings. */
+ else if (op->opcode == ARRAY_REF
+ && TREE_CODE (op->op0) == INTEGER_CST
+ && integer_zerop (op->op1)
+ && VEC_length (vn_reference_op_s, operands) == 2)
+ {
+ vn_reference_op_t arg0;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (arg0->opcode == STRING_CST
+ && (TYPE_MODE (op->type)
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
+ && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
+ && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
+ && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
+ return build_int_cst_type (op->type,
+ (TREE_STRING_POINTER (arg0->op0)
+ [TREE_INT_CST_LOW (op->op0)]));
+ }
+
+ return NULL_TREE;
+}
+
+/* Transform any SSA_NAME's in a vector of vn_reference_op_s
+ structures into their value numbers. This is done in-place, and
+ the vector passed in is returned. */
+
+static VEC (vn_reference_op_s, heap) *
+valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+{
+ vn_reference_op_t vro;
+ unsigned int i;
+
+ for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
+ {
+ if (vro->opcode == SSA_NAME
+ || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
+ {
+ vro->op0 = SSA_VAL (vro->op0);
+ /* If it transforms from an SSA_NAME to a constant, update
+ the opcode. */
+ if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
+ vro->opcode = TREE_CODE (vro->op0);
+ /* If it transforms from an SSA_NAME to an address, fold with
+ a preceding indirect reference. */
+ if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
+ && VEC_index (vn_reference_op_s,
+ orig, i - 1)->opcode == INDIRECT_REF)
+ {
+ vn_reference_fold_indirect (&orig, &i);
+ continue;
+ }
+ }
+ if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+ vro->op1 = SSA_VAL (vro->op1);
+ if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
+ vro->op2 = SSA_VAL (vro->op2);
+ }