+/* Construct a MEM_REF that would reference a part of aggregate BASE of type
+ EXP_TYPE at the given OFFSET. If BASE is something for which
+ get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
+ to insert new statements either before or below the current one as specified
+ by INSERT_AFTER. This function is not capable of handling bitfields. */
+
+tree
+build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
+ tree exp_type, gimple_stmt_iterator *gsi,
+ bool insert_after)
+{
+ tree prev_base = base;
+ tree off;
+ HOST_WIDE_INT base_offset;
+ unsigned HOST_WIDE_INT misalign;
+ unsigned int align;
+
+ gcc_checking_assert (offset % BITS_PER_UNIT == 0);
+
+ base = get_addr_base_and_unit_offset (base, &base_offset);
+
+ /* get_addr_base_and_unit_offset returns NULL for references with a variable
+ offset such as array[var_index]. */
+ if (!base)
+ {
+ gimple stmt;
+ tree tmp, addr;
+
+ gcc_checking_assert (gsi);
+ tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
+ add_referenced_var (tmp);
+ tmp = make_ssa_name (tmp, NULL);
+ addr = build_fold_addr_expr (unshare_expr (prev_base));
+ STRIP_USELESS_TYPE_CONVERSION (addr);
+ stmt = gimple_build_assign (tmp, addr);
+ gimple_set_location (stmt, loc);
+ SSA_NAME_DEF_STMT (tmp) = stmt;
+ if (insert_after)
+ gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
+ else
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ update_stmt (stmt);
+
+ off = build_int_cst (reference_alias_ptr_type (prev_base),
+ offset / BITS_PER_UNIT);
+ base = tmp;
+ }
+ else if (TREE_CODE (base) == MEM_REF)
+ {
+ off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
+ base_offset + offset / BITS_PER_UNIT);
+ off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
+ base = unshare_expr (TREE_OPERAND (base, 0));
+ }
+ else
+ {
+ off = build_int_cst (reference_alias_ptr_type (base),
+ base_offset + offset / BITS_PER_UNIT);
+ base = build_fold_addr_expr (unshare_expr (base));
+ }
+
+ /* If prev_base were always an originally performed access
+ we can extract more optimistic alignment information
+ by looking at the access mode. That would constrain the
+ alignment of base + base_offset which we would need to
+ adjust according to offset.
+ ??? But it is not at all clear that prev_base is an access
+ that was in the IL that way, so be conservative for now. */
+ align = get_pointer_alignment_1 (base, &misalign);
+ misalign += (double_int_sext (tree_to_double_int (off),
+ TYPE_PRECISION (TREE_TYPE (off))).low
+ * BITS_PER_UNIT);
+ misalign = misalign & (align - 1);
+ if (misalign != 0)
+ align = (misalign & -misalign);
+ if (align < TYPE_ALIGN (exp_type))
+ exp_type = build_aligned_type (exp_type, align);
+
+ return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
+}
+
+DEF_VEC_ALLOC_P_STACK (tree);
+#define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
+
+/* Construct a memory reference to a part of an aggregate BASE at the given
+ OFFSET and of the type of MODEL. In case this is a chain of references
+ to component, the function will replicate the chain of COMPONENT_REFs of
+ the expression of MODEL to access it. GSI and INSERT_AFTER have the same
+ meaning as in build_ref_for_offset. */
+
+static tree
+build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
+ struct access *model, gimple_stmt_iterator *gsi,
+ bool insert_after)
+{
+ tree type = model->type, t;
+ VEC(tree,stack) *cr_stack = NULL;
+
+ if (TREE_CODE (model->expr) == COMPONENT_REF)
+ {
+ tree expr = model->expr;
+
+ /* Create a stack of the COMPONENT_REFs so later we can walk them in
+ order from inner to outer. */
+ cr_stack = VEC_alloc (tree, stack, 6);
+
+ do {
+ tree field = TREE_OPERAND (expr, 1);
+ tree cr_offset = component_ref_field_offset (expr);
+ HOST_WIDE_INT bit_pos
+ = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
+ + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
+
+ /* We can be called with a model different from the one associated
+ with BASE so we need to avoid going up the chain too far. */
+ if (offset - bit_pos < 0)
+ break;
+
+ offset -= bit_pos;
+ VEC_safe_push (tree, stack, cr_stack, expr);
+
+ expr = TREE_OPERAND (expr, 0);
+ type = TREE_TYPE (expr);
+ } while (TREE_CODE (expr) == COMPONENT_REF);
+ }
+
+ t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
+
+ if (TREE_CODE (model->expr) == COMPONENT_REF)
+ {
+ unsigned i;
+ tree expr;
+
+ /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
+ FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
+ {
+ tree field = TREE_OPERAND (expr, 1);
+ t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
+ TREE_OPERAND (expr, 2));
+ }
+
+ VEC_free (tree, stack, cr_stack);
+ }
+
+ return t;
+}
+
+/* Construct a memory reference consisting of component_refs and array_refs to
+ a part of an aggregate *RES (which is of type TYPE). The requested part
+ should have type EXP_TYPE at be the given OFFSET. This function might not
+ succeed, it returns true when it does and only then *RES points to something
+ meaningful. This function should be used only to build expressions that we
+ might need to present to user (e.g. in warnings). In all other situations,
+ build_ref_for_model or build_ref_for_offset should be used instead. */