+ if (size_tree != NULL_TREE)
+ {
+ if (! host_integerp (size_tree, 1))
+ bitsize = -1;
+ else
+ bitsize = TREE_INT_CST_LOW (size_tree);
+ }
+
+ /* Initially, maxsize is the same as the accessed element size.
+ In the following it will only grow (or become -1). */
+ maxsize = bitsize;
+
+ /* Compute cumulative bit-offset for nested component-refs and array-refs,
+ and find the ultimate containing object. */
+ while (1)
+ {
+ switch (TREE_CODE (exp))
+ {
+ case BIT_FIELD_REF:
+ bit_offset += tree_low_cst (TREE_OPERAND (exp, 2), 0);
+ break;
+
+ case COMPONENT_REF:
+ {
+ tree field = TREE_OPERAND (exp, 1);
+ tree this_offset = component_ref_field_offset (exp);
+
+ if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
+ {
+ HOST_WIDE_INT hthis_offset = tree_low_cst (this_offset, 0);
+
+ hthis_offset *= BITS_PER_UNIT;
+ bit_offset += hthis_offset;
+ bit_offset += tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+ }
+ else
+ {
+ tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ /* We need to adjust maxsize to the whole structure bitsize.
+ But we can subtract any constant offset seen sofar,
+ because that would get us out of the structure otherwise. */
+ if (maxsize != -1 && csize && host_integerp (csize, 1))
+ maxsize = TREE_INT_CST_LOW (csize) - bit_offset;
+ else
+ maxsize = -1;
+ }
+ }
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ {
+ tree index = TREE_OPERAND (exp, 1);
+ tree low_bound = array_ref_low_bound (exp);
+ tree unit_size = array_ref_element_size (exp);
+
+ /* If the resulting bit-offset is constant, track it. */
+ if (host_integerp (index, 0)
+ && host_integerp (low_bound, 0)
+ && host_integerp (unit_size, 1))
+ {
+ HOST_WIDE_INT hindex = tree_low_cst (index, 0);
+
+ hindex -= tree_low_cst (low_bound, 0);
+ hindex *= tree_low_cst (unit_size, 1);
+ hindex *= BITS_PER_UNIT;
+ bit_offset += hindex;
+
+ /* An array ref with a constant index up in the structure
+ hierarchy will constrain the size of any variable array ref
+ lower in the access hierarchy. */
+ seen_variable_array_ref = false;
+ }
+ else
+ {
+ tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ /* We need to adjust maxsize to the whole array bitsize.
+ But we can subtract any constant offset seen sofar,
+ because that would get us outside of the array otherwise. */
+ if (maxsize != -1 && asize && host_integerp (asize, 1))
+ maxsize = TREE_INT_CST_LOW (asize) - bit_offset;
+ else
+ maxsize = -1;
+
+ /* Remember that we have seen an array ref with a variable
+ index. */
+ seen_variable_array_ref = true;
+ }
+ }
+ break;
+
+ case REALPART_EXPR:
+ break;
+
+ case IMAGPART_EXPR:
+ bit_offset += bitsize;
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ /* ??? We probably should give up here and bail out. */
+ break;
+
+ default:
+ goto done;
+ }
+
+ exp = TREE_OPERAND (exp, 0);
+ }
+ done:
+
+ /* We need to deal with variable arrays ending structures such as
+ struct { int length; int a[1]; } x; x.a[d]
+ struct { struct { int a; int b; } a[1]; } x; x.a[d].a
+ struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
+ where we do not know maxsize for variable index accesses to
+ the array. The simplest way to conservatively deal with this
+ is to punt in the case that offset + maxsize reaches the
+ base type boundary. */
+ if (seen_variable_array_ref
+ && maxsize != -1
+ && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
+ && bit_offset + maxsize
+ == (signed)TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))
+ maxsize = -1;
+
+ /* ??? Due to negative offsets in ARRAY_REF we can end up with
+ negative bit_offset here. We might want to store a zero offset
+ in this case. */
+ *poffset = bit_offset;
+ *psize = bitsize;
+ *pmax_size = maxsize;
+
+ return exp;
+}
+
+
+/* Return memory reference statistics for variable VAR in function FN.
+ This is computed by alias analysis, but it is not kept
+ incrementally up-to-date. So, these stats are only accurate if
+ pass_may_alias has been run recently. If no alias information
+ exists, this function returns NULL. */
+
+mem_sym_stats_t
+mem_sym_stats (struct function *fn, tree var)
+{
+ void **slot;
+ struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
+
+ if (stats_map == NULL)
+ return NULL;
+
+ slot = pointer_map_contains (stats_map, var);
+ if (slot == NULL)
+ return NULL;
+
+ return (mem_sym_stats_t) *slot;