+static void
+gather_mem_refs_stmt (struct loop *loop, gimple stmt)
+{
+ tree *mem = NULL;
+ hashval_t hash;
+ PTR *slot;
+ mem_ref_p ref;
+ tree vname;
+ bool is_stored;
+ bitmap clvops;
+ unsigned id;
+
+ if (!gimple_vuse (stmt))
+ return;
+
+ mem = simple_mem_ref_in_stmt (stmt, &is_stored);
+ if (!mem)
+ goto fail;
+
+ hash = iterative_hash_expr (*mem, 0);
+ slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
+
+ if (*slot)
+ {
+ ref = (mem_ref_p) *slot;
+ id = ref->id;
+ }
+ else
+ {
+ id = VEC_length (mem_ref_p, memory_accesses.refs_list);
+ ref = mem_ref_alloc (*mem, hash, id);
+ VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
+ *slot = ref;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Memory reference %u: ", id);
+ print_generic_expr (dump_file, ref->mem, TDF_SLIM);
+ fprintf (dump_file, "\n");
+ }
+ }
+ if (is_stored)
+ mark_ref_stored (ref, loop);
+
+ if ((vname = gimple_vuse (stmt)) != NULL_TREE)
+ bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
+ record_mem_ref_loc (ref, loop, stmt, mem);
+ return;
+
+fail:
+ clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
+ if ((vname = gimple_vuse (stmt)) != NULL_TREE)
+ bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
+}
+
+/* Gathers memory references in loops. */
+
+static void
+gather_mem_refs_in_loops (void)
+{
+ gimple_stmt_iterator bsi;
+ basic_block bb;
+ struct loop *loop;
+ loop_iterator li;
+ bitmap clvo, clvi;
+ bitmap lrefs, alrefs, alrefso;
+
+ FOR_EACH_BB (bb)
+ {
+ loop = bb->loop_father;
+ if (loop == current_loops->tree_root)
+ continue;
+
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ gather_mem_refs_stmt (loop, gsi_stmt (bsi));
+ }
+
+ /* Propagate the information about clobbered vops and accessed memory
+ references up the loop hierarchy. */
+ FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
+ {
+ lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
+ alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
+ bitmap_ior_into (alrefs, lrefs);
+
+ if (loop_outer (loop) == current_loops->tree_root)
+ continue;
+
+ clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
+ clvo = VEC_index (bitmap, memory_accesses.clobbered_vops,
+ loop_outer (loop)->num);
+ bitmap_ior_into (clvo, clvi);
+
+ alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
+ loop_outer (loop)->num);
+ bitmap_ior_into (alrefso, alrefs);
+ }
+}
+
+/* Element of the hash table that maps vops to memory references. */
+
+struct vop_to_refs_elt
+{
+ /* DECL_UID of the vop. */
+ unsigned uid;
+
+ /* List of the all references. */
+ bitmap refs_all;
+
+ /* List of stored references. */
+ bitmap refs_stored;
+};
+
+/* A hash function for struct vop_to_refs_elt object OBJ. */
+
+static hashval_t
+vtoe_hash (const void *obj)
+{
+ const struct vop_to_refs_elt *const vtoe =
+ (const struct vop_to_refs_elt *) obj;
+
+ return vtoe->uid;
+}
+
+/* An equality function for struct vop_to_refs_elt object OBJ1 with
+ uid of a vop OBJ2. */
+
+static int
+vtoe_eq (const void *obj1, const void *obj2)
+{
+ const struct vop_to_refs_elt *const vtoe =
+ (const struct vop_to_refs_elt *) obj1;
+ const unsigned *const uid = (const unsigned *) obj2;
+
+ return vtoe->uid == *uid;
+}
+
+/* A function to free the struct vop_to_refs_elt object. */
+
+static void
+vtoe_free (void *obj)
+{
+ struct vop_to_refs_elt *const vtoe =
+ (struct vop_to_refs_elt *) obj;
+
+ BITMAP_FREE (vtoe->refs_all);
+ BITMAP_FREE (vtoe->refs_stored);
+ free (vtoe);
+}
+
+/* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true
+ if the reference REF is stored. */
+
+static void
+record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored)
+{
+ void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT);
+ struct vop_to_refs_elt *vtoe;
+
+ if (!*slot)
+ {
+ vtoe = XNEW (struct vop_to_refs_elt);
+ vtoe->uid = vop;
+ vtoe->refs_all = BITMAP_ALLOC (NULL);
+ vtoe->refs_stored = BITMAP_ALLOC (NULL);
+ *slot = vtoe;
+ }
+ else
+ vtoe = (struct vop_to_refs_elt *) *slot;
+
+ bitmap_set_bit (vtoe->refs_all, ref);
+ if (stored)
+ bitmap_set_bit (vtoe->refs_stored, ref);
+}
+
+/* Returns the set of references that access VOP according to the table
+ VOP_TO_REFS. */
+
+static bitmap
+get_vop_accesses (htab_t vop_to_refs, unsigned vop)
+{
+ struct vop_to_refs_elt *const vtoe =
+ (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
+ return vtoe->refs_all;
+}
+
+/* Returns the set of stores that access VOP according to the table
+ VOP_TO_REFS. */
+
+static bitmap
+get_vop_stores (htab_t vop_to_refs, unsigned vop)
+{
+ struct vop_to_refs_elt *const vtoe =
+ (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
+ return vtoe->refs_stored;
+}
+
+/* Adds REF to mapping from virtual operands to references in LOOP. */
+
+static void
+add_vop_ref_mapping (struct loop *loop, mem_ref_p ref)
+{
+ htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
+ bool stored = bitmap_bit_p (ref->stored, loop->num);
+ bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops,
+ loop->num);
+ bitmap_iterator bi;
+ unsigned vop;
+
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi)
+ {
+ record_vop_access (map, vop, ref->id, stored);
+ }
+}
+
+/* Create a mapping from virtual operands to references that touch them
+ in LOOP. */
+
+static void
+create_vop_ref_mapping_loop (struct loop *loop)
+{
+ bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
+ struct loop *sloop;
+ bitmap_iterator bi;
+ unsigned i;
+ mem_ref_p ref;
+
+ EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
+ {
+ ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
+ for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop))
+ add_vop_ref_mapping (sloop, ref);
+ }
+}
+
+/* For each non-clobbered virtual operand and each loop, record the memory
+ references in this loop that touch the operand. */
+
+static void
+create_vop_ref_mapping (void)
+{
+ loop_iterator li;
+ struct loop *loop;
+
+ FOR_EACH_LOOP (li, loop, 0)
+ {
+ create_vop_ref_mapping_loop (loop);
+ }
+}
+
+/* Gathers information about memory accesses in the loops. */
+
+static void
+analyze_memory_references (void)
+{
+ unsigned i;
+ bitmap empty;
+ htab_t hempty;
+
+ memory_accesses.refs
+ = htab_create (100, memref_hash, memref_eq, memref_free);
+ memory_accesses.refs_list = NULL;
+ memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap,
+ number_of_loops ());
+
+ for (i = 0; i < number_of_loops (); i++)
+ {
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty);
+ hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free);
+ VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty);
+ }
+
+ memory_accesses.ttae_cache = NULL;
+
+ gather_mem_refs_in_loops ();
+ create_vop_ref_mapping ();
+}
+
+/* Returns true if a region of size SIZE1 at position 0 and a region of
+ size SIZE2 at position DIFF cannot overlap. */
+
+static bool
+cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
+{
+ double_int d, bound;
+
+ /* Unless the difference is a constant, we fail. */
+ if (diff->n != 0)
+ return false;
+
+ d = diff->offset;
+ if (double_int_negative_p (d))
+ {
+ /* The second object is before the first one, we succeed if the last
+ element of the second object is before the start of the first one. */
+ bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
+ return double_int_negative_p (bound);
+ }
+ else
+ {
+ /* We succeed if the second object starts after the first one ends. */
+ return double_int_scmp (size1, d) <= 0;
+ }
+}
+
+/* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
+ tree_to_aff_combination_expand. */
+
+static bool
+mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
+{
+ /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
+ object and their offset differ in such a way that the locations cannot
+ overlap, then they cannot alias. */
+ double_int size1, size2;
+ aff_tree off1, off2;
+
+ /* Perform basic offset and type-based disambiguation. */
+ if (!refs_may_alias_p (mem1, mem2))
+ return false;
+
+ /* The expansion of addresses may be a bit expensive, thus we only do
+ the check at -O2 and higher optimization levels. */
+ if (optimize < 2)
+ return true;
+
+ get_inner_reference_aff (mem1, &off1, &size1);
+ get_inner_reference_aff (mem2, &off2, &size2);
+ aff_combination_expand (&off1, ttae_cache);
+ aff_combination_expand (&off2, ttae_cache);
+ aff_combination_scale (&off1, double_int_minus_one);
+ aff_combination_add (&off2, &off1);
+
+ if (cannot_overlap_p (&off2, size1, size2))
+ return false;
+
+ return true;
+}
+
+/* Rewrites location LOC by TMP_VAR. */
+
+static void
+rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
+{
+ mark_virtual_ops_for_renaming (loc->stmt);
+ *loc->ref = tmp_var;
+ update_stmt (loc->stmt);
+}
+
+/* Adds all locations of REF in LOOP and its subloops to LOCS. */
+
+static void
+get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
+ VEC (mem_ref_loc_p, heap) **locs)
+{
+ mem_ref_locs_p accs;
+ unsigned i;
+ mem_ref_loc_p loc;
+ bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
+ loop->num);
+ struct loop *subloop;
+
+ if (!bitmap_bit_p (refs, ref->id))
+ return;
+
+ if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
+ > (unsigned) loop->num)
+ {
+ accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
+ if (accs)
+ {
+ for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
+ VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
+ }
+ }
+
+ for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
+ get_all_locs_in_loop (subloop, ref, locs);
+}
+
+/* Rewrites all references to REF in LOOP by variable TMP_VAR. */
+
+static void
+rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
+{
+ unsigned i;
+ mem_ref_loc_p loc;
+ VEC (mem_ref_loc_p, heap) *locs = NULL;
+
+ get_all_locs_in_loop (loop, ref, &locs);
+ for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
+ rewrite_mem_ref_loc (loc, tmp_var);
+ VEC_free (mem_ref_loc_p, heap, locs);
+}
+
+/* The name and the length of the currently generated variable
+ for lsm. */
+#define MAX_LSM_NAME_LENGTH 40
+static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
+static int lsm_tmp_name_length;
+
+/* Adds S to lsm_tmp_name. */
+
+static void
+lsm_tmp_name_add (const char *s)
+{
+ int l = strlen (s) + lsm_tmp_name_length;
+ if (l > MAX_LSM_NAME_LENGTH)
+ return;
+
+ strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
+ lsm_tmp_name_length = l;
+}
+
+/* Stores the name for temporary variable that replaces REF to
+ lsm_tmp_name. */
+
+static void
+gen_lsm_tmp_name (tree ref)
+{
+ const char *name;
+
+ switch (TREE_CODE (ref))
+ {
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ break;
+
+ case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
+ case ARRAY_RANGE_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ break;
+
+ case REALPART_EXPR: