+analyze_memory_references (void)
+{
+ unsigned i;
+ bitmap empty;
+ htab_t hempty;
+
+ memory_accesses.refs
+ = htab_create (100, memref_hash, memref_eq, memref_free);
+ memory_accesses.refs_list = NULL;
+ memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap,
+ number_of_loops ());
+ memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap,
+ number_of_loops ());
+
+ for (i = 0; i < number_of_loops (); i++)
+ {
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
+ empty = BITMAP_ALLOC (NULL);
+ VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty);
+ hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free);
+ VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty);
+ }
+
+ memory_accesses.ttae_cache = NULL;
+
+ gather_mem_refs_in_loops ();
+ create_vop_ref_mapping ();
+}
+
+/* Returns true if a region of size SIZE1 at position 0 and a region of
+ size SIZE2 at position DIFF cannot overlap. */
+
+static bool
+cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
+{
+ double_int d, bound;
+
+ /* Unless the difference is a constant, we fail. */
+ if (diff->n != 0)
+ return false;
+
+ d = diff->offset;
+ if (double_int_negative_p (d))
+ {
+ /* The second object is before the first one, we succeed if the last
+ element of the second object is before the start of the first one. */
+ bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
+ return double_int_negative_p (bound);
+ }
+ else
+ {
+ /* We succeed if the second object starts after the first one ends. */
+ return double_int_scmp (size1, d) <= 0;
+ }
+}
+
+/* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
+ tree_to_aff_combination_expand. */
+
+static bool
+mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
+{
+ /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
+ object and their offset differ in such a way that the locations cannot
+ overlap, then they cannot alias. */
+ double_int size1, size2;
+ aff_tree off1, off2;
+
+ /* Perform basic offset and type-based disambiguation. */
+ if (!refs_may_alias_p (mem1, mem2))
+ return false;
+
+ /* The expansion of addresses may be a bit expensive, thus we only do
+ the check at -O2 and higher optimization levels. */
+ if (optimize < 2)
+ return true;
+
+ get_inner_reference_aff (mem1, &off1, &size1);
+ get_inner_reference_aff (mem2, &off2, &size2);
+ aff_combination_expand (&off1, ttae_cache);
+ aff_combination_expand (&off2, ttae_cache);
+ aff_combination_scale (&off1, double_int_minus_one);
+ aff_combination_add (&off2, &off1);
+
+ if (cannot_overlap_p (&off2, size1, size2))
+ return false;
+
+ return true;
+}
+
+/* Rewrites location LOC by TMP_VAR. */
+
+static void
+rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
+{
+ mark_virtual_ops_for_renaming (loc->stmt);
+ *loc->ref = tmp_var;
+ update_stmt (loc->stmt);
+}
+
+/* Adds all locations of REF in LOOP and its subloops to LOCS. */
+
+static void
+get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
+ VEC (mem_ref_loc_p, heap) **locs)
+{
+ mem_ref_locs_p accs;
+ unsigned i;
+ mem_ref_loc_p loc;
+ bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
+ loop->num);
+ struct loop *subloop;
+
+ if (!bitmap_bit_p (refs, ref->id))
+ return;
+
+ if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
+ > (unsigned) loop->num)
+ {
+ accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
+ if (accs)
+ {
+ for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
+ VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
+ }
+ }
+
+ for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
+ get_all_locs_in_loop (subloop, ref, locs);
+}
+
+/* Rewrites all references to REF in LOOP by variable TMP_VAR. */
+
+static void
+rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
+{
+ unsigned i;
+ mem_ref_loc_p loc;
+ VEC (mem_ref_loc_p, heap) *locs = NULL;
+
+ get_all_locs_in_loop (loop, ref, &locs);
+ for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
+ rewrite_mem_ref_loc (loc, tmp_var);
+ VEC_free (mem_ref_loc_p, heap, locs);
+}
+
+/* The name and the length of the currently generated variable
+ for lsm. */
+#define MAX_LSM_NAME_LENGTH 40
+static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
+static int lsm_tmp_name_length;
+
+/* Adds S to lsm_tmp_name. */
+
+static void
+lsm_tmp_name_add (const char *s)
+{
+ int l = strlen (s) + lsm_tmp_name_length;
+ if (l > MAX_LSM_NAME_LENGTH)
+ return;
+
+ strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
+ lsm_tmp_name_length = l;
+}
+
+/* Stores the name for temporary variable that replaces REF to
+ lsm_tmp_name. */
+
+static void
+gen_lsm_tmp_name (tree ref)
+{
+ const char *name;
+
+ switch (TREE_CODE (ref))
+ {
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ break;
+
+ case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
+ case ARRAY_RANGE_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ break;
+
+ case REALPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_RE");
+ break;
+
+ case IMAGPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_IM");
+ break;
+
+ case COMPONENT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ name = get_name (TREE_OPERAND (ref, 1));
+ if (!name)
+ name = "F";
+ lsm_tmp_name_add ("_");
+ lsm_tmp_name_add (name);
+
+ case ARRAY_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_I");
+ break;
+
+ case SSA_NAME:
+ ref = SSA_NAME_VAR (ref);
+ /* Fallthru. */
+
+ case VAR_DECL:
+ case PARM_DECL:
+ name = get_name (ref);
+ if (!name)
+ name = "D";
+ lsm_tmp_name_add (name);
+ break;
+
+ case STRING_CST:
+ lsm_tmp_name_add ("S");
+ break;
+
+ case RESULT_DECL:
+ lsm_tmp_name_add ("R");
+ break;
+
+ case INTEGER_CST:
+ /* Nothing. */
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Determines name for temporary variable that replaces REF.
+ The name is accumulated into the lsm_tmp_name variable.
+ N is added to the name of the temporary. */
+
+char *
+get_lsm_tmp_name (tree ref, unsigned n)
+{
+ char ns[2];
+
+ lsm_tmp_name_length = 0;
+ gen_lsm_tmp_name (ref);
+ lsm_tmp_name_add ("_lsm");
+ if (n < 10)
+ {
+ ns[0] = '0' + n;
+ ns[1] = 0;
+ lsm_tmp_name_add (ns);
+ }
+ return lsm_tmp_name;
+}
+
+/* Executes store motion of memory reference REF from LOOP.
+ Exits from the LOOP are stored in EXITS. The initialization of the
+ temporary variable is put to the preheader of the loop, and assignments
+ to the reference from the temporary variable are emitted to exits. */
+
+static void
+execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)