/* Loop invariant motion.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
case BIT_FIELD_REF:
case VIEW_CONVERT_EXPR:
- case ARRAY_RANGE_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
nxt = &TREE_OPERAND (*addr_p, 0);
break;
case ARRAY_REF:
+ case ARRAY_RANGE_REF:
nxt = &TREE_OPERAND (*addr_p, 0);
if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
return false;
return MOVE_POSSIBLE;
}
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return MOVE_IMPOSSIBLE;
if (stmt_ends_bb_p (stmt))
if (stmt_ann (stmt)->has_volatile_ops)
return MOVE_IMPOSSIBLE;
- lhs = TREE_OPERAND (stmt, 0);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
if (TREE_CODE (lhs) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
return MOVE_IMPOSSIBLE;
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_SIDE_EFFECTS (rhs))
return MOVE_IMPOSSIBLE;
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return NULL;
- nops = TREE_CODE_LENGTH (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
{
aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
if (TREE_CODE (stmt) == COND_EXPR)
return LIM_EXPENSIVE;
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
/* Hoisting memory references out should almost surely be a win. */
if (stmt_references_memory_p (stmt))
if (!add_dependency (val, lim_data, loop, true))
return false;
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
if (!add_dependency (val, lim_data, loop, false))
return false;
/* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
to be hoisted out of loop, saving expensive divide. */
if (pos == MOVE_POSSIBLE
- && (rhs = TREE_OPERAND (stmt, 1)) != NULL
+ && (rhs = GENERIC_TREE_OPERAND (stmt, 1)) != NULL
&& TREE_CODE (rhs) == RDIV_EXPR
&& flag_unsafe_math_optimizations
&& !flag_trapping_math
&& outermost_invariant_loop_expr (rhs,
loop_containing_stmt (stmt)) == NULL)
{
- tree lhs, stmt1, stmt2, var, name;
+ tree lhs, stmt1, stmt2, var, name, tmp;
- lhs = TREE_OPERAND (stmt, 0);
+ lhs = GENERIC_TREE_OPERAND (stmt, 0);
- /* stmt must be MODIFY_EXPR. */
+ /* stmt must be GIMPLE_MODIFY_STMT. */
var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
add_referenced_var (var);
- stmt1 = build2 (MODIFY_EXPR, void_type_node, var,
- build2 (RDIV_EXPR, TREE_TYPE (rhs),
- build_real (TREE_TYPE (rhs), dconst1),
- TREE_OPERAND (rhs, 1)));
+ tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
+ build_real (TREE_TYPE (rhs), dconst1),
+ TREE_OPERAND (rhs, 1));
+ stmt1 = build_gimple_modify_stmt (var, tmp);
name = make_ssa_name (var, stmt1);
- TREE_OPERAND (stmt1, 0) = name;
- stmt2 = build2 (MODIFY_EXPR, void_type_node, lhs,
- build2 (MULT_EXPR, TREE_TYPE (rhs),
- name, TREE_OPERAND (rhs, 0)));
+ GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+ tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
+ name, TREE_OPERAND (rhs, 0));
+ stmt2 = build_gimple_modify_stmt (lhs, tmp);
/* Replace division stmt with reciprocal and multiply stmts.
The multiply stmt is not invariant, so update iterator
fini_walk_dominator_tree (&walk_data);
}
-/* Commits edge insertions and updates loop structures. */
-
-void
-loop_commit_inserts (void)
-{
- unsigned old_last_basic_block, i;
- basic_block bb;
-
- old_last_basic_block = last_basic_block;
- bsi_commit_edge_inserts ();
- for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
- {
- bb = BASIC_BLOCK (i);
- add_bb_to_loop (bb,
- find_common_loop (single_pred (bb)->loop_father,
- single_succ (bb)->loop_father));
- }
-}
-
/* Hoist the statements in basic block BB out of the loops prescribed by
data stored in LIM_DATA structures associated with each statement. Callback
for walk_dominator_tree. */
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
fini_walk_dominator_tree (&walk_data);
- loop_commit_inserts ();
+ bsi_commit_edge_inserts ();
if (need_ssa_update_p ())
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return;
- nops = TREE_CODE_LENGTH (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
}
/* Records request for store motion of memory reference REF from LOOP.
MEM_REFS is the list of occurrences of the reference REF inside LOOP;
these references are rewritten by a new temporary variable.
- Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
- The initialization of the temporary variable is put to the preheader
- of the loop, and assignments to the reference from the temporary variable
- are emitted to exits. */
+ Exits from the LOOP are stored in EXITS. The initialization of the
+ temporary variable is put to the preheader of the loop, and assignments
+ to the reference from the temporary variable are emitted to exits. */
static void
-schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
+schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
struct mem_ref_loc *mem_refs)
{
struct mem_ref_loc *aref;
unsigned i;
tree load, store;
struct fmt_data fmt_data;
+ edge ex;
if (dump_file && (dump_flags & TDF_DETAILS))
{
LIM_DATA (aref->stmt)->sm_done = true;
/* Emit the load & stores. */
- load = build2 (MODIFY_EXPR, void_type_node, tmp_var, ref);
+ load = build_gimple_modify_stmt (tmp_var, ref);
get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
LIM_DATA (load)->max_loop = loop;
LIM_DATA (load)->tgt_loop = loop;
all dependencies. */
bsi_insert_on_edge (loop_latch_edge (loop), load);
- for (i = 0; i < n_exits; i++)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
{
- store = build2 (MODIFY_EXPR, void_type_node,
- unshare_expr (ref), tmp_var);
- bsi_insert_on_edge (exits[i], store);
+ store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
+ bsi_insert_on_edge (ex, store);
}
}
is true, prepare the statements that load the value of the memory reference
to a temporary variable in the loop preheader, store it back on the loop
exits, and replace all the references inside LOOP by this temporary variable.
- LOOP has N_EXITS stored in EXITS. CLOBBERED_VOPS is the bitmap of virtual
+ EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
operands that are clobbered by a call or accessed through multiple references
in loop. */
static void
-determine_lsm_ref (struct loop *loop, edge *exits, unsigned n_exits,
+determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
bitmap clobbered_vops, struct mem_ref *ref)
{
struct mem_ref_loc *aref;
return;
}
- schedule_sm (loop, exits, n_exits, ref->mem, ref->locs);
+ schedule_sm (loop, exits, ref->mem, ref->locs);
}
/* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
of vops clobbered by call in loop or accessed by multiple memory references.
- EXITS is the list of N_EXITS exit edges of the LOOP. */
+ EXITS is the list of exit edges of the LOOP. */
static void
hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
- bitmap clobbered_vops, edge *exits, unsigned n_exits)
+ bitmap clobbered_vops, VEC (edge, heap) *exits)
{
struct mem_ref *ref;
for (ref = mem_refs; ref; ref = ref->next)
- determine_lsm_ref (loop, exits, n_exits, clobbered_vops, ref);
+ determine_lsm_ref (loop, exits, clobbered_vops, ref);
}
-/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
+/* Checks whether LOOP (with exits stored in EXITS array) is suitable
for a store motion optimization (i.e. whether we can insert statement
on its exits). */
static bool
-loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
- unsigned n_exits)
+loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
+ VEC (edge, heap) *exits)
{
unsigned i;
+ edge ex;
- for (i = 0; i < n_exits; i++)
- if (exits[i]->flags & EDGE_ABNORMAL)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
+ if (ex->flags & EDGE_ABNORMAL)
return false;
return true;
return;
/* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
goto fail;
- lhs = &TREE_OPERAND (stmt, 0);
- rhs = &TREE_OPERAND (stmt, 1);
+ lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_CODE (*lhs) == SSA_NAME)
{
}
ref->is_stored |= is_stored;
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (&ref->locs, stmt, mem);
return;
fail:
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
}
static void
determine_lsm_loop (struct loop *loop)
{
- unsigned n_exits;
- edge *exits = get_loop_exit_edges (loop, &n_exits);
+ VEC (edge, heap) *exits = get_loop_exit_edges (loop);
bitmap clobbered_vops;
struct mem_ref *mem_refs;
- if (!loop_suitable_for_sm (loop, exits, n_exits))
+ if (!loop_suitable_for_sm (loop, exits))
{
- free (exits);
+ VEC_free (edge, heap, exits);
return;
}
find_more_ref_vops (mem_refs, clobbered_vops);
/* Hoist all suitable memory references. */
- hoist_memory_references (loop, mem_refs, clobbered_vops, exits, n_exits);
+ hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
free_mem_refs (mem_refs);
- free (exits);
+ VEC_free (edge, heap, exits);
BITMAP_FREE (clobbered_vops);
}
/* Try to perform store motion for all memory references modified inside
- any of LOOPS. */
+ loops. */
static void
-determine_lsm (struct loops *loops)
+determine_lsm (void)
{
struct loop *loop;
-
- if (!loops->tree_root->inner)
- return;
+ loop_iterator li;
/* Pass the loops from the outermost and perform the store motion as
suitable. */
- loop = loops->tree_root->inner;
- while (1)
+ FOR_EACH_LOOP (li, loop, 0)
{
determine_lsm_loop (loop);
-
- if (loop->inner)
- {
- loop = loop->inner;
- continue;
- }
- while (!loop->next)
- {
- loop = loop->outer;
- if (loop == loops->tree_root)
- {
- loop_commit_inserts ();
- return;
- }
- }
- loop = loop->next;
}
+
+ bsi_commit_edge_inserts ();
}
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
fill_always_executed_in (loop, contains_call);
}
-/* Compute the global information needed by the loop invariant motion pass.
- LOOPS is the loop tree. */
+/* Compute the global information needed by the loop invariant motion pass. */
static void
-tree_ssa_lim_initialize (struct loops *loops)
+tree_ssa_lim_initialize (void)
{
sbitmap contains_call = sbitmap_alloc (last_basic_block);
block_stmt_iterator bsi;
SET_BIT (contains_call, bb->index);
}
- for (loop = loops->tree_root->inner; loop; loop = loop->next)
+ for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
fill_always_executed_in (loop, contains_call);
sbitmap_free (contains_call);
}
}
-/* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
+/* Moves invariants from loops. Only "expensive" invariants are moved out --
i.e. those that are likely to be win regardless of the register pressure. */
void
-tree_ssa_lim (struct loops *loops)
+tree_ssa_lim (void)
{
- tree_ssa_lim_initialize (loops);
+ tree_ssa_lim_initialize ();
/* For each statement determine the outermost loop in that it is
invariant and cost for computing the invariant. */
/* For each memory reference determine whether it is possible to hoist it
out of the loop. Force the necessary invariants to be moved out of the
loops as well. */
- determine_lsm (loops);
+ determine_lsm ();
/* Move the expressions that are expensive enough. */
move_computations ();