/* Loop invariant motion.
Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
Foundation, Inc.
-
+
This file is part of GCC.
-
+
GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3, or (at your option) any
later version.
-
+
GCC is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
-
+
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
struct depend
{
- tree stmt;
+ gimple stmt;
struct depend *next;
};
MAX_LOOP loop. */
};
-#define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
- ? NULL \
- : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
+/* Maps statements to their lim_aux_data. */
+
+static struct pointer_map_t *lim_aux_data_map;
/* Description of a memory reference location. */
typedef struct mem_ref_loc
{
tree *ref; /* The reference itself. */
- tree stmt; /* The statement in that it occurs. */
+ gimple stmt; /* The statement in that it occurs. */
} *mem_ref_loc_p;
DEF_VEC_P(mem_ref_loc_p);
block will be executed. */
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
+static struct lim_aux_data *
+init_lim_data (gimple stmt)
+{
+ void **p = pointer_map_insert (lim_aux_data_map, stmt);
+
+ *p = XCNEW (struct lim_aux_data);
+ return (struct lim_aux_data *) *p;
+}
+
+static struct lim_aux_data *
+get_lim_data (gimple stmt)
+{
+ void **p = pointer_map_contains (lim_aux_data_map, stmt);
+ if (!p)
+ return NULL;
+
+ return (struct lim_aux_data *) *p;
+}
+
+/* Releases the memory occupied by DATA. */
+
+static void
+free_lim_aux_data (struct lim_aux_data *data)
+{
+ struct depend *dep, *next;
+
+ for (dep = data->depends; dep; dep = next)
+ {
+ next = dep->next;
+ free (dep);
+ }
+ free (data);
+}
+
+static void
+clear_lim_data (gimple stmt)
+{
+ void **p = pointer_map_contains (lim_aux_data_map, stmt);
+ if (!p)
+ return;
+
+ free_lim_aux_data ((struct lim_aux_data *) *p);
+ *p = NULL;
+}
+
/* Calls CBCK for each index in memory reference ADDR_P. There are two
kinds situations handled; in each of these cases, the memory reference
and DATA are passed to the callback:
-
+
Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
pass the pointer to the index to the callback.
Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
pointer to addr to the callback.
-
+
If the callback returns false, the whole search stops and false is returned.
Otherwise the function returns true after traversing through the whole
reference *ADDR_P. */
Otherwise return MOVE_IMPOSSIBLE. */
enum move_pos
-movement_possibility (tree stmt)
+movement_possibility (gimple stmt)
{
- tree lhs, rhs;
+ tree lhs;
+ enum move_pos ret = MOVE_POSSIBLE;
if (flag_unswitch_loops
- && TREE_CODE (stmt) == COND_EXPR)
+ && gimple_code (stmt) == GIMPLE_COND)
{
/* If we perform unswitching, force the operands of the invariant
condition to be moved out of the loop. */
return MOVE_POSSIBLE;
}
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
- return MOVE_IMPOSSIBLE;
-
- if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
+ if (gimple_get_lhs (stmt) == NULL_TREE)
return MOVE_IMPOSSIBLE;
- if (stmt_ends_bb_p (stmt))
+ if (gimple_vdef (stmt))
return MOVE_IMPOSSIBLE;
- if (stmt_ann (stmt)->has_volatile_ops)
+ if (stmt_ends_bb_p (stmt)
+ || gimple_has_volatile_ops (stmt)
+ || gimple_has_side_effects (stmt)
+ || stmt_could_throw_p (stmt))
return MOVE_IMPOSSIBLE;
- lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- if (TREE_CODE (lhs) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
- return MOVE_IMPOSSIBLE;
-
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
- if (TREE_SIDE_EFFECTS (rhs)
- || tree_could_throw_p (rhs))
- return MOVE_IMPOSSIBLE;
-
- if (TREE_CODE (lhs) != SSA_NAME
- || tree_could_trap_p (rhs))
- return MOVE_PRESERVE_EXECUTION;
-
- if (get_call_expr_in (stmt))
+ if (is_gimple_call (stmt))
{
/* While pure or const call is guaranteed to have no side effects, we
cannot move it arbitrarily. Consider code like
invalid arguments, moving out a function call that is not executed
may cause performance regressions in case the call is costly and
not executed at all. */
- return MOVE_PRESERVE_EXECUTION;
+ ret = MOVE_PRESERVE_EXECUTION;
+ lhs = gimple_call_lhs (stmt);
}
- return MOVE_POSSIBLE;
+ else if (is_gimple_assign (stmt))
+ lhs = gimple_assign_lhs (stmt);
+ else
+ return MOVE_IMPOSSIBLE;
+
+ if (TREE_CODE (lhs) == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ return MOVE_IMPOSSIBLE;
+
+ if (TREE_CODE (lhs) != SSA_NAME
+ || gimple_could_trap_p (stmt))
+ return MOVE_PRESERVE_EXECUTION;
+
+ return ret;
}
/* Suppose that operand DEF is used inside the LOOP. Returns the outermost
static struct loop *
outermost_invariant_loop (tree def, struct loop *loop)
{
- tree def_stmt;
+ gimple def_stmt;
basic_block def_bb;
struct loop *max_loop;
+ struct lim_aux_data *lim_data;
- if (TREE_CODE (def) != SSA_NAME)
+ if (!def)
return superloop_at_depth (loop, 1);
+ if (TREE_CODE (def) != SSA_NAME)
+ {
+ gcc_assert (is_gimple_min_invariant (def));
+ return superloop_at_depth (loop, 1);
+ }
+
def_stmt = SSA_NAME_DEF_STMT (def);
- def_bb = bb_for_stmt (def_stmt);
+ def_bb = gimple_bb (def_stmt);
if (!def_bb)
return superloop_at_depth (loop, 1);
max_loop = find_common_loop (loop, def_bb->loop_father);
- if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
+ lim_data = get_lim_data (def_stmt);
+ if (lim_data != NULL && lim_data->max_loop != NULL)
max_loop = find_common_loop (max_loop,
- loop_outer (LIM_DATA (def_stmt)->max_loop));
+ loop_outer (lim_data->max_loop));
if (max_loop == loop)
return NULL;
max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
return max_loop;
}
-/* Returns the outermost superloop of LOOP in that the expression EXPR is
- invariant. */
-
-static struct loop *
-outermost_invariant_loop_expr (tree expr, struct loop *loop)
-{
- enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
- unsigned i, nops;
- struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
-
- if (TREE_CODE (expr) == SSA_NAME
- || TREE_CODE (expr) == INTEGER_CST
- || is_gimple_min_invariant (expr))
- return outermost_invariant_loop (expr, loop);
-
- if (codeclass != tcc_unary
- && codeclass != tcc_binary
- && codeclass != tcc_expression
- && codeclass != tcc_vl_exp
- && codeclass != tcc_comparison)
- return NULL;
-
- nops = TREE_OPERAND_LENGTH (expr);
- for (i = 0; i < nops; i++)
- {
- aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
- if (!aloop)
- return NULL;
-
- if (flow_loop_nested_p (max_loop, aloop))
- max_loop = aloop;
- }
-
- return max_loop;
-}
-
/* DATA is a structure containing information associated with a statement
inside LOOP. DEF is one of the operands of this statement.
-
+
Find the outermost loop enclosing LOOP in that value of DEF is invariant
and record this in DATA->max_loop field. If DEF itself is defined inside
this loop as well (i.e. we need to hoist it out of the loop if we want
to hoist the statement represented by DATA), record the statement in that
DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
add the cost of the computation of DEF to the DATA->cost.
-
+
If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
static bool
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
bool add_cost)
{
- tree def_stmt = SSA_NAME_DEF_STMT (def);
- basic_block def_bb = bb_for_stmt (def_stmt);
+ gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ basic_block def_bb = gimple_bb (def_stmt);
struct loop *max_loop;
struct depend *dep;
+ struct lim_aux_data *def_data;
if (!def_bb)
return true;
if (flow_loop_nested_p (data->max_loop, max_loop))
data->max_loop = max_loop;
- if (!LIM_DATA (def_stmt))
+ def_data = get_lim_data (def_stmt);
+ if (!def_data)
return true;
if (add_cost
on it, we will be able to avoid creating a new register for
it (since it will be only used in these dependent invariants). */
&& def_bb->loop_father == loop)
- data->cost += LIM_DATA (def_stmt)->cost;
+ data->cost += def_data->cost;
dep = XNEW (struct depend);
dep->stmt = def_stmt;
values. */
static unsigned
-stmt_cost (tree stmt)
+stmt_cost (gimple stmt)
{
- tree rhs;
+ tree fndecl;
unsigned cost = 1;
/* Always try to create possibilities for unswitching. */
- if (TREE_CODE (stmt) == COND_EXPR)
+ if (gimple_code (stmt) == GIMPLE_COND)
return LIM_EXPENSIVE;
- rhs = GENERIC_TREE_OPERAND (stmt, 1);
-
/* Hoisting memory references out should almost surely be a win. */
- if (stmt_references_memory_p (stmt))
+ if (gimple_references_memory_p (stmt))
cost += 20;
- switch (TREE_CODE (rhs))
+ if (is_gimple_call (stmt))
{
- case CALL_EXPR:
/* We should be hoisting calls if possible. */
/* Unless the call is a builtin_constant_p; this always folds to a
constant, so moving it is useless. */
- rhs = get_callee_fndecl (rhs);
- if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
+ fndecl = gimple_call_fndecl (stmt);
+ if (fndecl
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
return 0;
- cost += 20;
- break;
+ return cost + 20;
+ }
+
+ if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ return cost;
+ switch (gimple_assign_rhs_code (stmt))
+ {
case MULT_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
it is a store or load. Otherwise, returns NULL. */
static tree *
-simple_mem_ref_in_stmt (tree stmt, bool *is_store)
+simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
{
- tree *lhs, *rhs;
+ tree *lhs;
+ enum tree_code code;
/* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+ if (gimple_code (stmt) != GIMPLE_ASSIGN)
return NULL;
- lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
- rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
+ code = gimple_assign_rhs_code (stmt);
+
+ lhs = gimple_assign_lhs_ptr (stmt);
if (TREE_CODE (*lhs) == SSA_NAME)
{
- if (!is_gimple_addressable (*rhs))
+ if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
+ || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
return NULL;
*is_store = false;
- return rhs;
+ return gimple_assign_rhs1_ptr (stmt);
}
- else if (TREE_CODE (*rhs) == SSA_NAME
- || is_gimple_min_invariant (*rhs))
+ else if (code == SSA_NAME
+ || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
+ && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
{
*is_store = true;
return lhs;
/* Returns the memory reference contained in STMT. */
static mem_ref_p
-mem_ref_in_stmt (tree stmt)
+mem_ref_in_stmt (gimple stmt)
{
bool store;
tree *mem = simple_mem_ref_in_stmt (stmt, &store);
If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
we preserve the fact whether STMT is executed. It also fills other related
information to LIM_DATA (STMT).
-
+
The function returns false if STMT cannot be hoisted outside of the loop it
is defined in, and true otherwise. */
static bool
-determine_max_movement (tree stmt, bool must_preserve_exec)
+determine_max_movement (gimple stmt, bool must_preserve_exec)
{
- basic_block bb = bb_for_stmt (stmt);
+ basic_block bb = gimple_bb (stmt);
struct loop *loop = bb->loop_father;
struct loop *level;
- struct lim_aux_data *lim_data = LIM_DATA (stmt);
+ struct lim_aux_data *lim_data = get_lim_data (stmt);
tree val;
ssa_op_iter iter;
-
+
if (must_preserve_exec)
level = ALWAYS_EXECUTED_IN (bb);
else
if (!add_dependency (val, lim_data, loop, true))
return false;
- if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
+ if (gimple_vuse (stmt))
{
mem_ref_p ref = mem_ref_in_stmt (stmt);
}
else
{
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
+ if ((val = gimple_vuse (stmt)) != NULL_TREE)
{
if (!add_dependency (val, lim_data, loop, false))
return false;
operands) is hoisted at least out of the loop LEVEL. */
static void
-set_level (tree stmt, struct loop *orig_loop, struct loop *level)
+set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
{
- struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
+ struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
struct depend *dep;
+ struct lim_aux_data *lim_data;
stmt_loop = find_common_loop (orig_loop, stmt_loop);
- if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
+ lim_data = get_lim_data (stmt);
+ if (lim_data != NULL && lim_data->tgt_loop != NULL)
stmt_loop = find_common_loop (stmt_loop,
- loop_outer (LIM_DATA (stmt)->tgt_loop));
+ loop_outer (lim_data->tgt_loop));
if (flow_loop_nested_p (stmt_loop, level))
return;
- gcc_assert (LIM_DATA (stmt));
- gcc_assert (level == LIM_DATA (stmt)->max_loop
- || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
+ gcc_assert (level == lim_data->max_loop
+ || flow_loop_nested_p (lim_data->max_loop, level));
- LIM_DATA (stmt)->tgt_loop = level;
- for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
+ lim_data->tgt_loop = level;
+ for (dep = lim_data->depends; dep; dep = dep->next)
set_level (dep->stmt, orig_loop, level);
}
information to set it more sanely. */
static void
-set_profitable_level (tree stmt)
+set_profitable_level (gimple stmt)
{
- set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
+ set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
}
-/* Returns true if STMT is not a pure call. */
+/* Returns true if STMT is a call that has side effects. */
static bool
-nonpure_call_p (tree stmt)
+nonpure_call_p (gimple stmt)
{
- tree call = get_call_expr_in (stmt);
-
- if (!call)
+ if (gimple_code (stmt) != GIMPLE_CALL)
return false;
- return TREE_SIDE_EFFECTS (call) != 0;
-}
-
-/* Releases the memory occupied by DATA. */
-
-static void
-free_lim_aux_data (struct lim_aux_data *data)
-{
- struct depend *dep, *next;
-
- for (dep = data->depends; dep; dep = next)
- {
- next = dep->next;
- free (dep);
- }
- free (data);
+ return gimple_has_side_effects (stmt);
}
/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
-static tree
-rewrite_reciprocal (block_stmt_iterator *bsi)
+static gimple
+rewrite_reciprocal (gimple_stmt_iterator *bsi)
{
- tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
+ gimple stmt, stmt1, stmt2;
+ tree var, name, lhs, type;
+ tree real_one;
+ gimple_stmt_iterator gsi;
- stmt = bsi_stmt (*bsi);
- lhs = GENERIC_TREE_OPERAND (stmt, 0);
- rhs = GENERIC_TREE_OPERAND (stmt, 1);
+ stmt = gsi_stmt (*bsi);
+ lhs = gimple_assign_lhs (stmt);
+ type = TREE_TYPE (lhs);
- /* stmt must be GIMPLE_MODIFY_STMT. */
- var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
+ var = create_tmp_var (type, "reciptmp");
add_referenced_var (var);
+ DECL_GIMPLE_REG_P (var) = 1;
+
+ /* For vectors, create a VECTOR_CST full of 1's. */
+ if (TREE_CODE (type) == VECTOR_TYPE)
+ {
+ int i, len;
+ tree list = NULL_TREE;
+ real_one = build_real (TREE_TYPE (type), dconst1);
+ len = TYPE_VECTOR_SUBPARTS (type);
+ for (i = 0; i < len; i++)
+ list = tree_cons (NULL, real_one, list);
+ real_one = build_vector (type, list);
+ }
+ else
+ real_one = build_real (type, dconst1);
- tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
- build_real (TREE_TYPE (rhs), dconst1),
- TREE_OPERAND (rhs, 1));
- stmt1 = build_gimple_modify_stmt (var, tmp);
+ stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
+ var, real_one, gimple_assign_rhs2 (stmt));
name = make_ssa_name (var, stmt1);
- GIMPLE_STMT_OPERAND (stmt1, 0) = name;
- tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
- name, TREE_OPERAND (rhs, 0));
- stmt2 = build_gimple_modify_stmt (lhs, tmp);
+ gimple_assign_set_lhs (stmt1, name);
+
+ stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
+ gimple_assign_rhs1 (stmt));
/* Replace division stmt with reciprocal and multiply stmts.
The multiply stmt is not invariant, so update iterator
and avoid rescanning. */
- bsi_replace (bsi, stmt1, true);
- bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
- SSA_NAME_DEF_STMT (lhs) = stmt2;
+ gsi = *bsi;
+ gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
+ gsi_replace (&gsi, stmt2, true);
/* Continue processing with invariant reciprocal statement. */
return stmt1;
/* Check if the pattern at *BSI is a bittest of the form
(A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
-static tree
-rewrite_bittest (block_stmt_iterator *bsi)
+static gimple
+rewrite_bittest (gimple_stmt_iterator *bsi)
{
- tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
+ gimple stmt, use_stmt, stmt1, stmt2;
+ tree lhs, var, name, t, a, b;
use_operand_p use;
- stmt = bsi_stmt (*bsi);
- lhs = GENERIC_TREE_OPERAND (stmt, 0);
- rhs = GENERIC_TREE_OPERAND (stmt, 1);
+ stmt = gsi_stmt (*bsi);
+ lhs = gimple_assign_lhs (stmt);
/* Verify that the single use of lhs is a comparison against zero. */
if (TREE_CODE (lhs) != SSA_NAME
|| !single_imm_use (lhs, &use, &use_stmt)
- || TREE_CODE (use_stmt) != COND_EXPR)
+ || gimple_code (use_stmt) != GIMPLE_COND)
return stmt;
- t = COND_EXPR_COND (use_stmt);
- if (TREE_OPERAND (t, 0) != lhs
- || (TREE_CODE (t) != NE_EXPR
- && TREE_CODE (t) != EQ_EXPR)
- || !integer_zerop (TREE_OPERAND (t, 1)))
+ if (gimple_cond_lhs (use_stmt) != lhs
+ || (gimple_cond_code (use_stmt) != NE_EXPR
+ && gimple_cond_code (use_stmt) != EQ_EXPR)
+ || !integer_zerop (gimple_cond_rhs (use_stmt)))
return stmt;
/* Get at the operands of the shift. The rhs is TMP1 & 1. */
- stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
- if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+ if (gimple_code (stmt1) != GIMPLE_ASSIGN)
return stmt;
/* There is a conversion in between possibly inserted by fold. */
- t = GIMPLE_STMT_OPERAND (stmt1, 1);
- if (CONVERT_EXPR_P (t))
+ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
{
- t = TREE_OPERAND (t, 0);
+ t = gimple_assign_rhs1 (stmt1);
if (TREE_CODE (t) != SSA_NAME
|| !has_single_use (t))
return stmt;
stmt1 = SSA_NAME_DEF_STMT (t);
- if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ if (gimple_code (stmt1) != GIMPLE_ASSIGN)
return stmt;
- t = GIMPLE_STMT_OPERAND (stmt1, 1);
}
/* Verify that B is loop invariant but A is not. Verify that with
all the stmt walking we are still in the same loop. */
- if (TREE_CODE (t) == RSHIFT_EXPR
- && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
- && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
- loop_containing_stmt (stmt1)) != NULL
- && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
- loop_containing_stmt (stmt1)) == NULL)
+ if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
+ || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
+ return stmt;
+
+ a = gimple_assign_rhs1 (stmt1);
+ b = gimple_assign_rhs2 (stmt1);
+
+ if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
+ && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
{
- tree a = TREE_OPERAND (t, 0);
- tree b = TREE_OPERAND (t, 1);
+ gimple_stmt_iterator rsi;
/* 1 << B */
var = create_tmp_var (TREE_TYPE (a), "shifttmp");
add_referenced_var (var);
t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
build_int_cst (TREE_TYPE (a), 1), b);
- stmt1 = build_gimple_modify_stmt (var, t);
+ stmt1 = gimple_build_assign (var, t);
name = make_ssa_name (var, stmt1);
- GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+ gimple_assign_set_lhs (stmt1, name);
/* A & (1 << B) */
t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
- stmt2 = build_gimple_modify_stmt (var, t);
+ stmt2 = gimple_build_assign (var, t);
name = make_ssa_name (var, stmt2);
- GIMPLE_STMT_OPERAND (stmt2, 0) = name;
+ gimple_assign_set_lhs (stmt2, name);
/* Replace the SSA_NAME we compare against zero. Adjust
the type of zero accordingly. */
SET_USE (use, name);
- TREE_OPERAND (COND_EXPR_COND (use_stmt), 1)
- = build_int_cst_type (TREE_TYPE (name), 0);
+ gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
- bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
- bsi_replace (bsi, stmt2, true);
+ /* Don't use gsi_replace here, none of the new assignments sets
+ the variable originally set in stmt. Move bsi to stmt1, and
+ then remove the original stmt, so that we get a chance to
+ retain debug info for it. */
+ rsi = *bsi;
+ gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
+ gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
+ gsi_remove (&rsi, true);
return stmt1;
}
basic_block bb)
{
enum move_pos pos;
- block_stmt_iterator bsi;
- tree stmt, rhs;
+ gimple_stmt_iterator bsi;
+ gimple stmt;
bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
+ struct lim_aux_data *lim_data;
if (!loop_outer (bb->loop_father))
return;
fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- stmt = bsi_stmt (bsi);
+ stmt = gsi_stmt (bsi);
pos = movement_possibility (stmt);
if (pos == MOVE_IMPOSSIBLE)
store-motion work. */
else if (stmt_makes_single_store (stmt))
{
- stmt_ann (stmt)->common.aux
- = xcalloc (1, sizeof (struct lim_aux_data));
- LIM_DATA (stmt)->always_executed_in = outermost;
+ struct lim_aux_data *lim_data = init_lim_data (stmt);
+ lim_data->always_executed_in = outermost;
}
continue;
}
- if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ if (is_gimple_assign (stmt)
+ && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+ == GIMPLE_BINARY_RHS))
{
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+ tree op0 = gimple_assign_rhs1 (stmt);
+ tree op1 = gimple_assign_rhs2 (stmt);
+ struct loop *ol1 = outermost_invariant_loop (op1,
+ loop_containing_stmt (stmt));
/* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
to be hoisted out of loop, saving expensive divide. */
if (pos == MOVE_POSSIBLE
- && TREE_CODE (rhs) == RDIV_EXPR
+ && gimple_assign_rhs_code (stmt) == RDIV_EXPR
&& flag_unsafe_math_optimizations
&& !flag_trapping_math
- && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
- loop_containing_stmt (stmt)) != NULL
- && outermost_invariant_loop_expr (rhs,
- loop_containing_stmt (stmt)) == NULL)
+ && ol1 != NULL
+ && outermost_invariant_loop (op0, ol1) == NULL)
stmt = rewrite_reciprocal (&bsi);
/* If the shift count is invariant, convert (A >> B) & 1 to
A & (1 << B) allowing the bit mask to be hoisted out of the loop
saving an expensive shift. */
if (pos == MOVE_POSSIBLE
- && TREE_CODE (rhs) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (rhs, 1))
- && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
- && has_single_use (TREE_OPERAND (rhs, 0)))
+ && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+ && integer_onep (op1)
+ && TREE_CODE (op0) == SSA_NAME
+ && has_single_use (op0))
stmt = rewrite_bittest (&bsi);
}
- stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
- LIM_DATA (stmt)->always_executed_in = outermost;
+ lim_data = init_lim_data (stmt);
+ lim_data->always_executed_in = outermost;
if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
continue;
if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
{
- LIM_DATA (stmt)->max_loop = NULL;
+ lim_data->max_loop = NULL;
continue;
}
if (dump_file && (dump_flags & TDF_DETAILS))
{
- print_generic_stmt_indented (dump_file, stmt, 0, 2);
+ print_gimple_stmt (dump_file, stmt, 2, 0);
fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
- loop_depth (LIM_DATA (stmt)->max_loop),
- LIM_DATA (stmt)->cost);
+ loop_depth (lim_data->max_loop),
+ lim_data->cost);
}
- if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
+ if (lim_data->cost >= LIM_EXPENSIVE)
set_profitable_level (stmt);
}
}
memset (&walk_data, 0, sizeof (struct dom_walk_data));
walk_data.dom_direction = CDI_DOMINATORS;
- walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
+ walk_data.before_dom_children = determine_invariantness_stmt;
init_walk_dominator_tree (&walk_data);
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
basic_block bb)
{
struct loop *level;
- block_stmt_iterator bsi;
- tree stmt;
+ gimple_stmt_iterator bsi;
+ gimple stmt;
unsigned cost = 0;
+ struct lim_aux_data *lim_data;
if (!loop_outer (bb->loop_father))
return;
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
{
- stmt = bsi_stmt (bsi);
+ stmt = gsi_stmt (bsi);
- if (!LIM_DATA (stmt))
+ lim_data = get_lim_data (stmt);
+ if (lim_data == NULL)
{
- bsi_next (&bsi);
+ gsi_next (&bsi);
continue;
}
- cost = LIM_DATA (stmt)->cost;
- level = LIM_DATA (stmt)->tgt_loop;
- free_lim_aux_data (LIM_DATA (stmt));
- stmt_ann (stmt)->common.aux = NULL;
+ cost = lim_data->cost;
+ level = lim_data->tgt_loop;
+ clear_lim_data (stmt);
if (!level)
{
- bsi_next (&bsi);
+ gsi_next (&bsi);
continue;
}
/* We do not really want to move conditionals out of the loop; we just
placed it here to force its operands to be moved if necessary. */
- if (TREE_CODE (stmt) == COND_EXPR)
+ if (gimple_code (stmt) == GIMPLE_COND)
continue;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Moving statement\n");
- print_generic_stmt (dump_file, stmt, 0);
+ print_gimple_stmt (dump_file, stmt, 0, 0);
fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
cost, level->num);
}
mark_virtual_ops_for_renaming (stmt);
- bsi_insert_on_edge (loop_preheader_edge (level), stmt);
- bsi_remove (&bsi, false);
+ gsi_insert_on_edge (loop_preheader_edge (level), stmt);
+ gsi_remove (&bsi, false);
}
}
memset (&walk_data, 0, sizeof (struct dom_walk_data));
walk_data.dom_direction = CDI_DOMINATORS;
- walk_data.before_dom_children_before_stmts = move_computations_stmt;
+ walk_data.before_dom_children = move_computations_stmt;
init_walk_dominator_tree (&walk_data);
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
fini_walk_dominator_tree (&walk_data);
- bsi_commit_edge_inserts ();
- if (need_ssa_update_p ())
+ gsi_commit_edge_inserts ();
+ if (need_ssa_update_p (cfun))
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
static bool
may_move_till (tree ref, tree *index, void *data)
{
- struct loop *loop = (struct loop*) data, *max_loop;
+ struct loop *loop = (struct loop *) data, *max_loop;
/* If REF is an array reference, check also that the step and the lower
bound is invariant in LOOP. */
if (TREE_CODE (ref) == ARRAY_REF)
{
- tree step = array_ref_element_size (ref);
- tree lbound = array_ref_low_bound (ref);
+ tree step = TREE_OPERAND (ref, 3);
+ tree lbound = TREE_OPERAND (ref, 2);
- max_loop = outermost_invariant_loop_expr (step, loop);
+ max_loop = outermost_invariant_loop (step, loop);
if (!max_loop)
return false;
- max_loop = outermost_invariant_loop_expr (lbound, loop);
+ max_loop = outermost_invariant_loop (lbound, loop);
if (!max_loop)
return false;
}
return true;
}
-/* Forces statements defining (invariant) SSA names in expression EXPR to be
+/* If OP is SSA NAME, force the statement that defines it to be
moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
static void
-force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
+force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
{
- enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
- unsigned i, nops;
+ gimple stmt;
- if (TREE_CODE (expr) == SSA_NAME)
- {
- tree stmt = SSA_NAME_DEF_STMT (expr);
- if (IS_EMPTY_STMT (stmt))
- return;
+ if (!op
+ || is_gimple_min_invariant (op))
+ return;
- set_level (stmt, orig_loop, loop);
- return;
- }
+ gcc_assert (TREE_CODE (op) == SSA_NAME);
- if (codeclass != tcc_unary
- && codeclass != tcc_binary
- && codeclass != tcc_expression
- && codeclass != tcc_vl_exp
- && codeclass != tcc_comparison)
+ stmt = SSA_NAME_DEF_STMT (op);
+ if (gimple_nop_p (stmt))
return;
- nops = TREE_OPERAND_LENGTH (expr);
- for (i = 0; i < nops; i++)
- force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
+ set_level (stmt, orig_loop, loop);
}
/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
static bool
force_move_till (tree ref, tree *index, void *data)
{
- tree stmt;
struct fmt_data *fmt_data = (struct fmt_data *) data;
if (TREE_CODE (ref) == ARRAY_REF)
{
- tree step = array_ref_element_size (ref);
- tree lbound = array_ref_low_bound (ref);
+ tree step = TREE_OPERAND (ref, 3);
+ tree lbound = TREE_OPERAND (ref, 2);
- force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
- force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
+ force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
+ force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
}
- if (TREE_CODE (*index) != SSA_NAME)
- return true;
-
- stmt = SSA_NAME_DEF_STMT (*index);
- if (IS_EMPTY_STMT (stmt))
- return true;
-
- set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
+ force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
return true;
}
description REF. The reference occurs in statement STMT. */
static void
-record_mem_ref_loc (mem_ref_p ref, struct loop *loop, tree stmt, tree *loc)
+record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
{
mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
mem_ref_locs_p accs;
well. */
static void
-gather_mem_refs_stmt (struct loop *loop, tree stmt)
+gather_mem_refs_stmt (struct loop *loop, gimple stmt)
{
tree *mem = NULL;
hashval_t hash;
PTR *slot;
mem_ref_p ref;
- ssa_op_iter oi;
tree vname;
bool is_stored;
bitmap clvops;
unsigned id;
- if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+ if (!gimple_vuse (stmt))
return;
mem = simple_mem_ref_in_stmt (stmt, &is_stored);
if (is_stored)
mark_ref_stored (ref, loop);
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ if ((vname = gimple_vuse (stmt)) != NULL_TREE)
bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (ref, loop, stmt, mem);
return;
fail:
clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ if ((vname = gimple_vuse (stmt)) != NULL_TREE)
bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
}
static void
gather_mem_refs_in_loops (void)
{
- block_stmt_iterator bsi;
+ gimple_stmt_iterator bsi;
basic_block bb;
struct loop *loop;
loop_iterator li;
if (loop == current_loops->tree_root)
continue;
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- gather_mem_refs_stmt (loop, bsi_stmt (bsi));
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ gather_mem_refs_stmt (loop, gsi_stmt (bsi));
}
/* Propagate the information about clobbered vops and accessed memory
gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
lsm_tmp_name_add ("_RE");
break;
-
+
case IMAGPART_EXPR:
gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
lsm_tmp_name_add ("_IM");
lsm_tmp_name_add ("R");
break;
+ case INTEGER_CST:
+ /* Nothing. */
+ break;
+
default:
gcc_unreachable ();
}
{
tree tmp_var;
unsigned i;
- tree load, store;
+ gimple load, store;
struct fmt_data fmt_data;
edge ex;
+ struct lim_aux_data *lim_data;
if (dump_file && (dump_flags & TDF_DETAILS))
{
rewrite_mem_refs (loop, ref, tmp_var);
/* Emit the load & stores. */
- load = build_gimple_modify_stmt (tmp_var, unshare_expr (ref->mem));
- get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
- LIM_DATA (load)->max_loop = loop;
- LIM_DATA (load)->tgt_loop = loop;
+ load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
+ lim_data = init_lim_data (load);
+ lim_data->max_loop = loop;
+ lim_data->tgt_loop = loop;
/* Put this into the latch, so that we are sure it will be processed after
all dependencies. */
- bsi_insert_on_edge (loop_latch_edge (loop), load);
+ gsi_insert_on_edge (loop_latch_edge (loop), load);
for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
{
- store = build_gimple_modify_stmt (unshare_expr (ref->mem), tmp_var);
- bsi_insert_on_edge (ex, store);
+ store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
+ gsi_insert_on_edge (ex, store);
}
}
get_all_locs_in_loop (loop, ref, &locs);
for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
{
- if (!LIM_DATA (loc->stmt))
+ if (!get_lim_data (loc->stmt))
continue;
- must_exec = LIM_DATA (loc->stmt)->always_executed_in;
+ must_exec = get_lim_data (loc->stmt)->always_executed_in;
if (!must_exec)
continue;
store_motion_loop (loop, sm_executed);
BITMAP_FREE (sm_executed);
- bsi_commit_edge_inserts ();
+ gsi_commit_edge_inserts ();
}
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
tree_ssa_lim_initialize (void)
{
sbitmap contains_call = sbitmap_alloc (last_basic_block);
- block_stmt_iterator bsi;
+ gimple_stmt_iterator bsi;
struct loop *loop;
basic_block bb;
sbitmap_zero (contains_call);
FOR_EACH_BB (bb)
{
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- if (nonpure_call_p (bsi_stmt (bsi)))
+ if (nonpure_call_p (gsi_stmt (bsi)))
break;
}
- if (!bsi_end_p (bsi))
+ if (!gsi_end_p (bsi))
SET_BIT (contains_call, bb->index);
}
fill_always_executed_in (loop, contains_call);
sbitmap_free (contains_call);
+
+ lim_aux_data_map = pointer_map_create ();
}
/* Cleans up after the invariant motion pass. */
bb->aux = NULL;
}
+ pointer_map_destroy (lim_aux_data_map);
+
VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
htab_delete (memory_accesses.refs);