/* The single loop exit if it dominates the latch, NULL otherwise. */
-static edge
+edge
single_dom_exit (struct loop *loop)
{
edge exit = loop->single_exit;
nfe_desc = xmalloc (sizeof (struct nfe_cache_elt));
nfe_desc->exit = exit;
nfe_desc->valid_p = number_of_iterations_exit (data->current_loop,
- exit, &nfe_desc->niter);
+ exit, &nfe_desc->niter,
+ true);
*slot = nfe_desc;
}
else
unsigned i;
data->version_info_size = 2 * num_ssa_names;
- data->version_info = xcalloc (data->version_info_size,
- sizeof (struct version_info));
+ data->version_info = XCNEWVEC (struct version_info, data->version_info_size);
data->relevant = BITMAP_ALLOC (NULL);
data->important_candidates = BITMAP_ALLOC (NULL);
data->max_inv_id = 0;
static struct iv *
alloc_iv (tree base, tree step)
{
- struct iv *iv = xcalloc (1, sizeof (struct iv));
+ struct iv *iv = XCNEW (struct iv);
if (step && integer_zerop (step))
step = NULL_TREE;
determine_biv_step (tree phi)
{
struct loop *loop = bb_for_stmt (phi)->loop_father;
- tree name = PHI_RESULT (phi), base, step;
+ tree name = PHI_RESULT (phi);
+ affine_iv iv;
if (!is_gimple_reg (name))
return NULL_TREE;
- if (!simple_iv (loop, phi, name, &base, &step, true))
+ if (!simple_iv (loop, phi, name, &iv, true))
return NULL_TREE;
- if (zero_p (step))
- return NULL_TREE;
-
- return step;
+ return (zero_p (iv.step) ? NULL_TREE : iv.step);
}
/* Returns true if EXP is a ssa name that occurs in an abnormal phi node. */
}
/* Checks whether STMT defines a linear induction variable and stores its
- parameters to BASE and STEP. */
+ parameters to IV. */
static bool
-find_givs_in_stmt_scev (struct ivopts_data *data, tree stmt,
- tree *base, tree *step)
+find_givs_in_stmt_scev (struct ivopts_data *data, tree stmt, affine_iv *iv)
{
tree lhs;
struct loop *loop = data->current_loop;
- *base = NULL_TREE;
- *step = NULL_TREE;
+ iv->base = NULL_TREE;
+ iv->step = NULL_TREE;
if (TREE_CODE (stmt) != MODIFY_EXPR)
return false;
if (TREE_CODE (lhs) != SSA_NAME)
return false;
- if (!simple_iv (loop, stmt, TREE_OPERAND (stmt, 1), base, step, true))
+ if (!simple_iv (loop, stmt, TREE_OPERAND (stmt, 1), iv, true))
return false;
- *base = expand_simple_operations (*base);
+ iv->base = expand_simple_operations (iv->base);
- if (contains_abnormal_ssa_name_p (*base)
- || contains_abnormal_ssa_name_p (*step))
+ if (contains_abnormal_ssa_name_p (iv->base)
+ || contains_abnormal_ssa_name_p (iv->step))
return false;
return true;
static void
find_givs_in_stmt (struct ivopts_data *data, tree stmt)
{
- tree base, step;
+ affine_iv iv;
- if (!find_givs_in_stmt_scev (data, stmt, &base, &step))
+ if (!find_givs_in_stmt_scev (data, stmt, &iv))
return;
- set_iv (data, TREE_OPERAND (stmt, 0), base, step);
+ set_iv (data, TREE_OPERAND (stmt, 0), iv.base, iv.step);
}
/* Finds general ivs in basic block BB. */
record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
tree stmt, enum use_type use_type)
{
- struct iv_use *use = xcalloc (1, sizeof (struct iv_use));
+ struct iv_use *use = XCNEW (struct iv_use);
use->id = n_iv_uses (data);
use->type = use_type;
}
iv->have_use_for = true;
- civ = xmalloc (sizeof (struct iv));
+ civ = XNEW (struct iv);
*civ = *iv;
stmt = SSA_NAME_DEF_STMT (op);
return;
}
- civ = xmalloc (sizeof (struct iv));
+ civ = XNEW (struct iv);
*civ = zero_p (iv0->step) ? *iv1: *iv0;
record_use (data, cond_p, civ, stmt, USE_COMPARE);
}
/* The step for pointer arithmetics already is 1 byte. */
step = build_int_cst (sizetype, 1);
+ /* FIXME: convert_step should not be used outside chrec_convert: fix
+ this by calling chrec_convert. */
iv_step = convert_step (dta->ivopts_data->current_loop,
sizetype, iv->base, iv->step, dta->stmt);
if (i == n_iv_cands (data))
{
- cand = xcalloc (1, sizeof (struct iv_cand));
+ cand = XCNEW (struct iv_cand);
cand->id = i;
if (!base && !step)
}
use->n_map_members = size;
- use->cost_map = xcalloc (size, sizeof (struct cost_pair));
+ use->cost_map = XCNEWVEC (struct cost_pair, size);
}
}
comb->n--;
comb->coefs[i] = comb->coefs[comb->n];
comb->elts[i] = comb->elts[comb->n];
+
+ if (comb->rest)
+ {
+ gcc_assert (comb->n == MAX_AFF_ELTS - 1);
+ comb->coefs[comb->n] = 1;
+ comb->elts[comb->n] = comb->rest;
+ comb->rest = NULL_TREE;
+ comb->n++;
+ }
return;
}
if (comb->n < MAX_AFF_ELTS)
unsigned i;
comb1->offset = (comb1->offset + comb2->offset) & comb1->mask;
- for (i = 0; i < comb2-> n; i++)
+ for (i = 0; i < comb2->n; i++)
aff_combination_add_elt (comb1, comb2->elts[i], comb2->coefs[i]);
if (comb2->rest)
aff_combination_add_elt (comb1, comb2->rest, 1);
unsigned HOST_WIDE_INT ustepi, cstepi;
HOST_WIDE_INT ratioi;
struct affine_tree_combination cbase_aff, expr_aff;
+ tree cstep_orig = cstep, ustep_orig = ustep;
if (TYPE_PRECISION (utype) > TYPE_PRECISION (ctype))
{
expr = fold_convert (uutype, expr);
cbase = fold_convert (uutype, cbase);
cstep = fold_convert (uutype, cstep);
+
+ /* If the conversion is not noop, we must take it into account when
+ considering the value of the step. */
+ if (TYPE_PRECISION (utype) < TYPE_PRECISION (ctype))
+ cstep_orig = cstep;
}
- if (cst_and_fits_in_hwi (cstep)
- && cst_and_fits_in_hwi (ustep))
+ if (cst_and_fits_in_hwi (cstep_orig)
+ && cst_and_fits_in_hwi (ustep_orig))
{
- ustepi = int_cst_value (ustep);
- cstepi = int_cst_value (cstep);
+ ustepi = int_cst_value (ustep_orig);
+ cstepi = int_cst_value (cstep_orig);
if (!divide (TYPE_PRECISION (uutype), ustepi, cstepi, &ratioi))
{
}
else
{
- ratio = constant_multiple_of (uutype, ustep, cstep);
+ ratio = constant_multiple_of (uutype, ustep_orig, cstep_orig);
if (!ratio)
return false;
if (*cached)
return (*cached)->cost;
- *cached = xmalloc (sizeof (struct mbc_entry));
+ *cached = XNEW (struct mbc_entry);
(*cached)->mode = mode;
(*cached)->cst = cst;
acost = costs[symbol_present][var_present][offset_p][ratio_p];
if (!acost)
{
+ int old_cse_not_expected;
acost = 0;
addr = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1);
addr = gen_rtx_fmt_ee (PLUS, Pmode, addr, base);
start_sequence ();
+ /* To avoid splitting addressing modes, pretend that no cse will
+ follow. */
+ old_cse_not_expected = cse_not_expected;
+ cse_not_expected = true;
addr = memory_address (Pmode, addr);
+ cse_not_expected = old_cse_not_expected;
seq = get_insns ();
end_sequence ();
return cost + acost;
}
-/* Estimates cost of forcing EXPR into a variable. DEPENDS_ON is a set of the
- invariants the computation depends on. */
-static unsigned
-force_var_cost (struct ivopts_data *data,
- tree expr, bitmap *depends_on)
+/* Estimates cost of forcing expression EXPR into a variable. */
+
+unsigned
+force_expr_to_var_cost (tree expr)
{
static bool costs_initialized = false;
static unsigned integer_cost;
build_int_cst_type (type, 2000))) + 1;
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, "force_var_cost:\n");
+ fprintf (dump_file, "force_expr_to_var_cost:\n");
fprintf (dump_file, " integer %d\n", (int) integer_cost);
fprintf (dump_file, " symbol %d\n", (int) symbol_cost);
fprintf (dump_file, " address %d\n", (int) address_cost);
STRIP_NOPS (expr);
- if (depends_on)
- {
- fd_ivopts_data = data;
- walk_tree (&expr, find_depends, depends_on, NULL);
- }
-
if (SSA_VAR_P (expr))
return 0;
if (is_gimple_val (op0))
cost0 = 0;
else
- cost0 = force_var_cost (data, op0, NULL);
+ cost0 = force_expr_to_var_cost (op0);
if (is_gimple_val (op1))
cost1 = 0;
else
- cost1 = force_var_cost (data, op1, NULL);
+ cost1 = force_expr_to_var_cost (op1);
break;
return cost < target_spill_cost ? cost : target_spill_cost;
}
+/* Estimates cost of forcing EXPR into a variable. DEPENDS_ON is a set of the
+ invariants the computation depends on. */
+
+static unsigned
+force_var_cost (struct ivopts_data *data,
+ tree expr, bitmap *depends_on)
+{
+ if (depends_on)
+ {
+ fd_ivopts_data = data;
+ walk_tree (&expr, find_depends, depends_on, NULL);
+ }
+
+ return force_expr_to_var_cost (expr);
+}
+
/* Estimates cost of expressing address ADDR as var + symbol + offset. The
value of offset is added to OFFSET, SYMBOL_PRESENT and VAR_PRESENT are set
to false if the corresponding part is missing. DEPENDS_ON is a set of the
iv_ca_delta_add (struct iv_use *use, struct cost_pair *old_cp,
struct cost_pair *new_cp, struct iv_ca_delta *next_change)
{
- struct iv_ca_delta *change = xmalloc (sizeof (struct iv_ca_delta));
+ struct iv_ca_delta *change = XNEW (struct iv_ca_delta);
change->use = use;
change->old_cp = old_cp;
static struct iv_ca *
iv_ca_new (struct ivopts_data *data)
{
- struct iv_ca *nw = xmalloc (sizeof (struct iv_ca));
+ struct iv_ca *nw = XNEW (struct iv_ca);
nw->upto = 0;
nw->bad_uses = 0;
- nw->cand_for_use = xcalloc (n_iv_uses (data), sizeof (struct cost_pair *));
- nw->n_cand_uses = xcalloc (n_iv_cands (data), sizeof (unsigned));
+ nw->cand_for_use = XCNEWVEC (struct cost_pair *, n_iv_uses (data));
+ nw->n_cand_uses = XCNEWVEC (unsigned, n_iv_cands (data));
nw->cands = BITMAP_ALLOC (NULL);
nw->n_cands = 0;
nw->n_regs = 0;
nw->cand_use_cost = 0;
nw->cand_cost = 0;
- nw->n_invariant_uses = xcalloc (data->max_inv_id + 1, sizeof (unsigned));
+ nw->n_invariant_uses = XCNEWVEC (unsigned, data->max_inv_id + 1);
nw->cost = 0;
return nw;
{
block_stmt_iterator bsi = bsi_for_stmt (stmt);
- bsi_remove (&bsi);
+ bsi_remove (&bsi, true);
}
}
introduce a new computation (that might also need casting the
variable to unsigned and back). */
if (cand->pos == IP_ORIGINAL
- && TREE_CODE (use->stmt) == MODIFY_EXPR
- && TREE_OPERAND (use->stmt, 0) == cand->var_after)
+ && cand->incremented_at == use->stmt)
{
+ tree step, ctype, utype;
+ enum tree_code incr_code = PLUS_EXPR;
+
+ gcc_assert (TREE_CODE (use->stmt) == MODIFY_EXPR);
+ gcc_assert (TREE_OPERAND (use->stmt, 0) == cand->var_after);
+
+ step = cand->iv->step;
+ ctype = TREE_TYPE (step);
+ utype = TREE_TYPE (cand->var_after);
+ if (TREE_CODE (step) == NEGATE_EXPR)
+ {
+ incr_code = MINUS_EXPR;
+ step = TREE_OPERAND (step, 0);
+ }
+
+ /* Check whether we may leave the computation unchanged.
+ This is the case only if it does not rely on other
+ computations in the loop -- otherwise, the computation
+ we rely upon may be removed in remove_unused_ivs,
+ thus leading to ICE. */
op = TREE_OPERAND (use->stmt, 1);
+ if (TREE_CODE (op) == PLUS_EXPR
+ || TREE_CODE (op) == MINUS_EXPR)
+ {
+ if (TREE_OPERAND (op, 0) == cand->var_before)
+ op = TREE_OPERAND (op, 1);
+ else if (TREE_CODE (op) == PLUS_EXPR
+ && TREE_OPERAND (op, 1) == cand->var_before)
+ op = TREE_OPERAND (op, 0);
+ else
+ op = NULL_TREE;
+ }
+ else
+ op = NULL_TREE;
- /* Be a bit careful. In case variable is expressed in some
- complicated way, rewrite it so that we may get rid of this
- complicated expression. */
- if ((TREE_CODE (op) == PLUS_EXPR
- || TREE_CODE (op) == MINUS_EXPR)
- && TREE_OPERAND (op, 0) == cand->var_before
- && TREE_CODE (TREE_OPERAND (op, 1)) == INTEGER_CST)
+ if (op
+ && (TREE_CODE (op) == INTEGER_CST
+ || operand_equal_p (op, step, 0)))
return;
+
+ /* Otherwise, add the necessary computations to express
+ the iv. */
+ op = fold_convert (ctype, cand->var_before);
+ comp = fold_convert (utype,
+ build2 (incr_code, ctype, op,
+ unshare_expr (step)));
}
+ else
+ comp = get_computation (data->current_loop, use, cand);
- comp = get_computation (data->current_loop, use, cand);
switch (TREE_CODE (use->stmt))
{
case PHI_NODE:
and extracts this single useful piece of information. */
static tree
-get_ref_tag (tree ref)
+get_ref_tag (tree ref, tree orig)
{
tree var = get_base_address (ref);
- tree tag;
+ tree aref = NULL_TREE, tag, sv;
+ HOST_WIDE_INT offset, size, maxsize;
+
+ for (sv = orig; handled_component_p (sv); sv = TREE_OPERAND (sv, 0))
+ {
+ aref = get_ref_base_and_extent (sv, &offset, &size, &maxsize);
+ if (ref)
+ break;
+ }
+
+ if (aref && SSA_VAR_P (aref) && get_subvars_for_var (aref))
+ return unshare_expr (sv);
if (!var)
return NULL_TREE;
if (TREE_CODE (var) == INDIRECT_REF)
- var = TREE_OPERAND (var, 0);
- if (TREE_CODE (var) == SSA_NAME)
{
+ /* In case the base is a dereference of a pointer, first check its name
+ mem tag, and if it does not have one, use type mem tag. */
+ var = TREE_OPERAND (var, 0);
+ if (TREE_CODE (var) != SSA_NAME)
+ return NULL_TREE;
+
if (SSA_NAME_PTR_INFO (var))
{
tag = SSA_NAME_PTR_INFO (var)->name_mem_tag;
}
var = SSA_NAME_VAR (var);
+ tag = var_ann (var)->type_mem_tag;
+ gcc_assert (tag != NULL_TREE);
+ return tag;
}
-
- if (DECL_P (var))
- {
+ else
+ {
+ if (!DECL_P (var))
+ return NULL_TREE;
+
tag = var_ann (var)->type_mem_tag;
if (tag)
return tag;
return var;
}
-
- return NULL_TREE;
}
/* Copies the reference information from OLD_REF to NEW_REF. */
copy_mem_ref_info (new_ref, old_ref);
else
{
- TMR_TAG (new_ref) = get_ref_tag (old_ref);
TMR_ORIGINAL (new_ref) = unshare_and_remove_ssa_names (old_ref);
+ TMR_TAG (new_ref) = get_ref_tag (old_ref, TMR_ORIGINAL (new_ref));
}
}
so that they are emitted on the correct place, and so that the loop closed
ssa form is preserved. */
-static void
+void
compute_phi_arg_on_exit (edge exit, tree stmts, tree op)
{
tree_stmt_iterator tsi;
{
for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
{
- bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_NEW_STMT);
- protect_loop_closed_ssa_form (exit, bsi_stmt (bsi));
+ tree stmt = tsi_stmt (tsi);
+ bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
+ protect_loop_closed_ssa_form (exit, stmt);
}
}
else
{
- bsi_insert_after (&bsi, stmts, BSI_NEW_STMT);
- protect_loop_closed_ssa_form (exit, bsi_stmt (bsi));
+ bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
+ protect_loop_closed_ssa_form (exit, stmts);
}
if (!op)
stmt = build2 (MODIFY_EXPR, TREE_TYPE (op),
def, op);
SSA_NAME_DEF_STMT (def) = stmt;
- bsi_insert_after (&bsi, stmt, BSI_CONTINUE_LINKING);
+ bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
}
}
}
default:
gcc_unreachable ();
}
- update_stmt (use->stmt);
+ mark_new_vars_to_rename (use->stmt);
}
/* Rewrite the uses using the selected induction variables. */
{
data->version_info_size = 2 * num_ssa_names;
free (data->version_info);
- data->version_info = xcalloc (data->version_info_size,
- sizeof (struct version_info));
+ data->version_info = XCNEWVEC (struct version_info, data->version_info_size);
}
data->max_inv_id = 0;