/* Induction variable optimizations.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
struct iv
{
tree base; /* Initial value of the iv. */
+ tree base_object; /* A memory object to that the induction variable points. */
tree step; /* Step of the iv (constant only). */
tree ssa_name; /* The ssa name with the value. */
bool biv_p; /* Is it a biv? */
/* The candidates. */
varray_type iv_candidates;
+ /* A bitmap of important candidates. */
+ bitmap important_candidates;
+
/* Whether to consider just related and important candidates when replacing a
use. */
bool consider_all_candidates;
#define CONSIDER_ALL_CANDIDATES_BOUND \
((unsigned) PARAM_VALUE (PARAM_IV_CONSIDER_ALL_CANDIDATES_BOUND))
-/* If there are more iv occurences, we just give up (it is quite unlikely that
+/* If there are more iv occurrences, we just give up (it is quite unlikely that
optimizing such a loop would help, and it would take ages). */
#define MAX_CONSIDERED_USES \
void
dump_iv (FILE *file, struct iv *iv)
{
- fprintf (file, "ssa name ");
- print_generic_expr (file, iv->ssa_name, TDF_SLIM);
+ if (iv->ssa_name)
+ {
+ fprintf (file, "ssa name ");
+ print_generic_expr (file, iv->ssa_name, TDF_SLIM);
+ fprintf (file, "\n");
+ }
+
+ fprintf (file, " type ");
+ print_generic_expr (file, TREE_TYPE (iv->base), TDF_SLIM);
fprintf (file, "\n");
if (iv->step)
fprintf (file, "\n");
}
+ if (iv->base_object)
+ {
+ fprintf (file, " base object ");
+ print_generic_expr (file, iv->base_object, TDF_SLIM);
+ fprintf (file, "\n");
+ }
+
if (iv->biv_p)
fprintf (file, " is a biv\n");
}
void
dump_use (FILE *file, struct iv_use *use)
{
- struct iv *iv = use->iv;
-
fprintf (file, "use %d\n", use->id);
switch (use->type)
break;
default:
- abort ();
- }
-
- fprintf (file, " in statement ");
- print_generic_expr (file, use->stmt, TDF_SLIM);
- fprintf (file, "\n");
-
- fprintf (file, " at position ");
- if (use->op_p)
- print_generic_expr (file, *use->op_p, TDF_SLIM);
- fprintf (file, "\n");
-
- if (iv->step)
- {
- fprintf (file, " base ");
- print_generic_expr (file, iv->base, TDF_SLIM);
- fprintf (file, "\n");
-
- fprintf (file, " step ");
- print_generic_expr (file, iv->step, TDF_SLIM);
- fprintf (file, "\n");
- }
- else
- {
- fprintf (file, " invariant ");
- print_generic_expr (file, iv->base, TDF_SLIM);
- fprintf (file, "\n");
- }
-
- fprintf (file, " related candidates ");
- dump_bitmap (file, use->related_cands);
+ gcc_unreachable ();
+ }
+
+ fprintf (file, " in statement ");
+ print_generic_expr (file, use->stmt, TDF_SLIM);
+ fprintf (file, "\n");
+
+ fprintf (file, " at position ");
+ if (use->op_p)
+ print_generic_expr (file, *use->op_p, TDF_SLIM);
+ fprintf (file, "\n");
+
+ dump_iv (file, use->iv);
+
+ fprintf (file, " related candidates ");
+ dump_bitmap (file, use->related_cands);
}
/* Dumps information about the uses to FILE. */
break;
}
- if (iv->step)
- {
- fprintf (file, " base ");
- print_generic_expr (file, iv->base, TDF_SLIM);
- fprintf (file, "\n");
-
- fprintf (file, " step ");
- print_generic_expr (file, iv->step, TDF_SLIM);
- fprintf (file, "\n");
- }
- else
- {
- fprintf (file, " invariant ");
- print_generic_expr (file, iv->base, TDF_SLIM);
- fprintf (file, "\n");
- }
+ dump_iv (file, iv);
}
/* Returns the info for ssa version VER. */
val = (a * inv) & mask;
- if (((val * b) & mask) != a)
- abort ();
+ gcc_assert (((val * b) & mask) == a);
if ((val >> (bits - 1)) & 1)
val |= ~mask;
{
basic_block bb = ip_normal_pos (loop), sbb = bb_for_stmt (stmt);
- if (!bb)
- abort ();
+ gcc_assert (bb);
if (sbb == loop->latch)
return true;
return stmt_after_ip_original_pos (cand, stmt);
default:
- abort ();
+ gcc_unreachable ();
}
}
VARRAY_GENERIC_PTR_NOGC_INIT (decl_rtl_to_reset, 20, "decl_rtl_to_reset");
}
+/* Returns a memory object to that EXPR points. In case we are able to
+ determine that it does not point to any such object, NULL is returned. */
+
+static tree
+determine_base_object (tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+ tree base, obj, op0, op1;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (expr)))
+ return NULL_TREE;
+
+ switch (code)
+ {
+ case INTEGER_CST:
+ return NULL_TREE;
+
+ case ADDR_EXPR:
+ obj = TREE_OPERAND (expr, 0);
+ base = get_base_address (obj);
+
+ if (!base)
+ return fold_convert (ptr_type_node, expr);
+
+ return fold (build1 (ADDR_EXPR, ptr_type_node, base));
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ op0 = determine_base_object (TREE_OPERAND (expr, 0));
+ op1 = determine_base_object (TREE_OPERAND (expr, 1));
+
+ if (!op1)
+ return op0;
+
+ if (!op0)
+ return (code == PLUS_EXPR
+ ? op1
+ : fold (build1 (NEGATE_EXPR, ptr_type_node, op1)));
+
+ return fold (build (code, ptr_type_node, op0, op1));
+
+ default:
+ return fold_convert (ptr_type_node, expr);
+ }
+}
+
/* Allocates an induction variable with given initial value BASE and step STEP
for loop LOOP. */
step = NULL_TREE;
iv->base = base;
+ iv->base_object = determine_base_object (base);
iv->step = step;
iv->biv_p = false;
iv->have_use_for = false;
{
struct version_info *info = name_info (data, iv);
- if (info->iv)
- abort ();
+ gcc_assert (!info->iv);
bitmap_set_bit (data->relevant, SSA_NAME_VERSION (iv));
info->iv = alloc_iv (base, step);
return NULL_TREE;
if (!step)
- return fold_convert (type, integer_zero_node);
+ return build_int_cst (type, 0);
return step;
}
-/* Returns false if INDEX is a ssa name that occurs in an
+/* Returns true if EXP is a ssa name that occurs in an abnormal phi node. */
+
+static bool
+abnormal_ssa_name_p (tree exp)
+{
+ if (!exp)
+ return false;
+
+ if (TREE_CODE (exp) != SSA_NAME)
+ return false;
+
+ return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (exp) != 0;
+}
+
+/* Returns false if BASE or INDEX contains a ssa name that occurs in an
abnormal phi node. Callback for for_each_index. */
static bool
-idx_contains_abnormal_ssa_name_p (tree base ATTRIBUTE_UNUSED, tree *index,
+idx_contains_abnormal_ssa_name_p (tree base, tree *index,
void *data ATTRIBUTE_UNUSED)
{
- if (TREE_CODE (*index) != SSA_NAME)
- return true;
+ if (TREE_CODE (base) == ARRAY_REF)
+ {
+ if (abnormal_ssa_name_p (TREE_OPERAND (base, 2)))
+ return false;
+ if (abnormal_ssa_name_p (TREE_OPERAND (base, 3)))
+ return false;
+ }
- return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (*index) == 0;
+ return !abnormal_ssa_name_p (*index);
}
/* Returns true if EXPR contains a ssa name that occurs in an
contains_abnormal_ssa_name_p (tree expr)
{
enum tree_code code = TREE_CODE (expr);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
if (code == SSA_NAME)
return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr) != 0;
switch (class)
{
- case '2':
+ case tcc_binary:
+ case tcc_comparison:
if (contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 1)))
return true;
/* Fallthru. */
- case '1':
+ case tcc_unary:
if (contains_abnormal_ssa_name_p (TREE_OPERAND (expr, 0)))
return true;
break;
default:
- abort ();
+ gcc_unreachable ();
}
return false;
{
unsigned i;
struct loop *loop = data->current_loop;
+ bitmap_iterator bi;
if (!find_bivs (data))
return false;
fprintf (dump_file, "Induction variables:\n\n");
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
{
if (ver_info (data, i)->iv)
dump_iv (dump_file, ver_info (data, i)->iv);
- });
+ }
}
return true;
use->op_p = use_p;
use->related_cands = BITMAP_XMALLOC ();
+ /* To avoid showing ssa name in the dumps, if it was not reset by the
+ caller. */
+ iv->ssa_name = NULL_TREE;
+
if (dump_file && (dump_flags & TDF_DETAILS))
dump_use (dump_file, use);
{
use = iv_use (data, iv->use_id);
- if (use->type != USE_NONLINEAR_EXPR
- && use->type != USE_OUTER)
- abort ();
+ gcc_assert (use->type == USE_NONLINEAR_EXPR
+ || use->type == USE_OUTER);
if (type == USE_NONLINEAR_EXPR)
use->type = USE_NONLINEAR_EXPR;
*civ = *iv;
stmt = SSA_NAME_DEF_STMT (op);
- if (TREE_CODE (stmt) != PHI_NODE
- && TREE_CODE (stmt) != MODIFY_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (stmt) == PHI_NODE
+ || TREE_CODE (stmt) == MODIFY_EXPR);
use = record_use (data, NULL, civ, stmt, type);
iv->use_id = use->id;
record_use (data, cond_p, civ, stmt, USE_COMPARE);
}
+/* Returns true if expression EXPR is obviously invariant in LOOP,
+ i.e. if all its operands are defined outside of the LOOP. */
+
+bool
+expr_invariant_in_loop_p (struct loop *loop, tree expr)
+{
+ basic_block def_bb;
+ unsigned i, len;
+
+ if (is_gimple_min_invariant (expr))
+ return true;
+
+ if (TREE_CODE (expr) == SSA_NAME)
+ {
+ def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (expr));
+ if (def_bb
+ && flow_bb_inside_loop_p (loop, def_bb))
+ return false;
+
+ return true;
+ }
+
+ if (!EXPR_P (expr))
+ return false;
+
+ len = first_rtl_op (TREE_CODE (expr));
+ for (i = 0; i < len; i++)
+ if (!expr_invariant_in_loop_p (loop, TREE_OPERAND (expr, i)))
+ return false;
+
+ return true;
+}
+
/* Cumulates the steps of indices into DATA and replaces their values with the
initial ones. Returns false when the value of the index cannot be determined.
Callback for for_each_index. */
{
struct ifs_ivopts_data *dta = data;
struct iv *iv;
- tree step, type, iv_type, iv_step;
-
+ tree step, type, iv_type, iv_step, lbound, off;
+ struct loop *loop = dta->ivopts_data->current_loop;
+
+ if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF
+ || TREE_CODE (base) == ALIGN_INDIRECT_REF)
+ return false;
+
+ /* If base is a component ref, require that the offset of the reference
+ is invariant. */
+ if (TREE_CODE (base) == COMPONENT_REF)
+ {
+ off = component_ref_field_offset (base);
+ return expr_invariant_in_loop_p (loop, off);
+ }
+
+ /* If base is array, first check whether we will be able to move the
+ reference out of the loop (in order to take its address in strength
+ reduction). In order for this to work we need both lower bound
+ and step to be loop invariants. */
+ if (TREE_CODE (base) == ARRAY_REF)
+ {
+ step = array_ref_element_size (base);
+ lbound = array_ref_low_bound (base);
+
+ if (!expr_invariant_in_loop_p (loop, step)
+ || !expr_invariant_in_loop_p (loop, lbound))
+ return false;
+ }
+
if (TREE_CODE (*idx) != SSA_NAME)
return true;
iv_type = TREE_TYPE (iv->base);
type = build_pointer_type (TREE_TYPE (base));
if (TREE_CODE (base) == ARRAY_REF)
- step = array_ref_element_size (base);
- else
{
- /* The step for pointer arithmetics already is 1 byte. */
- step = fold_convert (type, integer_one_node);
+ step = array_ref_element_size (base);
+
+ /* We only handle addresses whose step is an integer constant. */
+ if (TREE_CODE (step) != INTEGER_CST)
+ return false;
}
+ else
+ /* The step for pointer arithmetics already is 1 byte. */
+ step = build_int_cst (type, 1);
if (TYPE_PRECISION (iv_type) < TYPE_PRECISION (type))
iv_step = can_count_iv_in_wider_type (dta->ivopts_data->current_loop,
object is passed to it in DATA. */
static bool
-idx_record_use (tree base ATTRIBUTE_UNUSED, tree *idx,
+idx_record_use (tree base, tree *idx,
void *data)
{
find_interesting_uses_op (data, *idx);
+ if (TREE_CODE (base) == ARRAY_REF)
+ {
+ find_interesting_uses_op (data, array_ref_element_size (base));
+ find_interesting_uses_op (data, array_ref_low_bound (base));
+ }
return true;
}
|| zero_p (step))
goto fail;
+ gcc_assert (TREE_CODE (base) != ALIGN_INDIRECT_REF);
+ gcc_assert (TREE_CODE (base) != MISALIGNED_INDIRECT_REF);
+
if (TREE_CODE (base) == INDIRECT_REF)
base = TREE_OPERAND (base, 0);
else
switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
{
- case '<':
+ case tcc_comparison:
find_interesting_uses_cond (data, stmt, &TREE_OPERAND (stmt, 1));
return;
- case 'r':
+ case tcc_reference:
find_interesting_uses_address (data, stmt, &TREE_OPERAND (stmt, 1));
- if (TREE_CODE_CLASS (TREE_CODE (lhs)) == 'r')
+ if (REFERENCE_CLASS_P (lhs))
find_interesting_uses_address (data, stmt, &TREE_OPERAND (stmt, 0));
return;
default: ;
}
- if (TREE_CODE_CLASS (TREE_CODE (lhs)) == 'r')
+ if (REFERENCE_CLASS_P (lhs)
+ && is_gimple_val (rhs))
{
find_interesting_uses_address (data, stmt, &TREE_OPERAND (stmt, 0));
find_interesting_uses_op (data, rhs);
return;
}
+
+ /* TODO -- we should also handle address uses of type
+
+ memory = call (whatever);
+
+ and
+
+ call (memory). */
}
if (TREE_CODE (stmt) == PHI_NODE
for (i = 0; i < data->current_loop->num_nodes; i++)
{
+ edge_iterator ei;
bb = body[i];
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR
&& !flow_bb_inside_loop_p (data->current_loop, e->dest))
find_interesting_uses_outside (data, e);
if (dump_file && (dump_flags & TDF_DETAILS))
{
+ bitmap_iterator bi;
+
fprintf (dump_file, "\n");
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
{
info = ver_info (data, i);
if (info->inv_id)
fprintf (dump_file, " is invariant (%d)%s\n",
info->inv_id, info->has_nonlin_use ? "" : ", eliminable");
}
- });
+ }
fprintf (dump_file, "\n");
}
{
/* Add 0 + 1 * iteration candidate. */
add_candidate (data,
- fold_convert (unsigned_type_node, integer_zero_node),
- fold_convert (unsigned_type_node, integer_one_node),
+ build_int_cst (unsigned_intSI_type_node, 0),
+ build_int_cst (unsigned_intSI_type_node, 1),
true, NULL);
- /* The same for a long type. */
- add_candidate (data,
- fold_convert (long_unsigned_type_node, integer_zero_node),
- fold_convert (long_unsigned_type_node, integer_one_node),
- true, NULL);
+ /* The same for a long type if it is still fast enough. */
+ if (BITS_PER_WORD > 32)
+ add_candidate (data,
+ build_int_cst (unsigned_intDI_type_node, 0),
+ build_int_cst (unsigned_intDI_type_node, 1),
+ true, NULL);
}
/* The same, but with initial value zero. */
add_candidate (data,
- fold_convert (TREE_TYPE (iv->base), integer_zero_node),
+ build_int_cst (TREE_TYPE (iv->base), 0),
iv->step, true, NULL);
phi = SSA_NAME_DEF_STMT (iv->ssa_name);
{
unsigned i;
struct iv *iv;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
{
iv = ver_info (data, i)->iv;
if (iv && iv->biv_p && !zero_p (iv->step))
add_old_iv_candidates (data, iv);
- });
+ }
}
/* Adds candidates based on the value of the induction variable IV and USE. */
add_candidate (data, iv->base, iv->step, false, use);
/* The same, but with initial value zero. */
- add_candidate (data,
- fold_convert (TREE_TYPE (iv->base), integer_zero_node),
+ add_candidate (data, build_int_cst (TREE_TYPE (iv->base), 0),
iv->step, false, use);
}
if (base != TREE_OPERAND (iv->base, 0))
{
+ gcc_assert (TREE_CODE (base) != ALIGN_INDIRECT_REF);
+ gcc_assert (TREE_CODE (base) != MISALIGNED_INDIRECT_REF);
+
if (TREE_CODE (base) == INDIRECT_REF)
base = TREE_OPERAND (base, 0);
else
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
}
for (i = 0; i < n_iv_uses (data); i++)
{
struct iv_use *use = iv_use (data, i);
+ bitmap_iterator bi;
if (data->consider_all_candidates)
{
else
{
size = n_imp;
- EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j, size++);
+ EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j, bi)
+ {
+ size++;
+ }
use->n_map_members = 0;
}
return cost;
}
+/* Produce DECL_RTL for object obj so it looks like it is stored in memory. */
+static rtx
+produce_memory_decl_rtl (tree obj, int *regno)
+{
+ rtx x;
+ if (!obj)
+ abort ();
+ if (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
+ {
+ const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
+ x = gen_rtx_SYMBOL_REF (Pmode, name);
+ }
+ else
+ x = gen_raw_REG (Pmode, (*regno)++);
+
+ return gen_rtx_MEM (DECL_MODE (obj), x);
+}
+
/* Prepares decl_rtl for variables referred in *EXPR_P. Callback for
walk_tree. DATA contains the actual fake register number. */
switch (TREE_CODE (*expr_p))
{
+ case ADDR_EXPR:
+ for (expr_p = &TREE_OPERAND (*expr_p, 0);
+ (handled_component_p (*expr_p)
+ || TREE_CODE (*expr_p) == REALPART_EXPR
+ || TREE_CODE (*expr_p) == IMAGPART_EXPR);
+ expr_p = &TREE_OPERAND (*expr_p, 0));
+ obj = *expr_p;
+ if (DECL_P (obj))
+ x = produce_memory_decl_rtl (obj, regno);
+ break;
+
case SSA_NAME:
*ws = 0;
obj = SSA_NAME_VAR (*expr_p);
break;
if (DECL_MODE (obj) == BLKmode)
- {
- if (TREE_STATIC (obj)
- || DECL_EXTERNAL (obj))
- {
- const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
- x = gen_rtx_SYMBOL_REF (Pmode, name);
- }
- else
- x = gen_raw_REG (Pmode, (*regno)++);
-
- x = gen_rtx_MEM (DECL_MODE (obj), x);
- }
+ x = produce_memory_decl_rtl (obj, regno);
else
x = gen_raw_REG (DECL_MODE (obj), (*regno)++);
get_computation_at (struct loop *loop,
struct iv_use *use, struct iv_cand *cand, tree at)
{
- tree ubase = use->iv->base, ustep = use->iv->step;
- tree cbase = cand->iv->base, cstep = cand->iv->step;
+ tree ubase = use->iv->base;
+ tree ustep = use->iv->step;
+ tree cbase = cand->iv->base;
+ tree cstep = cand->iv->step;
tree utype = TREE_TYPE (ubase), ctype = TREE_TYPE (cbase);
tree uutype;
tree expr, delta;
s_offset = offset;
cost = 0;
- offset_p = (min_offset <= s_offset && s_offset <= max_offset);
+ offset_p = (s_offset != 0
+ && min_offset <= s_offset && s_offset <= max_offset);
ratio_p = (ratio != 1
&& -MAX_RATIO <= ratio && ratio <= MAX_RATIO
&& TEST_BIT (valid_mult, ratio + MAX_RATIO));
if (ratio_p)
addr = gen_rtx_fmt_ee (MULT, Pmode, addr, GEN_INT (rat));
+ if (var_present)
+ addr = gen_rtx_fmt_ee (PLUS, Pmode, reg1, addr);
+
if (symbol_present)
{
base = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (""));
gen_rtx_fmt_ee (PLUS, Pmode,
base,
GEN_INT (off)));
- if (var_present)
- base = gen_rtx_fmt_ee (PLUS, Pmode, reg1, base);
- }
-
- else if (var_present)
- {
- base = reg1;
- if (offset_p)
- base = gen_rtx_fmt_ee (PLUS, Pmode, base, GEN_INT (off));
}
else if (offset_p)
base = GEN_INT (off);
return target_spill_cost;
}
-/* Peels a single layer of ADDR. If DIFF is not NULL, do it only if the
- offset is constant and add the offset to DIFF. */
-
-static tree
-peel_address (tree addr, unsigned HOST_WIDE_INT *diff)
-{
- tree off, size;
- HOST_WIDE_INT bit_offset;
-
- switch (TREE_CODE (addr))
- {
- case SSA_NAME:
- case INDIRECT_REF:
- case BIT_FIELD_REF:
- case VAR_DECL:
- case PARM_DECL:
- case RESULT_DECL:
- case STRING_CST:
- case REALPART_EXPR:
- case IMAGPART_EXPR:
- return NULL_TREE;
-
- case COMPONENT_REF:
- off = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (addr, 1));
- bit_offset = TREE_INT_CST_LOW (off);
-
- if (bit_offset % BITS_PER_UNIT)
- abort ();
-
- if (diff)
- *diff += bit_offset / BITS_PER_UNIT;
-
- return TREE_OPERAND (addr, 0);
-
- case ARRAY_REF:
- off = TREE_OPERAND (addr, 1);
-
- if (diff)
- {
- if (!cst_and_fits_in_hwi (off))
- return NULL_TREE;
-
- size = TYPE_SIZE_UNIT (TREE_TYPE (addr));
- if (!cst_and_fits_in_hwi (size))
- return NULL_TREE;
-
- *diff += TREE_INT_CST_LOW (off) * TREE_INT_CST_LOW (size);
- }
-
- return TREE_OPERAND (addr, 0);
-
- default:
- abort ();
- }
-}
-
-/* Checks whether E1 and E2 have constant difference, and if they do,
- store it in *DIFF. */
-
-static bool
-ptr_difference_const (tree e1, tree e2, unsigned HOST_WIDE_INT *diff)
-{
- int d1 = 0, d2 = 0;
- tree x;
- unsigned HOST_WIDE_INT delta1 = 0, delta2 = 0;
-
- /* Find depths of E1 and E2. */
- for (x = e1; x; x = peel_address (x, NULL))
- d1++;
- for (x = e2; x; x = peel_address (x, NULL))
- d2++;
-
- for (; e1 && d1 > d2; e1 = peel_address (e1, &delta1))
- d1--;
- for (; e2 && d2 > d1; e2 = peel_address (e2, &delta2))
- d2--;
-
- while (e1 && e2 && !operand_equal_p (e1, e2, 0))
- {
- e1 = peel_address (e1, &delta1);
- e2 = peel_address (e2, &delta1);
- }
-
- if (!e1 || !e2)
- return false;
-
- *diff = delta1 - delta2;
- return true;
-}
-
/* Estimates cost of expressing address ADDR as var + symbol + offset. The
value of offset is added to OFFSET, SYMBOL_PRESENT and VAR_PRESENT are set
to false if the corresponding part is missing. DEPENDS_ON is a set of the
tree addr, bool *symbol_present, bool *var_present,
unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
{
- tree core = addr;
-
- while (core
- && TREE_CODE (core) != VAR_DECL)
- core = peel_address (core, offset);
+ tree core;
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ tree toffset;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+
+ core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
+ &unsignedp, &volatilep);
- if (!core)
+ if (toffset != 0
+ || bitpos % BITS_PER_UNIT != 0
+ || TREE_CODE (core) != VAR_DECL)
{
*symbol_present = false;
*var_present = true;
fd_ivopts_data = data;
walk_tree (&addr, find_depends, depends_on, NULL);
return target_spill_cost;
- }
-
+ }
+
+ *offset += bitpos / BITS_PER_UNIT;
if (TREE_STATIC (core)
|| DECL_EXTERNAL (core))
{
tree e1, tree e2, bool *symbol_present, bool *var_present,
unsigned HOST_WIDE_INT *offset, bitmap *depends_on)
{
- unsigned HOST_WIDE_INT diff = 0;
+ HOST_WIDE_INT diff = 0;
unsigned cost;
- if (TREE_CODE (e1) != ADDR_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (e1) == ADDR_EXPR);
if (TREE_CODE (e2) == ADDR_EXPR
&& ptr_difference_const (TREE_OPERAND (e1, 0),
return INFTY;
}
+ if (address_p)
+ {
+ /* Do not try to express address of an object with computation based
+ on address of a different object. This may cause problems in rtl
+ level alias analysis (that does not expect this to be happening,
+ as this is illegal in C), and would be unlikely to be useful
+ anyway. */
+ if (use->iv->base_object
+ && cand->iv->base_object
+ && !operand_equal_p (use->iv->base_object, cand->iv->base_object, 0))
+ return INFTY;
+ }
+
if (!cst_and_fits_in_hwi (ustep)
|| !cst_and_fits_in_hwi (cstep))
return INFTY;
struct iv_use *use, struct iv_cand *cand,
enum tree_code *compare, tree *bound)
{
+ basic_block ex_bb;
edge exit;
- struct tree_niter_desc *niter, new_niter;
+ struct tree_niter_desc niter, new_niter;
tree wider_type, type, base;
-
- /* For now just very primitive -- we work just for the single exit condition,
- and are quite conservative about the possible overflows. TODO -- both of
- these can be improved. */
- exit = single_dom_exit (loop);
- if (!exit)
+
+ /* For now works only for exits that dominate the loop latch. TODO -- extend
+ for other conditions inside loop body. */
+ ex_bb = bb_for_stmt (use->stmt);
+ if (use->stmt != last_stmt (ex_bb)
+ || TREE_CODE (use->stmt) != COND_EXPR)
return false;
- if (use->stmt != last_stmt (exit->src))
+ if (!dominated_by_p (CDI_DOMINATORS, loop->latch, ex_bb))
return false;
- niter = &loop_data (loop)->niter;
- if (!niter->niter
- || !integer_nonzerop (niter->assumptions)
- || !integer_zerop (niter->may_be_zero))
+ exit = EDGE_SUCC (ex_bb, 0);
+ if (flow_bb_inside_loop_p (loop, exit->dest))
+ exit = EDGE_SUCC (ex_bb, 1);
+ if (flow_bb_inside_loop_p (loop, exit->dest))
+ return false;
+
+ niter.niter = NULL_TREE;
+ number_of_iterations_exit (loop, exit, &niter);
+ if (!niter.niter
+ || !integer_nonzerop (niter.assumptions)
+ || !integer_zerop (niter.may_be_zero))
return false;
if (exit->flags & EDGE_TRUE_VALUE)
else
*compare = NE_EXPR;
- *bound = cand_value_at (loop, cand, use->stmt, niter->niter);
+ *bound = cand_value_at (loop, cand, use->stmt, niter.niter);
/* Let us check there is not some problem with overflows, by checking that
the number of iterations is unchanged. */
return false;
wider_type = TREE_TYPE (new_niter.niter);
- if (TYPE_PRECISION (wider_type) < TYPE_PRECISION (TREE_TYPE (niter->niter)))
- wider_type = TREE_TYPE (niter->niter);
- if (!operand_equal_p (fold_convert (wider_type, niter->niter),
+ if (TYPE_PRECISION (wider_type) < TYPE_PRECISION (TREE_TYPE (niter.niter)))
+ wider_type = TREE_TYPE (niter.niter);
+ if (!operand_equal_p (fold_convert (wider_type, niter.niter),
fold_convert (wider_type, new_niter.niter), 0))
return false;
if (!exit)
return false;
- if (!dominated_by_p (CDI_DOMINATORS, exit->src,
- bb_for_stmt (use->stmt)))
- abort ();
+ gcc_assert (dominated_by_p (CDI_DOMINATORS, exit->src,
+ bb_for_stmt (use->stmt)));
niter = &loop_data (loop)->niter;
if (!niter->niter
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
}
else
{
- EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j,
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (use->related_cands, 0, j, bi)
{
cand = iv_cand (data, j);
if (!cand->important)
determine_use_iv_cost (data, use, cand);
- });
+ }
}
}
unsigned j, n;
tree phi, op;
struct loop *loop = data->current_loop;
+ bitmap_iterator bi;
/* We use the following model (definitely improvable, especially the
cost function -- TODO):
n++;
}
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j, bi)
{
struct version_info *info = ver_info (data, j);
if (info->inv_id && info->has_nonlin_use)
n++;
- });
+ }
loop_data (loop)->regs_used = n;
if (dump_file && (dump_flags & TDF_DETAILS))
unsigned best_cost = INFTY, cost;
struct iv_cand *cnd = NULL, *acnd;
bitmap depends_on = NULL, asol;
+ bitmap_iterator bi, bi1;
if (data->consider_all_candidates)
asol = sol;
else
{
asol = BITMAP_XMALLOC ();
- bitmap_a_and_b (asol, sol, use->related_cands);
+
+ bitmap_ior (asol, data->important_candidates, use->related_cands);
+ bitmap_and_into (asol, sol);
}
- EXECUTE_IF_SET_IN_BITMAP (asol, 0, c,
+ EXECUTE_IF_SET_IN_BITMAP (asol, 0, c, bi)
{
acnd = iv_cand (data, c);
cost = get_use_iv_cost (data, use, acnd, &depends_on);
if (cost == INFTY)
- goto next_cand;
+ continue;
if (cost > best_cost)
- goto next_cand;
+ continue;
if (cost == best_cost)
{
/* Prefer the cheaper iv. */
if (acnd->cost >= cnd->cost)
- goto next_cand;
+ continue;
}
if (depends_on)
{
- EXECUTE_IF_AND_COMPL_IN_BITMAP (depends_on, inv, 0, d,
- goto next_cand);
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (depends_on, inv, 0, d, bi1)
+ {
+ goto next_cand;
+ }
if (used_inv)
- bitmap_a_or_b (used_inv, used_inv, depends_on);
+ bitmap_ior_into (used_inv, depends_on);
}
cnd = acnd;
best_cost = cost;
+
next_cand: ;
- });
+ }
if (cnd && used_ivs)
bitmap_set_bit (used_ivs, cnd->id);
struct iv_use *use;
struct iv_cand *cand;
bitmap used_ivs = BITMAP_XMALLOC (), used_inv = BITMAP_XMALLOC ();
+ bitmap_iterator bi;
for (i = 0; i < max_use; i++)
{
cost += acost;
}
- EXECUTE_IF_SET_IN_BITMAP (used_ivs, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (used_ivs, 0, i, bi)
{
cand = iv_cand (data, i);
size++;
cost += cand->cost;
- });
- EXECUTE_IF_SET_IN_BITMAP (used_inv, 0, i, size++);
+ }
+ EXECUTE_IF_SET_IN_BITMAP (used_inv, 0, i, bi)
+ {
+ size++;
+ }
cost += ivopts_global_cost_for_size (data, size);
bitmap_copy (sol, used_ivs);
bitmap act_inv = BITMAP_XMALLOC ();
unsigned i;
struct cost_pair *cp;
+ bitmap_iterator bi;
+ struct iv_cand *cand;
+ bitmap depends_on;
bitmap_copy (best_ivs, ivs);
bitmap_copy (best_inv, inv);
- for (i = 0; i < use->n_map_members; i++)
+ /* First try important candidates. Only if it fails, try the specific ones.
+ Rationale -- in loops with many variables the best choice often is to use
+ just one generic biv. If we added here many ivs specific to the uses,
+ the optimization algorithm later would be likely to get stuck in a local
+ minimum, thus causing us to create too many ivs. The approach from
+ few ivs to more seems more likely to be successful -- starting from few
+ ivs, replacing an expensive use by a specific iv should always be a
+ win. */
+ EXECUTE_IF_SET_IN_BITMAP (data->important_candidates, 0, i, bi)
{
- cp = use->cost_map + i;
- if (cp->cost == INFTY)
+ cand = iv_cand (data, i);
+
+ if (get_use_iv_cost (data, use, cand, &depends_on) == INFTY)
continue;
bitmap_copy (act_ivs, ivs);
- bitmap_set_bit (act_ivs, cp->cand->id);
- if (cp->depends_on)
- bitmap_a_or_b (act_inv, inv, cp->depends_on);
+ bitmap_set_bit (act_ivs, cand->id);
+ if (depends_on)
+ bitmap_ior (act_inv, inv, depends_on);
else
bitmap_copy (act_inv, inv);
act_cost = set_cost_up_to (data, act_ivs, act_inv, use->id + 1);
}
}
+ if (best_cost == INFTY)
+ {
+ for (i = 0; i < use->n_map_members; i++)
+ {
+ cp = use->cost_map + i;
+ if (cp->cost == INFTY)
+ continue;
+
+ /* Already tried this. */
+ if (cp->cand->important)
+ continue;
+
+ bitmap_copy (act_ivs, ivs);
+ bitmap_set_bit (act_ivs, cp->cand->id);
+ if (cp->depends_on)
+ bitmap_ior (act_inv, inv, cp->depends_on);
+ else
+ bitmap_copy (act_inv, inv);
+ act_cost = set_cost_up_to (data, act_ivs, act_inv, use->id + 1);
+
+ if (act_cost < best_cost)
+ {
+ best_cost = act_cost;
+ bitmap_copy (best_ivs, act_ivs);
+ bitmap_copy (best_inv, act_inv);
+ }
+ }
+ }
+
bitmap_copy (ivs, best_ivs);
bitmap_copy (inv, best_inv);
bitmap inv = BITMAP_XMALLOC ();
struct iv_use *use;
+ data->important_candidates = BITMAP_XMALLOC ();
+ for (i = 0; i < n_iv_cands (data); i++)
+ {
+ struct iv_cand *cand = iv_cand (data, i);
+
+ if (cand->important)
+ bitmap_set_bit (data->important_candidates, i);
+ }
+
/* Set the upper bound. */
cost = get_initial_solution (data, set, inv);
if (cost == INFTY)
}
BITMAP_XFREE (inv);
+ BITMAP_XFREE (data->important_candidates);
return set;
}
{
unsigned i;
struct iv_cand *cand;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (set, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
{
cand = iv_cand (data, i);
create_new_iv (data, cand);
- });
+ }
}
/* Removes statement STMT (real or a phi node). If INCLUDING_DEFINED_NAME
}
else
{
- block_stmt_iterator bsi = stmt_for_bsi (stmt);
+ block_stmt_iterator bsi = bsi_for_stmt (stmt);
bsi_remove (&bsi);
}
tree op, stmts, tgt, ass;
block_stmt_iterator bsi, pbsi;
- if (TREE_CODE (use->stmt) == PHI_NODE)
+ switch (TREE_CODE (use->stmt))
{
+ case PHI_NODE:
tgt = PHI_RESULT (use->stmt);
/* If we should keep the biv, do not replace it. */
bsi = pbsi;
bsi_next (&pbsi);
}
- }
- else if (TREE_CODE (use->stmt) == MODIFY_EXPR)
- {
+ break;
+
+ case MODIFY_EXPR:
tgt = TREE_OPERAND (use->stmt, 0);
- bsi = stmt_for_bsi (use->stmt);
+ bsi = bsi_for_stmt (use->stmt);
+ break;
+
+ default:
+ gcc_unreachable ();
}
- else
- abort ();
op = force_gimple_operand (comp, &stmts, false, SSA_NAME_VAR (tgt));
for_each_index. */
static bool
-idx_remove_ssa_names (tree base ATTRIBUTE_UNUSED, tree *idx,
+idx_remove_ssa_names (tree base, tree *idx,
void *data ATTRIBUTE_UNUSED)
{
+ tree *op;
+
if (TREE_CODE (*idx) == SSA_NAME)
*idx = SSA_NAME_VAR (*idx);
+
+ if (TREE_CODE (base) == ARRAY_REF)
+ {
+ op = &TREE_OPERAND (base, 2);
+ if (*op
+ && TREE_CODE (*op) == SSA_NAME)
+ *op = SSA_NAME_VAR (*op);
+ op = &TREE_OPERAND (base, 3);
+ if (*op
+ && TREE_CODE (*op) == SSA_NAME)
+ *op = SSA_NAME_VAR (*op);
+ }
+
return true;
}
static void
rewrite_address_base (block_stmt_iterator *bsi, tree *op, tree with)
{
- tree var = get_base_address (*op), new_var, new_name, copy, name;
+ tree bvar, var, new_var, new_name, copy, name;
tree orig;
+ var = bvar = get_base_address (*op);
+
if (!var || TREE_CODE (with) != SSA_NAME)
goto do_rewrite;
+ gcc_assert (TREE_CODE (var) != ALIGN_INDIRECT_REF);
+ gcc_assert (TREE_CODE (var) != MISALIGNED_INDIRECT_REF);
if (TREE_CODE (var) == INDIRECT_REF)
var = TREE_OPERAND (var, 0);
if (TREE_CODE (var) == SSA_NAME)
do_rewrite:
orig = NULL_TREE;
+ gcc_assert (TREE_CODE (*op) != ALIGN_INDIRECT_REF);
+ gcc_assert (TREE_CODE (*op) != MISALIGNED_INDIRECT_REF);
+
if (TREE_CODE (*op) == INDIRECT_REF)
orig = REF_ORIGINAL (*op);
if (!orig)
orig = unshare_and_remove_ssa_names (*op);
*op = build1 (INDIRECT_REF, TREE_TYPE (*op), with);
+
/* Record the original reference, for purposes of alias analysis. */
REF_ORIGINAL (*op) = orig;
}
{
tree comp = unshare_expr (get_computation (data->current_loop,
use, cand));
- block_stmt_iterator bsi = stmt_for_bsi (use->stmt);
+ block_stmt_iterator bsi = bsi_for_stmt (use->stmt);
tree stmts;
tree op = force_gimple_operand (comp, &stmts, true, NULL_TREE);
{
tree comp;
tree *op_p, cond, op, stmts, bound;
- block_stmt_iterator bsi = stmt_for_bsi (use->stmt);
+ block_stmt_iterator bsi = bsi_for_stmt (use->stmt);
enum tree_code compare;
if (may_eliminate_iv (data->current_loop,
block_stmt_iterator bsi;
tree phi, stmt, def, next;
- if (exit->dest->pred->pred_next)
+ if (EDGE_COUNT (exit->dest->preds) > 1)
split_loop_exit_edge (exit);
if (TREE_CODE (stmts) == STATEMENT_LIST)
tree value, op, stmts, tgt;
tree phi;
- if (TREE_CODE (use->stmt) == PHI_NODE)
- tgt = PHI_RESULT (use->stmt);
- else if (TREE_CODE (use->stmt) == MODIFY_EXPR)
- tgt = TREE_OPERAND (use->stmt, 0);
- else
- abort ();
+ switch (TREE_CODE (use->stmt))
+ {
+ case PHI_NODE:
+ tgt = PHI_RESULT (use->stmt);
+ break;
+ case MODIFY_EXPR:
+ tgt = TREE_OPERAND (use->stmt, 0);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
exit = single_dom_exit (data->current_loop);
if (exit)
{
if (!cand->iv)
{
- if (!may_replace_final_value (data->current_loop, use, &value))
- abort ();
+ bool ok = may_replace_final_value (data->current_loop, use, &value);
+ gcc_assert (ok);
}
else
value = get_computation_at (data->current_loop,
use, cand, last_stmt (exit->src));
+ value = unshare_expr (value);
op = force_gimple_operand (value, &stmts, true, SSA_NAME_VAR (tgt));
/* If we will preserve the iv anyway and we would need to perform
break;
default:
- abort ();
+ gcc_unreachable ();
}
modify_stmt (use->stmt);
}
{
use = iv_use (data, i);
cand = use->selected;
- if (!cand)
- abort ();
+ gcc_assert (cand);
rewrite_use (data, use, cand);
}
remove_unused_ivs (struct ivopts_data *data)
{
unsigned j;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, j, bi)
{
struct version_info *info;
&& !info->iv->have_use_for
&& !info->preserve_biv)
remove_statement (SSA_NAME_DEF_STMT (info->iv->ssa_name), true);
- });
+ }
}
/* Frees data allocated by the optimization of a single loop. */
free_loop_data (struct ivopts_data *data)
{
unsigned i, j;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)
{
struct version_info *info;
info->has_nonlin_use = false;
info->preserve_biv = false;
info->inv_id = 0;
- });
+ }
bitmap_clear (data->relevant);
for (i = 0; i < n_iv_uses (data); i++)
#ifdef ENABLE_CHECKING
verify_loop_closed_ssa ();
+ verify_stmts ();
#endif
/* Scan the loops, inner ones first. */
while (loop != loops->tree_root)
{
- if (tree_ssa_iv_optimize_loop (&data, loop))
- {
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
- }
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ flow_loop_dump (loop, dump_file, NULL, 1);
+
+ tree_ssa_iv_optimize_loop (&data, loop);
if (loop->next)
{
loop = loop->outer;
}
+#ifdef ENABLE_CHECKING
+ verify_loop_closed_ssa ();
+ verify_stmts ();
+#endif
+
tree_ssa_iv_optimize_finalize (loops, &data);
}