static bool exist_non_indexing_operands_for_use_p (tree, tree);
static tree vect_get_loop_niters (struct loop *, tree *);
static bool vect_analyze_data_ref_dependence
- (struct data_dependence_relation *, loop_vec_info, bool);
+ (struct data_dependence_relation *, loop_vec_info);
static bool vect_compute_data_ref_alignment (struct data_reference *);
static bool vect_analyze_data_ref_access (struct data_reference *);
static bool vect_can_advance_ivs_p (loop_vec_info);
arguments (e.g. demotion, promotion), vectype will be reset
appropriately (later). Note that we have to visit the smallest
datatype in this function, because that determines the VF.
- If the samallest datatype in the loop is present only as the
+ If the smallest datatype in the loop is present only as the
rhs of a promotion operation - we'd miss it here.
However, in such a case, that a variable of this datatype
does not appear in the lhs anywhere in the loop, it shouldn't
tree phi;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block bb = loop->header;
- tree dummy;
+ tree dumy;
+ VEC(tree,heap) *worklist = VEC_alloc (tree, heap, 64);
if (vect_print_dump_info (REPORT_DETAILS))
fprintf (vect_dump, "=== vect_analyze_scalar_cycles ===");
+ /* First - identify all inductions. */
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
tree access_fn = NULL;
tree def = PHI_RESULT (phi);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
- tree reduc_stmt;
if (vect_print_dump_info (REPORT_DETAILS))
{
- fprintf (vect_dump, "Analyze phi: ");
- print_generic_expr (vect_dump, phi, TDF_SLIM);
+ fprintf (vect_dump, "Analyze phi: ");
+ print_generic_expr (vect_dump, phi, TDF_SLIM);
}
/* Skip virtual phi's. The data dependences that are associated with
virtual defs/uses (i.e., memory accesses) are analyzed elsewhere. */
-
if (!is_gimple_reg (SSA_NAME_VAR (def)))
- {
- if (vect_print_dump_info (REPORT_DETAILS))
- fprintf (vect_dump, "virtual phi. skip.");
- continue;
- }
+ continue;
STMT_VINFO_DEF_TYPE (stmt_vinfo) = vect_unknown_def_type;
/* Analyze the evolution function. */
-
access_fn = analyze_scalar_evolution (loop, def);
+ if (access_fn && vect_print_dump_info (REPORT_DETAILS))
+ {
+ fprintf (vect_dump, "Access function of PHI: ");
+ print_generic_expr (vect_dump, access_fn, TDF_SLIM);
+ }
- if (!access_fn)
- continue;
+ if (!access_fn
+ || !vect_is_simple_iv_evolution (loop->num, access_fn, &dumy, &dumy))
+ {
+ VEC_safe_push (tree, heap, worklist, phi);
+ continue;
+ }
if (vect_print_dump_info (REPORT_DETAILS))
- {
- fprintf (vect_dump, "Access function of PHI: ");
- print_generic_expr (vect_dump, access_fn, TDF_SLIM);
- }
+ fprintf (vect_dump, "Detected induction.");
+ STMT_VINFO_DEF_TYPE (stmt_vinfo) = vect_induction_def;
+ }
- if (vect_is_simple_iv_evolution (loop->num, access_fn, &dummy, &dummy))
- {
- if (vect_print_dump_info (REPORT_DETAILS))
- fprintf (vect_dump, "Detected induction.");
- STMT_VINFO_DEF_TYPE (stmt_vinfo) = vect_induction_def;
- continue;
- }
- /* TODO: handle invariant phis */
+ /* Second - identify all reductions. */
+ while (VEC_length (tree, worklist) > 0)
+ {
+ tree phi = VEC_pop (tree, worklist);
+ tree def = PHI_RESULT (phi);
+ stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
+ tree reduc_stmt;
+
+ if (vect_print_dump_info (REPORT_DETAILS))
+ {
+ fprintf (vect_dump, "Analyze phi: ");
+ print_generic_expr (vect_dump, phi, TDF_SLIM);
+ }
+
+ gcc_assert (is_gimple_reg (SSA_NAME_VAR (def)));
+ gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_unknown_def_type);
reduc_stmt = vect_is_simple_reduction (loop, phi);
if (reduc_stmt)
else
if (vect_print_dump_info (REPORT_DETAILS))
fprintf (vect_dump, "Unknown def-use cycle pattern.");
-
}
+ VEC_free (tree, heap, worklist);
return;
}
static bool
vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
- loop_vec_info loop_vinfo,
- bool check_interleaving)
+ loop_vec_info loop_vinfo)
{
unsigned int i;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
if (DDR_ARE_DEPENDENT (ddr) == chrec_known)
{
/* Independent data accesses. */
- if (check_interleaving)
- vect_check_interleaving (dra, drb);
+ vect_check_interleaving (dra, drb);
return false;
}
fprintf (vect_dump, " and ");
print_generic_expr (vect_dump, DR_REF (drb), TDF_SLIM);
}
- continue;
+
+ /* For interleaving, mark that there is a read-write dependency if
+ necessary. We check before that one of the data-refs is store. */
+ if (DR_IS_READ (dra))
+ DR_GROUP_READ_WRITE_DEPENDENCE (stmtinfo_a) = true;
+ else
+ {
+ if (DR_IS_READ (drb))
+ DR_GROUP_READ_WRITE_DEPENDENCE (stmtinfo_b) = true;
+ }
+
+ continue;
}
if (abs (dist) >= vectorization_factor)
}
-/* Function vect_check_dependences.
-
- Return TRUE if there is a store-store or load-store dependence between
- data-refs in DDR, otherwise return FALSE. */
-
-static bool
-vect_check_dependences (struct data_dependence_relation *ddr)
-{
- struct data_reference *dra = DDR_A (ddr);
- struct data_reference *drb = DDR_B (ddr);
-
- if (DDR_ARE_DEPENDENT (ddr) == chrec_known || dra == drb)
- /* Independent or same data accesses. */
- return false;
-
- if (DR_IS_READ (dra) == DR_IS_READ (drb) && DR_IS_READ (dra))
- /* Two loads. */
- return false;
-
- if (vect_print_dump_info (REPORT_DR_DETAILS))
- {
- fprintf (vect_dump, "possible store or store/load dependence between ");
- print_generic_expr (vect_dump, DR_REF (dra), TDF_SLIM);
- fprintf (vect_dump, " and ");
- print_generic_expr (vect_dump, DR_REF (drb), TDF_SLIM);
- }
- return true;
-}
-
-
/* Function vect_analyze_data_ref_dependences.
Examine all the data references in the loop, and make sure there do not
unsigned int i;
VEC (ddr_p, heap) *ddrs = LOOP_VINFO_DDRS (loop_vinfo);
struct data_dependence_relation *ddr;
- bool check_interleaving = true;
if (vect_print_dump_info (REPORT_DETAILS))
fprintf (vect_dump, "=== vect_analyze_dependences ===");
- /* We allow interleaving only if there are no store-store and load-store
- dependencies in the loop. */
- for (i = 0; VEC_iterate (ddr_p, ddrs, i, ddr); i++)
- {
- if (vect_check_dependences (ddr))
- {
- check_interleaving = false;
- break;
- }
- }
-
for (i = 0; VEC_iterate (ddr_p, ddrs, i, ddr); i++)
- if (vect_analyze_data_ref_dependence (ddr, loop_vinfo, check_interleaving))
+ if (vect_analyze_data_ref_dependence (ddr, loop_vinfo))
return false;
return true;
DR_INIT (STMT_VINFO_DATA_REF (
vinfo_for_stmt (next)))))
{
- /* For load use the same data-ref load. (We check in
- vect_check_dependences() that there are no two stores to the
- same location). */
+ if (!DR_IS_READ (data_ref))
+ {
+ if (vect_print_dump_info (REPORT_DETAILS))
+ fprintf (vect_dump, "Two store stmts share the same dr.");
+ return false;
+ }
+
+ /* Check that there is no load-store dependencies for this loads
+ to prevent a case of load-store-load to the same location. */
+ if (DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (next))
+ || DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (prev)))
+ {
+ if (vect_print_dump_info (REPORT_DETAILS))
+ fprintf (vect_dump,
+ "READ_WRITE dependence in interleaving.");
+ return false;
+ }
+
+ /* For load use the same data-ref load. */
DR_GROUP_SAME_DR_STMT (vinfo_for_stmt (next)) = prev;
prev = next;
tree scalar_type;
if (vect_print_dump_info (REPORT_DETAILS))
- fprintf (vect_dump, "=== vect_analyze_data_refs ===");
+ fprintf (vect_dump, "=== vect_analyze_data_refs ===\n");
compute_data_dependences_for_loop (loop, true,
&LOOP_VINFO_DATAREFS (loop_vinfo),
/* Update DR field in stmt_vec_info struct. */
stmt = DR_STMT (dr);
stmt_info = vinfo_for_stmt (stmt);
-
+
if (STMT_VINFO_DATA_REF (stmt_info))
{
if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
/* case 2.2: */
if (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_reduction_def)
- {
- gcc_assert (relevant == vect_unused_in_loop && live_p);
- relevant = vect_used_by_reduction;
- live_p = false;
- }
+ {
+ gcc_assert (relevant == vect_unused_in_loop && live_p);
+ relevant = vect_used_by_reduction;
+ live_p = false;
+ }
+ i = 0;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
{
+ if (vect_print_dump_info (REPORT_DETAILS))
+ {
+ fprintf (vect_dump, "worklist: examine use %d: ", i++);
+ print_generic_expr (vect_dump, use, TDF_SLIM);
+ }
+
/* case 1: we are only interested in uses that need to be vectorized.
Uses that are used for address computation are not considered
relevant.
continue;
if (!vect_is_simple_use (use, loop_vinfo, &def_stmt, &def, &dt))
- {
- if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
- fprintf (vect_dump, "not vectorized: unsupported use in stmt.");
+ {
+ if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
+ fprintf (vect_dump, "not vectorized: unsupported use in stmt.");
VEC_free (tree, heap, worklist);
- return false;
+ return false;
}
if (!def_stmt || IS_EMPTY_STMT (def_stmt))
continue;
- if (vect_print_dump_info (REPORT_DETAILS))
- {
- fprintf (vect_dump, "worklist: examine use %d: ", i);
- print_generic_expr (vect_dump, use, TDF_SLIM);
- }
-
bb = bb_for_stmt (def_stmt);
- if (!flow_bb_inside_loop_p (loop, bb))
- continue;
+ if (!flow_bb_inside_loop_p (loop, bb))
+ continue;
/* case 2.1: the reduction-use does not mark the defining-phi
as relevant. */
&& TREE_CODE (def_stmt) == PHI_NODE)
continue;
+ if (dt == vect_induction_def && TREE_CODE (def_stmt) == PHI_NODE)
+ continue;
+
vect_mark_relevant (&worklist, def_stmt, relevant, live_p);
}
} /* while worklist */