/* Conversion of SESE regions to Polyhedra.
- Copyright (C) 2009, 2010 Free Software Foundation, Inc.
+ Copyright (C) 2009, 2010, 2011 Free Software Foundation, Inc.
Contributed by Sebastian Pop <sebastian.pop@amd.com>.
This file is part of GCC.
{
base_alias_pair *bap = (base_alias_pair *)(dr->aux);
- if (bap->alias_set)
- free (bap->alias_set);
+ free (bap->alias_set);
free (bap);
dr->aux = NULL;
VEC_free (scop_p, heap, scops);
}
+/* Same as outermost_loop_in_sese, returns the outermost loop
+ containing BB in REGION, but makes sure that the returned loop
+ belongs to the REGION, and so this returns the first loop in the
+ REGION when the loop containing BB does not belong to REGION. */
+
+static loop_p
+outermost_loop_in_sese_1 (sese region, basic_block bb)
+{
+ loop_p nest = outermost_loop_in_sese (region, bb);
+
+ if (loop_in_sese_p (nest, region))
+ return nest;
+
+ /* When the basic block BB does not belong to a loop in the region,
+ return the first loop in the region. */
+ nest = nest->inner;
+ while (nest)
+ if (loop_in_sese_p (nest, region))
+ break;
+ else
+ nest = nest->next;
+
+ gcc_assert (nest);
+ return nest;
+}
+
/* Generates a polyhedral black box only if the bb contains interesting
information. */
try_generate_gimple_bb (scop_p scop, basic_block bb)
{
VEC (data_reference_p, heap) *drs = VEC_alloc (data_reference_p, heap, 5);
- loop_p nest = outermost_loop_in_sese (SCOP_REGION (scop), bb);
+ sese region = SCOP_REGION (scop);
+ loop_p nest = outermost_loop_in_sese_1 (region, bb);
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- if (!is_gimple_debug (stmt))
- graphite_find_data_references_in_stmt (nest, stmt, &drs);
+ loop_p loop;
+
+ if (is_gimple_debug (stmt))
+ continue;
+
+ loop = loop_containing_stmt (stmt);
+ if (!loop_in_sese_p (loop, region))
+ loop = nest;
+
+ graphite_find_data_references_in_stmt (nest, loop, stmt, &drs);
}
return new_gimple_bb (bb, drs);
scan_tree_for_params (s, TREE_OPERAND (e, 0), c, k);
break;
+ case ADDR_EXPR:
+ break;
+
default:
gcc_unreachable ();
break;
scan_tree_for_params (SCOP_REGION (scop), nb_iters, ub_expr, one);
mpz_clear (one);
- if (estimated_loop_iterations (loop, true, &nit))
+ if (max_stmt_executions (loop, true, &nit))
add_upper_bounds_from_estimated_nit (scop, nit, dim, ub_expr);
/* loop_i <= expr_nb_iters */
FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
- if (dr_may_alias_p (dr1, dr2))
+ if (dr_may_alias_p (dr1, dr2, true))
edge_num++;
fprintf (file, "$\n");
FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
- if (dr_may_alias_p (dr1, dr2))
+ if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "e %d %d\n", i + 1, j + 1);
return true;
FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
- if (dr_may_alias_p (dr1, dr2))
+ if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "n%d n%d\n", i, j);
return true;
FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
- if (dr_may_alias_p (dr1, dr2))
+ if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "%d %d\n", i, j);
return true;
FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
for (j = i+1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
- if (dr_may_alias_p (dr1, dr2))
+ if (dr_may_alias_p (dr1, dr2, true))
{
add_edge (g, i, j);
add_edge (g, j, i);
gimple_bb_p gbb;
gimple stmt;
int i;
+ sese region = SCOP_REGION (scop);
- if (!bb_in_sese_p (bb, SCOP_REGION (scop)))
+ if (!bb_in_sese_p (bb, region))
return;
- nest = outermost_loop_in_sese (SCOP_REGION (scop), bb);
+ nest = outermost_loop_in_sese_1 (region, bb);
gbb = gbb_from_bb (bb);
FOR_EACH_VEC_ELT (gimple, stmts, i, stmt)
- if (!is_gimple_debug (stmt))
- graphite_find_data_references_in_stmt (nest, stmt,
+ {
+ loop_p loop;
+
+ if (is_gimple_debug (stmt))
+ continue;
+
+ loop = loop_containing_stmt (stmt);
+ if (!loop_in_sese_p (loop, region))
+ loop = nest;
+
+ graphite_find_data_references_in_stmt (nest, loop, stmt,
&GBB_DATA_REFS (gbb));
+ }
}
/* Insert STMT at the end of the STMTS sequence and then insert the
if (VEC_index (poly_bb_p, SCOP_BBS (scop), index) == pbb)
break;
+ if (PBB_DOMAIN (pbb))
+ ppl_new_Pointset_Powerset_C_Polyhedron_from_Pointset_Powerset_C_Polyhedron
+ (&PBB_DOMAIN (pbb1), PBB_DOMAIN (pbb));
+
GBB_PBB (gbb1) = pbb1;
- ppl_new_Pointset_Powerset_C_Polyhedron_from_Pointset_Powerset_C_Polyhedron
- (&PBB_DOMAIN (pbb1), PBB_DOMAIN (pbb));
GBB_CONDITIONS (gbb1) = VEC_copy (gimple, heap, GBB_CONDITIONS (gbb));
GBB_CONDITION_CASES (gbb1) = VEC_copy (gimple, heap, GBB_CONDITION_CASES (gbb));
VEC_safe_insert (poly_bb_p, heap, SCOP_BBS (scop), index + 1, pbb1);
/* Do not split basic blocks with no writes to memory: the reduction
will be the only write to memory. */
- if (nb_data_writes_in_bb (bb) == 0)
+ if (nb_data_writes_in_bb (bb) == 0
+ /* Or if we have already marked BB as a reduction. */
+ || PBB_IS_REDUCTION (pbb_from_bb (bb)))
return bb;
e1 = split_pbb (scop, pbb, bb, stmt);
return NULL_TREE;
}
+/* Returns true when DEF is used outside the reduction cycle of
+ LOOP_PHI. */
+
+static bool
+used_outside_reduction (tree def, gimple loop_phi)
+{
+ use_operand_p use_p;
+ imm_use_iterator imm_iter;
+ loop_p loop = loop_containing_stmt (loop_phi);
+
+ /* In LOOP, DEF should be used only in LOOP_PHI. */
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
+ {
+ gimple stmt = USE_STMT (use_p);
+
+ if (stmt != loop_phi
+ && !is_gimple_debug (stmt)
+ && flow_bb_inside_loop_p (loop, gimple_bb (stmt)))
+ return true;
+ }
+
+ return false;
+}
+
/* Detect commutative and associative scalar reductions belonging to
the SCOP starting at the loop closed phi node STMT. Return the phi
node of the reduction cycle, or NULL. */
{
if (scalar_close_phi_node_p (stmt))
{
- tree arg = gimple_phi_arg_def (stmt, 0);
- gimple def, loop_phi;
+ gimple def, loop_phi, phi, close_phi = stmt;
+ tree init, lhs, arg = gimple_phi_arg_def (close_phi, 0);
if (TREE_CODE (arg) != SSA_NAME)
return NULL;
/* Note that loop close phi nodes should have a single argument
because we translated the representation into a canonical form
before Graphite: see canonicalize_loop_closed_ssa_form. */
- gcc_assert (gimple_phi_num_args (stmt) == 1);
+ gcc_assert (gimple_phi_num_args (close_phi) == 1);
def = SSA_NAME_DEF_STMT (arg);
- if (!stmt_in_sese_p (def, SCOP_REGION (scop)))
+ if (!stmt_in_sese_p (def, SCOP_REGION (scop))
+ || !(loop_phi = detect_commutative_reduction (scop, def, in, out)))
return NULL;
- loop_phi = detect_commutative_reduction (scop, def, in, out);
-
- if (loop_phi)
- {
- tree lhs = gimple_phi_result (stmt);
- tree init = initial_value_for_loop_phi (loop_phi);
- gimple phi = follow_inital_value_to_phi (init, lhs);
+ lhs = gimple_phi_result (close_phi);
+ init = initial_value_for_loop_phi (loop_phi);
+ phi = follow_inital_value_to_phi (init, lhs);
- VEC_safe_push (gimple, heap, *in, loop_phi);
- VEC_safe_push (gimple, heap, *out, stmt);
- return phi;
- }
- else
+ if (phi && (used_outside_reduction (lhs, phi)
+ || !has_single_use (gimple_phi_result (phi))))
return NULL;
+
+ VEC_safe_push (gimple, heap, *in, loop_phi);
+ VEC_safe_push (gimple, heap, *out, close_phi);
+ return phi;
}
if (gimple_code (stmt) == GIMPLE_ASSIGN)
gimple stmt, gimple loop_phi)
{
tree res = gimple_phi_result (loop_phi);
- gimple assign = gimple_build_assign (res, red);
+ gimple assign = gimple_build_assign (res, unshare_expr (red));
gimple_stmt_iterator gsi;
insert_stmts (scop, assign, NULL, gsi_after_labels (gimple_bb (loop_phi)));
- assign = gimple_build_assign (red, gimple_assign_lhs (stmt));
+ assign = gimple_build_assign (unshare_expr (red), gimple_assign_lhs (stmt));
gsi = gsi_for_stmt (stmt);
gsi_next (&gsi);
insert_stmts (scop, assign, NULL, gsi);
remove_phi_node (&gsi, false);
}
+/* Helper function for for_each_index. For each INDEX of the data
+ reference REF, returns true when its indices are valid in the loop
+ nest LOOP passed in as DATA. */
+
+static bool
+dr_indices_valid_in_loop (tree ref ATTRIBUTE_UNUSED, tree *index, void *data)
+{
+ loop_p loop;
+ basic_block header, def_bb;
+ gimple stmt;
+
+ if (TREE_CODE (*index) != SSA_NAME)
+ return true;
+
+ loop = *((loop_p *) data);
+ header = loop->header;
+ stmt = SSA_NAME_DEF_STMT (*index);
+
+ if (!stmt)
+ return true;
+
+ def_bb = gimple_bb (stmt);
+
+ if (!def_bb)
+ return true;
+
+ return dominated_by_p (CDI_DOMINATORS, header, def_bb);
+}
+
+/* When the result of a CLOSE_PHI is written to a memory location,
+ return a pointer to that memory reference, otherwise return
+ NULL_TREE. */
+
+static tree
+close_phi_written_to_memory (gimple close_phi)
+{
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+ gimple stmt;
+ tree res, def = gimple_phi_result (close_phi);
+
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
+ if ((stmt = USE_STMT (use_p))
+ && gimple_code (stmt) == GIMPLE_ASSIGN
+ && (res = gimple_assign_lhs (stmt)))
+ {
+ switch (TREE_CODE (res))
+ {
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ return res;
+
+ case ARRAY_REF:
+ case MEM_REF:
+ {
+ tree arg = gimple_phi_arg_def (close_phi, 0);
+ loop_p nest = loop_containing_stmt (SSA_NAME_DEF_STMT (arg));
+
+ /* FIXME: this restriction is for id-{24,25}.f and
+ could be handled by duplicating the computation of
+ array indices before the loop of the close_phi. */
+ if (for_each_index (&res, dr_indices_valid_in_loop, &nest))
+ return res;
+ }
+ /* Fallthru. */
+
+ default:
+ continue;
+ }
+ }
+ return NULL_TREE;
+}
+
/* Rewrite out of SSA the reduction described by the loop phi nodes
IN, and the close phi nodes OUT. IN and OUT are structured by loop
levels like this:
VEC (gimple, heap) *in,
VEC (gimple, heap) *out)
{
- unsigned int i;
gimple loop_phi;
- tree red = NULL_TREE;
+ unsigned int i = VEC_length (gimple, out) - 1;
+ tree red = close_phi_written_to_memory (VEC_index (gimple, out, i));
FOR_EACH_VEC_ELT (gimple, in, i, loop_phi)
{
PBB_IS_REDUCTION (pbb) = true;
gcc_assert (close_phi == loop_phi);
- red = create_zero_dim_array
- (gimple_assign_lhs (stmt), "Commutative_Associative_Reduction");
+ if (!red)
+ red = create_zero_dim_array
+ (gimple_assign_lhs (stmt), "Commutative_Associative_Reduction");
+
translate_scalar_reduction_to_array_for_stmt
(scop, red, stmt, VEC_index (gimple, in, 1));
continue;
if (i == VEC_length (gimple, in) - 1)
{
- insert_out_of_ssa_copy (scop, gimple_phi_result (close_phi), red,
- close_phi);
+ insert_out_of_ssa_copy (scop, gimple_phi_result (close_phi),
+ unshare_expr (red), close_phi);
insert_out_of_ssa_copy_on_edge
(scop, edge_initial_value_for_loop_phi (loop_phi),
- red, initial_value_for_loop_phi (loop_phi));
+ unshare_expr (red), initial_value_for_loop_phi (loop_phi));
}
remove_phi (loop_phi);
VEC (gimple, heap) *out = VEC_alloc (gimple, heap, 10);
detect_commutative_reduction (scop, close_phi, &in, &out);
- res = VEC_length (gimple, in) > 0;
+ res = VEC_length (gimple, in) > 1;
if (res)
translate_scalar_reduction_to_array (scop, in, out);
}
}
-/* Java does not initialize long_long_integer_type_node. */
-#define my_long_long (long_long_integer_type_node ? long_long_integer_type_node : ssizetype)
-
/* Can all ivs be represented by a signed integer?
As CLooG might generate negative values in its expressions, signed loop ivs
are required in the backend. */
loop_iterator li;
loop_p loop;
gimple_stmt_iterator psi;
+ bool result = true;
FOR_EACH_LOOP (li, loop, 0)
{
tree type = TREE_TYPE (res);
if (TYPE_UNSIGNED (type)
- && TYPE_PRECISION (type) >= TYPE_PRECISION (my_long_long))
- return false;
+ && TYPE_PRECISION (type) >= TYPE_PRECISION (long_long_integer_type_node))
+ {
+ result = false;
+ break;
+ }
}
+ if (!result)
+ FOR_EACH_LOOP_BREAK (li);
}
- return true;
+ return result;
}
-#undef my_long_long
-
/* Builds the polyhedral representation for a SESE region. */
void
if (!scop_ivs_can_be_represented (scop))
return;
+ if (flag_associative_math)
+ rewrite_commutative_reductions_out_of_ssa (scop);
+
build_sese_loop_nests (region);
build_sese_conditions (region);
find_scop_parameters (scop);
representation to the polyhedral representation to avoid scev
analysis failures. That means that these functions will insert
new data references that they create in the right place. */
- if (flag_associative_math)
- rewrite_commutative_reductions_out_of_ssa (scop);
rewrite_reductions_out_of_ssa (scop);
rewrite_cross_bb_scalar_deps_out_of_ssa (scop);