GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/*
Matrix flattening optimization tries to replace a N-dimensional
initial address and index of each dimension. */
struct access_site_info
{
- /* The statement (INDIRECT_REF or PLUS_EXPR). */
+ /* The statement (INDIRECT_REF or POINTER_PLUS_EXPR). */
tree stmt;
- /* In case of PLUS_EXPR, what is the offset. */
+ /* In case of POINTER_PLUS_EXPR, what is the offset. */
tree offset;
/* The index which created the offset. */
static hashval_t
mtt_info_hash (const void *mtt)
{
- return htab_hash_pointer (((struct matrix_info *) mtt)->decl);
+ return htab_hash_pointer (((const struct matrix_info *) mtt)->decl);
}
/* Return true if MTT1 and MTT2 (which are really both of type
/* Find if the SSA variable is accessed inside the
tree and record the tree containing it.
The only relevant uses are the case of SSA_NAME, or SSA inside
- INDIRECT_REF, CALL_EXPR, PLUS_EXPR, MULT_EXPR. */
+ INDIRECT_REF, CALL_EXPR, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR. */
static void
ssa_accessed_in_tree (tree t, struct ssa_acc_in_tree *a)
{
}
}
break;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MULT_EXPR:
op1 = TREE_OPERAND (t, 0);
return;
}
}
- /* This is a call to malloc. Check to see if this is the first
- call in this indirection level; if so, mark it; if not, mark
- as escaping. */
+ /* This is a call to malloc of level 'level'.
+ mi->max_malloced_level-1 == level means that we've
+ seen a malloc statement of level 'level' before.
+ If the statement is not the same one that we've
+ seen before, then there's another malloc statement
+ for the same level, which means that we need to mark
+ it escaping. */
if (mi->malloc_for_level
- && mi->malloc_for_level[level]
+ && mi->max_malloced_level-1 == level
&& mi->malloc_for_level[level] != stmt)
{
mark_min_matrix_escape_level (mi, level, stmt);
for (i = 0; VEC_iterate (access_site_info_p, mi->access_l, i, acc_info);
i++)
{
- if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 1)) == PLUS_EXPR
+ if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 1)) == POINTER_PLUS_EXPR
&& acc_info->level < min_escape_l)
{
loop = loop_containing_stmt (acc_info->stmt);
return current_indirect_level;
}
if (rhs_acc.t_code != INDIRECT_REF
- && rhs_acc.t_code != PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
+ && rhs_acc.t_code != POINTER_PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
{
mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
return current_indirect_level;
current_indirect_level, true);
current_indirect_level += 1;
}
- else if (rhs_acc.t_code == PLUS_EXPR)
+ else if (rhs_acc.t_code == POINTER_PLUS_EXPR)
{
- /* ??? maybe we should check
- the type of the PLUS_EXP and make sure it's
- integral type. */
gcc_assert (rhs_acc.second_op);
if (last_op)
/* Currently we support only one PLUS expression on the
/* We are placing it in an SSA, follow that SSA. */
analyze_matrix_accesses (mi, lhs,
current_indirect_level,
- rhs_acc.t_code == PLUS_EXPR,
+ rhs_acc.t_code == POINTER_PLUS_EXPR,
visited, record_accesses);
}
}
/* Now go over the uses of the SSA_NAME and check how it is used in
each one of them. We are mainly looking for the pattern INDIRECT_REF,
- then a PLUS_EXPR, then INDIRECT_REF etc. while in between there could
+ then a POINTER_PLUS_EXPR, then INDIRECT_REF etc. while in between there could
be any number of copies and casts. */
gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
case PARM_DECL:
case INTEGER_CST:
return expr;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
block_stmt_iterator bsi;
basic_block bb_level_0;
struct matrix_info *mi = *slot;
- sbitmap visited = sbitmap_alloc (num_ssa_names);
+ sbitmap visited;
if (!mi->malloc_for_level)
return 1;
+
+ visited = sbitmap_alloc (num_ssa_names);
+
/* Do nothing if the current function is not the allocation
function of MI. */
if (mi->allocation_function_decl != current_function_decl
static int
transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
{
- tree stmts;
block_stmt_iterator bsi;
struct matrix_info *mi = *slot;
int min_escape_l = mi->min_indirect_level_escape;
GIMPLE_STMT_OPERAND (orig, 0));
GIMPLE_STMT_OPERAND (acc_info->stmt, 1) = orig;
}
- else if (TREE_CODE (orig) == PLUS_EXPR
+ else if (TREE_CODE (orig) == POINTER_PLUS_EXPR
&& acc_info->level < (min_escape_l))
{
imm_use_iterator imm_iter;
tree new_offset;
tree d_type_size, d_type_size_k;
- d_type_size =
- build_int_cst (type,
- mi->dimension_type_size[min_escape_l]);
- d_type_size_k =
- build_int_cst (type, mi->dimension_type_size[k + 1]);
+ d_type_size = size_int (mi->dimension_type_size[min_escape_l]);
+ d_type_size_k = size_int (mi->dimension_type_size[k + 1]);
new_offset =
compute_offset (mi->dimension_type_size[min_escape_l],
total_elements = new_offset;
if (new_offset != offset)
{
- tmp1 =
- force_gimple_operand (total_elements, &stmts, true,
- NULL);
- if (stmts)
- {
- tree_stmt_iterator tsi;
-
- for (tsi = tsi_start (stmts); !tsi_end_p (tsi);
- tsi_next (&tsi))
- mark_symbols_for_renaming (tsi_stmt (tsi));
- bsi = bsi_for_stmt (acc_info->stmt);
- bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
- }
+ bsi = bsi_for_stmt (acc_info->stmt);
+ tmp1 = force_gimple_operand_bsi (&bsi, total_elements,
+ true, NULL,
+ true, BSI_SAME_STMT);
}
else
tmp1 = offset;
{
d_size = mi->dimension_size[mi->dim_map[k] + 1];
num_elements =
- fold_build2 (MULT_EXPR, type, acc_info->index, d_size);
- tmp1 = force_gimple_operand (num_elements, &stmts, true, NULL);
+ fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, acc_info->index),
+ fold_convert (sizetype, d_size));
add_referenced_var (d_size);
- if (stmts)
- {
- tree_stmt_iterator tsi;
-
- for (tsi = tsi_start (stmts); !tsi_end_p (tsi);
- tsi_next (&tsi))
- mark_symbols_for_renaming (tsi_stmt (tsi));
- bsi = bsi_for_stmt (acc_info->stmt);
- bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
- }
+ bsi = bsi_for_stmt (acc_info->stmt);
+ tmp1 = force_gimple_operand_bsi (&bsi, num_elements, true,
+ NULL, true, BSI_SAME_STMT);
}
/* Replace the offset if needed. */
if (tmp1 != offset)
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, offset)
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
- if (use_stmt == acc_info->stmt)
- SET_USE (use_p, tmp1);
+ if (use_stmt == acc_info->stmt)
+ SET_USE (use_p, tmp1);
}
else
{
{
int i;
struct matrix_info *mi;
- tree type, call_stmt_0, malloc_stmt, oldfn, stmts, prev_dim_size, use_stmt;
+ tree type, call_stmt_0, malloc_stmt, oldfn, prev_dim_size, use_stmt;
struct cgraph_node *c_node;
struct cgraph_edge *e;
block_stmt_iterator bsi;
/* To be able to produce gimple temporaries. */
oldfn = current_function_decl;
current_function_decl = mi->allocation_function_decl;
- cfun = DECL_STRUCT_FUNCTION (mi->allocation_function_decl);
+ push_cfun (DECL_STRUCT_FUNCTION (mi->allocation_function_decl));
/* Set the dimension sizes as follows:
DIM_SIZE[i] = DIM_SIZE[n] * ... * DIM_SIZE[i]
{
tree dim_size, dim_var, tmp;
tree d_type_size;
- tree_stmt_iterator tsi;
/* Now put the size expression in a global variable and initialize it to
the size expression before the malloc of level 0. */
dim_size = fold_build2 (MULT_EXPR, type, dim_size, prev_dim_size);
}
- dim_size = force_gimple_operand (dim_size, &stmts, true, NULL);
- if (stmts)
- {
- for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
- mark_symbols_for_renaming (tsi_stmt (tsi));
- bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
- bsi = bsi_for_stmt (call_stmt_0);
- }
+ dim_size = force_gimple_operand_bsi (&bsi, dim_size, true, NULL,
+ true, BSI_SAME_STMT);
/* GLOBAL_HOLDING_THE_SIZE = DIM_SIZE. */
tmp = fold_build2 (GIMPLE_MODIFY_STMT, type, dim_var, dim_size);
GIMPLE_STMT_OPERAND (tmp, 0) = dim_var;
mark_symbols_for_renaming (tmp);
- bsi_insert_before (&bsi, tmp, BSI_NEW_STMT);
- bsi = bsi_for_stmt (call_stmt_0);
+ bsi_insert_before (&bsi, tmp, BSI_SAME_STMT);
prev_dim_size = mi->dimension_size[i] = dim_var;
}
malloc_stmt = GIMPLE_STMT_OPERAND (call_stmt_0, 1);
c_node = cgraph_node (mi->allocation_function_decl);
old_size_0 = CALL_EXPR_ARG (malloc_stmt, 0);
- bsi = bsi_for_stmt (call_stmt_0);
- tmp = force_gimple_operand (mi->dimension_size[0], &stmts, true, NULL);
- if (stmts)
- {
- tree_stmt_iterator tsi;
-
- for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
- mark_symbols_for_renaming (tsi_stmt (tsi));
- bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
- bsi = bsi_for_stmt (call_stmt_0);
- }
+ tmp = force_gimple_operand_bsi (&bsi, mi->dimension_size[0], true,
+ NULL, true, BSI_SAME_STMT);
if (TREE_CODE (old_size_0) == SSA_NAME)
{
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, old_size_0)
gcc_assert (e);
cgraph_remove_edge (e);
current_function_decl = mi->free_stmts[i].func;
- cfun = DECL_STRUCT_FUNCTION (mi->free_stmts[i].func);
+ set_cfun (DECL_STRUCT_FUNCTION (mi->free_stmts[i].func));
bsi = bsi_for_stmt (mi->free_stmts[i].stmt);
bsi_remove (&bsi, true);
}
/* Return to the previous situation. */
current_function_decl = oldfn;
- cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
+ pop_cfun ();
return 1;
}
htab_traverse (matrices_to_reorg, dump_matrix_reorg_analysis, NULL);
current_function_decl = NULL;
- cfun = NULL;
+ set_cfun (NULL);
matrices_to_reorg = NULL;
return 0;
}
static bool
gate_matrix_reorg (void)
{
- return flag_ipa_matrix_reorg /*&& flag_whole_program */ ;
+ return flag_ipa_matrix_reorg && flag_whole_program;
}
struct tree_opt_pass pass_ipa_matrix_reorg = {