/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
#include "target.h"
/* expr.h is needed for MOVE_RATIO. */
#include "expr.h"
+#include "params.h"
/* This object of this pass is to replace a non-addressable aggregate with a
return false;
/* The type must have a definite nonzero size. */
- if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
+ if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
+ || integer_zerop (TYPE_SIZE (type)))
goto fail;
/* The type must be a non-union aggregate. */
};
#ifdef ENABLE_CHECKING
-/* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
+/* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
static tree
sra_find_candidate_decl (tree *tp, int *walk_subtrees,
return;
}
+ /* If the RHS is scalarizable, handle it. There are only two cases. */
+ if (rhs_elt)
+ {
+ if (!rhs_elt->is_scalar)
+ fns->ldst (rhs_elt, lhs, bsi, false);
+ else
+ fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
+ }
+
+ /* If it isn't scalarizable, there may be scalarizable variables within, so
+ check for a call or else walk the RHS to see if we need to do any
+ copy-in operations. We need to do it before the LHS is scalarized so
+ that the statements get inserted in the proper place, before any
+ copy-out operations. */
+ else
+ {
+ tree call = get_call_expr_in (rhs);
+ if (call)
+ sra_walk_call_expr (call, bsi, fns);
+ else
+ sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
+ }
+
+ /* Likewise, handle the LHS being scalarizable. We have cases similar
+ to those above, but also want to handle RHS being constant. */
if (lhs_elt)
{
/* If this is an assignment from a constant, or constructor, then
else
fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
}
- else
- {
- /* LHS_ELT being null only means that the LHS as a whole is not a
- scalarizable reference. There may be occurrences of scalarizable
- variables within, which implies a USE. */
- sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
- }
- /* Likewise for the right-hand side. The only difference here is that
- we don't have to handle constants, and the RHS may be a call. */
- if (rhs_elt)
- {
- if (!rhs_elt->is_scalar)
- fns->ldst (rhs_elt, lhs, bsi, false);
- else
- fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
- }
+ /* Similarly to above, LHS_ELT being null only means that the LHS as a
+ whole is not a scalarizable reference. There may be occurrences of
+ scalarizable variables within, which implies a USE. */
else
- {
- tree call = get_call_expr_in (rhs);
- if (call)
- sra_walk_call_expr (call, bsi, fns);
- else
- sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
- }
+ sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
}
/* Entry point to the walk functions. Search the entire function,
static const struct sra_walk_fns fns = {
scan_use, scan_copy, scan_init, scan_ldst, true
};
+ bitmap_iterator bi;
sra_walk_function (&fns);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- size_t i;
+ unsigned i;
fputs ("\nScan results:\n", dump_file);
- EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
if (elt)
scan_dump (elt);
- });
+ }
fputc ('\n', dump_file);
}
}
DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
+ DECL_IGNORED_P (var) = DECL_IGNORED_P (base);
if (DECL_NAME (base) && !DECL_IGNORED_P (base))
{
tree size_tree = TYPE_SIZE_UNIT (elt->type);
bool use_block_copy = true;
+ /* Tradeoffs for COMPLEX types pretty much always make it better
+ to go ahead and split the components. */
+ if (TREE_CODE (elt->type) == COMPLEX_TYPE)
+ use_block_copy = false;
+
/* Don't bother trying to figure out the rest if the structure is
so large we can't do easy arithmetic. This also forces block
copies for variable sized structures. */
- if (host_integerp (size_tree, 1))
+ else if (host_integerp (size_tree, 1))
{
unsigned HOST_WIDE_INT full_size, inst_size = 0;
unsigned int inst_count;
+ unsigned int max_size;
+
+ /* If the sra-max-structure-size parameter is 0, then the
+ user has not overridden the parameter and we can choose a
+ sensible default. */
+ max_size = SRA_MAX_STRUCTURE_SIZE
+ ? SRA_MAX_STRUCTURE_SIZE
+ : MOVE_RATIO * UNITS_PER_WORD;
full_size = tree_low_cst (size_tree, 1);
/* If the structure is small, and we've made copies, go ahead
and instantiate, hoping that the copies will go away. */
- if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
+ if (full_size <= max_size
&& elt->n_copies > elt->n_uses)
use_block_copy = false;
else
{
inst_count = sum_instantiated_sizes (elt, &inst_size);
- if (inst_size * 4 >= full_size * 3)
+ if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
use_block_copy = false;
}
{
unsigned int i;
bool cleared_any;
- struct bitmap_head_def done_head;
+ bitmap_head done_head;
+ bitmap_iterator bi;
/* We cannot clear bits from a bitmap we're iterating over,
so save up all the bits to clear until the end. */
- bitmap_initialize (&done_head, 1);
+ bitmap_initialize (&done_head, &bitmap_default_obstack);
cleared_any = false;
- EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
bitmap_set_bit (&done_head, i);
cleared_any = true;
}
- });
+ }
if (cleared_any)
{
- bitmap_operation (sra_candidates, sra_candidates, &done_head,
- BITMAP_AND_COMPL);
- bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
- BITMAP_AND_COMPL);
+ bitmap_and_compl_into (sra_candidates, &done_head);
+ bitmap_and_compl_into (needs_copy_in, &done_head);
}
bitmap_clear (&done_head);
get_stmt_operands (stmt);
- FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_VIRTUAL_DEFS)
+ FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
}
}
-/* Find all variables within the gimplified statement that were not previously
- visible to the function and add them to the referenced variables list. */
-
-static tree
-find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
-
- if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
- add_referenced_tmp_var (t);
-
- if (IS_TYPE_OR_DECL_P (t))
- *walk_subtrees = 0;
-
- return NULL;
-}
-
-static inline void
-find_new_referenced_vars (tree *stmt_p)
-{
- walk_tree (stmt_p, find_new_referenced_vars_1, NULL, NULL);
-}
-
/* Generate an assignment VAR = INIT, where INIT may need gimplification.
Add the result to *LIST_P. */
static void
generate_one_element_init (tree var, tree init, tree *list_p)
{
- tree stmt;
-
/* The replacement can be almost arbitrarily complex. Gimplify. */
- stmt = build (MODIFY_EXPR, void_type_node, var, init);
- gimplify_stmt (&stmt);
-
- /* The replacement can expose previously unreferenced variables. */
- if (TREE_CODE (stmt) == STATEMENT_LIST)
- {
- tree_stmt_iterator i;
- for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
- find_new_referenced_vars (tsi_stmt_ptr (i));
- }
- else
- find_new_referenced_vars (&stmt);
-
- append_to_statement_list (stmt, list_p);
+ tree stmt = build (MODIFY_EXPR, void_type_node, var, init);
+ gimplify_and_add (stmt, list_p);
}
/* Generate a set of assignment statements in *LIST_P to set all instantiated
handle. */
static bool
-generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
+generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
{
bool result = true;
enum tree_code init_code;
else
t = (init_code == COMPLEX_EXPR
? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
- result &= generate_element_init (sub, t, list_p);
+ result &= generate_element_init_1 (sub, t, list_p);
}
break;
sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
if (sub == NULL)
continue;
- result &= generate_element_init (sub, TREE_VALUE (t), list_p);
+ result &= generate_element_init_1 (sub, TREE_VALUE (t), list_p);
}
break;
return result;
}
+/* A wrapper function for generate_element_init_1 that handles cleanup after
+ gimplification. */
+
+static bool
+generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
+{
+ bool ret;
+
+ push_gimplify_context ();
+ ret = generate_element_init_1 (elt, init, list_p);
+ pop_gimplify_context (NULL);
+
+ /* The replacement can expose previously unreferenced variables. */
+ if (ret && *list_p)
+ {
+ tree_stmt_iterator i;
+ size_t old, new, j;
+
+ old = num_referenced_vars;
+
+ for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
+ find_new_referenced_vars (tsi_stmt_ptr (i));
+
+ new = num_referenced_vars;
+ for (j = old; j < new; ++j)
+ bitmap_set_bit (vars_to_rename, j);
+ }
+
+ return ret;
+}
+
/* Insert STMT on all the outgoing edges out of BB. Note that if BB
has more than one edge, STMT will be replicated for each edge. Also,
abnormal edges will be ignored. */
insert_edge_copies (tree stmt, basic_block bb)
{
edge e;
+ edge_iterator ei;
bool first_copy;
first_copy = true;
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
/* We don't need to insert copies on abnormal edges. The
value of the scalar replacement is not guaranteed to
/* Generate initialization statements for all members extant in the RHS. */
if (rhs)
{
- push_gimplify_context ();
+ /* Unshare the expression just in case this is from a decl's initial. */
+ rhs = unshare_expr (rhs);
result = generate_element_init (lhs_elt, rhs, &list);
- pop_gimplify_context (NULL);
}
/* CONSTRUCTOR is defined such that any member not mentioned is assigned
scalarize_parms (void)
{
tree list = NULL;
- size_t i;
+ unsigned i;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
generate_copy_inout (elt, true, var, &list);
- });
+ }
if (list)
insert_edge_copies (list, ENTRY_BLOCK_PTR);
sra_walk_function (&fns);
scalarize_parms ();
- bsi_commit_edge_inserts (NULL);
+ bsi_commit_edge_inserts ();
}
\f