/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "errors.h"
#include "ggc.h"
#include "tree.h"
*/
+/* True if this is the "early" pass, before inlining. */
+static bool early_sra;
+
+/* The set of todo flags to return from tree_sra. */
+static unsigned int todoflags;
+
/* The set of aggregate variables that are candidates for scalarization. */
static bitmap sra_candidates;
static bitmap sra_type_decomp_cache;
static bitmap sra_type_inst_cache;
-/* One of these structures is created for each candidate aggregate
- and each (accessed) member of such an aggregate. */
+/* One of these structures is created for each candidate aggregate and
+ each (accessed) member or group of members of such an aggregate. */
struct sra_elt
{
/* A tree of the elements. Used when we want to traverse everything. */
struct sra_elt *parent;
+ struct sra_elt *groups;
struct sra_elt *children;
struct sra_elt *sibling;
/* If this element is a root, then this is the VAR_DECL. If this is
a sub-element, this is some token used to identify the reference.
In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
- of an ARRAY_REF, this is the (constant) index. In the case of a
- complex number, this is a zero or one. */
+ of an ARRAY_REF, this is the (constant) index. In the case of an
+ ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
+ of a complex number, this is a zero or one. */
tree element;
/* The type of the element. */
/* True if TYPE is scalar. */
bool is_scalar;
+ /* True if this element is a group of members of its parent. */
+ bool is_group;
+
/* True if we saw something about this element that prevents scalarization,
such as non-constant indexing. */
bool cannot_scalarize;
should happen via memcpy and not per-element. */
bool use_block_copy;
+ /* True if everything under this element has been marked TREE_NO_WARNING. */
+ bool all_no_warning;
+
/* A flag for use with/after random access traversals. */
bool visited;
+
+ /* True if there is BIT_FIELD_REF on the lhs with a vector. */
+ bool is_vector_lhs;
+
+ /* 1 if the element is a field that is part of a block, 2 if the field
+ is the block itself, 0 if it's neither. */
+ char in_bitfld_block;
};
+#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
+
+#define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
+ for ((CHILD) = (ELT)->is_group \
+ ? next_child_for_group (NULL, (ELT)) \
+ : (ELT)->children; \
+ (CHILD); \
+ (CHILD) = (ELT)->is_group \
+ ? next_child_for_group ((CHILD), (ELT)) \
+ : (CHILD)->sibling)
+
+/* Helper function for above macro. Return next child in group. */
+static struct sra_elt *
+next_child_for_group (struct sra_elt *child, struct sra_elt *group)
+{
+ gcc_assert (group->is_group);
+
+ /* Find the next child in the parent. */
+ if (child)
+ child = child->sibling;
+ else
+ child = group->parent->children;
+
+ /* Skip siblings that do not belong to the group. */
+ while (child)
+ {
+ tree g_elt = group->element;
+ if (TREE_CODE (g_elt) == RANGE_EXPR)
+ {
+ if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
+ && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
+ break;
+ }
+ else
+ gcc_unreachable ();
+
+ child = child->sibling;
+ }
+
+ return child;
+}
+
/* Random access to the child of a parent is performed by hashing.
This prevents quadratic behavior, and allows SRA to function
reasonably on larger records. */
static bool
is_sra_candidate_decl (tree decl)
{
- return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
+ return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
}
/* Return true if TYPE is a scalar type. */
enum tree_code code = TREE_CODE (type);
return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
|| code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
- || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
+ || code == POINTER_TYPE || code == OFFSET_TYPE
|| code == REFERENCE_TYPE);
}
instantiated, just that if we decide to break up the type into
separate pieces that it can be done. */
-static bool
-type_can_be_decomposed_p (tree type)
+bool
+sra_type_can_be_decomposed_p (tree type)
{
unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
tree t;
}
/* We must be able to decompose the variable's type. */
- if (!type_can_be_decomposed_p (TREE_TYPE (var)))
+ if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
return false;
}
+ /* HACK: if we decompose a va_list_type_node before inlining, then we'll
+ confuse tree-stdarg.c, and we won't be able to figure out which and
+ how many arguments are accessed. This really should be improved in
+ tree-stdarg.c, as the decomposition is truely a win. This could also
+ be fixed if the stdarg pass ran early, but this can't be done until
+ we've aliasing information early too. See PR 30791. */
+ if (early_sra
+ && TYPE_MAIN_VARIANT (TREE_TYPE (var))
+ == TYPE_MAIN_VARIANT (va_list_type_node))
+ return false;
+
return true;
}
{
if (is_sra_scalar_type (type))
return true;
- if (!type_can_be_decomposed_p (type))
+ if (!sra_type_can_be_decomposed_p (type))
return false;
switch (TREE_CODE (type))
if (elt->cannot_scalarize)
return false;
- for (c = elt->children; c ; c = c->sibling)
+ for (c = elt->children; c; c = c->sibling)
+ if (!can_completely_scalarize_p (c))
+ return false;
+
+ for (c = elt->groups; c; c = c->sibling)
if (!can_completely_scalarize_p (c))
return false;
h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
break;
+ case RANGE_EXPR:
+ h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
+ h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
+ break;
+
case FIELD_DECL:
/* We can have types that are compatible, but have different member
lists, so we can't hash fields by ID. Use offsets instead. */
h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
break;
+ case BIT_FIELD_REF:
+ /* Don't take operand 0 into account, that's our parent. */
+ h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
+ h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
+ break;
+
default:
gcc_unreachable ();
}
h = sra_hash_tree (e->element);
- /* Take into account everything back up the chain. Given that chain
- lengths are rarely very long, this should be acceptable. If we
- truly identify this as a performance problem, it should work to
- hash the pointer value "e->parent". */
+ /* Take into account everything except bitfield blocks back up the
+ chain. Given that chain lengths are rarely very long, this
+ should be acceptable. If we truly identify this as a performance
+ problem, it should work to hash the pointer value
+ "e->parent". */
for (p = e->parent; p ; p = p->parent)
- h = (h * 65521) ^ sra_hash_tree (p->element);
+ if (!p->in_bitfld_block)
+ h = (h * 65521) ^ sra_hash_tree (p->element);
return h;
}
const struct sra_elt *a = x;
const struct sra_elt *b = y;
tree ae, be;
+ const struct sra_elt *ap = a->parent;
+ const struct sra_elt *bp = b->parent;
+
+ if (ap)
+ while (ap->in_bitfld_block)
+ ap = ap->parent;
+ if (bp)
+ while (bp->in_bitfld_block)
+ bp = bp->parent;
- if (a->parent != b->parent)
+ if (ap != bp)
return false;
ae = a->element;
/* Integers are not pointer unique, so compare their values. */
return tree_int_cst_equal (ae, be);
+ case RANGE_EXPR:
+ return
+ tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
+ && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
+
case FIELD_DECL:
/* Fields are unique within a record, but not between
compatible records. */
return false;
return fields_compatible_p (ae, be);
+ case BIT_FIELD_REF:
+ return
+ tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
+ && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
+
default:
gcc_unreachable ();
}
struct sra_elt **slot;
struct sra_elt *elt;
- dummy.parent = parent;
+ if (parent)
+ dummy.parent = parent->is_group ? parent->parent : parent;
+ else
+ dummy.parent = NULL;
dummy.element = child;
slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
if (parent)
{
- elt->sibling = parent->children;
- parent->children = elt;
+ if (IS_ELEMENT_FOR_GROUP (elt->element))
+ {
+ elt->is_group = true;
+ elt->sibling = parent->groups;
+ parent->groups = elt;
+ }
+ else
+ {
+ elt->sibling = parent->children;
+ parent->children = elt;
+ }
}
/* If this is a parameter, then if we want to scalarize, we have
if (TREE_CODE (child) == PARM_DECL)
{
elt->n_copies = 1;
- bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
+ bitmap_set_bit (needs_copy_in, DECL_UID (child));
}
}
return elt;
}
-/* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
-
-static bool
-is_valid_const_index (tree expr)
-{
- tree dom, t, index = TREE_OPERAND (expr, 1);
-
- if (TREE_CODE (index) != INTEGER_CST)
- return false;
-
- /* Watch out for stupid user tricks, indexing outside the array.
-
- Careful, we're not called only on scalarizable types, so do not
- assume constant array bounds. We needn't do anything with such
- cases, since they'll be referring to objects that we should have
- already rejected for scalarization, so returning false is fine. */
-
- dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
- if (dom == NULL)
- return false;
-
- t = TYPE_MIN_VALUE (dom);
- if (!t || TREE_CODE (t) != INTEGER_CST)
- return false;
- if (tree_int_cst_lt (index, t))
- return false;
-
- t = TYPE_MAX_VALUE (dom);
- if (!t || TREE_CODE (t) != INTEGER_CST)
- return false;
- if (tree_int_cst_lt (t, index))
- return false;
-
- return true;
-}
-
/* Create or return the SRA_ELT structure for EXPR if the expression
refers to a scalarizable variable. */
return NULL;
case ARRAY_REF:
- /* We can't scalarize variable array indicies. */
- if (is_valid_const_index (expr))
+ /* We can't scalarize variable array indices. */
+ if (in_array_bounds_p (expr))
child = TREE_OPERAND (expr, 1);
else
return NULL;
break;
+ case ARRAY_RANGE_REF:
+ /* We can't scalarize variable array indices. */
+ if (range_in_array_bounds_p (expr))
+ {
+ tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
+ child = build2 (RANGE_EXPR, integer_type_node,
+ TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
+ }
+ else
+ return NULL;
+ break;
+
case COMPONENT_REF:
/* Don't look through unions. */
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
/* Invoked when we have a copy between one scalarizable reference ELT
- and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
- is on the left-hand side. */
+ and one non-scalarizable reference OTHER without side-effects.
+ IS_OUTPUT is true if ELT is on the left-hand side. */
void (*ldst) (struct sra_elt *elt, tree other,
block_stmt_iterator *bsi, bool is_output);
the effort. */
/* ??? Hack. Figure out how to push this into the scan routines
without duplicating too much code. */
- if (!is_valid_const_index (inner))
+ if (!in_array_bounds_p (inner))
{
disable_scalarization = true;
goto use_all;
inner = TREE_OPERAND (inner, 0);
break;
+ case ARRAY_RANGE_REF:
+ if (!range_in_array_bounds_p (inner))
+ {
+ disable_scalarization = true;
+ goto use_all;
+ }
+ /* ??? See above non-constant bounds and stride . */
+ if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
+ goto use_all;
+ inner = TREE_OPERAND (inner, 0);
+ break;
+
case COMPONENT_REF:
/* A reference to a union member constitutes a reference to the
entire union. */
break;
case BIT_FIELD_REF:
+ /* A bit field reference to a specific vector is scalarized but for
+ ones for inputs need to be marked as used on the left hand size so
+ when we scalarize it, we can mark that variable as non renamable. */
+ if (is_output
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
+ {
+ struct sra_elt *elt
+ = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
+ if (elt)
+ elt->is_vector_lhs = true;
+ }
/* A bit field reference (access to *multiple* fields simultaneously)
is not currently scalarized. Consider this an access to the
complete outer element, to which walk_tree will bring us next. */
- goto use_all;
-
- case ARRAY_RANGE_REF:
- /* Similarly, an subrange reference is used to modify indexing. Which
- means that the canonical element names that we have won't work. */
+
goto use_all;
case VIEW_CONVERT_EXPR:
sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
const struct sra_walk_fns *fns)
{
- sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
+ int i;
+ int nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
}
/* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
}
-/* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
+static void sra_replace (block_stmt_iterator *bsi, tree list);
+static tree sra_build_elt_assignment (struct sra_elt *elt, tree src);
+
+/* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
static void
-sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
- const struct sra_walk_fns *fns)
+sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
+ const struct sra_walk_fns *fns)
{
struct sra_elt *lhs_elt, *rhs_elt;
tree lhs, rhs;
- lhs = TREE_OPERAND (expr, 0);
- rhs = TREE_OPERAND (expr, 1);
+ lhs = GIMPLE_STMT_OPERAND (expr, 0);
+ rhs = GIMPLE_STMT_OPERAND (expr, 1);
lhs_elt = maybe_lookup_element_for_expr (lhs);
rhs_elt = maybe_lookup_element_for_expr (rhs);
/* If the RHS is scalarizable, handle it. There are only two cases. */
if (rhs_elt)
{
- if (!rhs_elt->is_scalar)
+ if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
fns->ldst (rhs_elt, lhs, bsi, false);
else
- fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
+ fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false);
}
/* If it isn't scalarizable, there may be scalarizable variables within, so
if (call)
sra_walk_call_expr (call, bsi, fns);
else
- sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
+ sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
}
/* Likewise, handle the LHS being scalarizable. We have cases similar
The lvalue requirement prevents us from trying to directly scalarize
the result of a function call. Which would result in trying to call
the function multiple times, and other evil things. */
- else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
+ else if (!lhs_elt->is_scalar
+ && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
fns->ldst (lhs_elt, rhs, bsi, true);
/* Otherwise we're being used in some context that requires the
aggregate to be seen as a whole. Invoke USE. */
else
- fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
+ {
+ fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true);
+ }
}
/* Similarly to above, LHS_ELT being null only means that the LHS as a
whole is not a scalarizable reference. There may be occurrences of
scalarizable variables within, which implies a USE. */
else
- sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
+ sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
}
/* Entry point to the walk functions. Search the entire function,
/* If the statement has no virtual operands, then it doesn't
make any structure references that we care about. */
- if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
- && NUM_VUSES (VUSE_OPS (ann)) == 0
- && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
- continue;
+ if (gimple_aliases_computed_p (cfun)
+ && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
+ continue;
switch (TREE_CODE (stmt))
{
as a USE of the variable on the RHS of this assignment. */
t = TREE_OPERAND (stmt, 0);
- if (TREE_CODE (t) == MODIFY_EXPR)
- sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
+ if (t == NULL_TREE)
+ ;
+ else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+ sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
else
sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
break;
- case MODIFY_EXPR:
- sra_walk_modify_expr (stmt, &si, fns);
+ case GIMPLE_MODIFY_STMT:
+ sra_walk_gimple_modify_stmt (stmt, &si, fns);
break;
case CALL_EXPR:
sra_walk_call_expr (stmt, &si, fns);
static bool
find_candidates_for_sra (void)
{
- size_t i;
bool any_set = false;
+ tree var;
+ referenced_var_iterator rvi;
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
if (decl_can_be_decomposed_p (var))
{
- bitmap_set_bit (sra_candidates, var_ann (var)->uid);
+ bitmap_set_bit (sra_candidates, DECL_UID (var));
any_set = true;
}
}
for (c = elt->children; c ; c = c->sibling)
scan_dump (c);
+
+ for (c = elt->groups; c ; c = c->sibling)
+ scan_dump (c);
}
/* Entry point to phase 2. Scan the entire function, building up
sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
obstack_grow (&sra_obstack, buffer, strlen (buffer));
}
+ else if (TREE_CODE (t) == BIT_FIELD_REF)
+ {
+ sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
+ tree_low_cst (TREE_OPERAND (t, 2), 1));
+ obstack_grow (&sra_obstack, buffer, strlen (buffer));
+ sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
+ tree_low_cst (TREE_OPERAND (t, 1), 1));
+ obstack_grow (&sra_obstack, buffer, strlen (buffer));
+ }
else
{
tree name = DECL_NAME (t);
{
build_element_name_1 (elt);
obstack_1grow (&sra_obstack, '\0');
- return obstack_finish (&sra_obstack);
+ return XOBFINISH (&sra_obstack, char *);
}
/* Instantiate an element as an independent variable. */
{
struct sra_elt *base_elt;
tree var, base;
+ bool nowarn = TREE_NO_WARNING (elt->element);
for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
- continue;
+ if (!nowarn)
+ nowarn = base_elt->parent->n_uses
+ || TREE_NO_WARNING (base_elt->parent->element);
base = base_elt->element;
elt->replacement = var = make_rename_temp (elt->type, "SR");
+
+ /* For vectors, if used on the left hand side with BIT_FIELD_REF,
+ they are not a gimple register. */
+ if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
+ DECL_GIMPLE_REG_P (var) = 0;
+
DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
DECL_ARTIFICIAL (var) = 1;
+ if (TREE_THIS_VOLATILE (elt->type))
+ {
+ TREE_THIS_VOLATILE (var) = 1;
+ TREE_SIDE_EFFECTS (var) = 1;
+ }
+
if (DECL_NAME (base) && !DECL_IGNORED_P (base))
{
char *pretty_name = build_element_name (elt);
DECL_NAME (var) = get_identifier (pretty_name);
obstack_free (&sra_obstack, pretty_name);
- DECL_DEBUG_EXPR (var) = generate_element_ref (elt);
+ SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
DECL_DEBUG_EXPR_IS_FROM (var) = 1;
-
+
DECL_IGNORED_P (var) = 0;
- TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
+ TREE_NO_WARNING (var) = nowarn;
}
else
{
}
else
{
- struct sra_elt *c;
+ struct sra_elt *c, *group;
unsigned int this_uses = elt->n_uses + parent_uses;
unsigned int this_copies = elt->n_copies + parent_copies;
+ /* Consider groups of sub-elements as weighing in favour of
+ instantiation whatever their size. */
+ for (group = elt->groups; group ; group = group->sibling)
+ FOR_EACH_ACTUAL_CHILD (c, group)
+ {
+ c->n_uses += group->n_uses;
+ c->n_copies += group->n_copies;
+ }
+
for (c = elt->children; c ; c = c->sibling)
decide_instantiation_1 (c, this_uses, this_copies);
}
static void instantiate_missing_elements (struct sra_elt *elt);
-static void
+static struct sra_elt *
instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
{
struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
}
else
instantiate_missing_elements (sub);
+ return sub;
+}
+
+/* Obtain the canonical type for field F of ELEMENT. */
+
+static tree
+canon_type_for_field (tree f, tree element)
+{
+ tree field_type = TREE_TYPE (f);
+
+ /* canonicalize_component_ref() unwidens some bit-field types (not
+ marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
+ may introduce type mismatches. */
+ if (INTEGRAL_TYPE_P (field_type)
+ && DECL_MODE (f) != TYPE_MODE (field_type))
+ field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
+ field_type,
+ element,
+ f, NULL_TREE),
+ NULL_TREE));
+
+ return field_type;
+}
+
+/* Look for adjacent fields of ELT starting at F that we'd like to
+ scalarize as a single variable. Return the last field of the
+ group. */
+
+static tree
+try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
+{
+ unsigned HOST_WIDE_INT align, oalign, word, bit, size, alchk;
+ enum machine_mode mode;
+ tree first = f, prev;
+ tree type, var;
+ struct sra_elt *block;
+
+ if (!is_sra_scalar_type (TREE_TYPE (f))
+ || !host_integerp (DECL_FIELD_OFFSET (f), 1)
+ || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+ || !host_integerp (DECL_SIZE (f), 1)
+ || lookup_element (elt, f, NULL, NO_INSERT))
+ return f;
+
+ /* Taking the alignment of elt->element is not enough, since it
+ might be just an array index or some such. We shouldn't need to
+ initialize align here, but our optimizers don't always realize
+ that, if we leave the loop without initializing align, we'll fail
+ the assertion right after the loop. */
+ align = (unsigned HOST_WIDE_INT)-1;
+ for (block = elt; block; block = block->parent)
+ if (DECL_P (block->element))
+ {
+ align = DECL_ALIGN (block->element);
+ break;
+ }
+ gcc_assert (block);
+
+ oalign = DECL_OFFSET_ALIGN (f);
+ word = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+ bit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+ size = tree_low_cst (DECL_SIZE (f), 1);
+
+ if (align > oalign)
+ align = oalign;
+
+ alchk = align - 1;
+ alchk = ~alchk;
+
+ if ((bit & alchk) != ((bit + size - 1) & alchk))
+ return f;
+
+ /* Find adjacent fields in the same alignment word. */
+
+ for (prev = f, f = TREE_CHAIN (f);
+ f && TREE_CODE (f) == FIELD_DECL
+ && is_sra_scalar_type (TREE_TYPE (f))
+ && host_integerp (DECL_FIELD_OFFSET (f), 1)
+ && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+ && host_integerp (DECL_SIZE (f), 1)
+ && (HOST_WIDE_INT)word == tree_low_cst (DECL_FIELD_OFFSET (f), 1)
+ && !lookup_element (elt, f, NULL, NO_INSERT);
+ prev = f, f = TREE_CHAIN (f))
+ {
+ unsigned HOST_WIDE_INT nbit, nsize;
+
+ nbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+ nsize = tree_low_cst (DECL_SIZE (f), 1);
+
+ if (bit + size == nbit)
+ {
+ if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
+ break;
+ size += nsize;
+ }
+ else if (nbit + nsize == bit)
+ {
+ if ((nbit & alchk) != ((bit + size - 1) & alchk))
+ break;
+ bit = nbit;
+ size += nsize;
+ }
+ else
+ break;
+ }
+
+ f = prev;
+
+ if (f == first)
+ return f;
+
+ gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
+
+ /* Try to widen the bit range so as to cover padding bits as well. */
+
+ if ((bit & ~alchk) || size != align)
+ {
+ unsigned HOST_WIDE_INT mbit = bit & alchk;
+ unsigned HOST_WIDE_INT msize = align;
+
+ for (f = TYPE_FIELDS (elt->type);
+ f; f = TREE_CHAIN (f))
+ {
+ unsigned HOST_WIDE_INT fword, fbit, fsize;
+
+ /* Skip the fields from first to prev. */
+ if (f == first)
+ {
+ f = prev;
+ continue;
+ }
+
+ if (!(TREE_CODE (f) == FIELD_DECL
+ && host_integerp (DECL_FIELD_OFFSET (f), 1)
+ && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
+ continue;
+
+ fword = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+ /* If we're past the selected word, we're fine. */
+ if (word < fword)
+ continue;
+
+ fbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+
+ if (host_integerp (DECL_SIZE (f), 1))
+ fsize = tree_low_cst (DECL_SIZE (f), 1);
+ else
+ /* Assume a variable-sized field takes up all space till
+ the end of the word. ??? Endianness issues? */
+ fsize = align - fbit;
+
+ if (fword < word)
+ {
+ /* A large field might start at a previous word and
+ extend into the selected word. Exclude those
+ bits. ??? Endianness issues? */
+ HOST_WIDE_INT diff = fbit + fsize
+ - (HOST_WIDE_INT)((word - fword) * BITS_PER_UNIT + mbit);
+
+ if (diff <= 0)
+ continue;
+
+ mbit += diff;
+ msize -= diff;
+ }
+ else
+ {
+ gcc_assert (fword == word);
+
+ /* Non-overlapping, great. */
+ if (fbit + fsize <= mbit
+ || mbit + msize <= fbit)
+ continue;
+
+ if (fbit <= mbit)
+ {
+ unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
+ mbit += diff;
+ msize -= diff;
+ }
+ else if (fbit > mbit)
+ msize -= (mbit + msize - fbit);
+ else
+ gcc_unreachable ();
+ }
+ }
+
+ bit = mbit;
+ size = msize;
+ }
+
+ /* Now we know the bit range we're interested in. Find the smallest
+ machine mode we can use to access it. */
+
+ for (mode = smallest_mode_for_size (size, MODE_INT);
+ ;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ gcc_assert (mode != VOIDmode);
+
+ alchk = GET_MODE_PRECISION (mode) - 1;
+ alchk = ~alchk;
+
+ if ((bit & alchk) == ((bit + size - 1) & alchk))
+ break;
+ }
+
+ gcc_assert (~alchk < align);
+
+ /* Create the field group as a single variable. */
+
+ type = lang_hooks.types.type_for_mode (mode, 1);
+ gcc_assert (type);
+ var = build3 (BIT_FIELD_REF, type, NULL_TREE,
+ bitsize_int (size),
+ bitsize_int (word * BITS_PER_UNIT + bit));
+ BIT_FIELD_REF_UNSIGNED (var) = 1;
+
+ block = instantiate_missing_elements_1 (elt, var, type);
+ gcc_assert (block && block->is_scalar);
+
+ var = block->replacement;
+
+ if (((word * BITS_PER_UNIT + bit) & ~alchk)
+ || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
+ {
+ block->replacement = build3 (BIT_FIELD_REF,
+ TREE_TYPE (block->element), var,
+ bitsize_int (size),
+ bitsize_int ((word * BITS_PER_UNIT
+ + bit) & ~alchk));
+ BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
+ TREE_NO_WARNING (block->replacement) = 1;
+ }
+
+ block->in_bitfld_block = 2;
+
+ /* Add the member fields to the group, such that they access
+ portions of the group variable. */
+
+ for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
+ {
+ tree field_type = canon_type_for_field (f, elt->element);
+ struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
+
+ gcc_assert (fld && fld->is_scalar && !fld->replacement);
+
+ fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
+ DECL_SIZE (f),
+ bitsize_int
+ ((word * BITS_PER_UNIT
+ + (TREE_INT_CST_LOW
+ (DECL_FIELD_BIT_OFFSET (f))))
+ & ~alchk));
+ BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
+ TREE_NO_WARNING (block->replacement) = 1;
+ fld->in_bitfld_block = 1;
+ }
+
+ return prev;
}
static void
tree f;
for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
if (TREE_CODE (f) == FIELD_DECL)
- instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
+ {
+ tree last = try_instantiate_multiple_fields (elt, f);
+
+ if (last != f)
+ {
+ f = last;
+ continue;
+ }
+
+ instantiate_missing_elements_1 (elt, f,
+ canon_type_for_field
+ (f, elt->element));
+ }
break;
}
}
}
+/* Return true if there is only one non aggregate field in the record, TYPE.
+ Return false otherwise. */
+
+static bool
+single_scalar_field_in_record_p (tree type)
+{
+ int num_fields = 0;
+ tree field;
+ if (TREE_CODE (type) != RECORD_TYPE)
+ return false;
+
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL)
+ {
+ num_fields++;
+
+ if (num_fields == 2)
+ return false;
+
+ if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
+ return false;
+ }
+
+ return true;
+}
+
/* Make one pass across an element tree deciding whether to perform block
or element copies. If we decide on element copies, instantiate all
elements. Return true if there are any instantiated sub-elements. */
struct sra_elt *c;
bool any_inst;
+ /* We shouldn't be invoked on groups of sub-elements as they must
+ behave like their parent as far as block copy is concerned. */
+ gcc_assert (!elt->is_group);
+
/* If scalarization is disabled, respect it. */
if (elt->cannot_scalarize)
{
fputc ('\n', dump_file);
}
+ /* Disable scalarization of sub-elements */
+ for (c = elt->children; c; c = c->sibling)
+ {
+ c->cannot_scalarize = 1;
+ decide_block_copy (c);
+ }
+
+ /* Groups behave like their parent. */
+ for (c = elt->groups; c; c = c->sibling)
+ {
+ c->cannot_scalarize = 1;
+ c->use_block_copy = 1;
+ }
+
return false;
}
else if (host_integerp (size_tree, 1))
{
unsigned HOST_WIDE_INT full_size, inst_size = 0;
- unsigned int inst_count;
- unsigned int max_size;
+ unsigned int max_size, max_count, inst_count, full_count;
/* If the sra-max-structure-size parameter is 0, then the
user has not overridden the parameter and we can choose a
max_size = SRA_MAX_STRUCTURE_SIZE
? SRA_MAX_STRUCTURE_SIZE
: MOVE_RATIO * UNITS_PER_WORD;
+ max_count = SRA_MAX_STRUCTURE_COUNT
+ ? SRA_MAX_STRUCTURE_COUNT
+ : MOVE_RATIO;
full_size = tree_low_cst (size_tree, 1);
+ full_count = count_type_elements (elt->type, false);
+ inst_count = sum_instantiated_sizes (elt, &inst_size);
+
+ /* If there is only one scalar field in the record, don't block copy. */
+ if (single_scalar_field_in_record_p (elt->type))
+ use_block_copy = false;
/* ??? What to do here. If there are two fields, and we've only
instantiated one, then instantiating the other is clearly a win.
/* If the structure is small, and we've made copies, go ahead
and instantiate, hoping that the copies will go away. */
if (full_size <= max_size
+ && (full_count - inst_count) <= max_count
&& elt->n_copies > elt->n_uses)
use_block_copy = false;
- else
- {
- inst_count = sum_instantiated_sizes (elt, &inst_size);
-
- if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
- use_block_copy = false;
- }
+ else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
+ && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
+ use_block_copy = false;
/* In order to avoid block copy, we have to be able to instantiate
all elements of the type. See if this is possible. */
|| !type_can_instantiate_all_elements (elt->type)))
use_block_copy = true;
}
+
elt->use_block_copy = use_block_copy;
+ /* Groups behave like their parent. */
+ for (c = elt->groups; c; c = c->sibling)
+ c->use_block_copy = use_block_copy;
+
if (dump_file)
{
fprintf (dump_file, "Using %s for ",
bitmap_and_compl_into (needs_copy_in, &done_head);
}
bitmap_clear (&done_head);
+
+ mark_set_for_renaming (sra_candidates);
if (dump_file)
fputc ('\n', dump_file);
\f
/* Phase Four: Update the function to match the replacements created. */
-/* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
- renaming. This becomes necessary when we modify all of a non-scalar. */
+/* Mark all the variables in VDEF/VUSE operators for STMT for
+ renaming. This becomes necessary when we modify all of a
+ non-scalar. */
static void
-mark_all_v_defs (tree stmt)
+mark_all_v_defs_1 (tree stmt)
{
tree sym;
ssa_op_iter iter;
- get_stmt_operands (stmt);
+ update_stmt_if_modified (stmt);
FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
- bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
+ mark_sym_for_renaming (sym);
+ }
+}
+
+
+/* Mark all the variables in virtual operands in all the statements in
+ LIST for renaming. */
+
+static void
+mark_all_v_defs (tree list)
+{
+ if (TREE_CODE (list) != STATEMENT_LIST)
+ mark_all_v_defs_1 (list);
+ else
+ {
+ tree_stmt_iterator i;
+ for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
+ mark_all_v_defs_1 (tsi_stmt (i));
+ }
+}
+
+
+/* Mark every replacement under ELT with TREE_NO_WARNING. */
+
+static void
+mark_no_warning (struct sra_elt *elt)
+{
+ if (!elt->all_no_warning)
+ {
+ if (elt->replacement)
+ TREE_NO_WARNING (elt->replacement) = 1;
+ else
+ {
+ struct sra_elt *c;
+ FOR_EACH_ACTUAL_CHILD (c, elt)
+ mark_no_warning (c);
+ }
+ elt->all_no_warning = true;
}
}
{
tree field = elt->element;
+ /* We can't test elt->in_bitfld_blk here because, when this is
+ called from instantiate_element, we haven't set this field
+ yet. */
+ if (TREE_CODE (field) == BIT_FIELD_REF)
+ {
+ tree ret = copy_node (field);
+ TREE_OPERAND (ret, 0) = base;
+ return ret;
+ }
+
/* Watch out for compatible records with differing field lists. */
if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
field = find_compatible_field (TREE_TYPE (base), field);
- return build (COMPONENT_REF, elt->type, base, field, NULL);
+ return build3 (COMPONENT_REF, elt->type, base, field, NULL);
}
case ARRAY_TYPE:
- return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
+ if (TREE_CODE (elt->element) == RANGE_EXPR)
+ return build4 (ARRAY_RANGE_REF, elt->type, base,
+ TREE_OPERAND (elt->element, 0), NULL, NULL);
+ else
+ return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
case COMPLEX_TYPE:
if (elt->element == integer_zero_node)
- return build (REALPART_EXPR, elt->type, base);
+ return build1 (REALPART_EXPR, elt->type, base);
else
- return build (IMAGPART_EXPR, elt->type, base);
+ return build1 (IMAGPART_EXPR, elt->type, base);
default:
gcc_unreachable ();
return elt->element;
}
+/* Create an assignment statement from SRC to DST. */
+
+static tree
+sra_build_assignment (tree dst, tree src)
+{
+ /* It was hoped that we could perform some type sanity checking
+ here, but since front-ends can emit accesses of fields in types
+ different from their nominal types and copy structures containing
+ them as a whole, we'd have to handle such differences here.
+ Since such accesses under different types require compatibility
+ anyway, there's little point in making tests and/or adding
+ conversions to ensure the types of src and dst are the same.
+ So we just assume type differences at this point are ok. */
+ return build_gimple_modify_stmt (dst, src);
+}
+
+/* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
+ takes care of assignments, but we must create copies for uses. */
+#define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : copy_node (t))
+
+static tree
+sra_build_elt_assignment (struct sra_elt *elt, tree src)
+{
+ tree dst = elt->replacement;
+ tree var, type, tmp, tmp2, tmp3;
+ tree list, stmt;
+ tree cst, cst2, mask;
+ tree minshift, maxshift;
+
+ if (TREE_CODE (dst) != BIT_FIELD_REF
+ || !elt->in_bitfld_block)
+ return sra_build_assignment (REPLDUP (dst), src);
+
+ var = TREE_OPERAND (dst, 0);
+
+ /* Try to widen the assignment to the entire variable.
+ We need the source to be a BIT_FIELD_REF as well, such that, for
+ BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
+ if sp >= dp, we can turn it into
+ d = BIT_FIELD_REF<s,sp+sz,sp-dp>. */
+ if (elt->in_bitfld_block == 2
+ && TREE_CODE (src) == BIT_FIELD_REF
+ && !tree_int_cst_lt (TREE_OPERAND (src, 2), TREE_OPERAND (dst, 2)))
+ {
+ src = fold_build3 (BIT_FIELD_REF, TREE_TYPE (var),
+ TREE_OPERAND (src, 0),
+ size_binop (PLUS_EXPR, TREE_OPERAND (src, 1),
+ TREE_OPERAND (dst, 2)),
+ size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
+ TREE_OPERAND (dst, 2)));
+ BIT_FIELD_REF_UNSIGNED (src) = 1;
+
+ return sra_build_assignment (var, src);
+ }
+
+ if (!is_gimple_reg (var))
+ return sra_build_assignment (REPLDUP (dst), src);
+
+ list = alloc_stmt_list ();
+
+ cst = TREE_OPERAND (dst, 2);
+ if (WORDS_BIG_ENDIAN)
+ {
+ cst = size_binop (MINUS_EXPR, DECL_SIZE (var), cst);
+ maxshift = cst;
+ }
+ else
+ minshift = cst;
+
+ cst2 = size_binop (PLUS_EXPR, TREE_OPERAND (dst, 1),
+ TREE_OPERAND (dst, 2));
+ if (WORDS_BIG_ENDIAN)
+ {
+ cst2 = size_binop (MINUS_EXPR, DECL_SIZE (var), cst2);
+ minshift = cst2;
+ }
+ else
+ maxshift = cst2;
+
+ type = TREE_TYPE (var);
+
+ mask = build_int_cst_wide (type, 1, 0);
+ cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, 1);
+ cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, 1);
+ mask = int_const_binop (MINUS_EXPR, cst, cst2, 1);
+ mask = fold_build1 (BIT_NOT_EXPR, type, mask);
+
+ if (!WORDS_BIG_ENDIAN)
+ cst2 = TREE_OPERAND (dst, 2);
+
+ tmp = make_rename_temp (type, "SR");
+ stmt = build_gimple_modify_stmt (tmp,
+ fold_build2 (BIT_AND_EXPR, type,
+ var, mask));
+ append_to_statement_list (stmt, &list);
+
+ if (is_gimple_reg (src))
+ tmp2 = src;
+ else
+ {
+ tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
+ stmt = sra_build_assignment (tmp2, src);
+ append_to_statement_list (stmt, &list);
+ }
+
+ if (!TYPE_UNSIGNED (TREE_TYPE (tmp2))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (type))
+ {
+ tmp3 = make_rename_temp (type, "SR");
+ tmp2 = fold_build3 (BIT_FIELD_REF, type, tmp2, TREE_OPERAND (dst, 1),
+ bitsize_int (0));
+ if (TREE_CODE (tmp2) == BIT_FIELD_REF)
+ BIT_FIELD_REF_UNSIGNED (tmp2) = 1;
+ stmt = sra_build_assignment (tmp3, tmp2);
+ append_to_statement_list (stmt, &list);
+ tmp2 = tmp3;
+ }
+
+ if (!integer_zerop (minshift))
+ {
+ tmp3 = make_rename_temp (type, "SR");
+ stmt = build_gimple_modify_stmt (tmp3,
+ fold_build2 (LSHIFT_EXPR, type,
+ tmp2, minshift));
+ append_to_statement_list (stmt, &list);
+ tmp2 = tmp3;
+ }
+
+ stmt = build_gimple_modify_stmt (var,
+ fold_build2 (BIT_IOR_EXPR, type,
+ tmp, tmp2));
+ append_to_statement_list (stmt, &list);
+
+ return list;
+}
+
/* Generate a set of assignment statements in *LIST_P to copy all
instantiated elements under ELT to or from the equivalent structure
rooted at EXPR. COPY_OUT controls the direction of the copy, with
struct sra_elt *c;
tree t;
- if (elt->replacement)
+ if (!copy_out && TREE_CODE (expr) == SSA_NAME
+ && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
+ {
+ tree r, i;
+
+ c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
+ r = c->replacement;
+ c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
+ i = c->replacement;
+
+ t = build2 (COMPLEX_EXPR, elt->type, r, i);
+ t = sra_build_assignment (expr, t);
+ SSA_NAME_DEF_STMT (expr) = t;
+ append_to_statement_list (t, list_p);
+ }
+ else if (elt->replacement)
{
if (copy_out)
- t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
+ t = sra_build_elt_assignment (elt, expr);
else
- t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
+ t = sra_build_assignment (expr, REPLDUP (elt->replacement));
append_to_statement_list (t, list_p);
}
else
{
- for (c = elt->children; c ; c = c->sibling)
+ FOR_EACH_ACTUAL_CHILD (c, elt)
{
t = generate_one_element_ref (c, unshare_expr (expr));
generate_copy_inout (c, copy_out, t, list_p);
{
struct sra_elt *dc, *sc;
- for (dc = dst->children; dc ; dc = dc->sibling)
+ FOR_EACH_ACTUAL_CHILD (dc, dst)
{
sc = lookup_element (src, dc->element, NULL, NO_INSERT);
+ if (!sc && dc->in_bitfld_block == 2)
+ {
+ struct sra_elt *dcs;
+
+ FOR_EACH_ACTUAL_CHILD (dcs, dc)
+ {
+ sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
+ gcc_assert (sc);
+ generate_element_copy (dcs, sc, list_p);
+ }
+
+ continue;
+ }
gcc_assert (sc);
generate_element_copy (dc, sc, list_p);
}
gcc_assert (src->replacement);
- t = build (MODIFY_EXPR, void_type_node, dst->replacement,
- src->replacement);
+ t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
append_to_statement_list (t, list_p);
}
}
return;
}
- for (c = elt->children; c ; c = c->sibling)
- generate_element_zero (c, list_p);
+ if (!elt->in_bitfld_block)
+ FOR_EACH_ACTUAL_CHILD (c, elt)
+ generate_element_zero (c, list_p);
if (elt->replacement)
{
gcc_assert (elt->is_scalar);
t = fold_convert (elt->type, integer_zero_node);
- t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
+ t = sra_build_elt_assignment (elt, t);
append_to_statement_list (t, list_p);
}
}
Add the result to *LIST_P. */
static void
-generate_one_element_init (tree var, tree init, tree *list_p)
+generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
{
/* The replacement can be almost arbitrarily complex. Gimplify. */
- tree stmt = build (MODIFY_EXPR, void_type_node, var, init);
+ tree stmt = sra_build_elt_assignment (elt, init);
gimplify_and_add (stmt, list_p);
}
enum tree_code init_code;
struct sra_elt *sub;
tree t;
+ unsigned HOST_WIDE_INT idx;
+ tree value, purpose;
/* We can be passed DECL_INITIAL of a static variable. It might have a
conversion, which we strip off here. */
{
if (elt->replacement)
{
- generate_one_element_init (elt->replacement, init, list_p);
+ generate_one_element_init (elt, init, list_p);
elt->visited = true;
}
return result;
{
case COMPLEX_CST:
case COMPLEX_EXPR:
- for (sub = elt->children; sub ; sub = sub->sibling)
+ FOR_EACH_ACTUAL_CHILD (sub, elt)
{
if (sub->element == integer_zero_node)
t = (init_code == COMPLEX_EXPR
break;
case CONSTRUCTOR:
- for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
{
- sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
- if (sub == NULL)
- continue;
- result &= generate_element_init_1 (sub, TREE_VALUE (t), list_p);
+ if (TREE_CODE (purpose) == RANGE_EXPR)
+ {
+ tree lower = TREE_OPERAND (purpose, 0);
+ tree upper = TREE_OPERAND (purpose, 1);
+
+ while (1)
+ {
+ sub = lookup_element (elt, lower, NULL, NO_INSERT);
+ if (sub != NULL)
+ result &= generate_element_init_1 (sub, value, list_p);
+ if (tree_int_cst_equal (lower, upper))
+ break;
+ lower = int_const_binop (PLUS_EXPR, lower,
+ integer_one_node, true);
+ }
+ }
+ else
+ {
+ sub = lookup_element (elt, purpose, NULL, NO_INSERT);
+ if (sub != NULL)
+ result &= generate_element_init_1 (sub, value, list_p);
+ }
}
break;
if (ret && *list_p)
{
tree_stmt_iterator i;
- size_t old, new, j;
-
- old = num_referenced_vars;
for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
find_new_referenced_vars (tsi_stmt_ptr (i));
-
- new = num_referenced_vars;
- for (j = old; j < new; ++j)
- bitmap_set_bit (vars_to_rename, j);
}
return ret;
/* Helper function to insert LIST before BSI, and set up line number info. */
-static void
+void
sra_insert_before (block_stmt_iterator *bsi, tree list)
{
tree stmt = bsi_stmt (*bsi);
/* Similarly, but insert after BSI. Handles insertion onto edges as well. */
-static void
+void
sra_insert_after (block_stmt_iterator *bsi, tree list)
{
tree stmt = bsi_stmt (*bsi);
sra_replace (block_stmt_iterator *bsi, tree list)
{
sra_insert_before (bsi, list);
- bsi_remove (bsi);
+ bsi_remove (bsi, false);
if (bsi_end_p (*bsi))
*bsi = bsi_last (bsi->bb);
else
/* If we have a replacement, then updating the reference is as
simple as modifying the existing statement in place. */
if (is_output)
- mark_all_v_defs (stmt);
- *expr_p = elt->replacement;
- modify_stmt (stmt);
+ {
+ if (TREE_CODE (elt->replacement) == BIT_FIELD_REF
+ && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
+ && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
+ {
+ tree newstmt = sra_build_elt_assignment
+ (elt, GIMPLE_STMT_OPERAND (stmt, 1));
+ if (TREE_CODE (newstmt) != STATEMENT_LIST)
+ {
+ tree list = alloc_stmt_list ();
+ append_to_statement_list (newstmt, &list);
+ newstmt = list;
+ }
+ sra_replace (bsi, newstmt);
+ return;
+ }
+
+ mark_all_v_defs (stmt);
+ }
+ *expr_p = REPLDUP (elt->replacement);
+ update_stmt (stmt);
}
else
{
This optimization would be most effective if sra_walk_function
processed the blocks in dominator order. */
- generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
- if (list == NULL)
- return;
- mark_all_v_defs (expr_first (list));
+ generate_copy_inout (elt, false, generate_element_ref (elt), &list);
+ if (list)
+ {
+ mark_all_v_defs (list);
+ sra_insert_before (bsi, list);
+ mark_no_warning (elt);
+ }
+
if (is_output)
- sra_insert_after (bsi, list);
- else
- sra_insert_before (bsi, list);
+ {
+ list = NULL;
+ generate_copy_inout (elt, true, generate_element_ref (elt), &list);
+ if (list)
+ {
+ mark_all_v_defs (list);
+ sra_insert_after (bsi, list);
+ }
+ }
}
}
/* See the commentary in sra_walk_function concerning
RETURN_EXPR, and why we should never see one here. */
- gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
+ gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
- TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
- TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
- modify_stmt (stmt);
+ GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
+ GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
+ update_stmt (stmt);
}
else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
{
generate_element_ref (rhs_elt), &list);
if (list)
{
- mark_all_v_defs (expr_first (list));
+ mark_all_v_defs (list);
sra_insert_before (bsi, list);
}
generate_copy_inout (lhs_elt, true,
generate_element_ref (lhs_elt), &list);
if (list)
- sra_insert_after (bsi, list);
+ {
+ mark_all_v_defs (list);
+ sra_insert_after (bsi, list);
+ }
}
else
{
list = NULL;
generate_element_copy (lhs_elt, rhs_elt, &list);
gcc_assert (list);
+ mark_all_v_defs (list);
sra_replace (bsi, list);
}
}
exposes constants to later optimizations. */
if (list)
{
- mark_all_v_defs (expr_first (list));
+ mark_all_v_defs (list);
sra_insert_after (bsi, list);
}
}
replaces the original structure assignment. */
gcc_assert (list);
mark_all_v_defs (bsi_stmt (*bsi));
+ mark_all_v_defs (list);
sra_replace (bsi, list);
}
}
mark_all_v_defs (stmt);
generate_copy_inout (elt, is_output, other, &list);
gcc_assert (list);
+ mark_all_v_defs (list);
/* Preserve EH semantics. */
if (stmt_ends_bb_p (stmt))
}
if (list)
- insert_edge_copies (list, ENTRY_BLOCK_PTR);
+ {
+ insert_edge_copies (list, ENTRY_BLOCK_PTR);
+ mark_all_v_defs (list);
+ }
}
/* Entry point to phase 4. Update the function to match replacements. */
fputc ('.', f);
print_generic_expr (f, elt->element, dump_flags);
}
+ else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
+ fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
+ tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
+ tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
+ else if (TREE_CODE (elt->element) == RANGE_EXPR)
+ fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
+ TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
+ TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
else
fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
TREE_INT_CST_LOW (elt->element));
fputc ('\n', stderr);
}
+void
+sra_init_cache (void)
+{
+ if (sra_type_decomp_cache)
+ return;
+
+ sra_type_decomp_cache = BITMAP_ALLOC (NULL);
+ sra_type_inst_cache = BITMAP_ALLOC (NULL);
+}
+
/* Main entry point. */
-static void
+static unsigned int
tree_sra (void)
{
/* Initialize local variables. */
+ todoflags = 0;
gcc_obstack_init (&sra_obstack);
sra_candidates = BITMAP_ALLOC (NULL);
needs_copy_in = BITMAP_ALLOC (NULL);
- sra_type_decomp_cache = BITMAP_ALLOC (NULL);
- sra_type_inst_cache = BITMAP_ALLOC (NULL);
+ sra_init_cache ();
sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
/* Scan. If we find anything, instantiate and scalarize. */
BITMAP_FREE (sra_type_decomp_cache);
BITMAP_FREE (sra_type_inst_cache);
obstack_free (&sra_obstack, NULL);
+ return todoflags;
+}
+
+static unsigned int
+tree_sra_early (void)
+{
+ unsigned int ret;
+
+ early_sra = true;
+ ret = tree_sra ();
+ early_sra = false;
+
+ return ret;
}
static bool
return flag_tree_sra != 0;
}
+struct tree_opt_pass pass_sra_early =
+{
+ "esra", /* name */
+ gate_sra, /* gate */
+ tree_sra_early, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_SRA, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func
+ | TODO_update_ssa
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};
+
struct tree_opt_pass pass_sra =
{
"sra", /* name */
NULL, /* next */
0, /* static_pass_number */
TV_TREE_SRA, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
- 0, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_dump_func
+ | TODO_update_ssa
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};