/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
-Free Software Foundation; either version 2, or (at your option) any
+Free Software Foundation; either version 3, or (at your option) any
later version.
GCC is distributed in the hope that it will be useful, but WITHOUT
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
*/
+/* True if this is the "early" pass, before inlining. */
+static bool early_sra;
+
/* The set of todo flags to return from tree_sra. */
static unsigned int todoflags;
{
enum tree_code code = TREE_CODE (type);
return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
+ || code == FIXED_POINT_TYPE
|| code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
|| code == POINTER_TYPE || code == OFFSET_TYPE
|| code == REFERENCE_TYPE);
return false;
}
+ /* HACK: if we decompose a va_list_type_node before inlining, then we'll
+ confuse tree-stdarg.c, and we won't be able to figure out which and
+ how many arguments are accessed. This really should be improved in
+ tree-stdarg.c, as the decomposition is truely a win. This could also
+ be fixed if the stdarg pass ran early, but this can't be done until
+ we've aliasing information early too. See PR 30791. */
+ if (early_sra
+ && TYPE_MAIN_VARIANT (TREE_TYPE (var))
+ == TYPE_MAIN_VARIANT (va_list_type_node))
+ return false;
+
return true;
}
void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
/* Invoked when we have a copy between one scalarizable reference ELT
- and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
- is on the left-hand side. */
+ and one non-scalarizable reference OTHER without side-effects.
+ IS_OUTPUT is true if ELT is on the left-hand side. */
void (*ldst) (struct sra_elt *elt, tree other,
block_stmt_iterator *bsi, bool is_output);
/* A bit field reference to a specific vector is scalarized but for
ones for inputs need to be marked as used on the left hand size so
when we scalarize it, we can mark that variable as non renamable. */
- if (is_output && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
+ if (is_output
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
{
- struct sra_elt *elt = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
- elt->is_vector_lhs = true;
+ struct sra_elt *elt
+ = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
+ if (elt)
+ elt->is_vector_lhs = true;
}
/* A bit field reference (access to *multiple* fields simultaneously)
is not currently scalarized. Consider this an access to the
sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
const struct sra_walk_fns *fns)
{
- sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
+ int i;
+ int nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
}
/* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
/* If the RHS is scalarizable, handle it. There are only two cases. */
if (rhs_elt)
{
- if (!rhs_elt->is_scalar)
+ if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
fns->ldst (rhs_elt, lhs, bsi, false);
else
fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
The lvalue requirement prevents us from trying to directly scalarize
the result of a function call. Which would result in trying to call
the function multiple times, and other evil things. */
- else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
+ else if (!lhs_elt->is_scalar
+ && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
fns->ldst (lhs_elt, rhs, bsi, true);
/* Otherwise we're being used in some context that requires the
/* If the statement has no virtual operands, then it doesn't
make any structure references that we care about. */
- if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
- continue;
+ if (gimple_aliases_computed_p (cfun)
+ && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
+ continue;
switch (TREE_CODE (stmt))
{
as a USE of the variable on the RHS of this assignment. */
t = TREE_OPERAND (stmt, 0);
- if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+ if (t == NULL_TREE)
+ ;
+ else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
else
sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
DECL_IGNORED_P (var) = 0;
TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
+ if (elt->element && TREE_NO_WARNING (elt->element))
+ TREE_NO_WARNING (var) = 1;
}
else
{
tree f;
for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
if (TREE_CODE (f) == FIELD_DECL)
- instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
+ {
+ tree field_type = TREE_TYPE (f);
+
+ /* canonicalize_component_ref() unwidens some bit-field
+ types (not marked as DECL_BIT_FIELD in C++), so we
+ must do the same, lest we may introduce type
+ mismatches. */
+ if (INTEGRAL_TYPE_P (field_type)
+ && DECL_MODE (f) != TYPE_MODE (field_type))
+ field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
+ field_type,
+ elt->element,
+ f, NULL_TREE),
+ NULL_TREE));
+
+ instantiate_missing_elements_1 (elt, f, field_type);
+ }
break;
}
}
bitmap_clear (&done_head);
- if (!bitmap_empty_p (sra_candidates))
- todoflags |= TODO_update_smt_usage;
-
mark_set_for_renaming (sra_candidates);
if (dump_file)
}
case ARRAY_TYPE:
- todoflags |= TODO_update_smt_usage;
if (TREE_CODE (elt->element) == RANGE_EXPR)
return build4 (ARRAY_RANGE_REF, elt->type, base,
TREE_OPERAND (elt->element, 0), NULL, NULL);
return elt->element;
}
+/* Create an assignment statement from SRC to DST. */
+
+static tree
+sra_build_assignment (tree dst, tree src)
+{
+ /* It was hoped that we could perform some type sanity checking
+ here, but since front-ends can emit accesses of fields in types
+ different from their nominal types and copy structures containing
+ them as a whole, we'd have to handle such differences here.
+ Since such accesses under different types require compatibility
+ anyway, there's little point in making tests and/or adding
+ conversions to ensure the types of src and dst are the same.
+ So we just assume type differences at this point are ok. */
+ return build_gimple_modify_stmt (dst, src);
+}
+
/* Generate a set of assignment statements in *LIST_P to copy all
instantiated elements under ELT to or from the equivalent structure
rooted at EXPR. COPY_OUT controls the direction of the copy, with
i = c->replacement;
t = build2 (COMPLEX_EXPR, elt->type, r, i);
- t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, t);
+ t = sra_build_assignment (expr, t);
SSA_NAME_DEF_STMT (expr) = t;
append_to_statement_list (t, list_p);
}
else if (elt->replacement)
{
if (copy_out)
- t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, expr);
+ t = sra_build_assignment (elt->replacement, expr);
else
- t = build2 (GIMPLE_MODIFY_STMT, void_type_node, expr, elt->replacement);
+ t = sra_build_assignment (expr, elt->replacement);
append_to_statement_list (t, list_p);
}
else
gcc_assert (src->replacement);
- t = build2 (GIMPLE_MODIFY_STMT, void_type_node, dst->replacement,
- src->replacement);
+ t = sra_build_assignment (dst->replacement, src->replacement);
append_to_statement_list (t, list_p);
}
}
gcc_assert (elt->is_scalar);
t = fold_convert (elt->type, integer_zero_node);
- t = build2 (GIMPLE_MODIFY_STMT, void_type_node, elt->replacement, t);
+ t = sra_build_assignment (elt->replacement, t);
append_to_statement_list (t, list_p);
}
}
generate_one_element_init (tree var, tree init, tree *list_p)
{
/* The replacement can be almost arbitrarily complex. Gimplify. */
- tree stmt = build2 (GIMPLE_MODIFY_STMT, void_type_node, var, init);
+ tree stmt = sra_build_assignment (var, init);
gimplify_and_add (stmt, list_p);
}
scan_function ();
decide_instantiations ();
scalarize_function ();
+ if (!bitmap_empty_p (sra_candidates))
+ todoflags |= TODO_rebuild_alias;
}
/* Free allocated memory. */
return todoflags;
}
+static unsigned int
+tree_sra_early (void)
+{
+ unsigned int ret;
+
+ early_sra = true;
+ ret = tree_sra ();
+ early_sra = false;
+
+ return ret & ~TODO_rebuild_alias;
+}
+
static bool
gate_sra (void)
{
return flag_tree_sra != 0;
}
+struct tree_opt_pass pass_sra_early =
+{
+ "esra", /* name */
+ gate_sra, /* gate */
+ tree_sra_early, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_SRA, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func
+ | TODO_update_ssa
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};
+
struct tree_opt_pass pass_sra =
{
"sra", /* name */
NULL, /* next */
0, /* static_pass_number */
TV_TREE_SRA, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */