/* Is this particular access write access? */
unsigned write : 1;
- /* Is this access an artificial one created to scalarize some record
- entirely? */
- unsigned total_scalarization : 1;
-
/* Is this access an access to a non-addressable field? */
unsigned non_addressable : 1;
is not propagated in the access tree in any direction. */
unsigned grp_scalar_write : 1;
+ /* Is this access an artificial one created to scalarize some record
+ entirely? */
+ unsigned grp_total_scalarization : 1;
+
/* Other passes of the analysis use this bit to make function
analyze_access_subtree create scalar replacements for this group if
possible. */
fprintf (f, ", type = ");
print_generic_expr (f, access->type, 0);
if (grp)
- fprintf (f, ", total_scalarization = %d, grp_read = %d, grp_write = %d, "
- "grp_assignment_read = %d, grp_assignment_write = %d, "
- "grp_scalar_read = %d, grp_scalar_write = %d, "
+ fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
+ "grp_assignment_write = %d, grp_scalar_read = %d, "
+ "grp_scalar_write = %d, grp_total_scalarization = %d, "
"grp_hint = %d, grp_covered = %d, "
"grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
"grp_partial_lhs = %d, grp_to_be_replaced = %d, "
"grp_maybe_modified = %d, "
"grp_not_necessarilly_dereferenced = %d\n",
- access->total_scalarization, access->grp_read, access->grp_write,
- access->grp_assignment_read, access->grp_assignment_write,
- access->grp_scalar_read, access->grp_scalar_write,
+ access->grp_read, access->grp_write, access->grp_assignment_read,
+ access->grp_assignment_write, access->grp_scalar_read,
+ access->grp_scalar_write, access->grp_total_scalarization,
access->grp_hint, access->grp_covered,
access->grp_unscalarizable_region, access->grp_unscalarized_data,
access->grp_partial_lhs, access->grp_to_be_replaced,
access->grp_maybe_modified,
access->grp_not_necessarilly_dereferenced);
else
- fprintf (f, ", write = %d, total_scalarization = %d, "
+ fprintf (f, ", write = %d, grp_total_scalarization = %d, "
"grp_partial_lhs = %d\n",
- access->write, access->total_scalarization,
+ access->write, access->grp_total_scalarization,
access->grp_partial_lhs);
}
scalarization. */
static bool
-type_internals_preclude_sra_p (tree type)
+type_internals_preclude_sra_p (tree type, const char **msg)
{
tree fld;
tree et;
{
tree ft = TREE_TYPE (fld);
- if (TREE_THIS_VOLATILE (fld)
- || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
- || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
- || !host_integerp (DECL_SIZE (fld), 1)
- || (AGGREGATE_TYPE_P (ft)
- && int_bit_position (fld) % BITS_PER_UNIT != 0))
- return true;
+ if (TREE_THIS_VOLATILE (fld))
+ {
+ *msg = "volatile structure field";
+ return true;
+ }
+ if (!DECL_FIELD_OFFSET (fld))
+ {
+ *msg = "no structure field offset";
+ return true;
+ }
+ if (!DECL_SIZE (fld))
+ {
+ *msg = "zero structure field size";
+ return true;
+ }
+ if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
+ {
+ *msg = "structure field offset not fixed";
+ return true;
+ }
+ if (!host_integerp (DECL_SIZE (fld), 1))
+ {
+ *msg = "structure field size not fixed";
+ return true;
+ }
+ if (AGGREGATE_TYPE_P (ft)
+ && int_bit_position (fld) % BITS_PER_UNIT != 0)
+ {
+ *msg = "structure field is bit field";
+ return true;
+ }
- if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft))
+ if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
return true;
}
et = TREE_TYPE (type);
if (TYPE_VOLATILE (et))
- return true;
+ {
+ *msg = "element type is volatile";
+ return true;
+ }
- if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et))
+ if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
return true;
return false;
access = create_access_1 (base, pos, size);
access->expr = nref;
access->type = ft;
- access->total_scalarization = 1;
+ access->grp_total_scalarization = 1;
/* Accesses for intraprocedural SRA can have their stmt NULL. */
}
else
}
}
+/* Create total_scalarization accesses for all scalar type fields in VAR and
+ for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
+ type_consists_of_records_p. */
+
+static void
+completely_scalarize_var (tree var)
+{
+ HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
+ struct access *access;
+
+ access = create_access_1 (var, 0, size);
+ access->expr = var;
+ access->type = TREE_TYPE (var);
+ access->grp_total_scalarization = 1;
+
+ completely_scalarize_record (var, var, 0, var);
+}
/* Search the given tree for a declaration by skipping handled components and
exclude it from the candidates. */
return false;
}
+/* Return true iff type of EXP is not sufficiently aligned. */
+
+static bool
+tree_non_mode_aligned_mem_p (tree exp)
+{
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ unsigned int align;
+
+ if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+ exp = TREE_OPERAND (exp, 0);
+
+ if (TREE_CODE (exp) == SSA_NAME
+ || TREE_CODE (exp) == MEM_REF
+ || mode == BLKmode
+ || is_gimple_min_invariant (exp)
+ || !STRICT_ALIGNMENT)
+ return false;
+
+ align = get_object_alignment (exp);
+ if (GET_MODE_ALIGNMENT (mode) > align)
+ return true;
+
+ return false;
+}
+
/* Scan expressions occuring in STMT, create access structures for all accesses
to candidates for scalarization and remove those candidates which occur in
statements or expressions that prevent them from being split apart. Return
tree lhs, rhs;
struct access *lacc, *racc;
- if (!gimple_assign_single_p (stmt))
+ if (!gimple_assign_single_p (stmt)
+ /* Scope clobbers don't influence scalarization. */
+ || gimple_clobber_p (stmt))
return false;
lhs = gimple_assign_lhs (stmt);
lacc = build_access_from_expr_1 (lhs, stmt, true);
if (lacc)
- lacc->grp_assignment_write = 1;
+ {
+ lacc->grp_assignment_write = 1;
+ lacc->grp_unscalarizable_region |= tree_non_mode_aligned_mem_p (rhs);
+ }
if (racc)
{
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
+ racc->grp_unscalarizable_region |= tree_non_mode_aligned_mem_p (lhs);
}
if (lacc && racc
offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
- return fold_build3_loc (loc, COMPONENT_REF, model->type, t, fld,
+ return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
TREE_OPERAND (model->expr, 2));
}
else
return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
}
+/* Print message to dump file why a variable was rejected. */
+
+static void
+reject (tree var, const char *msg)
+{
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
/* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
those with type which is suitable for scalarization. */
tree var, type;
referenced_var_iterator rvi;
bool ret = false;
+ const char *msg;
FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
{
continue;
type = TREE_TYPE (var);
- if (!AGGREGATE_TYPE_P (type)
- || needs_to_live_in_memory (var)
- || TREE_THIS_VOLATILE (var)
- || !COMPLETE_TYPE_P (type)
- || !host_integerp (TYPE_SIZE (type), 1)
- || tree_low_cst (TYPE_SIZE (type), 1) == 0
- || type_internals_preclude_sra_p (type)
- /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
+ if (!AGGREGATE_TYPE_P (type))
+ {
+ reject (var, "not aggregate");
+ continue;
+ }
+ if (needs_to_live_in_memory (var))
+ {
+ reject (var, "needs to live in memory");
+ continue;
+ }
+ if (TREE_THIS_VOLATILE (var))
+ {
+ reject (var, "is volatile");
+ continue;
+ }
+ if (!COMPLETE_TYPE_P (type))
+ {
+ reject (var, "has incomplete type");
+ continue;
+ }
+ if (!host_integerp (TYPE_SIZE (type), 1))
+ {
+ reject (var, "type size not fixed");
+ continue;
+ }
+ if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
+ {
+ reject (var, "type size is zero");
+ continue;
+ }
+ if (type_internals_preclude_sra_p (type, &msg))
+ {
+ reject (var, msg);
+ continue;
+ }
+ if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
we also want to schedule it rather late. Thus we ignore it in
the early pass. */
- || (sra_mode == SRA_MODE_EARLY_INTRA
+ (sra_mode == SRA_MODE_EARLY_INTRA
&& is_va_list_type (type)))
- continue;
+ {
+ reject (var, "is va_list");
+ continue;
+ }
bitmap_set_bit (candidate_bitmap, DECL_UID (var));
bool grp_assignment_read = access->grp_assignment_read;
bool grp_assignment_write = access->grp_assignment_write;
bool multiple_scalar_reads = false;
- bool total_scalarization = access->total_scalarization;
+ bool total_scalarization = access->grp_total_scalarization;
bool grp_partial_lhs = access->grp_partial_lhs;
bool first_scalar = is_gimple_reg_type (access->type);
bool unscalarizable_region = access->grp_unscalarizable_region;
grp_assignment_write |= ac2->grp_assignment_write;
grp_partial_lhs |= ac2->grp_partial_lhs;
unscalarizable_region |= ac2->grp_unscalarizable_region;
- total_scalarization |= ac2->total_scalarization;
+ total_scalarization |= ac2->grp_total_scalarization;
relink_to_new_repr (access, ac2);
/* If there are both aggregate-type and scalar-type accesses with
access->grp_assignment_read = grp_assignment_read;
access->grp_assignment_write = grp_assignment_write;
access->grp_hint = multiple_scalar_reads || total_scalarization;
+ access->grp_total_scalarization = total_scalarization;
access->grp_partial_lhs = grp_partial_lhs;
access->grp_unscalarizable_region = unscalarizable_region;
if (access->first_link)
tree repl;
repl = create_tmp_var (access->type, "SR");
- get_var_ann (repl);
add_referenced_var (repl);
if (rename)
mark_sym_for_renaming (repl);
root->grp_write = 1;
if (parent->grp_assignment_write)
root->grp_assignment_write = 1;
+ if (parent->grp_total_scalarization)
+ root->grp_total_scalarization = 1;
}
if (root->grp_unscalarizable_region)
for (child = root->first_child; child; child = child->next_sibling)
{
- if (!hole && child->offset < covered_to)
- hole = true;
- else
- covered_to += child->size;
-
+ hole |= covered_to < child->offset;
sth_created |= analyze_access_subtree (child, root,
allow_replacements && !scalar);
root->grp_unscalarized_data |= child->grp_unscalarized_data;
- hole |= !child->grp_covered;
+ root->grp_total_scalarization &= child->grp_total_scalarization;
+ if (child->grp_covered)
+ covered_to += child->size;
+ else
+ hole = true;
}
if (allow_replacements && scalar && !root->first_child
|| ((root->grp_scalar_read || root->grp_assignment_read)
&& (root->grp_scalar_write || root->grp_assignment_write))))
{
+ bool new_integer_type;
+ if (TREE_CODE (root->type) == ENUMERAL_TYPE)
+ {
+ tree rt = root->type;
+ root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
+ TYPE_UNSIGNED (rt));
+ new_integer_type = true;
+ }
+ else
+ new_integer_type = false;
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Marking ");
print_generic_expr (dump_file, root->base, 0);
- fprintf (dump_file, " offset: %u, size: %u: ",
+ fprintf (dump_file, " offset: %u, size: %u ",
(unsigned) root->offset, (unsigned) root->size);
- fprintf (dump_file, " to be replaced.\n");
+ fprintf (dump_file, " to be replaced%s.\n",
+ new_integer_type ? " with an integer": "");
}
root->grp_to_be_replaced = 1;
sth_created = true;
hole = false;
}
- else if (covered_to < limit)
- hole = true;
+ else
+ {
+ if (covered_to < limit)
+ hole = true;
+ if (scalar)
+ root->grp_total_scalarization = 0;
+ }
- if (sth_created && !hole)
+ if (sth_created
+ && (!hole || root->grp_total_scalarization))
{
root->grp_covered = 1;
return true;
tree var = referenced_var (i);
if (TREE_CODE (var) == VAR_DECL
- && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
- <= max_total_scalarization_size)
&& type_consists_of_records_p (TREE_TYPE (var)))
{
- completely_scalarize_record (var, var, 0, var);
- if (dump_file && (dump_flags & TDF_DETAILS))
+ if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
+ <= max_total_scalarization_size)
+ {
+ completely_scalarize_var (var);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Will attempt to totally scalarize ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
+ }
+ }
+ else if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, "Will attempt to totally scalarize ");
+ fprintf (dump_file, "Too big to totally scalarize: ");
print_generic_expr (dump_file, var, 0);
- fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
+ fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
}
}
}
there to do the copying and then load the scalar replacements of the LHS.
This is what the first branch does. */
- if (gimple_has_volatile_ops (*stmt)
+ if (modify_this_stmt
+ || gimple_has_volatile_ops (*stmt)
|| contains_vce_or_bfcref_p (rhs)
|| contains_vce_or_bfcref_p (lhs))
{
&& TREE_OPERAND (lhs, 0) == name
&& integer_zerop (TREE_OPERAND (lhs, 1))
&& types_compatible_p (TREE_TYPE (lhs),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (lhs))
uses_ok++;
}
if (gimple_assign_single_p (stmt))
&& TREE_OPERAND (rhs, 0) == name
&& integer_zerop (TREE_OPERAND (rhs, 1))
&& types_compatible_p (TREE_TYPE (rhs),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (rhs))
uses_ok++;
}
else if (is_gimple_call (stmt))
&& TREE_OPERAND (arg, 0) == name
&& integer_zerop (TREE_OPERAND (arg, 1))
&& types_compatible_p (TREE_TYPE (arg),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (arg))
uses_ok++;
}
}
tree parm;
int count = 0;
bool ret = false;
+ const char *msg;
for (parm = DECL_ARGUMENTS (current_function_decl);
parm;
|| !host_integerp (TYPE_SIZE (type), 1)
|| tree_low_cst (TYPE_SIZE (type), 1) == 0
|| (AGGREGATE_TYPE_P (type)
- && type_internals_preclude_sra_p (type)))
+ && type_internals_preclude_sra_p (type, &msg)))
continue;
bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
|| gimple_code (access->stmt) == GIMPLE_ASM))
return true;
+ if (tree_non_mode_aligned_mem_p (access->expr))
+ return true;
+
return false;
}
DECL_NAME (repl) = get_identifier (pretty_name);
obstack_free (&name_obstack, pretty_name);
- get_var_ann (repl);
add_referenced_var (repl);
adj->new_ssa_base = repl;
}
VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
rebuild_cgraph_edges ();
+ free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
current_function_decl = NULL_TREE;