/* Is this particular access write access? */
unsigned write : 1;
- /* Is this access an artificial one created to scalarize some record
- entirely? */
- unsigned total_scalarization : 1;
-
/* Is this access an access to a non-addressable field? */
unsigned non_addressable : 1;
is not propagated in the access tree in any direction. */
unsigned grp_scalar_write : 1;
+ /* Is this access an artificial one created to scalarize some record
+ entirely? */
+ unsigned grp_total_scalarization : 1;
+
/* Other passes of the analysis use this bit to make function
analyze_access_subtree create scalar replacements for this group if
possible. */
fprintf (f, ", type = ");
print_generic_expr (f, access->type, 0);
if (grp)
- fprintf (f, ", total_scalarization = %d, grp_read = %d, grp_write = %d, "
- "grp_assignment_read = %d, grp_assignment_write = %d, "
- "grp_scalar_read = %d, grp_scalar_write = %d, "
+ fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
+ "grp_assignment_write = %d, grp_scalar_read = %d, "
+ "grp_scalar_write = %d, grp_total_scalarization = %d, "
"grp_hint = %d, grp_covered = %d, "
"grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
"grp_partial_lhs = %d, grp_to_be_replaced = %d, "
"grp_maybe_modified = %d, "
"grp_not_necessarilly_dereferenced = %d\n",
- access->total_scalarization, access->grp_read, access->grp_write,
- access->grp_assignment_read, access->grp_assignment_write,
- access->grp_scalar_read, access->grp_scalar_write,
+ access->grp_read, access->grp_write, access->grp_assignment_read,
+ access->grp_assignment_write, access->grp_scalar_read,
+ access->grp_scalar_write, access->grp_total_scalarization,
access->grp_hint, access->grp_covered,
access->grp_unscalarizable_region, access->grp_unscalarized_data,
access->grp_partial_lhs, access->grp_to_be_replaced,
access->grp_maybe_modified,
access->grp_not_necessarilly_dereferenced);
else
- fprintf (f, ", write = %d, total_scalarization = %d, "
+ fprintf (f, ", write = %d, grp_total_scalarization = %d, "
"grp_partial_lhs = %d\n",
- access->write, access->total_scalarization,
+ access->write, access->grp_total_scalarization,
access->grp_partial_lhs);
}
access = create_access_1 (base, pos, size);
access->expr = nref;
access->type = ft;
- access->total_scalarization = 1;
+ access->grp_total_scalarization = 1;
/* Accesses for intraprocedural SRA can have their stmt NULL. */
}
else
}
}
+/* Create total_scalarization accesses for all scalar type fields in VAR and
+ for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
+ type_consists_of_records_p. */
+
+static void
+completely_scalarize_var (tree var)
+{
+ HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
+ struct access *access;
+
+ access = create_access_1 (var, 0, size);
+ access->expr = var;
+ access->type = TREE_TYPE (var);
+ access->grp_total_scalarization = 1;
+
+ completely_scalarize_record (var, var, 0, var);
+}
/* Search the given tree for a declaration by skipping handled components and
exclude it from the candidates. */
return false;
}
+/* Return true if EXP is a memory reference less aligned than ALIGN. This is
+ invoked only on strict-alignment targets. */
+
+static bool
+tree_non_aligned_mem_p (tree exp, unsigned int align)
+{
+ unsigned int exp_align;
+
+ if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+ exp = TREE_OPERAND (exp, 0);
+
+ if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
+ return false;
+
+ /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
+ compute an explicit alignment. Pretend that dereferenced pointers
+ are always aligned on strict-alignment targets. */
+ if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
+ exp_align = get_object_or_type_alignment (exp);
+ else
+ exp_align = get_object_alignment (exp);
+
+ if (exp_align < align)
+ return true;
+
+ return false;
+}
+
+/* Return true if EXP is a memory reference less aligned than what the access
+ ACC would require. This is invoked only on strict-alignment targets. */
+
+static bool
+tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
+{
+ unsigned int acc_align;
+
+ /* The alignment of the access is that of its expression. However, it may
+ have been artificially increased, e.g. by a local alignment promotion,
+ so we cap it to the alignment of the type of the base, on the grounds
+ that valid sub-accesses cannot be more aligned than that. */
+ acc_align = get_object_alignment (acc->expr);
+ if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
+ acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
+
+ return tree_non_aligned_mem_p (exp, acc_align);
+}
+
/* Scan expressions occuring in STMT, create access structures for all accesses
to candidates for scalarization and remove those candidates which occur in
statements or expressions that prevent them from being split apart. Return
tree lhs, rhs;
struct access *lacc, *racc;
- if (!gimple_assign_single_p (stmt))
+ if (!gimple_assign_single_p (stmt)
+ /* Scope clobbers don't influence scalarization. */
+ || gimple_clobber_p (stmt))
return false;
lhs = gimple_assign_lhs (stmt);
lacc = build_access_from_expr_1 (lhs, stmt, true);
if (lacc)
- lacc->grp_assignment_write = 1;
+ {
+ lacc->grp_assignment_write = 1;
+ if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
+ lacc->grp_unscalarizable_region = 1;
+ }
if (racc)
{
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
+ if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
+ racc->grp_unscalarizable_region = 1;
}
if (lacc && racc
return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
}
+DEF_VEC_ALLOC_P_STACK (tree);
+#define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
+
/* Construct a memory reference to a part of an aggregate BASE at the given
- OFFSET and of the same type as MODEL. In case this is a reference to a
- component, the function will replicate the last COMPONENT_REF of model's
- expr to access it. GSI and INSERT_AFTER have the same meaning as in
- build_ref_for_offset. */
+ OFFSET and of the type of MODEL. In case this is a chain of references
+ to component, the function will replicate the chain of COMPONENT_REFs of
+ the expression of MODEL to access it. GSI and INSERT_AFTER have the same
+ meaning as in build_ref_for_offset. */
static tree
build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
struct access *model, gimple_stmt_iterator *gsi,
bool insert_after)
{
+ tree type = model->type, t;
+ VEC(tree,stack) *cr_stack = NULL;
+
+ if (TREE_CODE (model->expr) == COMPONENT_REF)
+ {
+ tree expr = model->expr;
+
+ /* Create a stack of the COMPONENT_REFs so later we can walk them in
+ order from inner to outer. */
+ cr_stack = VEC_alloc (tree, stack, 6);
+
+ do {
+ tree field = TREE_OPERAND (expr, 1);
+ tree cr_offset = component_ref_field_offset (expr);
+ HOST_WIDE_INT bit_pos
+ = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
+ + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
+
+ /* We can be called with a model different from the one associated
+ with BASE so we need to avoid going up the chain too far. */
+ if (offset - bit_pos < 0)
+ break;
+
+ offset -= bit_pos;
+ VEC_safe_push (tree, stack, cr_stack, expr);
+
+ expr = TREE_OPERAND (expr, 0);
+ type = TREE_TYPE (expr);
+ } while (TREE_CODE (expr) == COMPONENT_REF);
+ }
+
+ t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
+
if (TREE_CODE (model->expr) == COMPONENT_REF)
{
- tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
- tree cr_offset = component_ref_field_offset (model->expr);
-
- gcc_assert (cr_offset && host_integerp (cr_offset, 1));
- offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
- offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
- exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
- t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
- return fold_build3_loc (loc, COMPONENT_REF, model->type, t, fld,
- TREE_OPERAND (model->expr, 2));
+ unsigned i;
+ tree expr;
+
+ /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
+ FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
+ {
+ tree field = TREE_OPERAND (expr, 1);
+ t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
+ TREE_OPERAND (expr, 2));
+ }
+
+ VEC_free (tree, stack, cr_stack);
}
- else
- return build_ref_for_offset (loc, base, offset, model->type,
- gsi, insert_after);
+
+ return t;
}
/* Construct a memory reference consisting of component_refs and array_refs to
bool grp_assignment_read = access->grp_assignment_read;
bool grp_assignment_write = access->grp_assignment_write;
bool multiple_scalar_reads = false;
- bool total_scalarization = access->total_scalarization;
+ bool total_scalarization = access->grp_total_scalarization;
bool grp_partial_lhs = access->grp_partial_lhs;
bool first_scalar = is_gimple_reg_type (access->type);
bool unscalarizable_region = access->grp_unscalarizable_region;
grp_assignment_write |= ac2->grp_assignment_write;
grp_partial_lhs |= ac2->grp_partial_lhs;
unscalarizable_region |= ac2->grp_unscalarizable_region;
- total_scalarization |= ac2->total_scalarization;
+ total_scalarization |= ac2->grp_total_scalarization;
relink_to_new_repr (access, ac2);
/* If there are both aggregate-type and scalar-type accesses with
access->grp_assignment_read = grp_assignment_read;
access->grp_assignment_write = grp_assignment_write;
access->grp_hint = multiple_scalar_reads || total_scalarization;
+ access->grp_total_scalarization = total_scalarization;
access->grp_partial_lhs = grp_partial_lhs;
access->grp_unscalarizable_region = unscalarizable_region;
if (access->first_link)
tree repl;
repl = create_tmp_var (access->type, "SR");
- get_var_ann (repl);
add_referenced_var (repl);
if (rename)
mark_sym_for_renaming (repl);
root->grp_write = 1;
if (parent->grp_assignment_write)
root->grp_assignment_write = 1;
+ if (parent->grp_total_scalarization)
+ root->grp_total_scalarization = 1;
}
if (root->grp_unscalarizable_region)
for (child = root->first_child; child; child = child->next_sibling)
{
- if (!hole && child->offset < covered_to)
- hole = true;
- else
- covered_to += child->size;
-
+ hole |= covered_to < child->offset;
sth_created |= analyze_access_subtree (child, root,
allow_replacements && !scalar);
root->grp_unscalarized_data |= child->grp_unscalarized_data;
- hole |= !child->grp_covered;
+ root->grp_total_scalarization &= child->grp_total_scalarization;
+ if (child->grp_covered)
+ covered_to += child->size;
+ else
+ hole = true;
}
if (allow_replacements && scalar && !root->first_child
|| ((root->grp_scalar_read || root->grp_assignment_read)
&& (root->grp_scalar_write || root->grp_assignment_write))))
{
+ bool new_integer_type;
+ if (TREE_CODE (root->type) == ENUMERAL_TYPE)
+ {
+ tree rt = root->type;
+ root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
+ TYPE_UNSIGNED (rt));
+ new_integer_type = true;
+ }
+ else
+ new_integer_type = false;
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Marking ");
print_generic_expr (dump_file, root->base, 0);
- fprintf (dump_file, " offset: %u, size: %u: ",
+ fprintf (dump_file, " offset: %u, size: %u ",
(unsigned) root->offset, (unsigned) root->size);
- fprintf (dump_file, " to be replaced.\n");
+ fprintf (dump_file, " to be replaced%s.\n",
+ new_integer_type ? " with an integer": "");
}
root->grp_to_be_replaced = 1;
sth_created = true;
hole = false;
}
- else if (covered_to < limit)
- hole = true;
+ else
+ {
+ if (covered_to < limit)
+ hole = true;
+ if (scalar)
+ root->grp_total_scalarization = 0;
+ }
- if (sth_created && !hole)
+ if (sth_created
+ && (!hole || root->grp_total_scalarization))
{
root->grp_covered = 1;
return true;
|| racc->grp_unscalarizable_region)
return false;
- if (!lacc->first_child && !racc->first_child
- && is_gimple_reg_type (racc->type))
+ if (is_gimple_reg_type (racc->type))
{
- tree t = lacc->base;
-
- lacc->type = racc->type;
- if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
- racc->type))
- lacc->expr = t;
- else
+ if (!lacc->first_child && !racc->first_child)
{
- lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
- lacc->base, lacc->offset,
- racc, NULL, false);
- lacc->grp_no_warning = true;
+ tree t = lacc->base;
+
+ lacc->type = racc->type;
+ if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
+ lacc->offset, racc->type))
+ lacc->expr = t;
+ else
+ {
+ lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
+ lacc->base, lacc->offset,
+ racc, NULL, false);
+ lacc->grp_no_warning = true;
+ }
}
return false;
}
tree var = referenced_var (i);
if (TREE_CODE (var) == VAR_DECL
- && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
- <= max_total_scalarization_size)
&& type_consists_of_records_p (TREE_TYPE (var)))
{
- completely_scalarize_record (var, var, 0, var);
- if (dump_file && (dump_flags & TDF_DETAILS))
+ if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
+ <= max_total_scalarization_size)
{
- fprintf (dump_file, "Will attempt to totally scalarize ");
+ completely_scalarize_var (var);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Will attempt to totally scalarize ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
+ }
+ }
+ else if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Too big to totally scalarize: ");
print_generic_expr (dump_file, var, 0);
- fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
+ fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
}
}
}
rhs = get_access_replacement (racc);
if (!useless_type_conversion_p (lacc->type, racc->type))
rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
+
+ if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
+ rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
+ true, GSI_SAME_STMT);
}
else
{
else
rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
new_gsi, true);
+ if (lacc->grp_partial_lhs)
+ rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
+ false, GSI_NEW_STMT);
}
stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
there to do the copying and then load the scalar replacements of the LHS.
This is what the first branch does. */
- if (gimple_has_volatile_ops (*stmt)
+ if (modify_this_stmt
+ || gimple_has_volatile_ops (*stmt)
|| contains_vce_or_bfcref_p (rhs)
|| contains_vce_or_bfcref_p (lhs))
{
&& TREE_OPERAND (lhs, 0) == name
&& integer_zerop (TREE_OPERAND (lhs, 1))
&& types_compatible_p (TREE_TYPE (lhs),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (lhs))
uses_ok++;
}
if (gimple_assign_single_p (stmt))
&& TREE_OPERAND (rhs, 0) == name
&& integer_zerop (TREE_OPERAND (rhs, 1))
&& types_compatible_p (TREE_TYPE (rhs),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (rhs))
uses_ok++;
}
else if (is_gimple_call (stmt))
&& TREE_OPERAND (arg, 0) == name
&& integer_zerop (TREE_OPERAND (arg, 1))
&& types_compatible_p (TREE_TYPE (arg),
- TREE_TYPE (TREE_TYPE (name))))
+ TREE_TYPE (TREE_TYPE (name)))
+ && !TREE_THIS_VOLATILE (arg))
uses_ok++;
}
}
|| gimple_code (access->stmt) == GIMPLE_ASM))
return true;
+ if (STRICT_ALIGNMENT
+ && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
+ return true;
+
return false;
}
DECL_NAME (repl) = get_identifier (pretty_name);
obstack_free (&name_obstack, pretty_name);
- get_var_ann (repl);
add_referenced_var (repl);
adj->new_ssa_base = repl;
}
VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
rebuild_cgraph_edges ();
+ free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
current_function_decl = NULL_TREE;