statement? This flag is propagated down the access tree. */
unsigned grp_assignment_read : 1;
+ /* Does this group contain a write access that comes from an assignment
+ statement? This flag is propagated down the access tree. */
+ unsigned grp_assignment_write : 1;
+
/* Other passes of the analysis use this bit to make function
analyze_access_subtree create scalar replacements for this group if
possible. */
cannot be called from within FOR_EACH_REFERENCED_VAR. */
unsigned grp_to_be_replaced : 1;
+ /* Should TREE_NO_WARNING of a replacement be set? */
+ unsigned grp_no_warning : 1;
+
/* Is it possible that the group refers to data which might be (directly or
otherwise) modified? */
unsigned grp_maybe_modified : 1;
if (grp)
fprintf (f, ", grp_write = %d, total_scalarization = %d, "
"grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
- "grp_covered = %d, grp_unscalarizable_region = %d, "
- "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
- "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
+ "grp_assignment_write = %d, grp_covered = %d, "
+ "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
+ "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
+ "grp_maybe_modified = %d, "
"grp_not_necessarilly_dereferenced = %d\n",
access->grp_write, access->total_scalarization,
access->grp_read, access->grp_hint, access->grp_assignment_read,
- access->grp_covered, access->grp_unscalarizable_region,
- access->grp_unscalarized_data, access->grp_partial_lhs,
- access->grp_to_be_replaced, access->grp_maybe_modified,
+ access->grp_assignment_write, access->grp_covered,
+ access->grp_unscalarizable_region, access->grp_unscalarized_data,
+ access->grp_partial_lhs, access->grp_to_be_replaced,
+ access->grp_maybe_modified,
access->grp_not_necessarilly_dereferenced);
else
fprintf (f, ", write = %d, total_scalarization = %d, "
if (TREE_THIS_VOLATILE (fld)
|| !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
|| !host_integerp (DECL_FIELD_OFFSET (fld), 1)
- || !host_integerp (DECL_SIZE (fld), 1))
+ || !host_integerp (DECL_SIZE (fld), 1)
+ || (DECL_BIT_FIELD (fld) && AGGREGATE_TYPE_P (ft)))
return true;
if (AGGREGATE_TYPE_P (ft)
disqualify_candidate (base, "Encountered a variable sized access.");
return NULL;
}
- if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
+ if (TREE_CODE (expr) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
{
- disqualify_candidate (base,
- "Encountered an acces not aligned to a byte.");
+ disqualify_candidate (base, "Encountered a bit-field access.");
return NULL;
}
+ gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
if (ptr)
mark_parm_dereference (base, offset + size, stmt);
/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
register types or (recursively) records with only these two kinds of fields.
- It also returns false if any of these records has a zero-size field as its
- last field or has a bit-field. */
+ It also returns false if any of these records contains a bit-field. */
static bool
type_consists_of_records_p (tree type)
{
tree fld;
- bool last_fld_has_zero_size = false;
if (TREE_CODE (type) != RECORD_TYPE)
return false;
if (!is_gimple_reg_type (ft)
&& !type_consists_of_records_p (ft))
return false;
-
- last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
}
- if (last_fld_has_zero_size)
- return false;
-
return true;
}
racc = build_access_from_expr_1 (rhs, stmt, false);
lacc = build_access_from_expr_1 (lhs, stmt, true);
+ if (lacc)
+ lacc->grp_assignment_write = 1;
+
if (racc)
{
racc->grp_assignment_read = 1;
by INSERT_AFTER. This function is not capable of handling bitfields. */
tree
-build_ref_for_offset (tree base, HOST_WIDE_INT offset,
+build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
tree exp_type, gimple_stmt_iterator *gsi,
bool insert_after)
{
tree prev_base = base;
tree off;
- location_t loc = EXPR_LOCATION (base);
HOST_WIDE_INT base_offset;
gcc_checking_assert (offset % BITS_PER_UNIT == 0);
tmp = make_ssa_name (tmp, NULL);
addr = build_fold_addr_expr (unshare_expr (prev_base));
stmt = gimple_build_assign (tmp, addr);
+ gimple_set_location (stmt, loc);
SSA_NAME_DEF_STMT (tmp) = stmt;
if (insert_after)
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
/* Construct a memory reference to a part of an aggregate BASE at the given
OFFSET and of the same type as MODEL. In case this is a reference to a
- bit-field, the function will replicate the last component_ref of model's
+ component, the function will replicate the last COMPONENT_REF of model's
expr to access it. GSI and INSERT_AFTER have the same meaning as in
build_ref_for_offset. */
static tree
-build_ref_for_model (tree base, HOST_WIDE_INT offset,
+build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
struct access *model, gimple_stmt_iterator *gsi,
bool insert_after)
{
- if (TREE_CODE (model->expr) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
+ if (TREE_CODE (model->expr) == COMPONENT_REF)
{
- /* This access represents a bit-field. */
tree t, exp_type;
-
offset -= int_bit_position (TREE_OPERAND (model->expr, 1));
exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
- t = build_ref_for_offset (base, offset, exp_type, gsi, insert_after);
- return fold_build3_loc (EXPR_LOCATION (base), COMPONENT_REF,
- model->type, t, TREE_OPERAND (model->expr, 1),
- NULL_TREE);
+ t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
+ return fold_build3_loc (loc, COMPONENT_REF, model->type, t,
+ TREE_OPERAND (model->expr, 1), NULL_TREE);
}
else
- return build_ref_for_offset (base, offset, model->type, gsi, insert_after);
+ return build_ref_for_offset (loc, base, offset, model->type,
+ gsi, insert_after);
}
/* Construct a memory reference consisting of component_refs and array_refs to
access_count = VEC_length (access_p, access_vec);
/* Sort by <OFFSET, SIZE>. */
- qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
- compare_access_positions);
+ VEC_qsort (access_p, access_vec, compare_access_positions);
i = 0;
while (i < access_count)
bool grp_write = access->write;
bool grp_read = !access->write;
bool grp_assignment_read = access->grp_assignment_read;
+ bool grp_assignment_write = access->grp_assignment_write;
bool multiple_reads = false;
bool total_scalarization = access->total_scalarization;
bool grp_partial_lhs = access->grp_partial_lhs;
grp_read = true;
}
grp_assignment_read |= ac2->grp_assignment_read;
+ grp_assignment_write |= ac2->grp_assignment_write;
grp_partial_lhs |= ac2->grp_partial_lhs;
unscalarizable_region |= ac2->grp_unscalarizable_region;
total_scalarization |= ac2->total_scalarization;
access->grp_write = grp_write;
access->grp_read = grp_read;
access->grp_assignment_read = grp_assignment_read;
+ access->grp_assignment_write = grp_assignment_write;
access->grp_hint = multiple_reads || total_scalarization;
access->grp_partial_lhs = grp_partial_lhs;
access->grp_unscalarizable_region = unscalarizable_region;
}
SET_DECL_DEBUG_EXPR (repl, debug_expr);
DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
- TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
+ if (access->grp_no_warning)
+ TREE_NO_WARNING (repl) = 1;
+ else
+ TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
}
else
TREE_NO_WARNING (repl) = 1;
return false;
}
-enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
+enum mark_rw_status { SRA_MRRW_NOTHING, SRA_MRRW_DIRECT, SRA_MRRW_ASSIGN};
/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
sorts of access flags appropriately along the way, notably always set
grp_read and grp_assign_read according to MARK_READ and grp_write when
- MARK_WRITE is true. */
+ MARK_WRITE is true.
+
+ Creating a replacement for a scalar access is considered beneficial if its
+ grp_hint is set (this means we are either attempting total scalarization or
+ there is more than one direct read access) or according to the following
+ table:
+
+ Access written to individually (once or more times)
+ |
+ | Parent written to in an assignment statement
+ | |
+ | | Access read individually _once_
+ | | |
+ | | | Parent read in an assignment statement
+ | | | |
+ | | | | Scalarize Comment
+-----------------------------------------------------------------------------
+ 0 0 0 0 No access for the scalar
+ 0 0 0 1 No access for the scalar
+ 0 0 1 0 No Single read - won't help
+ 0 0 1 1 No The same case
+ 0 1 0 0 No access for the scalar
+ 0 1 0 1 No access for the scalar
+ 0 1 1 0 Yes s = *g; return s.i;
+ 0 1 1 1 Yes The same case as above
+ 1 0 0 0 No Won't help
+ 1 0 0 1 Yes s.i = 1; *g = s;
+ 1 0 1 0 Yes s.i = 5; g = s.i;
+ 1 0 1 1 Yes The same case as above
+ 1 1 0 0 No Won't help.
+ 1 1 0 1 Yes s.i = 1; *g = s;
+ 1 1 1 0 Yes s = *g; return s.i;
+ 1 1 1 1 Yes Any of the above yeses */
static bool
analyze_access_subtree (struct access *root, bool allow_replacements,
- enum mark_read_status mark_read, bool mark_write)
+ enum mark_rw_status mark_read,
+ enum mark_rw_status mark_write)
{
struct access *child;
HOST_WIDE_INT limit = root->offset + root->size;
bool scalar = is_gimple_reg_type (root->type);
bool hole = false, sth_created = false;
bool direct_read = root->grp_read;
+ bool direct_write = root->grp_write;
- if (mark_read == SRA_MR_ASSIGN_READ)
+ if (root->grp_assignment_read)
+ mark_read = SRA_MRRW_ASSIGN;
+ else if (mark_read == SRA_MRRW_ASSIGN)
{
root->grp_read = 1;
root->grp_assignment_read = 1;
}
- if (mark_read == SRA_MR_READ)
+ else if (mark_read == SRA_MRRW_DIRECT)
root->grp_read = 1;
- else if (root->grp_assignment_read)
- mark_read = SRA_MR_ASSIGN_READ;
else if (root->grp_read)
- mark_read = SRA_MR_READ;
+ mark_read = SRA_MRRW_DIRECT;
- if (mark_write)
- root->grp_write = true;
+ if (root->grp_assignment_write)
+ mark_write = SRA_MRRW_ASSIGN;
+ else if (mark_write == SRA_MRRW_ASSIGN)
+ {
+ root->grp_write = 1;
+ root->grp_assignment_write = 1;
+ }
+ else if (mark_write == SRA_MRRW_DIRECT)
+ root->grp_write = 1;
else if (root->grp_write)
- mark_write = true;
+ mark_write = SRA_MRRW_DIRECT;
if (root->grp_unscalarizable_region)
allow_replacements = false;
if (allow_replacements && scalar && !root->first_child
&& (root->grp_hint
- || (root->grp_write && (direct_read || root->grp_assignment_read))))
+ || ((direct_write || root->grp_assignment_write)
+ && (direct_read || root->grp_assignment_read))))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
while (access)
{
- if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
+ if (analyze_access_subtree (access, true,
+ SRA_MRRW_NOTHING, SRA_MRRW_NOTHING))
ret = true;
access = access->next_grp;
}
tree expr = parent->base;
gcc_assert (!model->grp_unscalarizable_region);
- if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
- model->type))
- return NULL;
access = (struct access *) pool_alloc (access_pool);
memset (access, 0, sizeof (struct access));
+ if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
+ model->type))
+ {
+ access->grp_no_warning = true;
+ expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
+ new_offset, model, NULL, false);
+ }
+
access->base = parent->base;
access->expr = expr;
access->offset = new_offset;
{
tree t = lacc->base;
+ lacc->type = racc->type;
if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
racc->type))
+ lacc->expr = t;
+ else
{
- lacc->expr = t;
- lacc->type = racc->type;
+ lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
+ lacc->base, lacc->offset,
+ racc, NULL, false);
+ lacc->grp_no_warning = true;
}
return false;
}
}
/* Generate statements copying scalar replacements of accesses within a subtree
- into or out of AGG. ACCESS is the first child of the root of the subtree to
- be processed. AGG is an aggregate type expression (can be a declaration but
- does not have to be, it can for example also be a mem_ref or a series of
- handled components). TOP_OFFSET is the offset of the processed subtree
- which has to be subtracted from offsets of individual accesses to get
- corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
+ into or out of AGG. ACCESS, all its children, siblings and their children
+ are to be processed. AGG is an aggregate type expression (can be a
+ declaration but does not have to be, it can for example also be a mem_ref or
+ a series of handled components). TOP_OFFSET is the offset of the processed
+ subtree which has to be subtracted from offsets of individual accesses to
+ get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
replacements in the interval <start_offset, start_offset + chunk_size>,
otherwise copy all. GSI is a statement iterator used to place the new
statements. WRITE should be true when the statements should write from AGG
HOST_WIDE_INT top_offset,
HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
gimple_stmt_iterator *gsi, bool write,
- bool insert_after)
+ bool insert_after, location_t loc)
{
do
{
tree expr, repl = get_access_replacement (access);
gimple stmt;
- expr = build_ref_for_model (agg, access->offset - top_offset,
+ expr = build_ref_for_model (loc, agg, access->offset - top_offset,
access, gsi, insert_after);
if (write)
: GSI_SAME_STMT);
stmt = gimple_build_assign (expr, repl);
}
+ gimple_set_location (stmt, loc);
if (insert_after)
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
if (access->first_child)
generate_subtree_copies (access->first_child, agg, top_offset,
start_offset, chunk_size, gsi,
- write, insert_after);
+ write, insert_after, loc);
access = access->next_sibling;
}
static void
init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
- bool insert_after)
+ bool insert_after, location_t loc)
{
struct access *child;
gimple stmt;
stmt = gimple_build_assign (get_access_replacement (access),
- fold_convert (access->type,
- integer_zero_node));
+ build_zero_cst (access->type));
if (insert_after)
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
else
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
update_stmt (stmt);
+ gimple_set_location (stmt, loc);
}
for (child = access->first_child; child; child = child->next_sibling)
- init_subtree_with_zero (child, gsi, insert_after);
+ init_subtree_with_zero (child, gsi, insert_after, loc);
}
/* Search for an access representative for the given expression EXPR and
static bool
sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
{
+ location_t loc;
struct access *access;
tree type, bfr;
return false;
type = TREE_TYPE (*expr);
+ loc = gimple_location (gsi_stmt (*gsi));
if (access->grp_to_be_replaced)
{
tree repl = get_access_replacement (access);
{
tree ref;
- ref = build_ref_for_model (access->base, access->offset, access,
+ ref = build_ref_for_model (loc, access->base, access->offset, access,
NULL, false);
if (write)
ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
false, GSI_NEW_STMT);
stmt = gimple_build_assign (repl, ref);
+ gimple_set_location (stmt, loc);
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
}
else
repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
true, GSI_SAME_STMT);
stmt = gimple_build_assign (ref, repl);
+ gimple_set_location (stmt, loc);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
}
}
start_offset = chunk_size = 0;
generate_subtree_copies (access->first_child, access->base, 0,
- start_offset, chunk_size, gsi, write, write);
+ start_offset, chunk_size, gsi, write, write,
+ loc);
}
return true;
}
SRA_UDH_LEFT }; /* Data flushed to the LHS. */
/* Store all replacements in the access tree rooted in TOP_RACC either to their
- base aggregate if there are unscalarized data or directly to LHS
- otherwise. */
+ base aggregate if there are unscalarized data or directly to LHS of the
+ statement that is pointed to by GSI otherwise. */
static enum unscalarized_data_handling
-handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
+handle_unscalarized_data_in_subtree (struct access *top_racc,
gimple_stmt_iterator *gsi)
{
if (top_racc->grp_unscalarized_data)
{
generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
- gsi, false, false);
+ gsi, false, false,
+ gimple_location (gsi_stmt (*gsi)));
return SRA_UDH_RIGHT;
}
else
{
+ tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
- 0, 0, gsi, false, false);
+ 0, 0, gsi, false, false,
+ gimple_location (gsi_stmt (*gsi)));
return SRA_UDH_LEFT;
}
}
-/* Try to generate statements to load all sub-replacements in an access
- (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
- (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
- load the accesses from it. LEFT_OFFSET is the offset of the left whole
- subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
- NEW_GSI is stmt iterator used for statement insertions after the original
- assignment, OLD_GSI is used to insert statements before the assignment.
- *REFRESHED keeps the information whether we have needed to refresh
- replacements of the LHS and from which side of the assignments this takes
- place. */
+/* Try to generate statements to load all sub-replacements in an access subtree
+ formed by children of LACC from scalar replacements in the TOP_RACC subtree.
+ If that is not possible, refresh the TOP_RACC base aggregate and load the
+ accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
+ copied. NEW_GSI is stmt iterator used for statement insertions after the
+ original assignment, OLD_GSI is used to insert statements before the
+ assignment. *REFRESHED keeps the information whether we have needed to
+ refresh replacements of the LHS and from which side of the assignments this
+ takes place. */
static void
load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
HOST_WIDE_INT left_offset,
- HOST_WIDE_INT right_offset,
gimple_stmt_iterator *old_gsi,
gimple_stmt_iterator *new_gsi,
- enum unscalarized_data_handling *refreshed,
- tree lhs)
+ enum unscalarized_data_handling *refreshed)
{
- location_t loc = EXPR_LOCATION (lacc->expr);
- do
+ location_t loc = gimple_location (gsi_stmt (*old_gsi));
+ for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
{
if (lacc->grp_to_be_replaced)
{
struct access *racc;
- HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
+ HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
gimple stmt;
tree rhs;
the aggregate. See if we have to update it first... */
if (*refreshed == SRA_UDH_NONE)
*refreshed = handle_unscalarized_data_in_subtree (top_racc,
- lhs, old_gsi);
+ old_gsi);
if (*refreshed == SRA_UDH_LEFT)
- rhs = build_ref_for_model (lacc->base, lacc->offset, lacc,
+ rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
new_gsi, true);
else
- rhs = build_ref_for_model (top_racc->base, offset, lacc,
+ rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
new_gsi, true);
}
stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
+ gimple_set_location (stmt, loc);
update_stmt (stmt);
sra_stats.subreplacements++;
}
else if (*refreshed == SRA_UDH_NONE
&& lacc->grp_read && !lacc->grp_covered)
- *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
+ *refreshed = handle_unscalarized_data_in_subtree (top_racc,
old_gsi);
if (lacc->first_child)
- load_assign_lhs_subreplacements (lacc->first_child, top_racc,
- left_offset, right_offset,
- old_gsi, new_gsi, refreshed, lhs);
- lacc = lacc->next_sibling;
+ load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
+ old_gsi, new_gsi, refreshed);
}
- while (lacc);
}
/* Result code for SRA assignment modification. */
{
tree lhs = gimple_assign_lhs (*stmt);
struct access *acc;
+ location_t loc;
acc = get_access_for_expr (lhs);
if (!acc)
return SRA_AM_NONE;
+ loc = gimple_location (*stmt);
if (VEC_length (constructor_elt,
CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
{
following should handle it gracefully. */
if (access_has_children_p (acc))
generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
- true, true);
+ true, true, loc);
return SRA_AM_MODIFIED;
}
if (acc->grp_covered)
{
- init_subtree_with_zero (acc, gsi, false);
+ init_subtree_with_zero (acc, gsi, false, loc);
unlink_stmt_vdef (*stmt);
gsi_remove (gsi, true);
return SRA_AM_REMOVED;
}
else
{
- init_subtree_with_zero (acc, gsi, true);
+ init_subtree_with_zero (acc, gsi, true, loc);
return SRA_AM_MODIFIED;
}
}
return repl;
}
+/* Return true if REF has a COMPONENT_REF with a bit-field field declaration
+ somewhere in it. */
+
+static inline bool
+contains_bitfld_comp_ref_p (const_tree ref)
+{
+ while (handled_component_p (ref))
+ {
+ if (TREE_CODE (ref) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
+ return true;
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ return false;
+}
+
+/* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
+ bit-field field declaration somewhere in it. */
+
+static inline bool
+contains_vce_or_bfcref_p (const_tree ref)
+{
+ while (handled_component_p (ref))
+ {
+ if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
+ || (TREE_CODE (ref) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
+ return true;
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ return false;
+}
+
/* Examine both sides of the assignment statement pointed to by STMT, replace
them with a scalare replacement if there is one and generate copying of
replacements if scalarized aggregates have been used in the assignment. GSI
tree lhs, rhs;
bool modify_this_stmt = false;
bool force_gimple_rhs = false;
- location_t loc = gimple_location (*stmt);
+ location_t loc;
gimple_stmt_iterator orig_gsi = *gsi;
if (!gimple_assign_single_p (*stmt))
if (!lacc && !racc)
return SRA_AM_NONE;
+ loc = gimple_location (*stmt);
if (lacc && lacc->grp_to_be_replaced)
{
lhs = get_access_replacement (lacc);
??? This should move to fold_stmt which we simply should
call after building a VIEW_CONVERT_EXPR here. */
if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
+ && !contains_bitfld_comp_ref_p (lhs)
&& !access_has_children_p (lacc))
{
- lhs = build_ref_for_offset (lhs, 0, TREE_TYPE (rhs), gsi, false);
+ lhs = build_ref_for_offset (loc, lhs, 0, TREE_TYPE (rhs),
+ gsi, false);
gimple_assign_set_lhs (*stmt, lhs);
}
else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
- && !contains_view_convert_expr_p (rhs)
+ && !contains_vce_or_bfcref_p (rhs)
&& !access_has_children_p (racc))
- rhs = build_ref_for_offset (rhs, 0, TREE_TYPE (lhs), gsi, false);
+ rhs = build_ref_for_offset (loc, rhs, 0, TREE_TYPE (lhs),
+ gsi, false);
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
{
This is what the first branch does. */
if (gimple_has_volatile_ops (*stmt)
- || contains_view_convert_expr_p (rhs)
- || contains_view_convert_expr_p (lhs))
+ || contains_vce_or_bfcref_p (rhs)
+ || contains_vce_or_bfcref_p (lhs))
{
if (access_has_children_p (racc))
generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
- gsi, false, false);
+ gsi, false, false, loc);
if (access_has_children_p (lacc))
generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
- gsi, true, true);
+ gsi, true, true, loc);
sra_stats.separate_lhs_rhs_handling++;
}
else
enum unscalarized_data_handling refreshed;
if (lacc->grp_read && !lacc->grp_covered)
- refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
+ refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
else
refreshed = SRA_UDH_NONE;
- load_assign_lhs_subreplacements (lacc->first_child, racc,
- lacc->offset, racc->offset,
- &orig_gsi, gsi, &refreshed, lhs);
+ load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
+ &orig_gsi, gsi, &refreshed);
if (refreshed != SRA_UDH_RIGHT)
{
gsi_next (gsi);
if (racc->first_child)
generate_subtree_copies (racc->first_child, lhs,
racc->offset, 0, 0, gsi,
- false, false);
+ false, false, loc);
gcc_assert (*stmt == gsi_stmt (*gsi));
unlink_stmt_vdef (*stmt);
}
}
else if (racc->first_child)
- generate_subtree_copies (racc->first_child, lhs,
- racc->offset, 0, 0, gsi, false, true);
+ generate_subtree_copies (racc->first_child, lhs, racc->offset,
+ 0, 0, gsi, false, true, loc);
}
if (access_has_children_p (lacc))
generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
- 0, 0, gsi, true, true);
+ 0, 0, gsi, true, true, loc);
}
}
for (access = VEC_index (access_p, access_vec, 0);
access;
access = access->next_grp)
- generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
+ generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
+ EXPR_LOCATION (parm));
}
if (seq)
return &no_accesses_representant;
access_count = VEC_length (access_p, access_vec);
- qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
- compare_access_positions);
+ VEC_qsort (access_p, access_vec, compare_access_positions);
i = 0;
total_size = 0;
{
/* V_C_Es of constructors can cause trouble (PR 42714). */
if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
- *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+ *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
else
*rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
}