/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
+ Copyright (C) 2008-2013 Free Software Foundation, Inc.
Contributed by Martin Jambor <mjambor@suse.cz>
This file is part of GCC.
typedef struct access *access_p;
-DEF_VEC_P (access_p);
-DEF_VEC_ALLOC_P (access_p, heap);
/* Alloc pool for allocating access structures. */
static alloc_pool access_pool;
/* Alloc pool for allocating assign link structures. */
static alloc_pool link_pool;
-/* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
+/* Base (tree) -> Vector (vec<access_p> *) map. */
static struct pointer_map_t *base_access_vec;
/* Set of candidates. */
/* Return a vector of pointers to accesses for the variable given in BASE or
NULL if there is none. */
-static VEC (access_p, heap) *
+static vec<access_p> *
get_base_access_vector (tree base)
{
void **slot;
if (!slot)
return NULL;
else
- return *(VEC (access_p, heap) **) slot;
+ return *(vec<access_p> **) slot;
}
/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
static struct access *
get_first_repr_for_decl (tree base)
{
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (base);
if (!access_vec)
return NULL;
- return VEC_index (access_p, access_vec, 0);
+ return (*access_vec)[0];
}
/* Find an access representative for the variable BASE and given OFFSET and
sra_initialize (void)
{
candidate_bitmap = BITMAP_ALLOC (NULL);
- candidates = htab_create (VEC_length (tree, cfun->local_decls) / 2,
+ candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
uid_decl_map_hash, uid_decl_map_eq, NULL);
should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
void *data ATTRIBUTE_UNUSED)
{
- VEC (access_p, heap) *access_vec;
- access_vec = (VEC (access_p, heap) *) *value;
- VEC_free (access_p, heap, access_vec);
-
+ vec<access_p> *access_vec = (vec<access_p> *) *value;
+ vec_free (access_vec);
return true;
}
{
*msg = "structure field size not fixed";
return true;
- }
+ }
+ if (!host_integerp (bit_position (fld), 0))
+ {
+ *msg = "structure field size too big";
+ return true;
+ }
if (AGGREGATE_TYPE_P (ft)
&& int_bit_position (fld) % BITS_PER_UNIT != 0)
{
static struct access *
create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
{
- VEC (access_p, heap) *vec;
+ vec<access_p> *v;
struct access *access;
void **slot;
slot = pointer_map_contains (base_access_vec, base);
if (slot)
- vec = (VEC (access_p, heap) *) *slot;
+ v = (vec<access_p> *) *slot;
else
- vec = VEC_alloc (access_p, heap, 32);
+ vec_alloc (v, 32);
- VEC_safe_push (access_p, heap, vec, access);
+ v->safe_push (access);
- *((struct VEC (access_p,heap) **)
- pointer_map_insert (base_access_vec, base)) = vec;
+ *((vec<access_p> **)
+ pointer_map_insert (base_access_vec, base)) = v;
return access;
}
EXP_TYPE at the given OFFSET. If BASE is something for which
get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
to insert new statements either before or below the current one as specified
- by INSERT_AFTER. This function is not capable of handling bitfields. */
+ by INSERT_AFTER. This function is not capable of handling bitfields.
+
+ BASE must be either a declaration or a memory reference that has correct
+ alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
tree
build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
unsigned int align;
gcc_checking_assert (offset % BITS_PER_UNIT == 0);
-
+ get_object_alignment_1 (base, &align, &misalign);
base = get_addr_base_and_unit_offset (base, &base_offset);
/* get_addr_base_and_unit_offset returns NULL for references with a variable
base = build_fold_addr_expr (unshare_expr (base));
}
- /* If prev_base were always an originally performed access
- we can extract more optimistic alignment information
- by looking at the access mode. That would constrain the
- alignment of base + base_offset which we would need to
- adjust according to offset. */
- if (!get_pointer_alignment_1 (base, &align, &misalign))
- {
- gcc_assert (misalign == 0);
- if (TREE_CODE (prev_base) == MEM_REF
- || TREE_CODE (prev_base) == TARGET_MEM_REF)
- align = TYPE_ALIGN (TREE_TYPE (prev_base));
- }
- misalign += (tree_to_double_int (off)
- .sext (TYPE_PRECISION (TREE_TYPE (off))).low
- * BITS_PER_UNIT);
- misalign = misalign & (align - 1);
+ misalign = (misalign + offset) & (align - 1);
if (misalign != 0)
align = (misalign & -misalign);
if (align < TYPE_ALIGN (exp_type))
{
int i, j, access_count;
struct access *res, **prev_acc_ptr = &res;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
bool first = true;
HOST_WIDE_INT low = -1, high = 0;
access_vec = get_base_access_vector (var);
if (!access_vec)
return NULL;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
/* Sort by <OFFSET, SIZE>. */
- VEC_qsort (access_p, access_vec, compare_access_positions);
+ access_vec->qsort (compare_access_positions);
i = 0;
while (i < access_count)
{
- struct access *access = VEC_index (access_p, access_vec, i);
+ struct access *access = (*access_vec)[i];
bool grp_write = access->write;
bool grp_read = !access->write;
bool grp_scalar_write = access->write
j = i + 1;
while (j < access_count)
{
- struct access *ac2 = VEC_index (access_p, access_vec, j);
+ struct access *ac2 = (*access_vec)[j];
if (ac2->offset != access->offset || ac2->size != access->size)
break;
if (ac2->write)
prev_acc_ptr = &access->next_grp;
}
- gcc_assert (res == VEC_index (access_p, access_vec, 0));
+ gcc_assert (res == (*access_vec)[0]);
return res;
}
and that get_ref_base_and_extent works properly on the
expression. It cannot handle accesses at a non-constant offset
though, so just give up in those cases. */
- for (d = debug_expr; !fail && handled_component_p (d);
+ for (d = debug_expr;
+ !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
d = TREE_OPERAND (d, 0))
switch (TREE_CODE (d))
{
&& TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
fail = true;
break;
+ case MEM_REF:
+ if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
+ fail = true;
+ else
+ d = TREE_OPERAND (d, 0);
+ break;
default:
break;
}
}
else
{
- if (MAY_HAVE_DEBUG_STMTS && allow_replacements
+ if (allow_replacements
&& scalar && !root->first_child
- && (root->grp_scalar_write || root->grp_assignment_write))
+ && (root->grp_scalar_write || root->grp_assignment_write)
+ && !bitmap_bit_p (cannot_scalarize_away_bitmap,
+ DECL_UID (root->base)))
{
gcc_checking_assert (!root->grp_scalar_read
&& !root->grp_assignment_read);
- root->grp_to_be_debug_replaced = 1;
- if (dump_file && (dump_flags & TDF_DETAILS))
+ sth_created = true;
+ if (MAY_HAVE_DEBUG_STMTS)
{
- fprintf (dump_file, "Marking ");
- print_generic_expr (dump_file, root->base, 0);
- fprintf (dump_file, " offset: %u, size: %u ",
- (unsigned) root->offset, (unsigned) root->size);
- fprintf (dump_file, " to be replaced with debug statements.\n");
+ root->grp_to_be_debug_replaced = 1;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Marking ");
+ print_generic_expr (dump_file, root->base, 0);
+ fprintf (dump_file, " offset: %u, size: %u ",
+ (unsigned) root->offset, (unsigned) root->size);
+ fprintf (dump_file, " to be replaced with debug "
+ "statements.\n");
+ }
}
}
root->grp_total_scalarization = 0;
}
- if (sth_created
- && (!hole || root->grp_total_scalarization))
- {
- root->grp_covered = 1;
- return true;
- }
- if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
+ if (!hole || root->grp_total_scalarization)
+ root->grp_covered = 1;
+ else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
root->grp_unscalarized_data = 1; /* not covered and written to */
- if (sth_created)
- return true;
- return false;
+ return sth_created;
}
/* Analyze all access trees linked by next_grp by the means of
}
loc = gimple_location (*stmt);
- if (VEC_length (constructor_elt,
- CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
+ if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
{
/* I have never seen this code path trigger but if it can happen the
following should handle it gracefully. */
??? This should move to fold_stmt which we simply should
call after building a VIEW_CONVERT_EXPR here. */
if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
- && !contains_bitfld_comp_ref_p (lhs)
- && !access_has_children_p (lacc))
+ && !contains_bitfld_comp_ref_p (lhs))
{
lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
gimple_assign_set_lhs (*stmt, lhs);
}
else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
- && !contains_vce_or_bfcref_p (rhs)
- && !access_has_children_p (racc))
+ && !contains_vce_or_bfcref_p (rhs))
rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
if (lacc && lacc->grp_to_be_debug_replaced)
{
- gimple ds = gimple_build_debug_bind (get_access_replacement (lacc),
- unshare_expr (rhs), *stmt);
+ tree dlhs = get_access_replacement (lacc);
+ tree drhs = unshare_expr (rhs);
+ if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
+ {
+ if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
+ && !contains_vce_or_bfcref_p (drhs))
+ drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
+ if (drhs
+ && !useless_type_conversion_p (TREE_TYPE (dlhs),
+ TREE_TYPE (drhs)))
+ drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
+ TREE_TYPE (dlhs), drhs);
+ }
+ gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
gsi_insert_before (gsi, ds, GSI_SAME_STMT);
}
parm;
parm = DECL_CHAIN (parm))
{
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
struct access *access;
if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
if (!access_vec)
continue;
- for (access = VEC_index (access_p, access_vec, 0);
+ for (access = (*access_vec)[0];
access;
access = access->next_grp)
generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
{
GIMPLE_PASS,
"esra", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_intra_sra, /* gate */
early_intra_sra, /* execute */
NULL, /* sub */
{
GIMPLE_PASS,
"sra", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_intra_sra, /* gate */
late_intra_sra, /* execute */
NULL, /* sub */
current function. */
static void
-analyze_modified_params (VEC (access_p, heap) *representatives)
+analyze_modified_params (vec<access_p> representatives)
{
int i;
{
struct access *repr;
- for (repr = VEC_index (access_p, representatives, i);
+ for (repr = representatives[i];
repr;
repr = repr->next_grp)
{
static void
propagate_dereference_distances (void)
{
- VEC (basic_block, heap) *queue;
+ vec<basic_block> queue;
basic_block bb;
- queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
- VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
+ queue.create (last_basic_block_for_function (cfun));
+ queue.quick_push (ENTRY_BLOCK_PTR);
FOR_EACH_BB (bb)
{
- VEC_quick_push (basic_block, queue, bb);
+ queue.quick_push (bb);
bb->aux = bb;
}
- while (!VEC_empty (basic_block, queue))
+ while (!queue.is_empty ())
{
edge_iterator ei;
edge e;
bool change = false;
int i;
- bb = VEC_pop (basic_block, queue);
+ bb = queue.pop ();
bb->aux = NULL;
if (bitmap_bit_p (final_bbs, bb->index))
continue;
e->src->aux = e->src;
- VEC_quick_push (basic_block, queue, e->src);
+ queue.quick_push (e->src);
}
}
- VEC_free (basic_block, heap, queue);
+ queue.release ();
}
/* Dump a dereferences TABLE with heading STR to file F. */
distances of each representative of a (fraction of a) parameter. */
static void
-analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
+analyze_caller_dereference_legality (vec<access_p> representatives)
{
int i;
for (i = 0; i < func_param_count; i++)
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
if (!repr || no_accesses_p (repr))
{
int i, access_count;
struct access *repr;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (parm);
gcc_assert (access_vec);
- repr = VEC_index (access_p, access_vec, 0);
+ repr = (*access_vec)[0];
if (repr->write)
return NULL;
repr->group_representative = repr;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
for (i = 1; i < access_count; i++)
{
- struct access *access = VEC_index (access_p, access_vec, i);
+ struct access *access = (*access_vec)[i];
if (access->write)
return NULL;
access->group_representative = repr;
return repr;
}
-/* Return true iff this access precludes IPA-SRA of the parameter it is
- associated with. */
+/* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
+ associated with. REQ_ALIGN is the minimum required alignment. */
static bool
-access_precludes_ipa_sra_p (struct access *access)
+access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
{
+ unsigned int exp_align;
/* Avoid issues such as the second simple testcase in PR 42025. The problem
is incompatible assign in a call statement (and possibly even in asm
statements). This can be relaxed by using a new temporary but only for
|| gimple_code (access->stmt) == GIMPLE_ASM))
return true;
+ exp_align = get_object_alignment (access->expr);
+ if (exp_align < req_align)
+ return true;
+
return false;
}
int i, j, access_count, group_count;
int agg_size, total_size = 0;
struct access *access, *res, **prev_acc_ptr = &res;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (parm);
if (!access_vec)
return &no_accesses_representant;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
- VEC_qsort (access_p, access_vec, compare_access_positions);
+ access_vec->qsort (compare_access_positions);
i = 0;
total_size = 0;
{
bool modification;
tree a1_alias_type;
- access = VEC_index (access_p, access_vec, i);
+ access = (*access_vec)[i];
modification = access->write;
- if (access_precludes_ipa_sra_p (access))
+ if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
return NULL;
a1_alias_type = reference_alias_ptr_type (access->expr);
j = i + 1;
while (j < access_count)
{
- struct access *ac2 = VEC_index (access_p, access_vec, j);
+ struct access *ac2 = (*access_vec)[j];
if (ac2->offset != access->offset)
{
/* All or nothing law for parameters. */
else if (ac2->size != access->size)
return NULL;
- if (access_precludes_ipa_sra_p (ac2)
+ if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
|| (ac2->type != access->type
&& (TREE_ADDRESSABLE (ac2->type)
|| TREE_ADDRESSABLE (access->type)))
IPA-SRA. Return result based on what representatives have been found. */
static enum ipa_splicing_result
-splice_all_param_accesses (VEC (access_p, heap) **representatives)
+splice_all_param_accesses (vec<access_p> &representatives)
{
enum ipa_splicing_result result = NO_GOOD_ACCESS;
tree parm;
struct access *repr;
- *representatives = VEC_alloc (access_p, heap, func_param_count);
+ representatives.create (func_param_count);
for (parm = DECL_ARGUMENTS (current_function_decl);
parm;
{
if (is_unused_scalar_param (parm))
{
- VEC_quick_push (access_p, *representatives,
- &no_accesses_representant);
+ representatives.quick_push (&no_accesses_representant);
if (result == NO_GOOD_ACCESS)
result = UNUSED_PARAMS;
}
&& bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
{
repr = unmodified_by_ref_scalar_representative (parm);
- VEC_quick_push (access_p, *representatives, repr);
+ representatives.quick_push (repr);
if (repr)
result = UNMODIF_BY_REF_ACCESSES;
}
{
bool ro_grp = false;
repr = splice_param_accesses (parm, &ro_grp);
- VEC_quick_push (access_p, *representatives, repr);
+ representatives.quick_push (repr);
if (repr && !no_accesses_p (repr))
{
result = UNUSED_PARAMS;
}
else
- VEC_quick_push (access_p, *representatives, NULL);
+ representatives.quick_push (NULL);
}
if (result == NO_GOOD_ACCESS)
{
- VEC_free (access_p, heap, *representatives);
- *representatives = NULL;
+ representatives.release ();
return NO_GOOD_ACCESS;
}
/* Return the index of BASE in PARMS. Abort if it is not found. */
static inline int
-get_param_index (tree base, VEC(tree, heap) *parms)
+get_param_index (tree base, vec<tree> parms)
{
int i, len;
- len = VEC_length (tree, parms);
+ len = parms.length ();
for (i = 0; i < len; i++)
- if (VEC_index (tree, parms, i) == base)
+ if (parms[i] == base)
return i;
gcc_unreachable ();
}
final number of adjustments. */
static ipa_parm_adjustment_vec
-turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
+turn_representatives_into_adjustments (vec<access_p> representatives,
int adjustments_count)
{
- VEC (tree, heap) *parms;
+ vec<tree> parms;
ipa_parm_adjustment_vec adjustments;
tree parm;
int i;
gcc_assert (adjustments_count > 0);
parms = ipa_get_vector_of_formal_parms (current_function_decl);
- adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
+ adjustments.create (adjustments_count);
parm = DECL_ARGUMENTS (current_function_decl);
for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
if (!repr || no_accesses_p (repr))
{
adj.copy_param = 1;
else
adj.remove_param = 1;
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, adj);
+ adjustments.quick_push (adj);
}
else
{
adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
&& (repr->grp_maybe_modified
|| repr->grp_not_necessarilly_dereferenced));
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, adj);
+ adjustments.quick_push (adj);
}
}
}
- VEC_free (tree, heap, parms);
+ parms.release ();
return adjustments;
}
enum ipa_splicing_result repr_state;
bool proceed = false;
int i, adjustments_count = 0;
- VEC (access_p, heap) *representatives;
+ vec<access_p> representatives;
ipa_parm_adjustment_vec adjustments;
- repr_state = splice_all_param_accesses (&representatives);
+ repr_state = splice_all_param_accesses (representatives);
if (repr_state == NO_GOOD_ACCESS)
- return NULL;
+ return ipa_parm_adjustment_vec();
/* If there are any parameters passed by reference which are not modified
directly, we need to check whether they can be modified indirectly. */
for (i = 0; i < func_param_count; i++)
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
if (repr && !no_accesses_p (repr))
{
adjustments_count++;
if (repr->grp_not_necessarilly_dereferenced
|| repr->grp_maybe_modified)
- VEC_replace (access_p, representatives, i, NULL);
+ representatives[i] = NULL;
else
{
proceed = true;
if (new_components == 0)
{
- VEC_replace (access_p, representatives, i, NULL);
+ representatives[i] = NULL;
adjustments_count++;
}
else
adjustments = turn_representatives_into_adjustments (representatives,
adjustments_count);
else
- adjustments = NULL;
+ adjustments = ipa_parm_adjustment_vec();
- VEC_free (access_p, heap, representatives);
+ representatives.release ();
return adjustments;
}
{
int i, len;
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (!adj->copy_param && adj->base == base)
return adj;
}
HOST_WIDE_INT offset, size, max_size;
tree base, src;
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
if (TREE_CODE (*expr) == BIT_FIELD_REF
|| TREE_CODE (*expr) == IMAGPART_EXPR
for (i = 0; i < len; i++)
{
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->base == base &&
(adj->offset == offset || adj->remove_param))
if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
else
- *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
+ *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
+ NULL);
}
else
new_rhs = fold_build1_loc (gimple_location (stmt),
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
gsip = &gsi;
}
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->copy_param || !is_gimple_reg (adj->base))
continue;
name = ssa_default_def (cfun, adj->base);
convert_callers_for_node (struct cgraph_node *node,
void *data)
{
- ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
+ ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
bitmap recomputed_callers = BITMAP_ALLOC (NULL);
struct cgraph_edge *cs;
xstrdup (cgraph_node_name (cs->caller)),
xstrdup (cgraph_node_name (cs->callee)));
- ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
+ ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
pop_cfun ();
}
basic_block this_block;
cgraph_for_node_and_aliases (node, convert_callers_for_node,
- adjustments, false);
+ &adjustments, false);
if (!encountered_recursive_call)
return;
{
struct cgraph_node *new_node;
bool cfg_changed;
- VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
+ vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
rebuild_cgraph_edges ();
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
- new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
- false, NULL, NULL, "isra");
- VEC_free (cgraph_edge_p, heap, redirect_callers);
+ new_node = cgraph_function_versioning (node, redirect_callers,
+ NULL,
+ NULL, false, NULL, NULL, "isra");
+ redirect_callers.release ();
push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
}
adjustments = analyze_all_param_acesses ();
- if (!adjustments)
+ if (!adjustments.exists ())
goto out;
if (dump_file)
ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
ret = TODO_update_ssa | TODO_cleanup_cfg;
else
ret = TODO_update_ssa;
- VEC_free (ipa_parm_adjustment_t, heap, adjustments);
+ adjustments.release ();
statistics_counter_event (cfun, "Unused parameters deleted",
sra_stats.deleted_unused_parameters);
{
GIMPLE_PASS,
"eipa_sra", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
ipa_early_sra_gate, /* gate */
ipa_early_sra, /* execute */
NULL, /* sub */