variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
- vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'. */
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
+ operand vector for VUSE, then the new vector will also be modified
+ such that it contains 'a_5' rather than 'a'. */
/* Structure storing statistics on how many call clobbers we have, and
/* Comparison function for qsort used in operand_build_sort_virtual. */
-static int
+int
operand_build_cmp (const void *p, const void *q)
{
const_tree const e1 = *((const_tree const *)p);
gimple_ssa_operands (cfun)->ssa_operand_mem_size
= OP_SIZE_3 * sizeof (struct voptype_d);
- /* Fail if there is not enough space. If there are this many operands
- required, first make sure there isn't a different problem causing this
- many operands. If the decision is that this is OK, then we can
- specially allocate a buffer just for this request. */
- gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+ /* We can reliably trigger the case that we need arbitrary many
+ operands (see PR34093), so allocate a buffer just for this request. */
+ if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = size;
ptr = (struct ssa_operand_memory_d *)
ggc_alloc (sizeof (struct ssa_operand_memory_d)
}
-/* Return a virtual op pointer with NUM elements which are all initialized to OP
- and are linked into the immediate uses for STMT. The new vop is appended
- after PREV. */
+/* Return a virtual op pointer with NUM elements which are all
+ initialized to OP and are linked into the immediate uses for STMT.
+ The new vop is appended after PREV. */
static inline voptype_p
add_vop (tree stmt, tree op, int num, voptype_p prev)
/* Don't allow duplicate entries. */
ann = get_var_ann (var);
- if (ann->in_vuse_list || ann->in_vdef_list)
+ if (ann->in_vuse_list)
return;
+ else if (ann->in_vdef_list)
+ {
+ /* We don't want a vuse if we already have a vdef, but we must
+ still put this in build_loads. */
+ bitmap_set_bit (build_loads, DECL_UID (var));
+ return;
+ }
ann->in_vuse_list = true;
sym = var;
}
To implement this, we just punt on accesses through union
pointers entirely.
+
+ Another case we have to allow is accessing a variable
+ through an array access at offset zero. This happens from
+ code generated by the fortran frontend like
+
+ char[1:1] & my_char_ref;
+ char my_char;
+ my_char_ref_1 = (char[1:1] &) &my_char;
+ D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1};
*/
else if (ref
&& flag_strict_aliasing
&& base
&& (TREE_CODE (base) != INDIRECT_REF
|| TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
+ && (TREE_CODE (base) != INDIRECT_REF
+ || TREE_CODE (ref) != ARRAY_REF
+ || offset != 0
+ || (DECL_SIZE (alias)
+ && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
+ && size != -1
+ && (unsigned HOST_WIDE_INT)size
+ != TREE_INT_CST_LOW (DECL_SIZE (alias))))
&& !AGGREGATE_TYPE_P (TREE_TYPE (alias))
&& TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
&& !var_ann (alias)->is_heapvar
return true;
}
-/* Add the actual variables FULL_REF can access, given a member of
- full_ref's points-to set VAR, where FULL_REF is an access of SIZE at
- OFFSET from var. IS_CALL_SITE is true if this is a call, and IS_DEF
- is true if this is supposed to be a vdef, and false if this should
- be a VUSE.
+/* Add the actual variables accessed, given a member of a points-to set
+ that is the SFT VAR, where the access is of SIZE at OFFSET from VAR.
+ IS_CALL_SITE is true if this is a call, and IS_DEF is true if this is
+ supposed to be a vdef, and false if this should be a VUSE.
The real purpose of this function is to take a points-to set for a
pointer to a structure, say
SFT's for a structure. */
static bool
-add_vars_for_offset (tree full_ref, tree var, HOST_WIDE_INT offset,
- HOST_WIDE_INT size, bool is_call_site, bool is_def)
+add_vars_for_offset (tree var, unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size, bool is_def)
{
- /* Call-clobbered tags may have non-call-clobbered
- symbols in their alias sets. Ignore them if we are
- adding VOPs for a call site. */
- if (is_call_site && !is_call_clobbered (var))
+ bool added = false;
+ tree subvar;
+ subvar_t sv;
+ unsigned int i;
+
+ /* Adjust offset by the pointed-to location. */
+ offset += SFT_OFFSET (var);
+
+ /* Add all subvars of var that overlap with the access.
+ Binary search for the first relevant SFT. */
+ sv = get_subvars_for_var (SFT_PARENT_VAR (var));
+ if (!get_first_overlapping_subvar (sv, offset, size, &i))
return false;
- /* For offset 0, we already have the right variable. If there is no
- full_ref, this is not a place we care about (All component
- related accesses that go through pointers will have full_ref not
- NULL).
- Any var for which we didn't create SFT's can't be
- distinguished. */
- if (!full_ref || (offset == 0 && size != -1)
- || (TREE_CODE (var) != STRUCT_FIELD_TAG
- && (!var_can_have_subvars (var) || !get_subvars_for_var (var))))
+ for (; VEC_iterate (tree, sv, i, subvar); ++i)
{
- if (!access_can_touch_variable (full_ref, var, offset, size))
- return false;
+ if (SFT_OFFSET (subvar) > offset
+ && size <= SFT_OFFSET (subvar) - offset)
+ break;
if (is_def)
- append_vdef (var);
- else
- append_vuse (var);
- return true;
- }
- else if (TREE_CODE (var) == STRUCT_FIELD_TAG)
- {
- if (size == -1)
- {
- bool added = false;
- subvar_t sv = get_subvars_for_var (SFT_PARENT_VAR (var));
- for (; sv; sv = sv->next)
- {
- if (overlap_subvar (SFT_OFFSET (var) + offset, size,
- sv->var, NULL)
- && access_can_touch_variable (full_ref, sv->var,
- offset, size))
- {
- added = true;
- if (is_def)
- append_vdef (sv->var);
- else
- append_vuse (sv->var);
- }
- }
- return added;
- }
+ append_vdef (subvar);
else
- {
- bool added = false;
- subvar_t sv = get_subvars_for_var (SFT_PARENT_VAR (var));
- for (; sv; sv = sv->next)
- {
- /* Once we hit the end of the parts that could touch,
- stop looking. */
- if (SFT_OFFSET (var) + offset + size <= SFT_OFFSET (sv->var))
- break;
- if (overlap_subvar (SFT_OFFSET (var) + offset, size,
- sv->var, NULL)
- && access_can_touch_variable (full_ref, sv->var, offset,
- size))
- {
- added = true;
- if (is_def)
- append_vdef (sv->var);
- else
- append_vuse (sv->var);
- }
- }
- return added;
- }
+ append_vuse (subvar);
+ added = true;
}
-
- return false;
+
+ return added;
}
+
/* Add VAR to the virtual operands array. FLAGS is as in
get_expr_operands. FULL_REF is a tree that contains the entire
pointer dereference expression, if available, or NULL otherwise.
if (aliases == NULL)
{
- if (!gimple_aliases_computed_p (cfun))
+ if (!gimple_aliases_computed_p (cfun)
+ && (flags & opf_def))
s_ann->has_volatile_ops = true;
/* The variable is not aliased or it is an alias tag. */
{
bitmap_iterator bi;
unsigned int i;
- tree al;
+ bool none_added = true;
/* The variable is aliased. Add its aliases to the virtual
operands. */
gcc_assert (!bitmap_empty_p (aliases));
-
- if (flags & opf_def)
+
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
{
- bool none_added = true;
- EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ tree al = referenced_var (i);
+
+ /* For SFTs we have to consider all subvariables of the parent var
+ if it is a potential points-to location. */
+ if (TREE_CODE (al) == STRUCT_FIELD_TAG
+ && TREE_CODE (var) == NAME_MEMORY_TAG)
{
- al = referenced_var (i);
- none_added &= !add_vars_for_offset (full_ref, al, offset, size,
- is_call_site, true);
+ if (SFT_BASE_FOR_COMPONENTS_P (al))
+ {
+ /* If AL is the first SFT of a component, it can be used
+ to find other SFTs at [offset, size] adjacent to it. */
+ none_added &= !add_vars_for_offset (al, offset, size,
+ flags & opf_def);
+ }
+ else if ((unsigned HOST_WIDE_INT)offset < SFT_SIZE (al))
+ {
+ /* Otherwise, we only need to consider it if
+ [offset, size] overlaps with AL. */
+ if (flags & opf_def)
+ append_vdef (al);
+ else
+ append_vuse (al);
+ none_added = false;
+ }
+ }
+ else
+ {
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ /* If we do not know the full reference tree or if the access is
+ unspecified [0, -1], we cannot prune it. Otherwise try doing
+ so using access_can_touch_variable. */
+ if (full_ref
+ && !access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ if (flags & opf_def)
+ append_vdef (al);
+ else
+ append_vuse (al);
+ none_added = false;
}
+ }
+ if (flags & opf_def)
+ {
/* If the variable is also an alias tag, add a virtual
operand for it, otherwise we will miss representing
references to the members of the variable's alias set.
}
else
{
- bool none_added = true;
- EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
- {
- al = referenced_var (i);
- none_added &= !add_vars_for_offset (full_ref, al, offset, size,
- is_call_site, false);
-
- }
-
/* Even if no aliases have been added, we still need to
establish def-use and use-def chains, lest
transformations think that this is not a memory
add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
}
-
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
-
- STMT is the statement being processed, EXPR is the INDIRECT_REF
- that got us here.
-
- FLAGS is as in get_expr_operands.
-
- FULL_REF contains the full pointer dereference expression, if we
- have it, or NULL otherwise.
-
- OFFSET and SIZE are the location of the access inside the
- dereferenced pointer, if known.
-
- RECURSE_ON_BASE should be set to true if we want to continue
- calling get_expr_operands on the base pointer, and false if
- something else will do it for us. */
+/* Subroutine of get_indirect_ref_operands. ADDR is the address
+ that is dereferenced, the meaning of the rest of the arguments
+ is the same as in get_indirect_ref_operands. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags,
- tree full_ref,
- HOST_WIDE_INT offset, HOST_WIDE_INT size,
- bool recurse_on_base)
+get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
{
- tree *pptr = &TREE_OPERAND (expr, 0);
- tree ptr = *pptr;
+ tree ptr = *addr;
stmt_ann_t s_ann = stmt_ann (stmt);
s_ann->references_memory = true;
- if (TREE_THIS_VOLATILE (expr))
- s_ann->has_volatile_ops = true;
if (SSA_VAR_P (ptr))
{
/* If we are emitting debugging dumps, display a warning if
PTR is an SSA_NAME with no flow-sensitive alias
information. That means that we may need to compute
- aliasing again. */
+ aliasing again or that a propagation pass forgot to
+ update the alias information on the pointers. */
if (dump_file
&& TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
+ && (pi == NULL
+ || pi->name_mem_tag == NULL_TREE))
{
fprintf (dump_file,
"NOTE: no flow-sensitive alias info for ");
print_generic_expr (dump_file, ptr, dump_flags);
fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
+ print_generic_stmt (dump_file, stmt, 0);
}
if (TREE_CODE (ptr) == SSA_NAME)
ptr = SSA_NAME_VAR (ptr);
v_ann = var_ann (ptr);
+ /* If we don't know what this pointer points to then we have
+ to make sure to not prune virtual operands based on offset
+ and size. */
if (v_ann->symbol_mem_tag)
- add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
- full_ref, offset, size, false);
+ {
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, 0, -1, false);
+ /* Make sure we add the SMT itself. */
+ if (!(flags & opf_no_vops))
+ {
+ if (flags & opf_def)
+ append_vdef (v_ann->symbol_mem_tag);
+ else
+ append_vuse (v_ann->symbol_mem_tag);
+ }
+ }
/* Aliasing information is missing; mark statement as
volatile so we won't optimize it out too actively. */
/* If requested, add a USE operand for the base pointer. */
if (recurse_on_base)
- get_expr_operands (stmt, pptr, opf_use);
+ get_expr_operands (stmt, addr, opf_use);
+}
+
+
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
+
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
+
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known.
+
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
+
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ if (TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
+
+ get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size,
+ recurse_on_base);
}
if (var_can_have_subvars (expr)
&& (svars = get_subvars_for_var (expr)))
{
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- add_stmt_operand (&sv->var, s_ann, flags);
+ unsigned int i;
+ tree subvar;
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
+ add_stmt_operand (&subvar, s_ann, flags);
}
else
add_stmt_operand (expr_p, s_ann, flags);
ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
{
- subvar_t sv;
subvar_t svars = get_subvars_for_var (ref);
+ unsigned int i;
+ tree subvar;
- for (sv = svars; sv; sv = sv->next)
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
{
bool exact;
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ if (overlap_subvar (offset, maxsize, subvar, &exact))
{
int subvar_flags = flags;
none = false;
- add_stmt_operand (&sv->var, s_ann, subvar_flags);
+ add_stmt_operand (&subvar, s_ann, subvar_flags);
}
}
get_expr_operands (stmt, &CHANGE_DYNAMIC_TYPE_LOCATION (expr), opf_use);
return;
+ case OMP_FOR:
+ {
+ tree init = OMP_FOR_INIT (expr);
+ tree cond = OMP_FOR_COND (expr);
+ tree incr = OMP_FOR_INCR (expr);
+ tree c, clauses = OMP_FOR_CLAUSES (stmt);
+
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
+ get_expr_operands (stmt,
+ &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1), 1),
+ opf_use);
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c),
+ opf_use);
+ return;
+ }
+
+ case OMP_CONTINUE:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ return;
+ }
+
+ case OMP_PARALLEL:
+ {
+ tree c, clauses = OMP_PARALLEL_CLAUSES (stmt);
+
+ if (OMP_PARALLEL_DATA_ARG (stmt))
+ {
+ get_expr_operands (stmt, &OMP_PARALLEL_DATA_ARG (stmt), opf_use);
+ add_to_addressable_set (OMP_PARALLEL_DATA_ARG (stmt),
+ &s_ann->addresses_taken);
+ }
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_IF);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_IF_EXPR (c), opf_use);
+ c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_NUM_THREADS_EXPR (c), opf_use);
+ return;
+ }
+
+ case OMP_SECTIONS:
+ {
+ get_expr_operands (stmt, &OMP_SECTIONS_CONTROL (expr), opf_def);
+ return;
+ }
+
+ case OMP_ATOMIC_LOAD:
+ {
+ tree *addr = &TREE_OPERAND (expr, 1);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
+
+ if (TREE_CODE (*addr) == ADDR_EXPR)
+ get_expr_operands (stmt, &TREE_OPERAND (*addr, 0), opf_def);
+ else
+ get_addr_dereference_operands (stmt, addr, opf_def,
+ NULL_TREE, 0, -1, true);
+ return;
+ }
+
+ case OMP_ATOMIC_STORE:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ return;
+ }
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
case FILTER_EXPR:
case LABEL_DECL:
case CONST_DECL:
- case OMP_PARALLEL:
- case OMP_SECTIONS:
- case OMP_FOR:
case OMP_SINGLE:
case OMP_MASTER:
case OMP_ORDERED:
case OMP_CRITICAL:
case OMP_RETURN:
- case OMP_CONTINUE:
+ case OMP_SECTION:
+ case OMP_SECTIONS_SWITCH:
+ case PREDICT_EXPR:
/* Expressions that make no memory references. */
return;
if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
ann->addresses_taken = NULL;
+
/* For added safety, assume that statements with volatile operands
also reference memory. */
if (ann->has_volatile_ops)
}
+/* Releases the operands of STMT back to their freelists, and clears
+ the stmt operand lists. */
+
+void
+free_stmt_operands (tree stmt)
+{
+ def_optype_p defs = DEF_OPS (stmt), last_def;
+ use_optype_p uses = USE_OPS (stmt), last_use;
+ voptype_p vuses = VUSE_OPS (stmt);
+ voptype_p vdefs = VDEF_OPS (stmt), vdef, next_vdef;
+ unsigned i;
+
+ if (defs)
+ {
+ for (last_def = defs; last_def->next; last_def = last_def->next)
+ continue;
+ last_def->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = defs;
+ DEF_OPS (stmt) = NULL;
+ }
+
+ if (uses)
+ {
+ for (last_use = uses; last_use->next; last_use = last_use->next)
+ delink_imm_use (USE_OP_PTR (last_use));
+ delink_imm_use (USE_OP_PTR (last_use));
+ last_use->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = uses;
+ USE_OPS (stmt) = NULL;
+ }
+
+ if (vuses)
+ {
+ for (i = 0; i < VUSE_NUM (vuses); i++)
+ delink_imm_use (VUSE_OP_PTR (vuses, i));
+ add_vop_to_freelist (vuses);
+ VUSE_OPS (stmt) = NULL;
+ }
+
+ if (vdefs)
+ {
+ for (vdef = vdefs; vdef; vdef = next_vdef)
+ {
+ next_vdef = vdef->next;
+ delink_imm_use (VDEF_OP_PTR (vdef, 0));
+ add_vop_to_freelist (vdef);
+ }
+ VDEF_OPS (stmt) = NULL;
+ }
+}
+
+
/* Free any operands vectors in OPS. */
void
create an artificial stmt which looks like a load from the store, this can
be used to eliminate redundant loads. OLD_OPS are the operands from the
store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+ values stored. If DELINK_IMM_USES_P is specified, the immediate
+ uses of this stmt will be de-linked. */
void
-create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
+create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt,
+ bool delink_imm_uses_p)
{
tree op;
ssa_op_iter iter;
use_operand_p use_p;
unsigned i;
+ stmt_ann_t ann;
- get_stmt_ann (new_stmt);
+ /* Create the stmt annotation but make sure to not mark the stmt
+ as modified as we will build operands ourselves. */
+ ann = get_stmt_ann (new_stmt);
+ ann->modified = 0;
/* Process NEW_STMT looking for operands. */
start_ssa_stmt_operands ();
if (TREE_CODE (op) != SSA_NAME)
var_ann (op)->in_vuse_list = false;
- for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++)
if (TREE_CODE (op) != SSA_NAME)
var_ann (op)->in_vdef_list = false;
VEC_truncate (tree, build_vdefs, 0);
VEC_truncate (tree, build_vuses, 0);
+ /* Clear the loads and stores bitmaps. */
+ bitmap_clear (build_loads);
+ bitmap_clear (build_stores);
+
/* For each VDEF on the original statement, we want to create a
VUSE of the VDEF result operand on the new statement. */
FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
finalize_ssa_stmt_operands (new_stmt);
/* All uses in this fake stmt must not be in the immediate use lists. */
- FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
- delink_imm_use (use_p);
+ if (delink_imm_uses_p)
+ FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
}
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
{
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
+ unsigned int i;
+ tree subvar;
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
{
- bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
- TREE_ADDRESSABLE (sv->var) = 1;
+ bitmap_set_bit (*addresses_taken, DECL_UID (subvar));
+ TREE_ADDRESSABLE (subvar) = 1;
}
}
else