having to keep track of too many V_MAY_DEF expressions at call sites. */
tree global_var;
+DEF_VEC_I(int);
+DEF_VEC_ALLOC_I(int,heap);
+
+/* qsort comparison function to sort type/name tags by DECL_UID. */
+
+static int
+sort_tags_by_id (const void *pa, const void *pb)
+{
+ tree a = *(tree *)pa;
+ tree b = *(tree *)pb;
+
+ return DECL_UID (a) - DECL_UID (b);
+}
+
+/* Initialize WORKLIST to contain those memory tags that are marked call
+ clobbered. Initialized WORKLIST2 to contain the reasons these
+ memory tags escaped. */
+
+static void
+init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2)
+{
+ referenced_var_iterator rvi;
+ tree curr;
+
+ FOR_EACH_REFERENCED_VAR (curr, rvi)
+ {
+ if (MTAG_P (curr) && is_call_clobbered (curr))
+ {
+ VEC_safe_push (tree, heap, *worklist, curr);
+ VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
+ }
+ }
+}
+
+/* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
+ ALIAS is not already marked call clobbered, and is a memory
+ tag. */
+
+static void
+add_to_worklist (tree alias, VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2,
+ int reason)
+{
+ if (MTAG_P (alias) && !is_call_clobbered (alias))
+ {
+ VEC_safe_push (tree, heap, *worklist, alias);
+ VEC_safe_push (int, heap, *worklist2, reason);
+ }
+}
+
+/* Mark aliases of TAG as call clobbered, and place any tags on the
+ alias list that were not already call clobbered on WORKLIST. */
+
+static void
+mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2)
+{
+ unsigned int i;
+ VEC (tree, gc) *ma;
+ tree entry;
+ var_ann_t ta = var_ann (tag);
+
+ if (!MTAG_P (tag))
+ return;
+ ma = may_aliases (tag);
+ if (!ma)
+ return;
+
+ for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
+ {
+ if (!unmodifiable_var_p (entry))
+ {
+ add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
+ mark_call_clobbered (entry, ta->escape_mask);
+ }
+ }
+}
+
+/* Tags containing global vars need to be marked as global.
+ Tags containing call clobbered vars need to be marked as call
+ clobbered. */
+
+static void
+compute_tag_properties (void)
+{
+ referenced_var_iterator rvi;
+ tree tag;
+ bool changed = true;
+ VEC (tree, heap) *taglist = NULL;
+
+ FOR_EACH_REFERENCED_VAR (tag, rvi)
+ {
+ if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
+ continue;
+ VEC_safe_push (tree, heap, taglist, tag);
+ }
+
+ /* We sort the taglist by DECL_UID, for two reasons.
+ 1. To get a sequential ordering to make the bitmap accesses
+ faster.
+ 2. Because of the way we compute aliases, it's more likely that
+ an earlier tag is included in a later tag, and this will reduce
+ the number of iterations.
+
+ If we had a real tag graph, we would just topo-order it and be
+ done with it. */
+ qsort (VEC_address (tree, taglist),
+ VEC_length (tree, taglist),
+ sizeof (tree),
+ sort_tags_by_id);
+
+ /* Go through each tag not marked as global, and if it aliases
+ global vars, mark it global.
+
+ If the tag contains call clobbered vars, mark it call
+ clobbered.
+
+ This loop iterates because tags may appear in the may-aliases
+ list of other tags when we group. */
+
+ while (changed)
+ {
+ unsigned int k;
+
+ changed = false;
+ for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
+ {
+ VEC (tree, gc) *ma;
+ unsigned int i;
+ tree entry;
+ bool tagcc = is_call_clobbered (tag);
+ bool tagglobal = MTAG_GLOBAL (tag);
+
+ if (tagcc && tagglobal)
+ continue;
+
+ ma = may_aliases (tag);
+ if (!ma)
+ continue;
+
+ for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
+ {
+ /* Call clobbered entries cause the tag to be marked
+ call clobbered. */
+ if (!tagcc && is_call_clobbered (entry))
+ {
+ mark_call_clobbered (tag, var_ann (entry)->escape_mask);
+ tagcc = true;
+ changed = true;
+ }
+
+ /* Global vars cause the tag to be marked global. */
+ if (!tagglobal && is_global_var (entry))
+ {
+ MTAG_GLOBAL (tag) = true;
+ changed = true;
+ tagglobal = true;
+ }
+
+ /* Early exit once both global and cc are set, since the
+ loop can't do any more than that. */
+ if (tagcc && tagglobal)
+ break;
+ }
+ }
+ }
+ VEC_free (tree, heap, taglist);
+}
+
+/* Set up the initial variable clobbers and globalness.
+ When this function completes, only tags whose aliases need to be
+ clobbered will be set clobbered. Tags clobbered because they
+ contain call clobbered vars are handled in compute_tag_properties. */
+
+static void
+set_initial_properties (struct alias_info *ai)
+{
+ unsigned int i;
+ referenced_var_iterator rvi;
+ tree var;
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (is_global_var (var)
+ && (!var_can_have_subvars (var)
+ || get_subvars_for_var (var) == NULL))
+ {
+ if (!unmodifiable_var_p (var))
+ mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
+ }
+ else if (TREE_CODE (var) == PARM_DECL
+ && default_def (var)
+ && POINTER_TYPE_P (TREE_TYPE (var)))
+ {
+ tree def = default_def (var);
+ get_ptr_info (def)->value_escapes_p = 1;
+ get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
+ }
+ }
+
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
+ {
+ tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+
+ if (pi->value_escapes_p)
+ {
+ /* If PTR escapes then its associated memory tags and
+ pointed-to variables are call-clobbered. */
+ if (pi->name_mem_tag)
+ mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
+
+ if (v_ann->type_mem_tag)
+ mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
+
+ if (pi->pt_vars)
+ {
+ bitmap_iterator bi;
+ unsigned int j;
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ if (!unmodifiable_var_p (referenced_var (j)))
+ mark_call_clobbered (referenced_var (j), pi->escape_mask);
+ }
+ }
+ /* If the name tag is call clobbered, so is the type tag
+ associated with the base VAR_DECL. */
+ if (pi->name_mem_tag
+ && v_ann->type_mem_tag
+ && is_call_clobbered (pi->name_mem_tag))
+ mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
+
+ /* Name tags and type tags that we don't know where they point
+ to, might point to global memory, and thus, are clobbered.
+
+ FIXME: This is not quite right. They should only be
+ clobbered if value_escapes_p is true, regardless of whether
+ they point to global memory or not.
+ So removing this code and fixing all the bugs would be nice.
+ It is the cause of a bunch of clobbering. */
+ if ((pi->pt_global_mem || pi->pt_anything)
+ && pi->is_dereferenced && pi->name_mem_tag)
+ {
+ mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
+ MTAG_GLOBAL (pi->name_mem_tag) = true;
+ }
+
+ if ((pi->pt_global_mem || pi->pt_anything)
+ && pi->is_dereferenced && v_ann->type_mem_tag)
+ {
+ mark_call_clobbered (v_ann->type_mem_tag, ESCAPE_IS_GLOBAL);
+ MTAG_GLOBAL (v_ann->type_mem_tag) = true;
+ }
+ }
+}
+
+/* Compute which variables need to be marked call clobbered because
+ their tag is call clobbered, and which tags need to be marked
+ global because they contain global variables. */
+
+static void
+compute_call_clobbered (struct alias_info *ai)
+{
+ VEC (tree, heap) *worklist = NULL;
+ VEC(int,heap) *worklist2 = NULL;
+
+ set_initial_properties (ai);
+ init_transitive_clobber_worklist (&worklist, &worklist2);
+ while (VEC_length (tree, worklist) != 0)
+ {
+ tree curr = VEC_pop (tree, worklist);
+ int reason = VEC_pop (int, worklist2);
+
+ mark_call_clobbered (curr, reason);
+ mark_aliases_call_clobbered (curr, &worklist, &worklist2);
+ }
+ VEC_free (tree, heap, worklist);
+ VEC_free (int, heap, worklist2);
+ compute_tag_properties ();
+}
/* Compute may-alias information for every variable referenced in function
FNDECL.
memory tags. */
compute_flow_insensitive_aliasing (ai);
+ /* Determine if we need to enable alias grouping. */
+ if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
+ group_aliases (ai);
+
+ /* Compute call clobbering information. */
+ compute_call_clobbered (ai);
+
/* If the program has too many call-clobbered variables and/or function
calls, create .GLOBAL_VAR and use it to model call-clobbering
semantics at call sites. This reduces the number of virtual operands
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
bitmap_iterator bi;
- if (pi->value_escapes_p || pi->pt_anything)
- {
- /* If PTR escapes or may point to anything, then its associated
- memory tags and pointed-to variables are call-clobbered. */
- if (pi->name_mem_tag)
- mark_call_clobbered (pi->name_mem_tag);
-
- if (v_ann->type_mem_tag)
- mark_call_clobbered (v_ann->type_mem_tag);
-
- if (pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
- mark_call_clobbered (referenced_var (j));
- }
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
add_may_alias (pi->name_mem_tag, referenced_var (j));
add_may_alias (v_ann->type_mem_tag, referenced_var (j));
}
-
- /* If the name tag is call clobbered, so is the type tag
- associated with the base VAR_DECL. */
- if (pi->name_mem_tag
- && v_ann->type_mem_tag
- && is_call_clobbered (pi->name_mem_tag))
- mark_call_clobbered (v_ann->type_mem_tag);
}
}
fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
ai->total_alias_vops);
-
- /* Determine if we need to enable alias grouping. */
- if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
- group_aliases (ai);
}
size_t j;
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
- varray_type aliases;
+ VEC(tree,gc) *aliases;
+ tree alias;
if (name_tag == NULL_TREE)
continue;
aliases = var_ann (name_tag)->may_aliases;
- for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
+ for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
{
- tree alias = VARRAY_TREE (aliases, j);
var_ann_t ann = var_ann (alias);
if ((!MTAG_P (alias)
{
tree new_alias;
- gcc_assert (VARRAY_ACTIVE_SIZE (ann->may_aliases) == 1);
+ gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
- new_alias = VARRAY_TREE (ann->may_aliases, 0);
+ new_alias = VEC_index (tree, ann->may_aliases, 0);
replace_may_alias (name_tag, j, new_alias);
}
}
if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
bitmap_set_bit (ai->written_vars, DECL_UID (tag));
- /* If pointer VAR is a global variable or a PARM_DECL,
- then its memory tag should be considered a global
- variable. */
- if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
- mark_call_clobbered (tag);
-
/* All the dereferences of pointer VAR count as
references of TAG. Since TAG can be associated with
several pointers, add the dereferences of VAR to the
call-clobbered variables. */
if (global_var && var != global_var)
{
- subvar_t svars;
add_may_alias (var, global_var);
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- mark_sym_for_renaming (sv->var);
- }
+ gcc_assert (!get_subvars_for_var (var));
}
mark_sym_for_renaming (var);
size_t i;
var_ann_t v_ann = get_var_ann (var);
var_ann_t a_ann = get_var_ann (alias);
+ tree al;
/* Don't allow self-referential aliases. */
gcc_assert (var != alias);
#endif
if (v_ann->may_aliases == NULL)
- VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
+ v_ann->may_aliases = VEC_alloc (tree, gc, 2);
/* Avoid adding duplicates. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
- if (alias == VARRAY_TREE (v_ann->may_aliases, i))
+ for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
+ if (alias == al)
return;
- /* If VAR is a call-clobbered variable, so is its new ALIAS.
- FIXME, call-clobbering should only depend on whether an address
- escapes. It should be independent of aliasing. */
- if (is_call_clobbered (var))
- mark_call_clobbered (alias);
-
- /* Likewise. If ALIAS is call-clobbered, so is VAR. */
- else if (is_call_clobbered (alias))
- mark_call_clobbered (var);
-
- VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
+ VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
a_ann->is_alias_tag = 1;
}
replace_may_alias (tree var, size_t i, tree new_alias)
{
var_ann_t v_ann = var_ann (var);
- VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
-
- /* If VAR is a call-clobbered variable, so is NEW_ALIAS.
- FIXME, call-clobbering should only depend on whether an address
- escapes. It should be independent of aliasing. */
- if (is_call_clobbered (var))
- mark_call_clobbered (new_alias);
-
- /* Likewise. If NEW_ALIAS is call-clobbered, so is VAR. */
- else if (is_call_clobbered (new_alias))
- mark_call_clobbered (var);
+ VEC_replace (tree, v_ann->may_aliases, i, new_alias);
}
3- STMT is an assignment to a non-local variable, or
4- STMT is a return statement.
- AI points to the alias information collected so far. */
+ AI points to the alias information collected so far.
-bool
+ Return the type of escape site found, if we found one, or NO_ESCAPE
+ if none. */
+
+enum escape_type
is_escape_site (tree stmt, struct alias_info *ai)
{
tree call = get_call_expr_in (stmt);
ai->num_calls_found++;
if (!TREE_SIDE_EFFECTS (call))
- ai->num_pure_const_calls_found++;
+ {
+ ai->num_pure_const_calls_found++;
+ return ESCAPE_TO_PURE_CONST;
+ }
- return true;
+ return ESCAPE_TO_CALL;
}
else if (TREE_CODE (stmt) == ASM_EXPR)
- return true;
+ return ESCAPE_TO_ASM;
else if (TREE_CODE (stmt) == MODIFY_EXPR)
{
tree lhs = TREE_OPERAND (stmt, 0);
/* If we couldn't recognize the LHS of the assignment, assume that it
is a non-local store. */
if (lhs == NULL_TREE)
- return true;
+ return ESCAPE_UNKNOWN;
/* If the RHS is a conversion between a pointer and an integer, the
pointer escapes since we can't track the integer. */
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
(TREE_OPERAND (stmt, 1), 0)))
&& !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
- return true;
+ return ESCAPE_BAD_CAST;
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
- return false;
+ return NO_ESCAPE;
/* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
local variables we cannot be sure if it will escape, because we
Midkiff, ``Escape analysis for java,'' in Proceedings of the
Conference on Object-Oriented Programming Systems, Languages, and
Applications (OOPSLA), pp. 1-19, 1999. */
- return true;
+ return ESCAPE_STORED_IN_GLOBAL;
}
else if (TREE_CODE (stmt) == RETURN_EXPR)
- return true;
+ return ESCAPE_TO_RETURN;
- return false;
+ return NO_ESCAPE;
}
/* Create a new memory tag of type TYPE.
if (tag == NULL_TREE)
tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
-
- /* If PTR is a PARM_DECL, it points to a global variable or malloc,
- then its name tag should be considered a global variable. */
- if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
- || pi->pt_global_mem)
- mark_call_clobbered (tag);
-
return tag;
}
TREE_THIS_VOLATILE (global_var) = 0;
TREE_ADDRESSABLE (global_var) = 0;
+ create_var_ann (global_var);
+ mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
add_referenced_tmp_var (global_var);
mark_sym_for_renaming (global_var);
}
void
dump_may_aliases_for (FILE *file, tree var)
{
- varray_type aliases;
+ VEC(tree, gc) *aliases;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (aliases)
{
size_t i;
+ tree al;
fprintf (file, "{ ");
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
{
- print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
+ print_generic_expr (file, al, dump_flags);
fprintf (file, " ");
}
fprintf (file, "}");
is_aliased_with (tree tag, tree sym)
{
size_t i;
- varray_type aliases;
+ VEC(tree,gc) *aliases;
+ tree al;
if (var_ann (sym)->is_alias_tag)
{
if (aliases == NULL)
return false;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- if (VARRAY_TREE (aliases, i) == sym)
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ if (al == sym)
return true;
}
else
if (aliases == NULL)
return false;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- if (VARRAY_TREE (aliases, i) == tag)
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ if (al == tag)
return true;
}
void
add_type_alias (tree ptr, tree var)
{
- varray_type aliases;
- tree tag;
+ VEC(tree, gc) *aliases;
+ tree tag, al;
var_ann_t ann = var_ann (ptr);
subvar_t svars;
VEC (tree, heap) *varvec = NULL;
+ unsigned i;
if (ann->type_mem_tag == NULL_TREE)
{
found_tag:
/* If VAR is not already PTR's type tag, add it to the may-alias set
for PTR's type tag. */
- gcc_assert (!MTAG_P (var_ann (var)->type_mem_tag));
+ gcc_assert (!MTAG_P (var));
tag = ann->type_mem_tag;
/* If VAR has subvars, add the subvars to the tag instead of the
mark_sym_for_renaming (tag);
if ((aliases = var_ann (tag)->may_aliases) != NULL)
{
- size_t i;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ mark_sym_for_renaming (al);
}
/* If we had grouped aliases, VAR may have aliases of its own. Mark
aliases of VAR will need to be updated. */
if ((aliases = var_ann (var)->may_aliases) != NULL)
{
- size_t i;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ mark_sym_for_renaming (al);
}
VEC_free (tree, heap, varvec);
}
same defs/uses/vdefs/vuses will be found after replacing a reference
to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
is the address of var. */
- varray_type aliases = v_ann->may_aliases;
+ VEC(tree, gc) *aliases = v_ann->may_aliases;
if ((aliases != NULL)
- && (VARRAY_ACTIVE_SIZE (aliases) == 1))
+ && (VEC_length (tree, aliases) == 1))
{
- tree ali = VARRAY_TREE (aliases, 0);
+ tree ali = VEC_index (tree, aliases, 0);
if (TREE_CODE (ali) == TYPE_MEMORY_TAG)
{
add_may_alias (tag, var);
else
{
- size_t i;
+ unsigned i;
+ tree al;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- add_may_alias (tag, VARRAY_TREE (aliases, i));
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ add_may_alias (tag, al);
}
}
}
variable. Implicit uses occur when we can't tell what part we
are referencing, and have to make conservative assumptions. */
bool implicit_uses;
+ /* True if the structure is only written to or taken its address. */
+ bool write_only;
} *used_part_t;
/* An array of used_part structures, indexed by variable uid. */
up->maxused = 0;
up->explicit_uses = false;
up->implicit_uses = false;
+ up->write_only = true;
}
return up;
}
-/* Create and return a structure sub-variable for field FIELD of
+/* Create and return a structure sub-variable for field type FIELD of
variable VAR. */
static tree
create_sft (tree var, tree field)
{
var_ann_t ann;
- tree subvar = create_tag_raw (STRUCT_FIELD_TAG, TREE_TYPE (field), "SFT");
+ tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
/* We need to copy the various flags from VAR to SUBVAR, so that
they are is_global_var iff the original variable was. */
ann = get_var_ann (subvar);
ann->type_mem_tag = NULL;
add_referenced_tmp_var (subvar);
+ SFT_PARENT_VAR (subvar) = var;
return subvar;
}
used_part_t up;
size_t uid = DECL_UID (var);
- if (!up_lookup (uid))
+ up = up_lookup (uid);
+ if (!up
+ || up->write_only)
return;
- up = up_lookup (uid);
push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
if (VEC_length (fieldoff_s, fieldstack) != 0)
{
for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
{
- if (!DECL_SIZE (fo->field)
- || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
- || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
+ if (!fo->size
+ || TREE_CODE (fo->size) != INTEGER_CST
|| fo->offset < 0)
{
notokay = true;
HOST_WIDE_INT fosize;
tree currfotype;
- fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
- currfotype = TREE_TYPE (fo->field);
+ fosize = TREE_INT_CST_LOW (fo->size);
+ currfotype = fo->type;
/* If this field isn't in the used portion,
or it has the exact same offset and size as the last
sv->offset = fo->offset;
sv->size = fosize;
sv->next = *subvars;
- sv->var = create_sft (var, fo->field);
+ sv->var = create_sft (var, fo->type);
if (dump_file)
{
entire structure. */
static tree
-find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
{
switch (TREE_CODE (*tp))
{
+ case MODIFY_EXPR:
+ /* Recurse manually here to track whether the use is in the
+ LHS of an assignment. */
+ find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
+ return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
case COMPONENT_REF:
+ case ARRAY_REF:
{
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitmaxsize;
up->explicit_uses = true;
else
up->implicit_uses = true;
+ if (!lhs_p)
+ up->write_only = false;
up_insert (uid, up);
*walk_subtrees = 0;