}
+/* Return memory reference statistics for variable VAR in function FN.
+ This is computed by alias analysis, but it is not kept
+ incrementally up-to-date. So, these stats are only accurate if
+ pass_may_alias has been run recently. If no alias information
+ exists, this function returns NULL. */
+
+static mem_sym_stats_t
+mem_sym_stats (struct function *fn, tree var)
+{
+ void **slot;
+ struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
+
+ if (stats_map == NULL)
+ return NULL;
+
+ slot = pointer_map_contains (stats_map, var);
+ if (slot == NULL)
+ return NULL;
+
+ return (mem_sym_stats_t) *slot;
+}
+
+
/* Set MPT to be the memory partition associated with symbol SYM. */
static inline void
static void
init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
- VEC (int, heap) **worklist2)
+ VEC (int, heap) **worklist2,
+ bitmap on_worklist)
{
referenced_var_iterator rvi;
tree curr;
if (MTAG_P (curr) && is_call_clobbered (curr))
{
VEC_safe_push (tree, heap, *worklist, curr);
- VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
+ VEC_safe_push (int, heap, *worklist2,
+ var_ann (curr)->escape_mask);
+ bitmap_set_bit (on_worklist, DECL_UID (curr));
}
}
}
static void
add_to_worklist (tree alias, VEC (tree, heap) **worklist,
- VEC (int, heap) **worklist2,
- int reason)
+ VEC (int, heap) **worklist2, int reason,
+ bitmap on_worklist)
{
- if (MTAG_P (alias) && !is_call_clobbered (alias))
+ if (MTAG_P (alias) && !is_call_clobbered (alias)
+ && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
{
VEC_safe_push (tree, heap, *worklist, alias);
VEC_safe_push (int, heap, *worklist2, reason);
+ bitmap_set_bit (on_worklist, DECL_UID (alias));
}
}
static void
mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
- VEC (int, heap) **worklist2)
+ VEC (int, heap) **worklist2,
+ bitmap on_worklist, bitmap queued)
{
bitmap aliases;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
{
entry = referenced_var (i);
- if (!unmodifiable_var_p (entry))
+ /* If you clobber one part of a structure, you
+ clobber the entire thing. While this does not make
+ the world a particularly nice place, it is necessary
+ in order to allow C/C++ tricks that involve
+ pointer arithmetic to work. */
+ if (TREE_CODE (entry) == STRUCT_FIELD_TAG)
+ bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (entry)));
+ else if (!unmodifiable_var_p (entry))
{
- add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
+ add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
+ on_worklist);
mark_call_clobbered (entry, ta->escape_mask);
}
}
+ if (!bitmap_empty_p (queued))
+ {
+ EXECUTE_IF_SET_IN_BITMAP (queued, 0, i, bi)
+ {
+ subvar_t svars = get_subvars_for_var (referenced_var (i));
+ unsigned int i;
+ tree subvar;
+
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
+ if (!unmodifiable_var_p (subvar))
+ mark_call_clobbered (subvar, ta->escape_mask);
+ }
+ bitmap_clear (queued);
+ }
}
/* Tags containing global vars need to be marked as global.
referenced_var_iterator rvi;
tree var;
tree ptr;
+ bitmap queued;
+
+ /* Temporary bitmap to avoid quadratic behavior in marking
+ call clobbers. */
+ queued = BITMAP_ALLOC (&alias_bitmap_obstack);
FOR_EACH_REFERENCED_VAR (var, rvi)
{
bitmap_iterator bi;
unsigned int j;
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
- if (!unmodifiable_var_p (referenced_var (j)))
- mark_call_clobbered (referenced_var (j), pi->escape_mask);
+ {
+ tree alias = referenced_var (j);
+
+ /* If you clobber one part of a structure, you
+ clobber the entire thing. While this does not make
+ the world a particularly nice place, it is necessary
+ in order to allow C/C++ tricks that involve
+ pointer arithmetic to work. */
+ if (TREE_CODE (alias) == STRUCT_FIELD_TAG)
+ bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (alias)));
+ else if (!unmodifiable_var_p (alias))
+ mark_call_clobbered (alias, pi->escape_mask);
+ }
+ /* Process variables we need to clobber all parts of. */
+ if (!bitmap_empty_p (queued))
+ {
+ EXECUTE_IF_SET_IN_BITMAP (queued, 0, j, bi)
+ {
+ subvar_t svars = get_subvars_for_var (referenced_var (j));
+ unsigned int i;
+ tree subvar;
+
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
+ if (!unmodifiable_var_p (subvar))
+ mark_call_clobbered (subvar, pi->escape_mask);
+ }
+ bitmap_clear (queued);
+ }
}
}
MTAG_GLOBAL (tag) = true;
}
}
+
+ BITMAP_FREE (queued);
}
/* Compute which variables need to be marked call clobbered because
compute_call_clobbered (struct alias_info *ai)
{
VEC (tree, heap) *worklist = NULL;
- VEC(int,heap) *worklist2 = NULL;
-
+ VEC (int,heap) *worklist2 = NULL;
+ bitmap on_worklist, queued;
+
timevar_push (TV_CALL_CLOBBER);
+ on_worklist = BITMAP_ALLOC (NULL);
+ queued = BITMAP_ALLOC (NULL);
+
set_initial_properties (ai);
- init_transitive_clobber_worklist (&worklist, &worklist2);
+ init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
while (VEC_length (tree, worklist) != 0)
{
tree curr = VEC_pop (tree, worklist);
int reason = VEC_pop (int, worklist2);
-
+
+ bitmap_clear_bit (on_worklist, DECL_UID (curr));
mark_call_clobbered (curr, reason);
- mark_aliases_call_clobbered (curr, &worklist, &worklist2);
+ mark_aliases_call_clobbered (curr, &worklist, &worklist2,
+ on_worklist, queued);
}
VEC_free (tree, heap, worklist);
VEC_free (int, heap, worklist2);
+ BITMAP_FREE (on_worklist);
+ BITMAP_FREE (queued);
compute_tag_properties ();
timevar_pop (TV_CALL_CLOBBER);
}
}
+/* The list is sorted by increasing partitioning score (PSCORE).
+ This score is computed such that symbols with high scores are
+ those that are least likely to be partitioned. Given a symbol
+ MP->VAR, PSCORE(S) is the result of the following weighted sum
+
+ PSCORE(S) = FW * 64 + FR * 32
+ + DW * 16 + DR * 8
+ + IW * 4 + IR * 2
+ + NO_ALIAS
+
+ where
+
+ FW Execution frequency of writes to S
+ FR Execution frequency of reads from S
+ DW Number of direct writes to S
+ DR Number of direct reads from S
+ IW Number of indirect writes to S
+ IR Number of indirect reads from S
+ NO_ALIAS State of the NO_ALIAS* flags
+
+ The basic idea here is that symbols that are frequently
+ written-to in hot paths of the code are the last to be considered
+ for partitioning. */
+
+static inline long
+mem_sym_score (mem_sym_stats_t mp)
+{
+ /* Unpartitionable SFTs are automatically thrown to the bottom of
+ the list. They are not stored in partitions, but they are used
+ for computing overall statistics. */
+ if (TREE_CODE (mp->var) == STRUCT_FIELD_TAG
+ && SFT_UNPARTITIONABLE_P (mp->var))
+ return LONG_MAX;
+
+ return mp->frequency_writes * 64 + mp->frequency_reads * 32
+ + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
+ + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
+ + var_ann (mp->var)->noalias_state;
+}
+
+
/* Dump memory reference stats for function CFUN to FILE. */
void
dump_mem_sym_stats (stderr, var);
}
+/* Dump memory reference stats for variable VAR to FILE. For use
+ of tree-dfa.c:dump_variable. */
+
+void
+dump_mem_sym_stats_for_var (FILE *file, tree var)
+{
+ mem_sym_stats_t stats = mem_sym_stats (cfun, var);
+
+ if (stats == NULL)
+ return;
+
+ fprintf (file, ", score: %ld", mem_sym_score (stats));
+ fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
+ fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
+ fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
+ fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
+}
/* Dump memory reference stats for all memory symbols to FILE. */
}
-/* The list is sorted by increasing partitioning score (PSCORE).
- This score is computed such that symbols with high scores are
- those that are least likely to be partitioned. Given a symbol
- MP->VAR, PSCORE(S) is the result of the following weighted sum
-
- PSCORE(S) = FW * 64 + FR * 32
- + DW * 16 + DR * 8
- + IW * 4 + IR * 2
- + NO_ALIAS
-
- where
-
- FW Execution frequency of writes to S
- FR Execution frequency of reads from S
- DW Number of direct writes to S
- DR Number of direct reads from S
- IW Number of indirect writes to S
- IR Number of indirect reads from S
- NO_ALIAS State of the NO_ALIAS* flags
-
- The basic idea here is that symbols that are frequently
- written-to in hot paths of the code are the last to be considered
- for partitioning. */
-
-static inline long
-pscore (mem_sym_stats_t mp)
-{
- return mp->frequency_writes * 64 + mp->frequency_reads * 32
- + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
- + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
- + var_ann (mp->var)->noalias_state;
-}
-
-
/* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
be partitioned before MP2->VAR, 0 if they are the same or 1 if
MP1->VAR should be partitioned after MP2->VAR. */
static inline int
compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
{
- long pscore1 = pscore (mp1);
- long pscore2 = pscore (mp2);
+ long pscore1 = mem_sym_score (mp1);
+ long pscore2 = mem_sym_score (mp2);
if (pscore1 < pscore2)
return -1;
else if (pscore1 > pscore2)
return 1;
else
- return 0;
+ return DECL_UID (mp1->var) - DECL_UID (mp2->var);
}
static void
build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
- VEC(mem_sym_stats_t,heap) **mp_info_p,
- VEC(tree,heap) **tags_p)
+ VEC(mem_sym_stats_t,heap) **mp_info_p,
+ VEC(tree,heap) **tags_p)
{
tree var;
referenced_var_iterator rvi;
if (!need_to_partition_p (mem_ref_stats))
break;
+ /* SFTs that are marked unpartitionable should not be added to
+ partitions. These SFTs are special because they mark the
+ first SFT into a structure where a pointer is pointing to.
+ This is needed by the operand scanner to find adjacent
+ fields. See add_vars_for_offset for details. */
+ if (TREE_CODE (mp_p->var) == STRUCT_FIELD_TAG
+ && SFT_UNPARTITIONABLE_P (mp_p->var))
+ continue;
+
mpt = find_partition_for (mp_p);
estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
}
virtual operands. However, by reducing the size of the alias
sets to be scanned, the work needed inside the operand scanner is
significantly reduced. */
- new_aliases = BITMAP_ALLOC (NULL);
+ new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
{
}
+/* Helper for init_alias_info. Reset existing aliasing information. */
+
+static void
+reset_alias_info (void)
+{
+ referenced_var_iterator rvi;
+ tree var;
+ unsigned i;
+ bitmap active_nmts, all_nmts;
+
+ /* Clear the set of addressable variables. We do not need to clear
+ the TREE_ADDRESSABLE bit on every symbol because we are going to
+ re-compute addressability here. */
+ bitmap_clear (gimple_addressable_vars (cfun));
+
+ active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
+ all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
+
+ /* Clear flow-insensitive alias information from each symbol. */
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (is_gimple_reg (var))
+ continue;
+
+ if (MTAG_P (var))
+ MTAG_ALIASES (var) = NULL;
+
+ /* Memory partition information will be computed from scratch. */
+ if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
+ MPT_SYMBOLS (var) = NULL;
+
+ /* Collect all the name tags to determine if we have any
+ orphaned that need to be removed from the IL. A name tag
+ will be orphaned if it is not associated with any active SSA
+ name. */
+ if (TREE_CODE (var) == NAME_MEMORY_TAG)
+ bitmap_set_bit (all_nmts, DECL_UID (var));
+
+ /* Since we are about to re-discover call-clobbered
+ variables, clear the call-clobbered flag. Variables that
+ are intrinsically call-clobbered (globals, local statics,
+ etc) will not be marked by the aliasing code, so we can't
+ remove them from CALL_CLOBBERED_VARS.
+
+ NB: STRUCT_FIELDS are still call clobbered if they are for a
+ global variable, so we *don't* clear their call clobberedness
+ just because they are tags, though we will clear it if they
+ aren't for global variables. */
+ if (TREE_CODE (var) == NAME_MEMORY_TAG
+ || TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ || TREE_CODE (var) == MEMORY_PARTITION_TAG
+ || !is_global_var (var))
+ clear_call_clobbered (var);
+ }
+
+ /* Clear flow-sensitive points-to information from each SSA name. */
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+
+ if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
+ continue;
+
+ if (SSA_NAME_PTR_INFO (name))
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
+
+ /* Clear all the flags but keep the name tag to
+ avoid creating new temporaries unnecessarily. If
+ this pointer is found to point to a subset or
+ superset of its former points-to set, then a new
+ tag will need to be created in create_name_tags. */
+ pi->pt_anything = 0;
+ pi->pt_null = 0;
+ pi->value_escapes_p = 0;
+ pi->is_dereferenced = 0;
+ if (pi->pt_vars)
+ bitmap_clear (pi->pt_vars);
+
+ /* Add NAME's name tag to the set of active tags. */
+ if (pi->name_mem_tag)
+ bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
+ }
+ }
+
+ /* Name memory tags that are no longer associated with an SSA name
+ are considered stale and should be removed from the IL. All the
+ name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
+ considered stale and marked for renaming. */
+ bitmap_and_compl_into (all_nmts, active_nmts);
+ mark_set_for_renaming (all_nmts);
+
+ BITMAP_FREE (all_nmts);
+ BITMAP_FREE (active_nmts);
+}
+
+
/* Initialize the data structures used for alias analysis. */
static struct alias_info *
/* If aliases have been computed before, clear existing information. */
if (gimple_aliases_computed_p (cfun))
- {
- unsigned i;
-
- /* Similarly, clear the set of addressable variables. In this
- case, we can just clear the set because addressability is
- only computed here. */
- bitmap_clear (gimple_addressable_vars (cfun));
-
- /* Clear flow-insensitive alias information from each symbol. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- if (is_gimple_reg (var))
- continue;
-
- if (MTAG_P (var))
- MTAG_ALIASES (var) = NULL;
-
- /* Memory partition information will be computed from scratch. */
- if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
- MPT_SYMBOLS (var) = NULL;
-
- /* Since we are about to re-discover call-clobbered
- variables, clear the call-clobbered flag. Variables that
- are intrinsically call-clobbered (globals, local statics,
- etc) will not be marked by the aliasing code, so we can't
- remove them from CALL_CLOBBERED_VARS.
-
- NB: STRUCT_FIELDS are still call clobbered if they are
- for a global variable, so we *don't* clear their call
- clobberedness just because they are tags, though we will
- clear it if they aren't for global variables. */
- if (TREE_CODE (var) == NAME_MEMORY_TAG
- || TREE_CODE (var) == SYMBOL_MEMORY_TAG
- || TREE_CODE (var) == MEMORY_PARTITION_TAG
- || !is_global_var (var))
- clear_call_clobbered (var);
- }
-
- /* Clear flow-sensitive points-to information from each SSA name. */
- for (i = 1; i < num_ssa_names; i++)
- {
- tree name = ssa_name (i);
-
- if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
- continue;
-
- if (SSA_NAME_PTR_INFO (name))
- {
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
-
- /* Clear all the flags but keep the name tag to
- avoid creating new temporaries unnecessarily. If
- this pointer is found to point to a subset or
- superset of its former points-to set, then a new
- tag will need to be created in create_name_tags. */
- pi->pt_anything = 0;
- pi->pt_null = 0;
- pi->value_escapes_p = 0;
- pi->is_dereferenced = 0;
- if (pi->pt_vars)
- bitmap_clear (pi->pt_vars);
- }
- }
- }
+ reset_alias_info ();
else
{
/* If this is the first time we compute aliasing information,
else
{
*slot = pi;
+
/* If we didn't find a pointer with the same points-to set
as PTR, create a new name tag if needed. */
if (pi->name_mem_tag == NULL_TREE)
renaming. */
if (old_name_tag && old_name_tag != pi->name_mem_tag)
mark_sym_for_renaming (old_name_tag);
-
+
+ /* Inherit volatility from the pointed-to type. */
TREE_THIS_VOLATILE (pi->name_mem_tag)
- |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
+ |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
/* Mark the new name tag for renaming. */
mark_sym_for_renaming (pi->name_mem_tag);
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
{
- subvar_t sv;
+ unsigned int i;
+ tree subvar;
- for (sv = svars; sv; sv = sv->next)
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
{
if (bitmap_bit_p (gimple_addressable_vars (cfun),
- DECL_UID (sv->var)))
+ DECL_UID (subvar)))
okay_to_mark = false;
- mark_sym_for_renaming (sv->var);
+ mark_sym_for_renaming (subvar);
}
}
So, if we have some pure/const and some regular calls in the
program we create .GLOBAL_VAR to avoid missing these
relations. */
- if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
+ if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
&& stats->num_call_sites > 0
&& stats->num_pure_const_call_sites > 0
&& stats->num_call_sites > stats->num_pure_const_call_sites)
}
if (pi->is_dereferenced)
- fprintf (file, ", is dereferenced (R=%ld, W=%ld)",
- get_mem_sym_stats_for (ptr)->num_direct_reads,
- get_mem_sym_stats_for (ptr)->num_direct_writes);
+ fprintf (file, ", is dereferenced");
if (pi->value_escapes_p)
fprintf (file, ", its value escapes");
aliases = may_aliases (var);
/* Case 1: |aliases| == 1 */
- if (aliases && bitmap_count_bits (aliases) == 1)
+ if (aliases
+ && bitmap_single_bit_set_p (aliases))
{
tree ali = referenced_var (bitmap_first_set_bit (aliases));
if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
HOST_WIDE_INT offset, size, maxsize;
tree ref;
VEC (tree, heap) *overlaps = NULL;
- subvar_t sv;
- unsigned int len;
+ unsigned int len, i;
+ tree subvar;
+
gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
gcc_assert (!MTAG_P (var));
if (var_can_have_subvars (ref)
&& (svars = get_subvars_for_var (ref)))
{
- for (sv = svars; sv; sv = sv->next)
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
{
bool exact;
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
- VEC_safe_push (tree, heap, overlaps, sv->var);
+ if (overlap_subvar (offset, maxsize, subvar, &exact))
+ VEC_safe_push (tree, heap, overlaps, subvar);
}
gcc_assert (overlaps != NULL);
}
On mem-ssa branch, the scanning for virtual operands have been
split from the rest of tree-ssa-operands, so it should be much
easier to fix this problem correctly once mem-ssa is merged. */
- for (sv = svars; sv; sv = sv->next)
- VEC_safe_push (tree, heap, overlaps, sv->var);
+ for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
+ VEC_safe_push (tree, heap, overlaps, subvar);
gcc_assert (overlaps != NULL);
}
static tree
create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
- unsigned HOST_WIDE_INT size, alias_set_type alias_set)
+ unsigned HOST_WIDE_INT size, alias_set_type alias_set,
+ bool base_for_components)
{
tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
SFT_OFFSET (subvar) = offset;
SFT_SIZE (subvar) = size;
SFT_ALIAS_SET (subvar) = alias_set;
+ SFT_BASE_FOR_COMPONENTS_P (subvar) = base_for_components;
+ SFT_UNPARTITIONABLE_P (subvar) = false;
+
return subvar;
}
push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL,
TREE_TYPE (var));
- if (VEC_length (fieldoff_s, fieldstack) != 0)
+ /* Make sure to not create SFTs for structs we won't generate variable
+ infos for. See tree-ssa-structalias.c:create_variable_info_for (). */
+ if (VEC_length (fieldoff_s, fieldstack) > 1
+ && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
{
subvar_t *subvars;
fieldoff_s *fo;
/* Otherwise, create the variables. */
subvars = lookup_subvars_for_var (var);
-
+ *subvars = VEC_alloc (tree, gc, VEC_length (fieldoff_s, fieldstack));
+
sort_fieldstack (fieldstack);
- for (i = VEC_length (fieldoff_s, fieldstack);
- VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
+ for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); ++i)
{
- subvar_t sv;
HOST_WIDE_INT fosize;
- tree currfotype;
+ tree currfotype, subvar;
fosize = TREE_INT_CST_LOW (fo->size);
currfotype = fo->type;
/* If this field isn't in the used portion,
or it has the exact same offset and size as the last
- field, skip it. */
-
- if (((fo->offset <= up->minused
- && fo->offset + fosize <= up->minused)
- || fo->offset >= up->maxused)
+ field, skip it. Note that we always need the field at
+ offset 0 so we can properly handle pointers to the
+ structure. */
+
+ if ((fo->offset != 0
+ && ((fo->offset <= up->minused
+ && fo->offset + fosize <= up->minused)
+ || fo->offset >= up->maxused))
|| (fo->offset == lastfooffset
&& fosize == lastfosize
&& currfotype == lastfotype))
continue;
- sv = GGC_NEW (struct subvar);
- sv->next = *subvars;
- sv->var =
- create_sft (var, fo->type, fo->offset, fosize, fo->alias_set);
+ subvar = create_sft (var, fo->type, fo->offset,
+ fosize, fo->alias_set, fo->base_for_components);
+ VEC_quick_push (tree, *subvars, subvar);
if (dump_file)
{
fprintf (dump_file, "structure field tag %s created for var %s",
- get_name (sv->var), get_name (var));
+ get_name (subvar), get_name (var));
fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
- SFT_OFFSET (sv->var));
+ SFT_OFFSET (subvar));
fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
- SFT_SIZE (sv->var));
+ SFT_SIZE (subvar));
fprintf (dump_file, "\n");
}
lastfotype = currfotype;
lastfooffset = fo->offset;
lastfosize = fosize;
- *subvars = sv;
}
/* Once we have created subvars, the original is no longer call
for (i = 0; i < nargs; i++)
{
tree *arg = &CALL_EXPR_ARG (*tp, i);
- if (TREE_CODE (*arg) != ADDR_EXPR)
+ if (TREE_CODE (*arg) == ADDR_EXPR)
find_used_portions (arg, walk_subtrees, NULL);
}
*walk_subtrees = 0;
FOR_EACH_BB (bb)
{
block_stmt_iterator bsi;
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ use_operand_p use;
+ ssa_op_iter iter;
+
+ FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
+ {
+ tree op = USE_FROM_PTR (use);
+ walk_tree_without_duplicates (&op, find_used_portions,
+ NULL);
+ }
+ }
+
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
tree sym = referenced_var_lookup (i);
if (get_subvars_for_var (sym))
{
- update=true;
+ update = true;
break;
}
}
tree sym = referenced_var_lookup (i);
if (get_subvars_for_var (sym))
{
- update=true;
+ update = true;
break;
}
}
tree sym = referenced_var_lookup (i);
if (get_subvars_for_var (sym))
{
- update=true;
+ update = true;
break;
}
}