You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
+#include "tree-ssa-structalias.h"
#include "convert.h"
#include "params.h"
+#include "ipa-type-escape.h"
#include "vec.h"
+#include "bitmap.h"
+
+/* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
+ aliasing */
+static bitmap_obstack alias_obstack;
/* 'true' after aliases have been computed (see compute_may_aliases). */
bool aliases_computed_p;
/* Set of variables aliased with VAR. This is the exact same
information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
bitmap form to speed up alias grouping. */
- sbitmap may_aliases;
-};
-
-
-/* Alias information used by compute_may_aliases and its helpers. */
-struct alias_info
-{
- /* SSA names visited while collecting points-to information. If bit I
- is set, it means that SSA variable with version I has already been
- visited. */
- sbitmap ssa_names_visited;
-
- /* Array of SSA_NAME pointers processed by the points-to collector. */
- varray_type processed_ptrs;
-
- /* Variables whose address is still needed. */
- bitmap addresses_needed;
-
- /* ADDRESSABLE_VARS contains all the global variables and locals that
- have had their address taken. */
- struct alias_map_d **addressable_vars;
- size_t num_addressable_vars;
-
- /* POINTERS contains all the _DECL pointers with unique memory tags
- that have been referenced in the program. */
- struct alias_map_d **pointers;
- size_t num_pointers;
-
- /* Number of function calls found in the program. */
- size_t num_calls_found;
-
- /* Number of const/pure function calls found in the program. */
- size_t num_pure_const_calls_found;
-
- /* Array of counters to keep track of how many times each pointer has
- been dereferenced in the program. This is used by the alias grouping
- heuristic in compute_flow_insensitive_aliasing. */
- varray_type num_references;
-
- /* Total number of virtual operands that will be needed to represent
- all the aliases of all the pointers found in the program. */
- long total_alias_vops;
-
- /* Variables that have been written to. */
- bitmap written_vars;
-
- /* Pointers that have been used in an indirect store operation. */
- bitmap dereferenced_ptrs_store;
-
- /* Pointers that have been used in an indirect load operation. */
- bitmap dereferenced_ptrs_load;
+ bitmap may_aliases;
};
unsigned int simple_resolved;
unsigned int tbaa_queries;
unsigned int tbaa_resolved;
+ unsigned int structnoaddress_queries;
+ unsigned int structnoaddress_resolved;
};
/* Local functions. */
static void compute_flow_insensitive_aliasing (struct alias_info *);
static void dump_alias_stats (FILE *);
-static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT);
+static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
static tree create_memory_tag (tree type, bool is_type_tag);
static tree get_tmt_for (tree, struct alias_info *);
static tree get_nmt_for (tree);
static void replace_may_alias (tree, size_t, tree);
static struct alias_info *init_alias_info (void);
static void delete_alias_info (struct alias_info *);
-static void compute_points_to_and_addr_escape (struct alias_info *);
static void compute_flow_sensitive_aliasing (struct alias_info *);
static void setup_pointers_and_addressables (struct alias_info *);
-static bool collect_points_to_info_r (tree, tree, void *);
-static bool is_escape_site (tree, struct alias_info *);
-static void add_pointed_to_var (struct alias_info *, tree, tree);
static void create_global_var (void);
-static void collect_points_to_info_for (struct alias_info *, tree);
static void maybe_create_global_var (struct alias_info *ai);
static void group_aliases (struct alias_info *);
static void set_pt_anything (tree ptr);
-static void set_pt_malloc (tree ptr);
/* Global declarations. */
having to keep track of too many V_MAY_DEF expressions at call sites. */
tree global_var;
+DEF_VEC_I(int);
+DEF_VEC_ALLOC_I(int,heap);
+
+/* qsort comparison function to sort type/name tags by DECL_UID. */
+
+static int
+sort_tags_by_id (const void *pa, const void *pb)
+{
+ tree a = *(tree *)pa;
+ tree b = *(tree *)pb;
+
+ return DECL_UID (a) - DECL_UID (b);
+}
+
+/* Initialize WORKLIST to contain those memory tags that are marked call
+ clobbered. Initialized WORKLIST2 to contain the reasons these
+ memory tags escaped. */
+
+static void
+init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2)
+{
+ referenced_var_iterator rvi;
+ tree curr;
+
+ FOR_EACH_REFERENCED_VAR (curr, rvi)
+ {
+ if (MTAG_P (curr) && is_call_clobbered (curr))
+ {
+ VEC_safe_push (tree, heap, *worklist, curr);
+ VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
+ }
+ }
+}
+
+/* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
+ ALIAS is not already marked call clobbered, and is a memory
+ tag. */
+
+static void
+add_to_worklist (tree alias, VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2,
+ int reason)
+{
+ if (MTAG_P (alias) && !is_call_clobbered (alias))
+ {
+ VEC_safe_push (tree, heap, *worklist, alias);
+ VEC_safe_push (int, heap, *worklist2, reason);
+ }
+}
+
+/* Mark aliases of TAG as call clobbered, and place any tags on the
+ alias list that were not already call clobbered on WORKLIST. */
+
+static void
+mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
+ VEC (int, heap) **worklist2)
+{
+ unsigned int i;
+ VEC (tree, gc) *ma;
+ tree entry;
+ var_ann_t ta = var_ann (tag);
+
+ if (!MTAG_P (tag))
+ return;
+ ma = may_aliases (tag);
+ if (!ma)
+ return;
+
+ for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
+ {
+ if (!unmodifiable_var_p (entry))
+ {
+ add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
+ mark_call_clobbered (entry, ta->escape_mask);
+ }
+ }
+}
+
+/* Tags containing global vars need to be marked as global.
+ Tags containing call clobbered vars need to be marked as call
+ clobbered. */
+
+static void
+compute_tag_properties (void)
+{
+ referenced_var_iterator rvi;
+ tree tag;
+ bool changed = true;
+ VEC (tree, heap) *taglist = NULL;
+
+ FOR_EACH_REFERENCED_VAR (tag, rvi)
+ {
+ if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
+ continue;
+ VEC_safe_push (tree, heap, taglist, tag);
+ }
+
+ /* We sort the taglist by DECL_UID, for two reasons.
+ 1. To get a sequential ordering to make the bitmap accesses
+ faster.
+ 2. Because of the way we compute aliases, it's more likely that
+ an earlier tag is included in a later tag, and this will reduce
+ the number of iterations.
+
+ If we had a real tag graph, we would just topo-order it and be
+ done with it. */
+ qsort (VEC_address (tree, taglist),
+ VEC_length (tree, taglist),
+ sizeof (tree),
+ sort_tags_by_id);
+
+ /* Go through each tag not marked as global, and if it aliases
+ global vars, mark it global.
+
+ If the tag contains call clobbered vars, mark it call
+ clobbered.
+
+ This loop iterates because tags may appear in the may-aliases
+ list of other tags when we group. */
+
+ while (changed)
+ {
+ unsigned int k;
+
+ changed = false;
+ for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
+ {
+ VEC (tree, gc) *ma;
+ unsigned int i;
+ tree entry;
+ bool tagcc = is_call_clobbered (tag);
+ bool tagglobal = MTAG_GLOBAL (tag);
+
+ if (tagcc && tagglobal)
+ continue;
+
+ ma = may_aliases (tag);
+ if (!ma)
+ continue;
+
+ for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
+ {
+ /* Call clobbered entries cause the tag to be marked
+ call clobbered. */
+ if (!tagcc && is_call_clobbered (entry))
+ {
+ mark_call_clobbered (tag, var_ann (entry)->escape_mask);
+ tagcc = true;
+ changed = true;
+ }
+
+ /* Global vars cause the tag to be marked global. */
+ if (!tagglobal && is_global_var (entry))
+ {
+ MTAG_GLOBAL (tag) = true;
+ changed = true;
+ tagglobal = true;
+ }
+
+ /* Early exit once both global and cc are set, since the
+ loop can't do any more than that. */
+ if (tagcc && tagglobal)
+ break;
+ }
+ }
+ }
+ VEC_free (tree, heap, taglist);
+}
+
+/* Set up the initial variable clobbers and globalness.
+ When this function completes, only tags whose aliases need to be
+ clobbered will be set clobbered. Tags clobbered because they
+ contain call clobbered vars are handled in compute_tag_properties. */
+
+static void
+set_initial_properties (struct alias_info *ai)
+{
+ unsigned int i;
+ referenced_var_iterator rvi;
+ tree var;
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (is_global_var (var)
+ && (!var_can_have_subvars (var)
+ || get_subvars_for_var (var) == NULL))
+ {
+ if (!unmodifiable_var_p (var))
+ mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
+ }
+ else if (TREE_CODE (var) == PARM_DECL
+ && default_def (var)
+ && POINTER_TYPE_P (TREE_TYPE (var)))
+ {
+ tree def = default_def (var);
+ get_ptr_info (def)->value_escapes_p = 1;
+ get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
+ }
+ }
+
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
+ {
+ tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+
+ if (pi->value_escapes_p)
+ {
+ /* If PTR escapes then its associated memory tags and
+ pointed-to variables are call-clobbered. */
+ if (pi->name_mem_tag)
+ mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
+
+ if (v_ann->type_mem_tag)
+ mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
+
+ if (pi->pt_vars)
+ {
+ bitmap_iterator bi;
+ unsigned int j;
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ if (!unmodifiable_var_p (referenced_var (j)))
+ mark_call_clobbered (referenced_var (j), pi->escape_mask);
+ }
+ }
+ /* If the name tag is call clobbered, so is the type tag
+ associated with the base VAR_DECL. */
+ if (pi->name_mem_tag
+ && v_ann->type_mem_tag
+ && is_call_clobbered (pi->name_mem_tag))
+ mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
+
+ /* Name tags and type tags that we don't know where they point
+ to, might point to global memory, and thus, are clobbered.
+
+ FIXME: This is not quite right. They should only be
+ clobbered if value_escapes_p is true, regardless of whether
+ they point to global memory or not.
+ So removing this code and fixing all the bugs would be nice.
+ It is the cause of a bunch of clobbering. */
+ if ((pi->pt_global_mem || pi->pt_anything)
+ && pi->is_dereferenced && pi->name_mem_tag)
+ {
+ mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
+ MTAG_GLOBAL (pi->name_mem_tag) = true;
+ }
+
+ if ((pi->pt_global_mem || pi->pt_anything)
+ && pi->is_dereferenced && v_ann->type_mem_tag)
+ {
+ mark_call_clobbered (v_ann->type_mem_tag, ESCAPE_IS_GLOBAL);
+ MTAG_GLOBAL (v_ann->type_mem_tag) = true;
+ }
+ }
+}
+
+/* This variable is set to true if we are updating the used alone
+ information for TMT's, or are in a pass that is going to break it
+ temporarily. */
+
+bool updating_used_alone;
+
+/* Compute which variables need to be marked call clobbered because
+ their tag is call clobbered, and which tags need to be marked
+ global because they contain global variables. */
+
+static void
+compute_call_clobbered (struct alias_info *ai)
+{
+ VEC (tree, heap) *worklist = NULL;
+ VEC(int,heap) *worklist2 = NULL;
+
+ set_initial_properties (ai);
+ init_transitive_clobber_worklist (&worklist, &worklist2);
+ while (VEC_length (tree, worklist) != 0)
+ {
+ tree curr = VEC_pop (tree, worklist);
+ int reason = VEC_pop (int, worklist2);
+
+ mark_call_clobbered (curr, reason);
+ mark_aliases_call_clobbered (curr, &worklist, &worklist2);
+ }
+ VEC_free (tree, heap, worklist);
+ VEC_free (int, heap, worklist2);
+ compute_tag_properties ();
+}
+
+
+/* Recalculate the used_alone information for TMT's . */
+void
+recalculate_used_alone (void)
+{
+ VEC (tree, heap) *calls = NULL;
+ block_stmt_iterator bsi;
+ basic_block bb;
+ tree stmt;
+ size_t i;
+ referenced_var_iterator rvi;
+ tree var;
+
+ /* First, reset all the TMT used alone bits to zero. */
+ updating_used_alone = true;
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ if (TREE_CODE (var) == TYPE_MEMORY_TAG)
+ TMT_USED_ALONE (var) = 0;
+
+ /* Walk all the statements.
+ Calls get put into a list of statements to update, since we will
+ need to update operands on them if we make any changes.
+ If we see a bare use of a TMT anywhere in a real virtual use or virtual
+ def, mark the TMT as used alone, and for renaming. */
+
+
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ stmt = bsi_stmt (bsi);
+ if (TREE_CODE (stmt) == CALL_EXPR
+ || (TREE_CODE (stmt) == MODIFY_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
+ VEC_safe_push (tree, heap, calls, stmt);
+ else
+ {
+ ssa_op_iter iter;
+
+ FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
+ SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
+ {
+ tree svar = var;
+
+ if(TREE_CODE (var) == SSA_NAME)
+ svar = SSA_NAME_VAR (var);
+
+ if (TREE_CODE (svar) == TYPE_MEMORY_TAG)
+ {
+ if (!TMT_USED_ALONE (svar))
+ {
+ TMT_USED_ALONE (svar) = true;
+ mark_sym_for_renaming (svar);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /* Update the operands on all the calls we saw. */
+ if (calls)
+ {
+ for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
+ update_stmt (stmt);
+ }
+ VEC_free (tree, heap, calls);
+ updating_used_alone = false;
+}
/* Compute may-alias information for every variable referenced in function
FNDECL.
address of V escapes the current function, making V call-clobbered
(i.e., whether &V is stored in a global variable or if its passed as a
function call argument). */
- compute_points_to_and_addr_escape (ai);
+ compute_points_to_sets (ai);
/* Collect all pointers and addressable variables, compute alias sets,
create memory tags for pointers and promote variables whose address is
memory tags. */
compute_flow_insensitive_aliasing (ai);
+ /* Determine if we need to enable alias grouping. */
+ if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
+ group_aliases (ai);
+
+ /* Compute call clobbering information. */
+ compute_call_clobbered (ai);
+
/* If the program has too many call-clobbered variables and/or function
calls, create .GLOBAL_VAR and use it to model call-clobbering
semantics at call sites. This reduces the number of virtual operands
/* Deallocate memory used by aliasing data structures. */
delete_alias_info (ai);
+ updating_used_alone = true;
{
block_stmt_iterator bsi;
basic_block bb;
}
}
}
-
+ recalculate_used_alone ();
+ updating_used_alone = false;
}
+
struct tree_opt_pass pass_may_alias =
{
"alias", /* name */
(ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
static tree
-count_ptr_derefs (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
+count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
{
struct count_ptr_d *count_p = (struct count_ptr_d *) data;
+ /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
+ pointer 'ptr' is *not* dereferenced, it is simply used to compute
+ the address of 'fld' as 'ptr + offsetof(fld)'. */
+ if (TREE_CODE (*tp) == ADDR_EXPR)
+ {
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+
if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
count_p->count++;
gcc_assert (*num_uses_p >= *num_derefs_p);
}
-
/* Initialize the data structures used for alias analysis. */
static struct alias_info *
init_alias_info (void)
{
struct alias_info *ai;
+ referenced_var_iterator rvi;
+ tree var;
- ai = xcalloc (1, sizeof (struct alias_info));
+ bitmap_obstack_initialize (&alias_obstack);
+ ai = XCNEW (struct alias_info);
ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
- ai->addresses_needed = BITMAP_ALLOC (NULL);
- VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
- ai->written_vars = BITMAP_ALLOC (NULL);
- ai->dereferenced_ptrs_store = BITMAP_ALLOC (NULL);
- ai->dereferenced_ptrs_load = BITMAP_ALLOC (NULL);
+ ai->written_vars = BITMAP_ALLOC (&alias_obstack);
+ ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
+ ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
bitmap_clear (addressable_vars);
/* Clear flow-insensitive alias information from each symbol. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
-
- ann->is_alias_tag = 0;
+
+ ann->is_aliased = 0;
ann->may_aliases = NULL;
+ NUM_REFERENCES_CLEAR (ann);
/* Since we are about to re-discover call-clobbered
variables, clear the call-clobbered flag. Variables that
a global variable, so we *don't* clear their call clobberedness
just because they are tags, though we will clear it if they
aren't for global variables. */
- if (ann->mem_tag_kind == NAME_TAG
- || ann->mem_tag_kind == TYPE_TAG
+ if (TREE_CODE (var) == NAME_MEMORY_TAG
+ || TREE_CODE (var) == TYPE_MEMORY_TAG
|| !is_global_var (var))
clear_call_clobbered (var);
}
superset of its former points-to set, then a new
tag will need to be created in create_name_tags. */
pi->pt_anything = 0;
- pi->pt_malloc = 0;
pi->pt_null = 0;
pi->value_escapes_p = 0;
pi->is_dereferenced = 0;
delete_alias_info (struct alias_info *ai)
{
size_t i;
+ referenced_var_iterator rvi;
+ tree var;
sbitmap_free (ai->ssa_names_visited);
ai->processed_ptrs = NULL;
- BITMAP_FREE (ai->addresses_needed);
for (i = 0; i < ai->num_addressable_vars; i++)
+ free (ai->addressable_vars[i]);
+
+ FOR_EACH_REFERENCED_VAR(var, rvi)
{
- sbitmap_free (ai->addressable_vars[i]->may_aliases);
- free (ai->addressable_vars[i]);
+ var_ann_t ann = var_ann (var);
+ NUM_REFERENCES_CLEAR (ann);
}
+
free (ai->addressable_vars);
for (i = 0; i < ai->num_pointers; i++)
- {
- sbitmap_free (ai->pointers[i]->may_aliases);
- free (ai->pointers[i]);
- }
+ free (ai->pointers[i]);
free (ai->pointers);
- ai->num_references = NULL;
BITMAP_FREE (ai->written_vars);
BITMAP_FREE (ai->dereferenced_ptrs_store);
BITMAP_FREE (ai->dereferenced_ptrs_load);
-
+ bitmap_obstack_release (&alias_obstack);
free (ai);
-}
-
-/* Walk use-def chains for pointer PTR to determine what variables is PTR
- pointing to. */
-
-static void
-collect_points_to_info_for (struct alias_info *ai, tree ptr)
-{
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
-
- if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
- {
- SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
- walk_use_def_chains (ptr, collect_points_to_info_r, ai, true);
- VARRAY_PUSH_TREE (ai->processed_ptrs, ptr);
- }
+ delete_points_to_sets ();
}
-
-/* Traverse use-def links for all the pointers in the program to collect
- address escape and points-to information.
-
- This is loosely based on the same idea described in R. Hasti and S.
- Horwitz, ``Using static single assignment form to improve
- flow-insensitive pointer analysis,'' in SIGPLAN Conference on
- Programming Language Design and Implementation, pp. 97-105, 1998. */
-
-static void
-compute_points_to_and_addr_escape (struct alias_info *ai)
-{
- basic_block bb;
- unsigned i;
- tree op;
- ssa_op_iter iter;
-
- timevar_push (TV_TREE_PTA);
-
- FOR_EACH_BB (bb)
- {
- bb_ann_t block_ann = bb_ann (bb);
- block_stmt_iterator si;
-
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- {
- bitmap addr_taken;
- tree stmt = bsi_stmt (si);
- bool stmt_escapes_p = is_escape_site (stmt, ai);
- bitmap_iterator bi;
-
- /* Mark all the variables whose address are taken by the
- statement. Note that this will miss all the addresses taken
- in PHI nodes (those are discovered while following the use-def
- chains). */
- addr_taken = addresses_taken (stmt);
- if (addr_taken)
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
- {
- tree var = referenced_var (i);
- bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
- if (stmt_escapes_p)
- mark_call_clobbered (var);
- }
-
- if (stmt_escapes_p)
- block_ann->has_escape_site = 1;
-
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
- {
- var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
- struct ptr_info_def *pi;
- bool is_store;
- unsigned num_uses, num_derefs;
-
- /* If the operand's variable may be aliased, keep track
- of how many times we've referenced it. This is used
- for alias grouping in compute_flow_sensitive_aliasing.
- Note that we don't need to grow AI->NUM_REFERENCES
- because we are processing regular variables, not
- memory tags (the array's initial size is set to
- NUM_REFERENCED_VARS). */
- if (may_be_aliased (SSA_NAME_VAR (op)))
- (VARRAY_UINT (ai->num_references, v_ann->uid))++;
-
- if (!POINTER_TYPE_P (TREE_TYPE (op)))
- continue;
-
- collect_points_to_info_for (ai, op);
-
- pi = SSA_NAME_PTR_INFO (op);
- count_uses_and_derefs (op, stmt, &num_uses, &num_derefs,
- &is_store);
-
- if (num_derefs > 0)
- {
- /* Mark OP as dereferenced. In a subsequent pass,
- dereferenced pointers that point to a set of
- variables will be assigned a name tag to alias
- all the variables OP points to. */
- pi->is_dereferenced = 1;
-
- /* Keep track of how many time we've dereferenced each
- pointer. Again, we don't need to grow
- AI->NUM_REFERENCES because we're processing
- existing program variables. */
- (VARRAY_UINT (ai->num_references, v_ann->uid))++;
-
- /* If this is a store operation, mark OP as being
- dereferenced to store, otherwise mark it as being
- dereferenced to load. */
- if (is_store)
- bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
- else
- bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
- }
-
- if (stmt_escapes_p && num_derefs < num_uses)
- {
- /* If STMT is an escape point and STMT contains at
- least one direct use of OP, then the value of OP
- escapes and so the pointed-to variables need to
- be marked call-clobbered. */
- pi->value_escapes_p = 1;
-
- /* If the statement makes a function call, assume
- that pointer OP will be dereferenced in a store
- operation inside the called function. */
- if (get_call_expr_in (stmt))
- {
- bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
- pi->is_dereferenced = 1;
- }
- }
- }
-
- /* Update reference counter for definitions to any
- potentially aliased variable. This is used in the alias
- grouping heuristics. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
- {
- tree var = SSA_NAME_VAR (op);
- var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, ann->uid);
- if (may_be_aliased (var))
- (VARRAY_UINT (ai->num_references, ann->uid))++;
-
- if (POINTER_TYPE_P (TREE_TYPE (op)))
- collect_points_to_info_for (ai, op);
- }
-
- /* Mark variables in V_MAY_DEF operands as being written to. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
- var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, ann->uid);
- }
-
- /* After promoting variables and computing aliasing we will
- need to re-scan most statements. FIXME: Try to minimize the
- number of statements re-scanned. It's not really necessary to
- re-scan *all* statements. */
- mark_stmt_modified (stmt);
- }
- }
-
- timevar_pop (TV_TREE_PTA);
-}
-
-
/* Create name tags for all the pointers that have been dereferenced.
We only create a name tag for a pointer P if P is found to point to
a set of variables (so that we can alias them to *P) or if it is
are assigned the same name tag. */
static void
-create_name_tags (struct alias_info *ai)
+create_name_tags (void)
{
size_t i;
+ VEC (tree, heap) *with_ptvars = NULL;
+ tree ptr;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
+ /* Collect the list of pointers with a non-empty points to set. */
+ for (i = 1; i < num_ssa_names; i++)
{
- tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ tree ptr = ssa_name (i);
+ struct ptr_info_def *pi;
+
+ if (!ptr
+ || !POINTER_TYPE_P (TREE_TYPE (ptr))
+ || !SSA_NAME_PTR_INFO (ptr))
+ continue;
+
+ pi = SSA_NAME_PTR_INFO (ptr);
if (pi->pt_anything || !pi->is_dereferenced)
{
continue;
}
- if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
+ /* Set pt_anything on the pointers without pt_vars filled in so
+ that they are assigned a type tag. */
+
+ if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
+ VEC_safe_push (tree, heap, with_ptvars, ptr);
+ else
+ set_pt_anything (ptr);
+ }
+
+ /* If we didn't find any pointers with pt_vars set, we're done. */
+ if (!with_ptvars)
+ return;
+
+ /* Now go through the pointers with pt_vars, and find a name tag
+ with the same pt_vars as this pointer, or create one if one
+ doesn't exist. */
+ for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ size_t j;
+ tree ptr2;
+ tree old_name_tag = pi->name_mem_tag;
+
+ /* If PTR points to a set of variables, check if we don't
+ have another pointer Q with the same points-to set before
+ creating a tag. If so, use Q's tag instead of creating a
+ new one.
+
+ This is important for not creating unnecessary symbols
+ and also for copy propagation. If we ever need to
+ propagate PTR into Q or vice-versa, we would run into
+ problems if they both had different name tags because
+ they would have different SSA version numbers (which
+ would force us to take the name tags in and out of SSA). */
+ for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
{
- size_t j;
- tree old_name_tag = pi->name_mem_tag;
-
- /* If PTR points to a set of variables, check if we don't
- have another pointer Q with the same points-to set before
- creating a tag. If so, use Q's tag instead of creating a
- new one.
-
- This is important for not creating unnecessary symbols
- and also for copy propagation. If we ever need to
- propagate PTR into Q or vice-versa, we would run into
- problems if they both had different name tags because
- they would have different SSA version numbers (which
- would force us to take the name tags in and out of SSA). */
- for (j = 0; j < i; j++)
+ struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
+
+ if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
{
- tree q = VARRAY_TREE (ai->processed_ptrs, j);
- struct ptr_info_def *qi = SSA_NAME_PTR_INFO (q);
-
- if (qi
- && qi->pt_vars
- && qi->name_mem_tag
- && bitmap_equal_p (pi->pt_vars, qi->pt_vars))
- {
- pi->name_mem_tag = qi->name_mem_tag;
- break;
- }
+ pi->name_mem_tag = qi->name_mem_tag;
+ break;
}
-
- /* If we didn't find a pointer with the same points-to set
- as PTR, create a new name tag if needed. */
- if (pi->name_mem_tag == NULL_TREE)
- pi->name_mem_tag = get_nmt_for (ptr);
-
- /* If the new name tag computed for PTR is different than
- the old name tag that it used to have, then the old tag
- needs to be removed from the IL, so we mark it for
- renaming. */
- if (old_name_tag && old_name_tag != pi->name_mem_tag)
- mark_sym_for_renaming (old_name_tag);
}
- else if (pi->pt_malloc)
- {
- /* Otherwise, create a unique name tag for this pointer. */
- pi->name_mem_tag = get_nmt_for (ptr);
- }
- else
- {
- /* Only pointers that may point to malloc or other variables
- may receive a name tag. If the pointer does not point to
- a known spot, we should use type tags. */
- set_pt_anything (ptr);
- continue;
- }
-
+
+ /* If we didn't find a pointer with the same points-to set
+ as PTR, create a new name tag if needed. */
+ if (pi->name_mem_tag == NULL_TREE)
+ pi->name_mem_tag = get_nmt_for (ptr);
+
+ /* If the new name tag computed for PTR is different than
+ the old name tag that it used to have, then the old tag
+ needs to be removed from the IL, so we mark it for
+ renaming. */
+ if (old_name_tag && old_name_tag != pi->name_mem_tag)
+ mark_sym_for_renaming (old_name_tag);
+
TREE_THIS_VOLATILE (pi->name_mem_tag)
- |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
-
+ |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
+
/* Mark the new name tag for renaming. */
mark_sym_for_renaming (pi->name_mem_tag);
}
-}
+ VEC_free (tree, heap, with_ptvars);
+}
/* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
compute_flow_sensitive_aliasing (struct alias_info *ai)
{
size_t i;
+
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
+ {
+ tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
+ if (!find_what_p_points_to (ptr))
+ set_pt_anything (ptr);
+ }
- create_name_tags (ai);
+ create_name_tags ();
for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
{
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
bitmap_iterator bi;
- if (pi->value_escapes_p || pi->pt_anything)
- {
- /* If PTR escapes or may point to anything, then its associated
- memory tags and pointed-to variables are call-clobbered. */
- if (pi->name_mem_tag)
- mark_call_clobbered (pi->name_mem_tag);
-
- if (v_ann->type_mem_tag)
- mark_call_clobbered (v_ann->type_mem_tag);
-
- if (pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
- {
- mark_call_clobbered (referenced_var (j));
- }
- }
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
add_may_alias (pi->name_mem_tag, referenced_var (j));
add_may_alias (v_ann->type_mem_tag, referenced_var (j));
}
-
- /* If the name tag is call clobbered, so is the type tag
- associated with the base VAR_DECL. */
- if (pi->name_mem_tag
- && v_ann->type_mem_tag
- && is_call_clobbered (pi->name_mem_tag))
- mark_call_clobbered (v_ann->type_mem_tag);
}
}
var_ann_t tag_ann = var_ann (tag);
p_map->total_alias_vops = 0;
- p_map->may_aliases = sbitmap_alloc (num_referenced_vars);
- sbitmap_zero (p_map->may_aliases);
+ p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
for (j = 0; j < ai->num_addressable_vars; j++)
{
So we first check the call_clobbered status of the
tag and variable before querying the bitmap. */
tag_stored_p = is_call_clobbered (tag)
- || bitmap_bit_p (ai->written_vars, tag_ann->uid);
+ || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
var_stored_p = is_call_clobbered (var)
- || bitmap_bit_p (ai->written_vars, v_ann->uid);
+ || bitmap_bit_p (ai->written_vars, DECL_UID (var));
if (!tag_stored_p && !var_stored_p)
continue;
-
- if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
+
+ if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
{
- subvar_t svars;
size_t num_tag_refs, num_var_refs;
- num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
- num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
+ num_tag_refs = NUM_REFERENCES (tag_ann);
+ num_var_refs = NUM_REFERENCES (v_ann);
/* Add VAR to TAG's may-aliases set. */
- /* If this is an aggregate, we may have subvariables for it
- that need to be pointed to. */
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
+ /* We should never have a var with subvars here, because
+ they shouldn't get into the set of addressable vars */
+ gcc_assert (!var_can_have_subvars (var)
+ || get_subvars_for_var (var) == NULL);
- for (sv = svars; sv; sv = sv->next)
- {
- add_may_alias (tag, sv->var);
- /* Update the bitmap used to represent TAG's alias set
- in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (sv->var)->uid);
- }
- }
- else
- {
- add_may_alias (tag, var);
- /* Update the bitmap used to represent TAG's alias set
- in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (var)->uid);
- }
+ add_may_alias (tag, var);
+ /* Update the bitmap used to represent TAG's alias set
+ in case we need to group aliases. */
+ bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
/* Update the total number of virtual operands due to
aliasing. Since we are adding one more alias to TAG's
size_t j;
struct alias_map_d *p_map1 = ai->pointers[i];
tree tag1 = var_ann (p_map1->var)->type_mem_tag;
- sbitmap may_aliases1 = p_map1->may_aliases;
+ bitmap may_aliases1 = p_map1->may_aliases;
for (j = i + 1; j < ai->num_pointers; j++)
{
struct alias_map_d *p_map2 = ai->pointers[j];
tree tag2 = var_ann (p_map2->var)->type_mem_tag;
- sbitmap may_aliases2 = p_map2->may_aliases;
+ bitmap may_aliases2 = p_map2->may_aliases;
/* If the pointers may not point to each other, do nothing. */
- if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set))
+ if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
continue;
/* The two pointers may alias each other. If they already have
symbols in common, do nothing. */
- if (sbitmap_any_common_bits (may_aliases1, may_aliases2))
+ if (bitmap_intersect_p (may_aliases1, may_aliases2))
continue;
- if (sbitmap_first_set_bit (may_aliases2) >= 0)
+ if (!bitmap_empty_p (may_aliases2))
{
- size_t k;
+ unsigned int k;
+ bitmap_iterator bi;
/* Add all the aliases for TAG2 into TAG1's alias set.
FIXME, update grouping heuristic counters. */
- EXECUTE_IF_SET_IN_SBITMAP (may_aliases2, 0, k,
- add_may_alias (tag1, referenced_var (k)));
- sbitmap_a_or_b (may_aliases1, may_aliases1, may_aliases2);
+ EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
+ add_may_alias (tag1, referenced_var (k));
+ bitmap_ior_into (may_aliases1, may_aliases2);
}
else
{
/* Since TAG2 does not have any aliases of its own, add
TAG2 itself to the alias set of TAG1. */
add_may_alias (tag1, tag2);
- SET_BIT (may_aliases1, var_ann (tag2)->uid);
+ bitmap_set_bit (may_aliases1, DECL_UID (tag2));
}
}
}
-
+
if (dump_file)
- fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
+ fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
ai->total_alias_vops);
-
- /* Determine if we need to enable alias grouping. */
- if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
- group_aliases (ai);
}
may-aliases(V2) = { TAG } */
static void
-group_aliases_into (tree tag, sbitmap tag_aliases, struct alias_info *ai)
+group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
{
- size_t i;
+ unsigned int i;
var_ann_t tag_ann = var_ann (tag);
- size_t num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
+ size_t num_tag_refs = NUM_REFERENCES (tag_ann);
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_SBITMAP (tag_aliases, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
{
tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
/* Make TAG the unique alias of VAR. */
- ann->is_alias_tag = 0;
+ ann->is_aliased = 0;
ann->may_aliases = NULL;
/* Note that VAR and TAG may be the same if the function has no
itself won't be removed. We will merely replace them with
references to TAG. */
ai->total_alias_vops -= num_tag_refs;
- });
+ }
/* We have reduced the number of virtual operands that TAG makes on
behalf of all the variables formerly aliased with it. However,
{
size_t j;
tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
- sbitmap tag1_aliases = ai->pointers[i]->may_aliases;
+ bitmap tag1_aliases = ai->pointers[i]->may_aliases;
/* Skip tags that have been grouped already. */
if (ai->pointers[i]->grouped_p)
aliases into TAG1. */
for (j = i + 1; j < ai->num_pointers; j++)
{
- sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
+ bitmap tag2_aliases = ai->pointers[j]->may_aliases;
- if (sbitmap_any_common_bits (tag1_aliases, tag2_aliases))
+ if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
{
tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
- sbitmap_a_or_b (tag1_aliases, tag1_aliases, tag2_aliases);
+ bitmap_ior_into (tag1_aliases, tag2_aliases);
/* TAG2 does not need its aliases anymore. */
- sbitmap_zero (tag2_aliases);
+ bitmap_clear (tag2_aliases);
var_ann (tag2)->may_aliases = NULL;
/* TAG1 is the unique alias of TAG2. */
size_t j;
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
- varray_type aliases;
+ VEC(tree,gc) *aliases;
+ tree alias;
if (name_tag == NULL_TREE)
continue;
aliases = var_ann (name_tag)->may_aliases;
- for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
+ for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
{
- tree alias = VARRAY_TREE (aliases, j);
var_ann_t ann = var_ann (alias);
- if ((ann->mem_tag_kind == NOT_A_TAG
- || ann->mem_tag_kind == STRUCT_FIELD)
+ if ((!MTAG_P (alias)
+ || TREE_CODE (alias) == STRUCT_FIELD_TAG)
&& ann->may_aliases)
{
tree new_alias;
- gcc_assert (VARRAY_ACTIVE_SIZE (ann->may_aliases) == 1);
+ gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
- new_alias = VARRAY_TREE (ann->may_aliases, 0);
+ new_alias = VEC_index (tree, ann->may_aliases, 0);
replace_may_alias (name_tag, j, new_alias);
}
}
create_alias_map_for (tree var, struct alias_info *ai)
{
struct alias_map_d *alias_map;
- alias_map = xcalloc (1, sizeof (*alias_map));
+ alias_map = XCNEW (struct alias_map_d);
alias_map->var = var;
alias_map->set = get_alias_set (var);
ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
static void
setup_pointers_and_addressables (struct alias_info *ai)
{
- size_t i, n_vars, num_addressable_vars, num_pointers;
+ size_t n_vars, num_addressable_vars, num_pointers;
+ referenced_var_iterator rvi;
+ tree var;
+ VEC (tree, heap) *varvec = NULL;
+ safe_referenced_var_iterator srvi;
/* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
num_addressable_vars = num_pointers = 0;
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
-
if (may_be_aliased (var))
num_addressable_vars++;
/* Since we don't keep track of volatile variables, assume that
these pointers are used in indirect store operations. */
if (TREE_THIS_VOLATILE (var))
- bitmap_set_bit (ai->dereferenced_ptrs_store, var_ann (var)->uid);
+ bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
num_pointers++;
}
because some TREE_ADDRESSABLE variables will be marked
non-addressable below and only pointers with unique type tags are
going to be added to POINTERS. */
- ai->addressable_vars = xcalloc (num_addressable_vars,
- sizeof (struct alias_map_d *));
- ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
+ ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
+ ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
ai->num_addressable_vars = 0;
ai->num_pointers = 0;
unnecessarily. */
n_vars = num_referenced_vars;
- for (i = 0; i < n_vars; i++)
+ FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
{
- tree var = referenced_var (i);
var_ann_t v_ann = var_ann (var);
subvar_t svars;
Structure fields, on the other hand, have to have some of this
information processed for them, but it's pointless to mark them
non-addressable (since they are fake variables anyway). */
- if (v_ann->mem_tag_kind != NOT_A_TAG
- && v_ann->mem_tag_kind != STRUCT_FIELD)
+ if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
continue;
/* Remove the ADDRESSABLE flag from every addressable variable whose
of ADDR_EXPR constants into INDIRECT_REF expressions and the
removal of dead pointer assignments done by the early scalar
cleanup passes. */
- if (TREE_ADDRESSABLE (var) && v_ann->mem_tag_kind != STRUCT_FIELD)
+ if (TREE_ADDRESSABLE (var))
{
- if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
+ if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
&& TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
to rename VAR into SSA afterwards. */
mark_sym_for_renaming (var);
+ /* If VAR can have sub-variables, and any of its
+ sub-variables has its address taken, then we cannot
+ remove the addressable flag from VAR. */
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
{
for (sv = svars; sv; sv = sv->next)
{
- var_ann_t svann = var_ann (sv->var);
- if (bitmap_bit_p (ai->addresses_needed, svann->uid))
+ if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
okay_to_mark = false;
mark_sym_for_renaming (sv->var);
}
if (okay_to_mark)
mark_non_addressable (var);
}
- else
- {
- /* Add the variable to the set of addressables. Mostly
- used when scanning operands for ASM_EXPRs that
- clobber memory. In those cases, we need to clobber
- all call-clobbered variables and all addressables. */
- bitmap_set_bit (addressable_vars, v_ann->uid);
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (addressable_vars, var_ann (sv->var)->uid);
- }
-
- }
}
/* Global variables and addressable locals may be aliased. Create an
entry in ADDRESSABLE_VARS for VAR. */
- if (may_be_aliased (var))
+ if (may_be_aliased (var)
+ && (!var_can_have_subvars (var)
+ || get_subvars_for_var (var) == NULL))
{
create_alias_map_for (var, ai);
mark_sym_for_renaming (var);
array and create a type memory tag for them. */
if (POINTER_TYPE_P (TREE_TYPE (var)))
{
- if ((bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid)
- || bitmap_bit_p (ai->dereferenced_ptrs_load, v_ann->uid)))
+ if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
+ || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
{
tree tag;
var_ann_t t_ann;
/* If pointer VAR has been used in a store operation,
then its memory tag must be marked as written-to. */
- if (bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid))
- bitmap_set_bit (ai->written_vars, t_ann->uid);
-
- /* If pointer VAR is a global variable or a PARM_DECL,
- then its memory tag should be considered a global
- variable. */
- if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
- mark_call_clobbered (tag);
+ if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
+ bitmap_set_bit (ai->written_vars, DECL_UID (tag));
/* All the dereferences of pointer VAR count as
references of TAG. Since TAG can be associated with
several pointers, add the dereferences of VAR to the
- TAG. We may need to grow AI->NUM_REFERENCES because
- we have been adding name and type tags. */
- if (t_ann->uid >= VARRAY_SIZE (ai->num_references))
- VARRAY_GROW (ai->num_references, t_ann->uid + 10);
-
- VARRAY_UINT (ai->num_references, t_ann->uid)
- += VARRAY_UINT (ai->num_references, v_ann->uid);
+ TAG. */
+ NUM_REFERENCES_SET (t_ann,
+ NUM_REFERENCES (t_ann)
+ + NUM_REFERENCES (v_ann));
}
else
{
}
}
}
+ VEC_free (tree, heap, varvec);
}
/* Mark all call-clobbered symbols for renaming. Since the initial
rewrite into SSA ignored all call sites, we may need to rename
- .GLOBAL_VAR and the call-clobbered variables. */
+ .GLOBAL_VAR and the call-clobbered variables. */
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
call-clobbered variables. */
if (global_var && var != global_var)
{
- subvar_t svars;
add_may_alias (var, global_var);
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- mark_sym_for_renaming (sv->var);
- }
+ gcc_assert (!get_subvars_for_var (var));
}
mark_sym_for_renaming (var);
static bool
may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
- tree var, HOST_WIDE_INT var_alias_set)
+ tree var, HOST_WIDE_INT var_alias_set,
+ bool alias_set_only)
{
tree mem;
- var_ann_t m_ann;
alias_stats.alias_queries++;
alias_stats.simple_queries++;
return false;
}
- m_ann = var_ann (mem);
-
- gcc_assert (m_ann->mem_tag_kind == TYPE_TAG);
+ gcc_assert (TREE_CODE (mem) == TYPE_MEMORY_TAG);
alias_stats.tbaa_queries++;
- /* If VAR is a pointer with the same alias set as PTR, then dereferencing
- PTR can't possibly affect VAR. Note, that we are specifically testing
- for PTR's alias set here, not its pointed-to type. We also can't
- do this check with relaxed aliasing enabled. */
- if (POINTER_TYPE_P (TREE_TYPE (var))
- && var_alias_set != 0
- && mem_alias_set != 0)
- {
- HOST_WIDE_INT ptr_alias_set = get_alias_set (ptr);
- if (ptr_alias_set == var_alias_set)
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
- }
-
/* If the alias sets don't conflict then MEM cannot alias VAR. */
if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
{
alias_stats.alias_noalias++;
alias_stats.tbaa_resolved++;
return false;
- }
- alias_stats.alias_mayalias++;
- return true;
-}
-
-
-/* Add ALIAS to the set of variables that may alias VAR. */
-
-static void
-add_may_alias (tree var, tree alias)
-{
- size_t i;
- var_ann_t v_ann = get_var_ann (var);
- var_ann_t a_ann = get_var_ann (alias);
-
- gcc_assert (var != alias);
-
- if (v_ann->may_aliases == NULL)
- VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
-
- /* Avoid adding duplicates. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
- if (alias == VARRAY_TREE (v_ann->may_aliases, i))
- return;
-
- /* If VAR is a call-clobbered variable, so is its new ALIAS.
- FIXME, call-clobbering should only depend on whether an address
- escapes. It should be independent of aliasing. */
- if (is_call_clobbered (var))
- mark_call_clobbered (alias);
-
- /* Likewise. If ALIAS is call-clobbered, so is VAR. */
- else if (is_call_clobbered (alias))
- mark_call_clobbered (var);
-
- VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
- a_ann->is_alias_tag = 1;
-}
-
-
-/* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
-
-static void
-replace_may_alias (tree var, size_t i, tree new_alias)
-{
- var_ann_t v_ann = var_ann (var);
- VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
-
- /* If VAR is a call-clobbered variable, so is NEW_ALIAS.
- FIXME, call-clobbering should only depend on whether an address
- escapes. It should be independent of aliasing. */
- if (is_call_clobbered (var))
- mark_call_clobbered (new_alias);
-
- /* Likewise. If NEW_ALIAS is call-clobbered, so is VAR. */
- else if (is_call_clobbered (new_alias))
- mark_call_clobbered (var);
-}
-
-
-/* Mark pointer PTR as pointing to an arbitrary memory location. */
-
-static void
-set_pt_anything (tree ptr)
-{
- struct ptr_info_def *pi = get_ptr_info (ptr);
-
- pi->pt_anything = 1;
- pi->pt_malloc = 0;
-
- /* The pointer used to have a name tag, but we now found it pointing
- to an arbitrary location. The name tag needs to be renamed and
- disassociated from PTR. */
- if (pi->name_mem_tag)
- {
- mark_sym_for_renaming (pi->name_mem_tag);
- pi->name_mem_tag = NULL_TREE;
- }
-}
-
-
-/* Mark pointer PTR as pointing to a malloc'd memory area. */
-
-static void
-set_pt_malloc (tree ptr)
-{
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
-
- /* If the pointer has already been found to point to arbitrary
- memory locations, it is unsafe to mark it as pointing to malloc. */
- if (pi->pt_anything)
- return;
-
- pi->pt_malloc = 1;
-}
-
-
-/* Given two different pointers DEST and ORIG. Merge the points-to
- information in ORIG into DEST. AI contains all the alias
- information collected up to this point. */
-
-static void
-merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
-{
- struct ptr_info_def *dest_pi, *orig_pi;
-
- gcc_assert (dest != orig);
-
- /* Make sure we have points-to information for ORIG. */
- collect_points_to_info_for (ai, orig);
-
- dest_pi = get_ptr_info (dest);
- orig_pi = SSA_NAME_PTR_INFO (orig);
-
- if (orig_pi)
- {
- gcc_assert (orig_pi != dest_pi);
-
- /* Notice that we never merge PT_MALLOC. This attribute is only
- true if the pointer is the result of a malloc() call.
- Otherwise, we can end up in this situation:
-
- P_i = malloc ();
- ...
- P_j = P_i + X;
-
- P_j would be marked as PT_MALLOC, however we currently do not
- handle cases of more than one pointer pointing to the same
- malloc'd area.
-
- FIXME: If the merging comes from an expression that preserves
- the PT_MALLOC attribute (copy assignment, address
- arithmetic), we ought to merge PT_MALLOC, but then both
- pointers would end up getting different name tags because
- create_name_tags is not smart enough to determine that the
- two come from the same malloc call. Copy propagation before
- aliasing should cure this. */
- dest_pi->pt_malloc = 0;
- if (orig_pi->pt_malloc || orig_pi->pt_anything)
- set_pt_anything (dest);
-
- dest_pi->pt_null |= orig_pi->pt_null;
-
- if (!dest_pi->pt_anything
- && orig_pi->pt_vars
- && !bitmap_empty_p (orig_pi->pt_vars))
- {
- if (dest_pi->pt_vars == NULL)
- {
- dest_pi->pt_vars = BITMAP_GGC_ALLOC ();
- bitmap_copy (dest_pi->pt_vars, orig_pi->pt_vars);
- }
- else
- bitmap_ior_into (dest_pi->pt_vars, orig_pi->pt_vars);
- }
- }
- else
- set_pt_anything (dest);
-}
-
-
-/* Add EXPR to the list of expressions pointed-to by PTR. */
-
-static void
-add_pointed_to_expr (struct alias_info *ai, tree ptr, tree expr)
-{
- if (TREE_CODE (expr) == WITH_SIZE_EXPR)
- expr = TREE_OPERAND (expr, 0);
-
- get_ptr_info (ptr);
-
- if (TREE_CODE (expr) == CALL_EXPR
- && (call_expr_flags (expr) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
- {
- /* If EXPR is a malloc-like call, then the area pointed to PTR
- is guaranteed to not alias with anything else. */
- set_pt_malloc (ptr);
- }
- else if (TREE_CODE (expr) == ADDR_EXPR)
- {
- /* Found P_i = ADDR_EXPR */
- add_pointed_to_var (ai, ptr, expr);
- }
- else if (TREE_CODE (expr) == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (expr)))
- {
- /* Found P_i = Q_j. */
- merge_pointed_to_info (ai, ptr, expr);
- }
- else if (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR)
- {
- /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
- tree op0 = TREE_OPERAND (expr, 0);
- tree op1 = TREE_OPERAND (expr, 1);
-
- /* Both operands may be of pointer type. FIXME: Shouldn't
- we just expect PTR + OFFSET always? */
- if (POINTER_TYPE_P (TREE_TYPE (op0))
- && TREE_CODE (op0) != INTEGER_CST)
- {
- if (TREE_CODE (op0) == SSA_NAME)
- merge_pointed_to_info (ai, ptr, op0);
- else if (TREE_CODE (op0) == ADDR_EXPR)
- add_pointed_to_var (ai, ptr, op0);
- else
- set_pt_anything (ptr);
- }
-
- if (POINTER_TYPE_P (TREE_TYPE (op1))
- && TREE_CODE (op1) != INTEGER_CST)
- {
- if (TREE_CODE (op1) == SSA_NAME)
- merge_pointed_to_info (ai, ptr, op1);
- else if (TREE_CODE (op1) == ADDR_EXPR)
- add_pointed_to_var (ai, ptr, op1);
- else
- set_pt_anything (ptr);
- }
-
- /* Neither operand is a pointer? VAR can be pointing anywhere.
- FIXME: Shouldn't we abort here? If we get here, we found
- PTR = INT_CST + INT_CST, which should not be a valid pointer
- expression. */
- if (!(POINTER_TYPE_P (TREE_TYPE (op0))
- && TREE_CODE (op0) != INTEGER_CST)
- && !(POINTER_TYPE_P (TREE_TYPE (op1))
- && TREE_CODE (op1) != INTEGER_CST))
- set_pt_anything (ptr);
- }
- else if (integer_zerop (expr))
- {
- /* EXPR is the NULL pointer. Mark PTR as pointing to NULL. */
- SSA_NAME_PTR_INFO (ptr)->pt_null = 1;
- }
- else
- {
- /* If we can't recognize the expression, assume that PTR may
- point anywhere. */
- set_pt_anything (ptr);
- }
-}
-
-
-/* If VALUE is of the form &DECL, add DECL to the set of variables
- pointed-to by PTR. Otherwise, add VALUE as a pointed-to expression by
- PTR. AI points to the collected alias information. */
-
-static void
-add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
-{
- struct ptr_info_def *pi = get_ptr_info (ptr);
- tree pt_var = NULL_TREE;
- HOST_WIDE_INT offset, size;
- tree addrop;
- size_t uid;
- tree ref;
- subvar_t svars;
-
- gcc_assert (TREE_CODE (value) == ADDR_EXPR);
-
- addrop = TREE_OPERAND (value, 0);
- if (REFERENCE_CLASS_P (addrop))
- pt_var = get_base_address (addrop);
- else
- pt_var = addrop;
+ }
- /* If this is a component_ref, see if we can get a smaller number of
- variables to take the address of. */
- if (TREE_CODE (addrop) == COMPONENT_REF
- && (ref = okay_component_ref_for_subvars (addrop, &offset ,&size)))
- {
- subvar_t sv;
- svars = get_subvars_for_var (ref);
+ /* If var is a record or union type, ptr cannot point into var
+ unless there is some operation explicit address operation in the
+ program that can reference a field of the ptr's dereferenced
+ type. This also assumes that the types of both var and ptr are
+ contained within the compilation unit, and that there is no fancy
+ addressing arithmetic associated with any of the types
+ involved. */
- uid = var_ann (pt_var)->uid;
+ if ((mem_alias_set != 0) && (var_alias_set != 0))
+ {
+ tree ptr_type = TREE_TYPE (ptr);
+ tree var_type = TREE_TYPE (var);
- if (pi->pt_vars == NULL)
- pi->pt_vars = BITMAP_GGC_ALLOC ();
- /* If the variable is a global, mark the pointer as pointing to
- global memory (which will make its tag a global variable). */
- if (is_global_var (pt_var))
- pi->pt_global_mem = 1;
-
- for (sv = svars; sv; sv = sv->next)
+ /* The star count is -1 if the type at the end of the pointer_to
+ chain is not a record or union type. */
+ if ((!alias_set_only) &&
+ ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
{
- if (overlap_subvar (offset, size, sv, NULL))
+ int ptr_star_count = 0;
+
+ /* Ipa_type_escape_star_count_of_interesting_type is a little to
+ restrictive for the pointer type, need to allow pointers to
+ primitive types as long as those types cannot be pointers
+ to everything. */
+ while (POINTER_TYPE_P (ptr_type))
+ /* Strip the *'s off. */
{
- bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid);
- bitmap_set_bit (ai->addresses_needed, var_ann (sv->var)->uid);
+ ptr_type = TREE_TYPE (ptr_type);
+ ptr_star_count++;
}
- }
- }
- else if (pt_var && SSA_VAR_P (pt_var))
- {
-
- uid = var_ann (pt_var)->uid;
-
- if (pi->pt_vars == NULL)
- pi->pt_vars = BITMAP_GGC_ALLOC ();
-
- /* If this is an aggregate, we may have subvariables for it that need
- to be pointed to. */
- if (var_can_have_subvars (pt_var)
- && (svars = get_subvars_for_var (pt_var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
+
+ /* There does not appear to be a better test to see if the
+ pointer type was one of the pointer to everything
+ types. */
+
+ if (ptr_star_count > 0)
+ {
+ alias_stats.structnoaddress_queries++;
+ if (ipa_type_escape_field_does_not_clobber_p (var_type,
+ TREE_TYPE (ptr)))
+ {
+ alias_stats.structnoaddress_resolved++;
+ alias_stats.alias_noalias++;
+ return false;
+ }
+ }
+ else if (ptr_star_count == 0)
{
- uid = var_ann (sv->var)->uid;
- bitmap_set_bit (ai->addresses_needed, uid);
- bitmap_set_bit (pi->pt_vars, uid);
+ /* If ptr_type was not really a pointer to type, it cannot
+ alias. */
+ alias_stats.structnoaddress_queries++;
+ alias_stats.structnoaddress_resolved++;
+ alias_stats.alias_noalias++;
+ return false;
}
}
- else
- {
- bitmap_set_bit (ai->addresses_needed, uid);
- bitmap_set_bit (pi->pt_vars, uid);
- }
-
- /* If the variable is a global, mark the pointer as pointing to
- global memory (which will make its tag a global variable). */
- if (is_global_var (pt_var))
- pi->pt_global_mem = 1;
}
-}
+ alias_stats.alias_mayalias++;
+ return true;
+}
-/* Callback for walk_use_def_chains to gather points-to information from the
- SSA web.
-
- VAR is an SSA variable or a GIMPLE expression.
-
- STMT is the statement that generates the SSA variable or, if STMT is a
- PHI_NODE, VAR is one of the PHI arguments.
- DATA is a pointer to a structure of type ALIAS_INFO. */
+/* Add ALIAS to the set of variables that may alias VAR. */
-static bool
-collect_points_to_info_r (tree var, tree stmt, void *data)
+static void
+add_may_alias (tree var, tree alias)
{
- struct alias_info *ai = (struct alias_info *) data;
+ size_t i;
+ var_ann_t v_ann = get_var_ann (var);
+ var_ann_t a_ann = get_var_ann (alias);
+ tree al;
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "Visiting use-def links for ");
- print_generic_expr (dump_file, var, dump_flags);
- fprintf (dump_file, "\n");
- }
+ /* Don't allow self-referential aliases. */
+ gcc_assert (var != alias);
- switch (TREE_CODE (stmt))
- {
- case RETURN_EXPR:
- gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR);
- stmt = TREE_OPERAND (stmt, 0);
- /* FALLTHRU */
+ /* ALIAS must be addressable if it's being added to an alias set. */
+#if 1
+ TREE_ADDRESSABLE (alias) = 1;
+#else
+ gcc_assert (may_be_aliased (alias));
+#endif
- case MODIFY_EXPR:
- {
- tree rhs = TREE_OPERAND (stmt, 1);
- STRIP_NOPS (rhs);
- add_pointed_to_expr (ai, var, rhs);
- break;
- }
+ if (v_ann->may_aliases == NULL)
+ v_ann->may_aliases = VEC_alloc (tree, gc, 2);
- case ASM_EXPR:
- /* Pointers defined by __asm__ statements can point anywhere. */
- set_pt_anything (var);
- break;
+ /* Avoid adding duplicates. */
+ for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
+ if (alias == al)
+ return;
- case NOP_EXPR:
- if (IS_EMPTY_STMT (stmt))
- {
- tree decl = SSA_NAME_VAR (var);
-
- if (TREE_CODE (decl) == PARM_DECL)
- add_pointed_to_expr (ai, var, decl);
- else if (DECL_INITIAL (decl))
- add_pointed_to_expr (ai, var, DECL_INITIAL (decl));
- else
- add_pointed_to_expr (ai, var, decl);
- }
- break;
+ VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
+ a_ann->is_aliased = 1;
+}
- case PHI_NODE:
- {
- /* It STMT is a PHI node, then VAR is one of its arguments. The
- variable that we are analyzing is the LHS of the PHI node. */
- tree lhs = PHI_RESULT (stmt);
- switch (TREE_CODE (var))
- {
- case ADDR_EXPR:
- add_pointed_to_var (ai, lhs, var);
- break;
-
- case SSA_NAME:
- /* Avoid unnecessary merges. */
- if (lhs != var)
- merge_pointed_to_info (ai, lhs, var);
- break;
-
- default:
- gcc_assert (is_gimple_min_invariant (var));
- add_pointed_to_expr (ai, lhs, var);
- break;
- }
- break;
- }
+/* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
- default:
- gcc_unreachable ();
+static void
+replace_may_alias (tree var, size_t i, tree new_alias)
+{
+ var_ann_t v_ann = var_ann (var);
+ VEC_replace (tree, v_ann->may_aliases, i, new_alias);
+}
+
+
+/* Mark pointer PTR as pointing to an arbitrary memory location. */
+
+static void
+set_pt_anything (tree ptr)
+{
+ struct ptr_info_def *pi = get_ptr_info (ptr);
+
+ pi->pt_anything = 1;
+ pi->pt_vars = NULL;
+
+ /* The pointer used to have a name tag, but we now found it pointing
+ to an arbitrary location. The name tag needs to be renamed and
+ disassociated from PTR. */
+ if (pi->name_mem_tag)
+ {
+ mark_sym_for_renaming (pi->name_mem_tag);
+ pi->name_mem_tag = NULL_TREE;
}
-
- return false;
}
3- STMT is an assignment to a non-local variable, or
4- STMT is a return statement.
- AI points to the alias information collected so far. */
+ AI points to the alias information collected so far.
-static bool
+ Return the type of escape site found, if we found one, or NO_ESCAPE
+ if none. */
+
+enum escape_type
is_escape_site (tree stmt, struct alias_info *ai)
{
tree call = get_call_expr_in (stmt);
ai->num_calls_found++;
if (!TREE_SIDE_EFFECTS (call))
- ai->num_pure_const_calls_found++;
+ {
+ ai->num_pure_const_calls_found++;
+ return ESCAPE_TO_PURE_CONST;
+ }
- return true;
+ return ESCAPE_TO_CALL;
}
else if (TREE_CODE (stmt) == ASM_EXPR)
- return true;
+ return ESCAPE_TO_ASM;
else if (TREE_CODE (stmt) == MODIFY_EXPR)
{
tree lhs = TREE_OPERAND (stmt, 0);
/* If we couldn't recognize the LHS of the assignment, assume that it
is a non-local store. */
if (lhs == NULL_TREE)
- return true;
+ return ESCAPE_UNKNOWN;
/* If the RHS is a conversion between a pointer and an integer, the
pointer escapes since we can't track the integer. */
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
(TREE_OPERAND (stmt, 1), 0)))
&& !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
- return true;
+ return ESCAPE_BAD_CAST;
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
- return false;
+ return NO_ESCAPE;
/* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
local variables we cannot be sure if it will escape, because we
Midkiff, ``Escape analysis for java,'' in Proceedings of the
Conference on Object-Oriented Programming Systems, Languages, and
Applications (OOPSLA), pp. 1-19, 1999. */
- return true;
+ return ESCAPE_STORED_IN_GLOBAL;
}
else if (TREE_CODE (stmt) == RETURN_EXPR)
- return true;
+ return ESCAPE_TO_RETURN;
- return false;
+ return NO_ESCAPE;
}
+/* Create a new memory tag of type TYPE.
+ Does NOT push it into the current binding. */
+
+static tree
+create_tag_raw (enum tree_code code, tree type, const char *prefix)
+{
+ tree tmp_var;
+ tree new_type;
+
+ /* Make the type of the variable writable. */
+ new_type = build_type_variant (type, 0, 0);
+ TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
+
+ tmp_var = build_decl (code, create_tmp_var_name (prefix),
+ type);
+ /* Make the variable writable. */
+ TREE_READONLY (tmp_var) = 0;
+
+ /* It doesn't start out global. */
+ MTAG_GLOBAL (tmp_var) = 0;
+ TREE_STATIC (tmp_var) = 0;
+ TREE_USED (tmp_var) = 1;
+
+ return tmp_var;
+}
/* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
is considered to represent all the pointers whose pointed-to types are
create_memory_tag (tree type, bool is_type_tag)
{
var_ann_t ann;
- tree tag = create_tmp_var_raw (type, (is_type_tag) ? "TMT" : "NMT");
+ tree tag = create_tag_raw (is_type_tag ? TYPE_MEMORY_TAG : NAME_MEMORY_TAG,
+ type, (is_type_tag) ? "TMT" : "NMT");
/* By default, memory tags are local variables. Alias analysis will
determine whether they should be considered globals. */
DECL_CONTEXT (tag) = current_function_decl;
- /* Memory tags are by definition addressable. This also prevents
- is_gimple_ref frome confusing memory tags with optimizable
- variables. */
+ /* Memory tags are by definition addressable. */
TREE_ADDRESSABLE (tag) = 1;
ann = get_var_ann (tag);
- ann->mem_tag_kind = (is_type_tag) ? TYPE_TAG : NAME_TAG;
ann->type_mem_tag = NULL_TREE;
/* Add the tag to the symbol table. */
if (tag == NULL_TREE)
tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
-
- /* If PTR is a PARM_DECL, it points to a global variable or malloc,
- then its name tag should be considered a global variable. */
- if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
- || pi->pt_malloc
- || pi->pt_global_mem)
- mark_call_clobbered (tag);
-
return tag;
}
{
struct alias_map_d *curr = ai->pointers[i];
tree curr_tag = var_ann (curr->var)->type_mem_tag;
- if (tag_set == curr->set
- && TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (curr_tag)))
+ if (tag_set == curr->set)
{
tag = curr_tag;
break;
/* Add PTR to the POINTERS array. Note that we are not interested in
PTR's alias set. Instead, we cache the alias set for the memory that
PTR points to. */
- alias_map = xcalloc (1, sizeof (*alias_map));
+ alias_map = XCNEW (struct alias_map_d);
alias_map->var = ptr;
alias_map->set = tag_set;
ai->pointers[ai->num_pointers++] = alias_map;
pointed-to type. */
gcc_assert (tag_set == get_alias_set (tag));
- /* If PTR's pointed-to type is read-only, then TAG's type must also
- be read-only. */
- gcc_assert (TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (tag)));
-
return tag;
}
TREE_THIS_VOLATILE (global_var) = 0;
TREE_ADDRESSABLE (global_var) = 0;
+ create_var_ann (global_var);
+ mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
add_referenced_tmp_var (global_var);
mark_sym_for_renaming (global_var);
}
alias_stats.tbaa_queries);
fprintf (file, "Total TBAA resolved:\t%u\n",
alias_stats.tbaa_resolved);
+ fprintf (file, "Total non-addressable structure type queries:\t%u\n",
+ alias_stats.structnoaddress_queries);
+ fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
+ alias_stats.structnoaddress_resolved);
}
size_t i;
const char *funcname
= lang_hooks.decl_printable_name (current_function_decl, 2);
+ referenced_var_iterator rvi;
+ tree var;
fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
fprintf (file, "Aliased symbols\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
if (may_be_aliased (var))
dump_variable (file, var);
}
fprintf (file, "\nDereferenced pointers\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
if (ann->type_mem_tag)
dump_variable (file, var);
}
fprintf (file, "\nType memory tags\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
- var_ann_t ann = var_ann (var);
- if (ann->mem_tag_kind == TYPE_TAG)
+ if (TREE_CODE (var) == TYPE_MEMORY_TAG)
dump_variable (file, var);
}
}
fprintf (file, "\nName memory tags\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
- var_ann_t ann = var_ann (var);
- if (ann->mem_tag_kind == NAME_TAG)
+ if (TREE_CODE (var) == NAME_MEMORY_TAG)
dump_variable (file, var);
}
pi = SSA_NAME_PTR_INFO (t);
if (pi == NULL)
{
- pi = ggc_alloc (sizeof (*pi));
+ pi = GGC_NEW (struct ptr_info_def);
memset ((void *)pi, 0, sizeof (*pi));
SSA_NAME_PTR_INFO (t) = pi;
}
if (pi->pt_anything)
fprintf (file, ", points-to anything");
- if (pi->pt_malloc)
- fprintf (file, ", points-to malloc");
-
if (pi->pt_null)
fprintf (file, ", points-to NULL");
{
basic_block bb;
block_stmt_iterator si;
- size_t i;
ssa_op_iter iter;
const char *fname =
lang_hooks.decl_printable_name (current_function_decl, 2);
+ referenced_var_iterator rvi;
+ tree var;
fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
/* First dump points-to information for the default definitions of
pointer variables. This is necessary because default definitions are
not part of the code. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
if (POINTER_TYPE_P (TREE_TYPE (var)))
{
- var_ann_t ann = var_ann (var);
- if (ann->default_def)
- dump_points_to_info_for (file, ann->default_def);
+ tree def = default_def (var);
+ if (def)
+ dump_points_to_info_for (file, def);
}
}
}
-/* Dump points-to info pointed by PTO into STDERR. */
+/* Dump points-to info pointed to by PTO into STDERR. */
void
debug_points_to_info (void)
void
dump_may_aliases_for (FILE *file, tree var)
{
- varray_type aliases;
+ VEC(tree, gc) *aliases;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (aliases)
{
size_t i;
+ tree al;
fprintf (file, "{ ");
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
{
- print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
+ print_generic_expr (file, al, dump_flags);
fprintf (file, " ");
}
fprintf (file, "}");
/* Globally visible variables can have their addresses taken by other
translation units. */
- if (DECL_EXTERNAL (var) || TREE_PUBLIC (var))
+
+ if (MTAG_P (var)
+ && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
+ return true;
+ else if (!MTAG_P (var)
+ && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
return true;
/* Automatic variables can't have their addresses escape any other way.
}
+/* Given two symbols return TRUE if one is in the alias set of the other. */
+bool
+is_aliased_with (tree tag, tree sym)
+{
+ size_t i;
+ VEC(tree,gc) *aliases;
+ tree al;
+
+ if (var_ann (sym)->is_aliased)
+ {
+ aliases = var_ann (tag)->may_aliases;
+
+ if (aliases == NULL)
+ return false;
+
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ if (al == sym)
+ return true;
+ }
+ else
+ {
+ aliases = var_ann (sym)->may_aliases;
+
+ if (aliases == NULL)
+ return false;
+
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ if (al == tag)
+ return true;
+ }
+
+ return false;
+}
+
+
/* Add VAR to the list of may-aliases of PTR's type tag. If PTR
doesn't already have a type tag, create one. */
void
add_type_alias (tree ptr, tree var)
{
- varray_type aliases;
- tree tag;
+ VEC(tree, gc) *aliases;
+ tree tag, al;
var_ann_t ann = var_ann (ptr);
subvar_t svars;
+ VEC (tree, heap) *varvec = NULL;
+ unsigned i;
if (ann->type_mem_tag == NULL_TREE)
{
- size_t i;
tree q = NULL_TREE;
tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
HOST_WIDE_INT tag_set = get_alias_set (tag_type);
+ safe_referenced_var_iterator rvi;
/* PTR doesn't have a type tag, create a new one and add VAR to
the new tag's alias set.
whether there is another pointer Q with the same alias set as
PTR. This could be sped up by having type tags associated
with types. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
{
- q = referenced_var (i);
-
if (POINTER_TYPE_P (TREE_TYPE (q))
&& tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
{
found_tag:
/* If VAR is not already PTR's type tag, add it to the may-alias set
for PTR's type tag. */
- gcc_assert (var_ann (var)->type_mem_tag == NOT_A_TAG);
+ gcc_assert (!MTAG_P (var));
tag = ann->type_mem_tag;
/* If VAR has subvars, add the subvars to the tag instead of the
mark_sym_for_renaming (tag);
if ((aliases = var_ann (tag)->may_aliases) != NULL)
{
- size_t i;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ mark_sym_for_renaming (al);
}
/* If we had grouped aliases, VAR may have aliases of its own. Mark
aliases of VAR will need to be updated. */
if ((aliases = var_ann (var)->may_aliases) != NULL)
{
- size_t i;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ mark_sym_for_renaming (al);
}
+ VEC_free (tree, heap, varvec);
}
-/* This structure is simply used during pushing fields onto the fieldstack
- to track the offset of the field, since bitpos_of_field gives it relative
- to its immediate containing type, and we want it relative to the ultimate
- containing object. */
-
-typedef struct fieldoff
-{
- tree field;
- HOST_WIDE_INT offset;
-} *fieldoff_t;
-
-DEF_VEC_MALLOC_P(fieldoff_t);
+/* Create a new type tag for PTR. Construct the may-alias list of this type
+ tag so that it has the aliasing of VAR.
-/* Return the position, in bits, of FIELD_DECL from the beginning of its
- structure.
- Return -1 if the position is conditional or otherwise non-constant
- integer. */
+ Note, the set of aliases represented by the new type tag are not marked
+ for renaming. */
-static HOST_WIDE_INT
-bitpos_of_field (const tree fdecl)
+void
+new_type_alias (tree ptr, tree var)
{
+ var_ann_t p_ann = var_ann (ptr);
+ tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
+ var_ann_t v_ann = var_ann (var);
+ tree tag;
+ subvar_t svars;
- if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
- || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
- return -1;
+ gcc_assert (p_ann->type_mem_tag == NULL_TREE);
+ gcc_assert (!MTAG_P (var));
- return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
-}
+ /* Add VAR to the may-alias set of PTR's new type tag. If VAR has
+ subvars, add the subvars to the tag instead of the actual var. */
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
-/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
- of TYPE onto fieldstack, recording their offsets along the way.
- OFFSET is used to keep track of the offset in this entire structure, rather
- than just the immediately containing structure. */
+ tag = create_memory_tag (tag_type, true);
+ p_ann->type_mem_tag = tag;
-static void
-push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
- HOST_WIDE_INT offset)
-{
- fieldoff_t pair;
- tree field = TYPE_FIELDS (type);
- if (!field)
- return;
- if (var_can_have_subvars (field)
- && TREE_CODE (field) == FIELD_DECL)
- {
- size_t before = VEC_length (fieldoff_t, *fieldstack);
- /* Empty structures may have actual size, like in C++. So see if we
- actually end up pushing a field, and if not, if the size is nonzero,
- push the field onto the stack */
- push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack, offset);
- if (before == VEC_length (fieldoff_t, *fieldstack)
- && DECL_SIZE (field)
- && !integer_zerop (DECL_SIZE (field)))
- {
- pair = xmalloc (sizeof (struct fieldoff));
- pair->field = field;
- pair->offset = offset;
- VEC_safe_push (fieldoff_t, *fieldstack, pair);
- }
- }
- else if (TREE_CODE (field) == FIELD_DECL)
- {
- pair = xmalloc (sizeof (struct fieldoff));
- pair->field = field;
- pair->offset = offset + bitpos_of_field (field);
- VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ for (sv = svars; sv; sv = sv->next)
+ add_may_alias (tag, sv->var);
}
- for (field = TREE_CHAIN (field); field; field = TREE_CHAIN (field))
+ else
{
- if (TREE_CODE (field) != FIELD_DECL)
- continue;
- if (var_can_have_subvars (field))
+ /* The following is based on code in add_stmt_operand to ensure that the
+ same defs/uses/vdefs/vuses will be found after replacing a reference
+ to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
+ is the address of var. */
+ VEC(tree, gc) *aliases = v_ann->may_aliases;
+
+ if ((aliases != NULL)
+ && (VEC_length (tree, aliases) == 1))
{
- size_t before = VEC_length (fieldoff_t, *fieldstack);
- push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack,
- offset + bitpos_of_field (field));
- /* Empty structures may have actual size, like in C++. So see if we
- actually end up pushing a field, and if not, if the size is nonzero,
- push the field onto the stack */
- if (before == VEC_length (fieldoff_t, *fieldstack)
- && DECL_SIZE (field)
- && !integer_zerop (DECL_SIZE (field)))
+ tree ali = VEC_index (tree, aliases, 0);
+
+ if (TREE_CODE (ali) == TYPE_MEMORY_TAG)
{
- pair = xmalloc (sizeof (struct fieldoff));
- pair->field = field;
- pair->offset = offset + bitpos_of_field (field);
- VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ p_ann->type_mem_tag = ali;
+ return;
}
}
+
+ tag = create_memory_tag (tag_type, true);
+ p_ann->type_mem_tag = tag;
+
+ if (aliases == NULL)
+ add_may_alias (tag, var);
else
{
- pair = xmalloc (sizeof (struct fieldoff));
- pair->field = field;
- pair->offset = offset + bitpos_of_field (field);
- VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ unsigned i;
+ tree al;
+
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ add_may_alias (tag, al);
}
- }
+ }
}
+
/* This represents the used range of a variable. */
typedef struct used_part
variable. Implicit uses occur when we can't tell what part we
are referencing, and have to make conservative assumptions. */
bool implicit_uses;
+ /* True if the structure is only written to or taken its address. */
+ bool write_only;
} *used_part_t;
/* An array of used_part structures, indexed by variable uid. */
-static used_part_t *used_portions;
+static htab_t used_portions;
+
+struct used_part_map
+{
+ unsigned int uid;
+ used_part_t to;
+};
+
+/* Return true if the uid in the two used part maps are equal. */
+
+static int
+used_part_map_eq (const void *va, const void *vb)
+{
+ const struct used_part_map *a = (const struct used_part_map *) va;
+ const struct used_part_map *b = (const struct used_part_map *) vb;
+ return (a->uid == b->uid);
+}
+
+/* Hash a from uid in a used_part_map. */
+
+static unsigned int
+used_part_map_hash (const void *item)
+{
+ return ((const struct used_part_map *)item)->uid;
+}
+
+/* Free a used part map element. */
+
+static void
+free_used_part_map (void *item)
+{
+ free (((struct used_part_map *)item)->to);
+ free (item);
+}
+
+/* Lookup a used_part structure for a UID. */
+
+static used_part_t
+up_lookup (unsigned int uid)
+{
+ struct used_part_map *h, in;
+ in.uid = uid;
+ h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
+ if (!h)
+ return NULL;
+ return h->to;
+}
+
+/* Insert the pair UID, TO into the used part hashtable. */
+
+static void
+up_insert (unsigned int uid, used_part_t to)
+{
+ struct used_part_map *h;
+ void **loc;
+
+ h = XNEW (struct used_part_map);
+ h->uid = uid;
+ h->to = to;
+ loc = htab_find_slot_with_hash (used_portions, h,
+ uid, INSERT);
+ if (*loc != NULL)
+ free (*loc);
+ *(struct used_part_map **) loc = h;
+}
+
/* Given a variable uid, UID, get or create the entry in the used portions
table for the variable. */
get_or_create_used_part_for (size_t uid)
{
used_part_t up;
- if (used_portions[uid] == NULL)
+ if ((up = up_lookup (uid)) == NULL)
{
- up = xcalloc (1, sizeof (struct used_part));
+ up = XCNEW (struct used_part);
up->minused = INT_MAX;
up->maxused = 0;
up->explicit_uses = false;
up->implicit_uses = false;
+ up->write_only = true;
}
- else
- up = used_portions[uid];
+
return up;
}
-/* qsort comparison function for two fieldoff_t's PA and PB */
-static int
-fieldoff_compare (const void *pa, const void *pb)
+/* Create and return a structure sub-variable for field type FIELD at
+ offset OFFSET, with size SIZE, of variable VAR. */
+
+static tree
+create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size)
{
- const fieldoff_t foa = *(fieldoff_t *)pa;
- const fieldoff_t fob = *(fieldoff_t *)pb;
- HOST_WIDE_INT foasize, fobsize;
- if (foa->offset != fob->offset)
- return foa->offset - fob->offset;
-
- foasize = TREE_INT_CST_LOW (DECL_SIZE (foa->field));
- fobsize = TREE_INT_CST_LOW (DECL_SIZE (fob->field));
- if (foasize != fobsize)
- return foasize - fobsize;
- return 0;
+ var_ann_t ann;
+ tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
+
+ /* We need to copy the various flags from VAR to SUBVAR, so that
+ they are is_global_var iff the original variable was. */
+ DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
+ MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
+ TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
+ TREE_STATIC (subvar) = TREE_STATIC (var);
+ TREE_READONLY (subvar) = TREE_READONLY (var);
+
+ /* Add the new variable to REFERENCED_VARS. */
+ ann = get_var_ann (subvar);
+ ann->type_mem_tag = NULL;
+ add_referenced_tmp_var (subvar);
+ SFT_PARENT_VAR (subvar) = var;
+ SFT_OFFSET (subvar) = offset;
+ SFT_SIZE (subvar) = size;
+ return subvar;
}
+
/* Given an aggregate VAR, create the subvariables that represent its
fields. */
static void
create_overlap_variables_for (tree var)
{
- VEC(fieldoff_t) *fieldstack = NULL;
+ VEC(fieldoff_s,heap) *fieldstack = NULL;
used_part_t up;
- size_t uid = var_ann (var)->uid;
+ size_t uid = DECL_UID (var);
- if (used_portions[uid] == NULL)
+ up = up_lookup (uid);
+ if (!up
+ || up->write_only)
return;
- up = used_portions[uid];
- push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0);
- if (VEC_length (fieldoff_t, fieldstack) != 0)
+ push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
+ if (VEC_length (fieldoff_s, fieldstack) != 0)
{
subvar_t *subvars;
- fieldoff_t fo;
+ fieldoff_s *fo;
bool notokay = false;
int fieldcount = 0;
int i;
currently don't. Doing so would require some extra changes to
tree-ssa-operands.c. */
- for (i = 0; VEC_iterate (fieldoff_t, fieldstack, i, fo); i++)
+ for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
{
- if (!DECL_SIZE (fo->field)
- || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
- || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
+ if (!fo->size
+ || TREE_CODE (fo->size) != INTEGER_CST
|| fo->offset < 0)
{
notokay = true;
notokay = true;
}
-
- /* Cleanup after ourselves if we can't create overlap variables. */
+ /* Bail out, if we can't create overlap variables. */
if (notokay)
{
- while (VEC_length (fieldoff_t, fieldstack) != 0)
- {
- fo = VEC_pop (fieldoff_t, fieldstack);
- free (fo);
- }
- VEC_free (fieldoff_t, fieldstack);
+ VEC_free (fieldoff_s, heap, fieldstack);
return;
}
+
/* Otherwise, create the variables. */
subvars = lookup_subvars_for_var (var);
- qsort (VEC_address (fieldoff_t, fieldstack),
- VEC_length (fieldoff_t, fieldstack),
- sizeof (fieldoff_t),
- fieldoff_compare);
+ sort_fieldstack (fieldstack);
- while (VEC_length (fieldoff_t, fieldstack) != 0)
+ for (i = VEC_length (fieldoff_s, fieldstack);
+ VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
{
subvar_t sv;
HOST_WIDE_INT fosize;
- var_ann_t ann;
tree currfotype;
- fo = VEC_pop (fieldoff_t, fieldstack);
- fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
- currfotype = TREE_TYPE (fo->field);
+ fosize = TREE_INT_CST_LOW (fo->size);
+ currfotype = fo->type;
/* If this field isn't in the used portion,
or it has the exact same offset and size as the last
|| (fo->offset == lastfooffset
&& fosize == lastfosize
&& currfotype == lastfotype))
- {
- free (fo);
- continue;
- }
- sv = ggc_alloc (sizeof (struct subvar));
- sv->offset = fo->offset;
- sv->size = fosize;
+ continue;
+ sv = GGC_NEW (struct subvar);
sv->next = *subvars;
- sv->var = create_tmp_var_raw (TREE_TYPE (fo->field), "SFT");
+ sv->var = create_sft (var, fo->type, fo->offset, fosize);
+
if (dump_file)
{
fprintf (dump_file, "structure field tag %s created for var %s",
get_name (sv->var), get_name (var));
fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
- sv->offset);
+ SFT_OFFSET (sv->var));
fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
- sv->size);
+ SFT_SIZE (sv->var));
fprintf (dump_file, "\n");
-
}
- /* We need to copy the various flags from var to sv->var, so that
- they are is_global_var iff the original variable was. */
-
- DECL_EXTERNAL (sv->var) = DECL_EXTERNAL (var);
- TREE_PUBLIC (sv->var) = TREE_PUBLIC (var);
- TREE_STATIC (sv->var) = TREE_STATIC (var);
- TREE_READONLY (sv->var) = TREE_READONLY (var);
-
- /* Like other memory tags, these need to be marked addressable to
- keep is_gimple_reg from thinking they are real. */
- TREE_ADDRESSABLE (sv->var) = 1;
-
- DECL_CONTEXT (sv->var) = DECL_CONTEXT (var);
-
- ann = get_var_ann (sv->var);
- ann->mem_tag_kind = STRUCT_FIELD;
- ann->type_mem_tag = NULL;
- add_referenced_tmp_var (sv->var);
-
lastfotype = currfotype;
lastfooffset = fo->offset;
lastfosize = fosize;
*subvars = sv;
- free (fo);
}
/* Once we have created subvars, the original is no longer call
marking subvars of global variables as call clobbered for us
to start, since they are global as well. */
clear_call_clobbered (var);
-
}
- VEC_free (fieldoff_t, fieldstack);
+ VEC_free (fieldoff_s, heap, fieldstack);
}
entire structure. */
static tree
-find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
{
switch (TREE_CODE (*tp))
{
+ case MODIFY_EXPR:
+ /* Recurse manually here to track whether the use is in the
+ LHS of an assignment. */
+ find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
+ return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
case COMPONENT_REF:
+ case ARRAY_REF:
{
HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitmaxsize;
HOST_WIDE_INT bitpos;
- tree offset;
- enum machine_mode mode;
- int unsignedp;
- int volatilep;
tree ref;
- ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
- if (DECL_P (ref) && offset == NULL && bitsize != -1)
- {
- size_t uid = var_ann (ref)->uid;
+ ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
+ if (DECL_P (ref)
+ && var_can_have_subvars (ref)
+ && bitmaxsize != -1)
+ {
+ size_t uid = DECL_UID (ref);
used_part_t up;
up = get_or_create_used_part_for (uid);
if (bitpos <= up->minused)
up->minused = bitpos;
- if ((bitpos + bitsize >= up->maxused))
- up->maxused = bitpos + bitsize;
+ if ((bitpos + bitmaxsize >= up->maxused))
+ up->maxused = bitpos + bitmaxsize;
- up->explicit_uses = true;
- used_portions[uid] = up;
+ if (bitsize == bitmaxsize)
+ up->explicit_uses = true;
+ else
+ up->implicit_uses = true;
+ if (!lhs_p)
+ up->write_only = false;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
- else if (DECL_P (ref))
- {
- if (DECL_SIZE (ref)
- && var_can_have_subvars (ref)
- && TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
- {
- used_part_t up;
- size_t uid = var_ann (ref)->uid;
-
- up = get_or_create_used_part_for (uid);
-
- up->minused = 0;
- up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
-
- up->implicit_uses = true;
+ }
+ break;
+ /* This is here to make sure we mark the entire base variable as used
+ when you take its address. Because our used portion analysis is
+ simple, we aren't looking at casts or pointer arithmetic to see what
+ happens when you take the address. */
+ case ADDR_EXPR:
+ {
+ tree var = get_base_address (TREE_OPERAND (*tp, 0));
- used_portions[uid] = up;
+ if (var
+ && DECL_P (var)
+ && DECL_SIZE (var)
+ && var_can_have_subvars (var)
+ && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
+ {
+ used_part_t up;
+ size_t uid = DECL_UID (var);
+
+ up = get_or_create_used_part_for (uid);
+
+ up->minused = 0;
+ up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
+ up->implicit_uses = true;
- *walk_subtrees = 0;
- return NULL_TREE;
- }
+ up_insert (uid, up);
+ *walk_subtrees = 0;
+ return NULL_TREE;
}
}
break;
case VAR_DECL:
case PARM_DECL:
+ case RESULT_DECL:
{
tree var = *tp;
if (DECL_SIZE (var)
&& TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
{
used_part_t up;
- size_t uid = var_ann (var)->uid;
+ size_t uid = DECL_UID (var);
up = get_or_create_used_part_for (uid);
up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
up->implicit_uses = true;
- used_portions[uid] = up;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
return NULL_TREE;
}
-/* We are about to create some new referenced variables, and we need the
- before size. */
-
-static size_t old_referenced_vars;
-
-
/* Create structure field variables for structures used in this function. */
static void
create_structure_vars (void)
{
basic_block bb;
- size_t i;
+ safe_referenced_var_iterator rvi;
+ VEC (tree, heap) *varvec = NULL;
+ tree var;
- old_referenced_vars = num_referenced_vars;
- used_portions = xcalloc (num_referenced_vars, sizeof (used_part_t));
+ used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
+ free_used_part_map);
FOR_EACH_BB (bb)
{
NULL);
}
}
- for (i = 0; i < old_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
{
- tree var = referenced_var (i);
/* The C++ FE creates vars without DECL_SIZE set, for some reason. */
if (var
&& DECL_SIZE (var)
&& var_can_have_subvars (var)
- && var_ann (var)->mem_tag_kind == NOT_A_TAG
+ && !MTAG_P (var)
&& TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
create_overlap_variables_for (var);
}
- for (i = 0; i < old_referenced_vars; i++)
- free (used_portions[i]);
+ htab_delete (used_portions);
+ VEC_free (tree, heap, varvec);
- free (used_portions);
}
static bool