/* Alias analysis for trees.
- Copyright (C) 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
#include "tree-gimple.h"
#include "tree-flow.h"
#include "tree-inline.h"
-#include "tree-alias-common.h"
#include "tree-pass.h"
#include "convert.h"
#include "params.h"
/* SSA names visited while collecting points-to information. If bit I
is set, it means that SSA variable with version I has already been
visited. */
- bitmap ssa_names_visited;
+ sbitmap ssa_names_visited;
/* Array of SSA_NAME pointers processed by the points-to collector. */
varray_type processed_ptrs;
unsigned int simple_resolved;
unsigned int tbaa_queries;
unsigned int tbaa_resolved;
- unsigned int pta_queries;
- unsigned int pta_resolved;
};
static bool collect_points_to_info_r (tree, tree, void *);
static bool is_escape_site (tree, size_t *);
static void add_pointed_to_var (struct alias_info *, tree, tree);
-static void add_pointed_to_expr (tree, tree);
static void create_global_var (void);
static void collect_points_to_info_for (struct alias_info *, tree);
static bool ptr_is_dereferenced_by (tree, tree, bool *);
static void maybe_create_global_var (struct alias_info *ai);
static void group_aliases (struct alias_info *);
-static struct ptr_info_def *get_ptr_info (tree t);
static void set_pt_anything (tree ptr);
static void set_pt_malloc (tree ptr);
The concept of 'escaping' is the same one used in the Java world. When
a pointer or an ADDR_EXPR escapes, it means that it has been exposed
outside of the current function. So, assignment to global variables,
- function arguments and returning a pointer are all escape sites.
+ function arguments and returning a pointer are all escape sites, as are
+ conversions between pointers and integers.
This is where we are currently limited. Since not everything is renamed
into SSA, we lose track of escape properties when a pointer is stashed
foo (int i)
{
- int *p, *q, a, b;
+ int *p, a, b;
if (i > 10)
p = &a;
else
- q = &b;
+ p = &b;
*p = 3;
- *q = 5;
a = b + 2;
return *p;
}
NULL, /* next */
0, /* static_pass_number */
TV_TREE_MAY_ALIAS, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_pta, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
PROP_alias, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+ | TODO_ggc_collect | TODO_verify_ssa
+ | TODO_verify_stmts, /* todo_flags_finish */
+ 0 /* letter */
};
+/* Count the number of calls in the function and conditionally
+ create GLOBAL_VAR. This is performed before translation
+ into SSA (and thus before alias analysis) to avoid compile time
+ and memory utilization explosions in functions with many
+ of calls and call clobbered variables. */
+
+static void
+count_calls_and_maybe_create_global_var (void)
+{
+ struct alias_info ai;
+ basic_block bb;
+ bool temp;
+
+ memset (&ai, 0, sizeof (struct alias_info));
+
+ /* First count the number of calls in the IL. */
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator si;
+
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt = bsi_stmt (si);
+
+ if (get_call_expr_in (stmt) != NULL_TREE)
+ ai.num_calls_found++;
+ }
+ }
+
+ /* If there are no call clobbered variables, then maybe_create_global_var
+ will always create a GLOBAL_VAR. At this point we do not want that
+ behavior. So we turn on one bit in CALL_CLOBBERED_VARs, call
+ maybe_create_global_var, then reset the bit to its original state. */
+ temp = bitmap_bit_p (call_clobbered_vars, 0);
+ bitmap_set_bit (call_clobbered_vars, 0);
+ maybe_create_global_var (&ai);
+ if (!temp)
+ bitmap_clear_bit (call_clobbered_vars, 0);
+}
+
+struct tree_opt_pass pass_maybe_create_global_var =
+{
+ "maybe_create_global_var", /* name */
+ NULL, /* gate */
+ count_calls_and_maybe_create_global_var, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_MAY_ALIAS, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
/* Initialize the data structures used for alias analysis. */
static bool aliases_computed_p = false;
ai = xcalloc (1, sizeof (struct alias_info));
- ai->ssa_names_visited = BITMAP_XMALLOC ();
+ ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
+ sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
ai->addresses_needed = BITMAP_XMALLOC ();
VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
{
- size_t i;
-
- /* Clear the call-clobbered set. We are going to re-discover
- call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ unsigned i;
+ basic_block bb;
+
+ /* Make sure that every statement has a valid set of operands.
+ If a statement needs to be scanned for operands while we
+ compute aliases, it may get erroneous operands because all
+ the alias relations are not built at that point.
+ FIXME: This code will become obsolete when operands are not
+ lazily updated. */
+ FOR_EACH_BB (bb)
{
- tree var = referenced_var (i);
-
- /* Variables that are intrinsically call-clobbered (globals,
- local statics, etc) will not be marked by the aliasing
- code, so we can't remove them from CALL_CLOBBERED_VARS. */
- if (!is_call_clobbered (var))
- bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
- });
+ block_stmt_iterator si;
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ get_stmt_operands (bsi_stmt (si));
+ }
/* Similarly, clear the set of addressable variables. In this
case, we can just clear the set because addressability is
/* Clear flow-insensitive alias information from each symbol. */
for (i = 0; i < num_referenced_vars; i++)
{
- var_ann_t ann = var_ann (referenced_var (i));
+ tree var = referenced_var (i);
+ var_ann_t ann = var_ann (var);
+
ann->is_alias_tag = 0;
ann->may_aliases = NULL;
+
+ /* Since we are about to re-discover call-clobbered
+ variables, clear the call-clobbered flag. Variables that
+ are intrinsically call-clobbered (globals, local statics,
+ etc) will not be marked by the aliasing code, so we can't
+ remove them from CALL_CLOBBERED_VARS. */
+ if (ann->mem_tag_kind != NOT_A_TAG || !is_global_var (var))
+ clear_call_clobbered (var);
}
/* Clear flow-sensitive points-to information from each SSA name. */
{
tree name = ssa_name (i);
- if (!POINTER_TYPE_P (TREE_TYPE (name)))
+ if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
continue;
if (SSA_NAME_PTR_INFO (name))
{
size_t i;
- BITMAP_XFREE (ai->ssa_names_visited);
+ sbitmap_free (ai->ssa_names_visited);
ai->processed_ptrs = NULL;
BITMAP_XFREE (ai->addresses_needed);
static void
collect_points_to_info_for (struct alias_info *ai, tree ptr)
{
-#if defined ENABLE_CHECKING
- if (!POINTER_TYPE_P (TREE_TYPE (ptr)))
- abort ();
-#endif
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
- if (!bitmap_bit_p (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
+ if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
{
- bitmap_set_bit (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
+ SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
walk_use_def_chains (ptr, collect_points_to_info_r, ai, true);
VARRAY_PUSH_TREE (ai->processed_ptrs, ptr);
}
/* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
- INDIRECT_REF nodes for the pointer passed in DATA. */
+ (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
static tree
find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
{
tree ptr = (tree) data;
- if (TREE_CODE (*tp) == INDIRECT_REF
+ if (INDIRECT_REF_P (*tp)
&& TREE_OPERAND (*tp, 0) == ptr)
return *tp;
}
-/* Return true if STMT contains INDIRECT_REF <PTR>. *IS_STORE is set
- to 'true' if the dereference is on the LHS of an assignment. */
+/* Return true if STMT contains (ALIGN/MISALIGNED_)INDIRECT_REF <PTR>.
+ *IS_STORE is set to 'true' if the dereference is on the LHS of an
+ assignment. */
static bool
ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
return true;
}
}
+ else
+ {
+ /* CALL_EXPRs may also contain pointer dereferences for types
+ that are not GIMPLE register types. If the CALL_EXPR is on
+ the RHS of an assignment, it will be handled by the
+ MODIFY_EXPR handler above. */
+ tree call = get_call_expr_in (stmt);
+ if (call && walk_tree (&call, find_ptr_dereference, ptr, NULL))
+ return true;
+ }
return false;
}
compute_points_to_and_addr_escape (struct alias_info *ai)
{
basic_block bb;
- size_t i;
+ unsigned i;
+ tree op;
+ ssa_op_iter iter;
timevar_push (TV_TREE_PTA);
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
- use_optype uses;
- def_optype defs;
- v_may_def_optype v_may_defs;
- v_must_def_optype v_must_defs;
- stmt_ann_t ann;
bitmap addr_taken;
tree stmt = bsi_stmt (si);
bool stmt_escapes_p = is_escape_site (stmt, &ai->num_calls_found);
+ bitmap_iterator bi;
/* Mark all the variables whose address are taken by the
statement. Note that this will miss all the addresses taken
get_stmt_operands (stmt);
addr_taken = addresses_taken (stmt);
if (addr_taken)
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
- {
- tree var = referenced_var (i);
- bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
- if (stmt_escapes_p)
- mark_call_clobbered (var);
- });
+ EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
+ if (stmt_escapes_p)
+ mark_call_clobbered (var);
+ }
if (stmt_escapes_p)
block_ann->has_escape_site = 1;
- /* Special case for silly ADDR_EXPR tricks
- (gcc.c-torture/unsorted/pass.c). If this statement is an
- assignment to a non-pointer variable and the RHS takes the
- address of a variable, assume that the variable on the RHS is
- call-clobbered. We could add the LHS to the list of
- "pointers" and follow it to see if it really escapes, but it's
- not worth the pain. */
- if (addr_taken
- && TREE_CODE (stmt) == MODIFY_EXPR
- && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0))))
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
- {
- tree var = referenced_var (i);
- mark_call_clobbered (var);
- });
-
- ann = stmt_ann (stmt);
- uses = USE_OPS (ann);
- for (i = 0; i < NUM_USES (uses); i++)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
- tree op = USE_OP (uses, i);
var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
struct ptr_info_def *pi;
bool is_store;
/* Update reference counter for definitions to any
potentially aliased variable. This is used in the alias
grouping heuristics. */
- defs = DEF_OPS (ann);
- for (i = 0; i < NUM_DEFS (defs); i++)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
{
- tree op = DEF_OP (defs, i);
tree var = SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
bitmap_set_bit (ai->written_vars, ann->uid);
if (may_be_aliased (var))
(VARRAY_UINT (ai->num_references, ann->uid))++;
+
+ if (POINTER_TYPE_P (TREE_TYPE (op)))
+ collect_points_to_info_for (ai, op);
}
/* Mark variables in V_MAY_DEF operands as being written to. */
- v_may_defs = V_MAY_DEF_OPS (ann);
- for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
- tree op = V_MAY_DEF_OP (v_may_defs, i);
tree var = SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
bitmap_set_bit (ai->written_vars, ann->uid);
}
- /* Mark variables in V_MUST_DEF operands as being written to. */
- v_must_defs = V_MUST_DEF_OPS (ann);
- for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- {
- tree op = V_MUST_DEF_OP (v_must_defs, i);
- tree var = SSA_NAME_VAR (op);
- var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, ann->uid);
- }
-
/* After promoting variables and computing aliasing we will
need to re-scan most statements. FIXME: Try to minimize the
number of statements re-scanned. It's not really necessary to
continue;
}
- if (pi->pt_vars
- && bitmap_first_set_bit (pi->pt_vars) >= 0)
+ if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
{
size_t j;
tree old_name_tag = pi->name_mem_tag;
continue;
}
+ TREE_THIS_VOLATILE (pi->name_mem_tag)
+ |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
+
/* Mark the new name tag for renaming. */
bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
}
for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
{
- size_t j;
+ unsigned j;
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+ bitmap_iterator bi;
if (pi->value_escapes_p || pi->pt_anything)
{
mark_call_clobbered (v_ann->type_mem_tag);
if (pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
- mark_call_clobbered (referenced_var (j)));
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ {
+ mark_call_clobbered (referenced_var (j));
+ }
}
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
have a name memory tag. */
if (pi->name_mem_tag && pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
- add_may_alias (pi->name_mem_tag, referenced_var (j)));
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ {
+ add_may_alias (pi->name_mem_tag, referenced_var (j));
+ add_may_alias (v_ann->type_mem_tag, referenced_var (j));
+ }
/* If the name tag is call clobbered, so is the type tag
associated with the base VAR_DECL. */
/* Skip memory tags and variables that have never been
written to. We also need to check if the variables are
call-clobbered because they may be overwritten by
- function calls. */
- tag_stored_p = bitmap_bit_p (ai->written_vars, tag_ann->uid)
- || is_call_clobbered (tag);
- var_stored_p = bitmap_bit_p (ai->written_vars, v_ann->uid)
- || is_call_clobbered (var);
+ function calls.
+
+ Note this is effectively random accessing elements in
+ the sparse bitset, which can be highly inefficient.
+ So we first check the call_clobbered status of the
+ tag and variable before querying the bitmap. */
+ tag_stored_p = is_call_clobbered (tag)
+ || bitmap_bit_p (ai->written_vars, tag_ann->uid);
+ var_stored_p = is_call_clobbered (var)
+ || bitmap_bit_p (ai->written_vars, v_ann->uid);
if (!tag_stored_p && !var_stored_p)
continue;
}
}
+ /* Since this analysis is based exclusively on symbols, it fails to
+ handle cases where two pointers P and Q have different memory
+ tags with conflicting alias set numbers but no aliased symbols in
+ common.
+
+ For example, suppose that we have two memory tags TMT.1 and TMT.2
+ such that
+
+ may-aliases (TMT.1) = { a }
+ may-aliases (TMT.2) = { b }
+
+ and the alias set number of TMT.1 conflicts with that of TMT.2.
+ Since they don't have symbols in common, loads and stores from
+ TMT.1 and TMT.2 will seem independent of each other, which will
+ lead to the optimizers making invalid transformations (see
+ testsuite/gcc.c-torture/execute/pr15262-[12].c).
+
+ To avoid this problem, we do a final traversal of AI->POINTERS
+ looking for pairs of pointers that have no aliased symbols in
+ common and yet have conflicting alias set numbers. */
+ for (i = 0; i < ai->num_pointers; i++)
+ {
+ size_t j;
+ struct alias_map_d *p_map1 = ai->pointers[i];
+ tree tag1 = var_ann (p_map1->var)->type_mem_tag;
+ sbitmap may_aliases1 = p_map1->may_aliases;
+
+ for (j = i + 1; j < ai->num_pointers; j++)
+ {
+ struct alias_map_d *p_map2 = ai->pointers[j];
+ tree tag2 = var_ann (p_map2->var)->type_mem_tag;
+ sbitmap may_aliases2 = p_map2->may_aliases;
+
+ /* If the pointers may not point to each other, do nothing. */
+ if (!may_alias_p (p_map1->var, p_map1->set, p_map2->var, p_map2->set))
+ continue;
+
+ /* The two pointers may alias each other. If they already have
+ symbols in common, do nothing. */
+ if (sbitmap_any_common_bits (may_aliases1, may_aliases2))
+ continue;
+
+ if (sbitmap_first_set_bit (may_aliases2) >= 0)
+ {
+ size_t k;
+
+ /* Add all the aliases for TAG2 into TAG1's alias set.
+ FIXME, update grouping heuristic counters. */
+ EXECUTE_IF_SET_IN_SBITMAP (may_aliases2, 0, k,
+ add_may_alias (tag1, referenced_var (k)));
+ sbitmap_a_or_b (may_aliases1, may_aliases1, may_aliases2);
+ }
+ else
+ {
+ /* Since TAG2 does not have any aliases of its own, add
+ TAG2 itself to the alias set of TAG1. */
+ add_may_alias (tag1, tag2);
+ }
+ }
+ }
+
if (dump_file)
fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
group_aliases (struct alias_info *ai)
{
size_t i;
- sbitmap res;
/* Sort the POINTERS array in descending order of contributed
virtual operands. */
qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
total_alias_vops_cmp);
- res = sbitmap_alloc (num_referenced_vars);
-
/* For every pointer in AI->POINTERS, reverse the roles of its tag
and the tag's may-aliases set. */
for (i = 0; i < ai->num_pointers; i++)
{
sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
- sbitmap_a_and_b (res, tag1_aliases, tag2_aliases);
- if (sbitmap_first_set_bit (res) >= 0)
+ if (sbitmap_any_common_bits (tag1_aliases, tag2_aliases))
{
tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
{
tree new_alias;
-#if defined ENABLE_CHECKING
- if (VARRAY_ACTIVE_SIZE (ann->may_aliases) != 1)
- abort ();
-#endif
+ gcc_assert (VARRAY_ACTIVE_SIZE (ann->may_aliases) == 1);
+
new_alias = VARRAY_TREE (ann->may_aliases, 0);
replace_may_alias (name_tag, j, new_alias);
}
}
}
- sbitmap_free (res);
-
if (dump_file)
fprintf (dump_file,
"%s: Total number of aliased vops after grouping: %ld%s\n",
/* Name memory tags already have flow-sensitive aliasing
information, so they need not be processed by
- compute_may_aliases. Similarly, type memory tags are already
- accounted for when we process their associated pointer. */
+ compute_flow_insensitive_aliasing. Similarly, type memory
+ tags are already accounted for when we process their
+ associated pointer. */
if (v_ann->mem_tag_kind != NOT_A_TAG)
continue;
if (TREE_ADDRESSABLE (var))
{
if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
- && v_ann->mem_tag_kind == NOT_A_TAG
+ && TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
/* The address of VAR is not needed, remove the
}
}
}
-
- /* If we found no addressable variables, but we have more than one
- pointer, we will need to check for conflicts between the
- pointers. Otherwise, we would miss alias relations as in
- testsuite/gcc.dg/tree-ssa/20040319-1.c:
-
- struct bar { int count; int *arr;};
-
- void foo (struct bar *b)
- {
- b->count = 0;
- *(b->arr) = 2;
- if (b->count == 0)
- abort ();
- }
-
- b->count and *(b->arr) could be aliased if b->arr == &b->count.
- To do this, we add all the memory tags for the pointers in
- AI->POINTERS to AI->ADDRESSABLE_VARS, so that
- compute_flow_insensitive_aliasing will naturally compare every
- pointer to every type tag. */
- if (ai->num_addressable_vars == 0
- && ai->num_pointers > 1)
- {
- free (ai->addressable_vars);
- ai->addressable_vars = xcalloc (ai->num_pointers,
- sizeof (struct alias_map_d *));
- ai->num_addressable_vars = 0;
- for (i = 0; i < ai->num_pointers; i++)
- {
- struct alias_map_d *p = ai->pointers[i];
- tree tag = var_ann (p->var)->type_mem_tag;
- create_alias_map_for (tag, ai);
- }
- }
}
static void
maybe_create_global_var (struct alias_info *ai)
{
- size_t i, n_clobbered;
+ unsigned i, n_clobbered;
+ bitmap_iterator bi;
/* No need to create it, if we have one already. */
if (global_var == NULL_TREE)
{
/* Count all the call-clobbered variables. */
n_clobbered = 0;
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, n_clobbered++);
-
- /* Create .GLOBAL_VAR if we have too many call-clobbered
- variables. We also create .GLOBAL_VAR when there no
- call-clobbered variables to prevent code motion
- transformations from re-arranging function calls that may
- have side effects. For instance,
-
- foo ()
- {
- int a = f ();
- g ();
- h (a);
- }
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ n_clobbered++;
+ }
- There are no call-clobbered variables in foo(), so it would
- be entirely possible for a pass to want to move the call to
- f() after the call to g(). If f() has side effects, that
- would be wrong. Creating .GLOBAL_VAR in this case will
- insert VDEFs for it and prevent such transformations. */
- if (n_clobbered == 0
- || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
+ if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
create_global_var ();
}
/* If the function has calls to clobbering functions and .GLOBAL_VAR has
been created, make it an alias for all call-clobbered variables. */
if (global_var)
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
if (var != global_var)
add_may_alias (var, global_var);
bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
}
- });
+ }
}
v_ann = var_ann (var);
m_ann = var_ann (mem);
-#if defined ENABLE_CHECKING
- if (m_ann->mem_tag_kind != TYPE_TAG)
- abort ();
-#endif
+ gcc_assert (m_ann->mem_tag_kind == TYPE_TAG);
alias_stats.tbaa_queries++;
for PTR's alias set here, not its pointed-to type. We also can't
do this check with relaxed aliasing enabled. */
if (POINTER_TYPE_P (TREE_TYPE (var))
- && var_alias_set != 0)
+ && var_alias_set != 0
+ && mem_alias_set != 0)
{
HOST_WIDE_INT ptr_alias_set = get_alias_set (ptr);
if (ptr_alias_set == var_alias_set)
/* If the alias sets don't conflict then MEM cannot alias VAR. */
if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
{
- /* Handle aliases to structure fields. If either VAR or MEM are
- aggregate types, they may not have conflicting types, but one of
- the structures could contain a pointer to the other one.
-
- For instance, given
-
- MEM -> struct P *p;
- VAR -> struct Q *q;
-
- It may happen that '*p' and '*q' can't alias because 'struct P'
- and 'struct Q' have non-conflicting alias sets. However, it could
- happen that one of the fields in 'struct P' is a 'struct Q *' or
- vice-versa.
-
- Therefore, we also need to check if 'struct P' aliases 'struct Q *'
- or 'struct Q' aliases 'struct P *'. Notice, that since GIMPLE
- does not have more than one-level pointers, we don't need to
- recurse into the structures. */
- if (AGGREGATE_TYPE_P (TREE_TYPE (mem))
- || AGGREGATE_TYPE_P (TREE_TYPE (var)))
- {
- tree ptr_to_var;
-
- if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
- ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (var)));
- else
- ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (var));
-
- /* If no pointer-to VAR exists, then MEM can't alias VAR. */
- if (ptr_to_var == NULL_TREE)
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
-
- /* If MEM doesn't alias a pointer to VAR and VAR doesn't alias
- PTR, then PTR can't alias VAR. */
- if (!alias_sets_conflict_p (mem_alias_set, get_alias_set (ptr_to_var))
- && !alias_sets_conflict_p (var_alias_set, get_alias_set (ptr)))
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
- }
- else
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
- }
-
- if (flag_tree_points_to != PTA_NONE)
- alias_stats.pta_queries++;
-
- /* If -ftree-points-to is given, check if PTR may point to VAR. */
- if (flag_tree_points_to == PTA_ANDERSEN
- && !ptr_may_alias_var (ptr, var))
- {
alias_stats.alias_noalias++;
- alias_stats.pta_resolved++;
+ alias_stats.tbaa_resolved++;
return false;
}
var_ann_t v_ann = get_var_ann (var);
var_ann_t a_ann = get_var_ann (alias);
-#if defined ENABLE_CHECKING
- if (var == alias)
- abort ();
-#endif
+ gcc_assert (var != alias);
if (v_ann->may_aliases == NULL)
VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
/* If the pointer has already been found to point to arbitrary
- memory locations, it is unsafe to mark it as pointing to malloc. */
+ memory locations, it is unsafe to mark it as pointing to malloc. */
if (pi->pt_anything)
return;
}
-/* Given two pointers DEST and ORIG. Merge the points-to information in
- ORIG into DEST. AI is as in collect_points_to_info. */
+/* Given two different pointers DEST and ORIG. Merge the points-to
+ information in ORIG into DEST. AI is as in
+ collect_points_to_info. */
static void
merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
{
struct ptr_info_def *dest_pi, *orig_pi;
+ gcc_assert (dest != orig);
+
/* Make sure we have points-to information for ORIG. */
collect_points_to_info_for (ai, orig);
...
P_j = P_i + X;
- P_j would be marked as PT_MALLOC, which is wrong because
- PT_MALLOC implies that the pointer may not point to another
- variable.
-
- FIXME 1: Subsequent analysis may determine that P_j
- cannot alias anything else, but we are being conservative
- here.
-
- FIXME 2: If the merging comes from a copy assignment, we
- ought to merge PT_MALLOC, but then both pointers would end up
- getting different name tags because create_name_tags is not
- smart enough to determine that the two come from the same
- malloc call. Copy propagation before aliasing should cure
- this. */
+ P_j would be marked as PT_MALLOC, however we currently do not
+ handle cases of more than one pointer pointing to the same
+ malloc'd area.
+
+ FIXME: If the merging comes from an expression that preserves
+ the PT_MALLOC attribute (copy assignment, address
+ arithmetic), we ought to merge PT_MALLOC, but then both
+ pointers would end up getting different name tags because
+ create_name_tags is not smart enough to determine that the
+ two come from the same malloc call. Copy propagation before
+ aliasing should cure this. */
+ gcc_assert (orig_pi != dest_pi);
+
dest_pi->pt_malloc = 0;
if (orig_pi->pt_malloc || orig_pi->pt_anything)
if (!dest_pi->pt_anything
&& orig_pi->pt_vars
- && bitmap_first_set_bit (orig_pi->pt_vars) >= 0)
+ && !bitmap_empty_p (orig_pi->pt_vars))
{
if (dest_pi->pt_vars == NULL)
{
bitmap_copy (dest_pi->pt_vars, orig_pi->pt_vars);
}
else
- bitmap_a_or_b (dest_pi->pt_vars,
- dest_pi->pt_vars,
- orig_pi->pt_vars);
+ bitmap_ior_into (dest_pi->pt_vars, orig_pi->pt_vars);
}
}
else
}
-/* Add VALUE to the list of expressions pointed-to by PTR. */
+/* Add EXPR to the list of expressions pointed-to by PTR. */
static void
-add_pointed_to_expr (tree ptr, tree value)
+add_pointed_to_expr (struct alias_info *ai, tree ptr, tree expr)
{
- if (TREE_CODE (value) == WITH_SIZE_EXPR)
- value = TREE_OPERAND (value, 0);
-
-#if defined ENABLE_CHECKING
- /* Pointer variables should have been handled by merge_pointed_to_info. */
- if (TREE_CODE (value) == SSA_NAME
- && POINTER_TYPE_P (TREE_TYPE (value)))
- abort ();
-#endif
+ if (TREE_CODE (expr) == WITH_SIZE_EXPR)
+ expr = TREE_OPERAND (expr, 0);
get_ptr_info (ptr);
- /* If VALUE is the result of a malloc-like call, then the area pointed to
- PTR is guaranteed to not alias with anything else. */
- if (TREE_CODE (value) == CALL_EXPR
- && (call_expr_flags (value) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
- set_pt_malloc (ptr);
- else
- set_pt_anything (ptr);
-
- if (dump_file)
+ if (TREE_CODE (expr) == CALL_EXPR
+ && (call_expr_flags (expr) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
{
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ /* If EXPR is a malloc-like call, then the area pointed to PTR
+ is guaranteed to not alias with anything else. */
+ set_pt_malloc (ptr);
+ }
+ else if (TREE_CODE (expr) == ADDR_EXPR)
+ {
+ /* Found P_i = ADDR_EXPR */
+ add_pointed_to_var (ai, ptr, expr);
+ }
+ else if (TREE_CODE (expr) == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (expr)))
+ {
+ /* Found P_i = Q_j. */
+ merge_pointed_to_info (ai, ptr, expr);
+ }
+ else if (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR)
+ {
+ /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
- fprintf (dump_file, "Pointer ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " points to ");
- if (pi->pt_malloc)
- fprintf (dump_file, "malloc space: ");
- else
- fprintf (dump_file, "an arbitrary address: ");
- print_generic_expr (dump_file, value, dump_flags);
- fprintf (dump_file, "\n");
+ /* Both operands may be of pointer type. FIXME: Shouldn't
+ we just expect PTR + OFFSET always? */
+ if (POINTER_TYPE_P (TREE_TYPE (op0))
+ && TREE_CODE (op0) != INTEGER_CST)
+ {
+ if (TREE_CODE (op0) == SSA_NAME)
+ merge_pointed_to_info (ai, ptr, op0);
+ else if (TREE_CODE (op0) == ADDR_EXPR)
+ add_pointed_to_var (ai, ptr, op0);
+ else
+ set_pt_anything (ptr);
+ }
+
+ if (POINTER_TYPE_P (TREE_TYPE (op1))
+ && TREE_CODE (op1) != INTEGER_CST)
+ {
+ if (TREE_CODE (op1) == SSA_NAME)
+ merge_pointed_to_info (ai, ptr, op1);
+ else if (TREE_CODE (op1) == ADDR_EXPR)
+ add_pointed_to_var (ai, ptr, op1);
+ else
+ set_pt_anything (ptr);
+ }
+
+ /* Neither operand is a pointer? VAR can be pointing anywhere.
+ FIXME: Shouldn't we abort here? If we get here, we found
+ PTR = INT_CST + INT_CST, which should not be a valid pointer
+ expression. */
+ if (!(POINTER_TYPE_P (TREE_TYPE (op0))
+ && TREE_CODE (op0) != INTEGER_CST)
+ && !(POINTER_TYPE_P (TREE_TYPE (op1))
+ && TREE_CODE (op1) != INTEGER_CST))
+ set_pt_anything (ptr);
+ }
+ else
+ {
+ /* If we can't recognize the expression, assume that PTR may
+ point anywhere. */
+ set_pt_anything (ptr);
}
}
tree pt_var;
size_t uid;
-#if defined ENABLE_CHECKING
- if (TREE_CODE (value) != ADDR_EXPR)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (value) == ADDR_EXPR);
pt_var = TREE_OPERAND (value, 0);
- if (TREE_CODE_CLASS (TREE_CODE (pt_var)) == 'r')
+ if (REFERENCE_CLASS_P (pt_var))
pt_var = get_base_address (pt_var);
if (pt_var && SSA_VAR_P (pt_var))
fprintf (dump_file, "\n");
}
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ switch (TREE_CODE (stmt))
{
- tree rhs = TREE_OPERAND (stmt, 1);
- STRIP_NOPS (rhs);
+ case RETURN_EXPR:
+ if (TREE_CODE (TREE_OPERAND (stmt, 0)) != MODIFY_EXPR)
+ abort ();
+ stmt = TREE_OPERAND (stmt, 0);
+ /* FALLTHRU */
- /* Found P_i = ADDR_EXPR */
- if (TREE_CODE (rhs) == ADDR_EXPR)
- add_pointed_to_var (ai, var, rhs);
+ case MODIFY_EXPR:
+ {
+ tree rhs = TREE_OPERAND (stmt, 1);
+ STRIP_NOPS (rhs);
+ add_pointed_to_expr (ai, var, rhs);
+ break;
+ }
- /* Found P_i = Q_j. */
- else if (TREE_CODE (rhs) == SSA_NAME
- && POINTER_TYPE_P (TREE_TYPE (rhs)))
- merge_pointed_to_info (ai, var, rhs);
+ case ASM_EXPR:
+ /* Pointers defined by __asm__ statements can point anywhere. */
+ set_pt_anything (var);
+ break;
- /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
- else if (TREE_CODE (rhs) == PLUS_EXPR
- || TREE_CODE (rhs) == MINUS_EXPR)
+ case NOP_EXPR:
+ if (IS_EMPTY_STMT (stmt))
{
- tree op0 = TREE_OPERAND (rhs, 0);
- tree op1 = TREE_OPERAND (rhs, 1);
-
- /* Both operands may be of pointer type. FIXME: Shouldn't
- we just expect PTR + OFFSET always? */
- if (POINTER_TYPE_P (TREE_TYPE (op0)))
- {
- if (TREE_CODE (op0) == SSA_NAME)
- merge_pointed_to_info (ai, var, op0);
- else if (TREE_CODE (op0) == ADDR_EXPR)
- add_pointed_to_var (ai, var, op0);
- else
- add_pointed_to_expr (var, op0);
- }
-
- if (POINTER_TYPE_P (TREE_TYPE (op1)))
- {
- if (TREE_CODE (op1) == SSA_NAME)
- merge_pointed_to_info (ai, var, op1);
- else if (TREE_CODE (op1) == ADDR_EXPR)
- add_pointed_to_var (ai, var, op1);
- else
- add_pointed_to_expr (var, op1);
- }
-
- /* Neither operand is a pointer? VAR can be pointing
- anywhere. FIXME: Is this right? If we get here, we
- found PTR = INT_CST + INT_CST. */
- if (!POINTER_TYPE_P (TREE_TYPE (op0))
- && !POINTER_TYPE_P (TREE_TYPE (op1)))
- add_pointed_to_expr (var, rhs);
+ tree decl = SSA_NAME_VAR (var);
+
+ if (TREE_CODE (decl) == PARM_DECL)
+ add_pointed_to_expr (ai, var, decl);
+ else if (DECL_INITIAL (decl))
+ add_pointed_to_expr (ai, var, DECL_INITIAL (decl));
+ else
+ add_pointed_to_expr (ai, var, decl);
}
+ break;
- /* Something else. */
- else
- add_pointed_to_expr (var, rhs);
- }
- else if (TREE_CODE (stmt) == ASM_EXPR)
- {
- /* Pointers defined by __asm__ statements can point anywhere. */
- set_pt_anything (var);
- }
- else if (IS_EMPTY_STMT (stmt))
- {
- tree decl = SSA_NAME_VAR (var);
+ case PHI_NODE:
+ {
+ /* It STMT is a PHI node, then VAR is one of its arguments. The
+ variable that we are analyzing is the LHS of the PHI node. */
+ tree lhs = PHI_RESULT (stmt);
- if (TREE_CODE (decl) == PARM_DECL)
- add_pointed_to_expr (var, decl);
- else if (DECL_INITIAL (decl))
- add_pointed_to_var (ai, var, DECL_INITIAL (decl));
- else
- add_pointed_to_expr (var, decl);
- }
- else if (TREE_CODE (stmt) == PHI_NODE)
- {
- /* It STMT is a PHI node, then VAR is one of its arguments. The
- variable that we are analyzing is the LHS of the PHI node. */
- tree lhs = PHI_RESULT (stmt);
-
- if (TREE_CODE (var) == ADDR_EXPR)
- add_pointed_to_var (ai, lhs, var);
- else if (TREE_CODE (var) == SSA_NAME)
- merge_pointed_to_info (ai, lhs, var);
- else if (is_gimple_min_invariant (var))
- add_pointed_to_expr (lhs, var);
- else
- abort ();
- }
- else
- abort ();
+ switch (TREE_CODE (var))
+ {
+ case ADDR_EXPR:
+ add_pointed_to_var (ai, lhs, var);
+ break;
+
+ case SSA_NAME:
+ /* Avoid unnecessary merges. */
+ if (lhs != var)
+ merge_pointed_to_info (ai, lhs, var);
+ break;
+
+ default:
+ gcc_assert (is_gimple_min_invariant (var));
+ add_pointed_to_expr (ai, lhs, var);
+ break;
+ }
+ break;
+ }
+ default:
+ gcc_unreachable ();
+ }
+
return false;
}
if (lhs == NULL_TREE)
return true;
+ /* If the RHS is a conversion between a pointer and an integer, the
+ pointer escapes since we can't track the integer. */
+ if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (stmt, 1), 0)))
+ && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
+ return true;
+
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
determine whether they should be considered globals. */
DECL_CONTEXT (tag) = current_function_decl;
- /* If the pointed-to type is volatile, so is the tag. */
- TREE_THIS_VOLATILE (tag) = TREE_THIS_VOLATILE (type);
-
/* Memory tags are by definition addressable. This also prevents
is_gimple_ref frome confusing memory tags with optimizable
variables. */
for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
{
struct alias_map_d *curr = ai->pointers[i];
- if (tag_set == curr->set
- && (flag_tree_points_to == PTA_NONE
- || same_points_to_set (curr->var, ptr)))
+ if (tag_set == curr->set)
{
tag = var_ann (curr->var)->type_mem_tag;
break;
ai->pointers[ai->num_pointers++] = alias_map;
}
-#if defined ENABLE_CHECKING
+ /* If the pointed-to type is volatile, so is the tag. */
+ TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
+
/* Make sure that the type tag has the same alias set as the
pointed-to type. */
- if (tag_set != get_alias_set (tag))
- abort ();
-#endif
-
+ gcc_assert (tag_set == get_alias_set (tag));
return tag;
}
create_global_var (void)
{
global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
- size_type_node);
+ void_type_node);
DECL_ARTIFICIAL (global_var) = 1;
TREE_READONLY (global_var) = 0;
DECL_EXTERNAL (global_var) = 1;
alias_stats.tbaa_queries);
fprintf (file, "Total TBAA resolved:\t%u\n",
alias_stats.tbaa_resolved);
- fprintf (file, "Total PTA queries:\t%u\n",
- alias_stats.pta_queries);
- fprintf (file, "Total PTA resolved:\t%u\n",
- alias_stats.pta_resolved);
}
for (i = 1; i < num_ssa_names; i++)
{
tree ptr = ssa_name (i);
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ struct ptr_info_def *pi;
+
+ if (ptr == NULL_TREE)
+ continue;
+
+ pi = SSA_NAME_PTR_INFO (ptr);
if (!SSA_NAME_IN_FREE_LIST (ptr)
&& pi
&& pi->name_mem_tag)
/* Return the alias information associated with pointer T. It creates a
new instance if none existed. */
-static struct ptr_info_def *
+struct ptr_info_def *
get_ptr_info (tree t)
{
struct ptr_info_def *pi;
-#if defined ENABLE_CHECKING
- if (!POINTER_TYPE_P (TREE_TYPE (t)))
- abort ();
-#endif
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
pi = SSA_NAME_PTR_INFO (t);
if (pi == NULL)
if (pi->pt_vars)
{
unsigned ix;
+ bitmap_iterator bi;
fprintf (file, ", points-to vars: { ");
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix,
- {
- print_generic_expr (file, referenced_var (ix), dump_flags);
- fprintf (file, " ");
- });
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
+ {
+ print_generic_expr (file, referenced_var (ix), dump_flags);
+ fprintf (file, " ");
+ }
fprintf (file, "}");
}
}
basic_block bb;
block_stmt_iterator si;
size_t i;
+ ssa_op_iter iter;
const char *fname =
lang_hooks.decl_printable_name (current_function_decl, 2);
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
- stmt_ann_t ann = stmt_ann (bsi_stmt (si));
- def_optype defs = DEF_OPS (ann);
- if (defs)
- for (i = 0; i < NUM_DEFS (defs); i++)
- if (POINTER_TYPE_P (TREE_TYPE (DEF_OP (defs, i))))
- dump_points_to_info_for (file, DEF_OP (defs, i));
+ tree stmt = bsi_stmt (si);
+ tree def;
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
+ if (POINTER_TYPE_P (TREE_TYPE (def)))
+ dump_points_to_info_for (file, def);
}
}
if (TREE_ADDRESSABLE (var))
return true;
- /* Automatic variables can't have their addresses escape any other way. */
- if (!TREE_STATIC (var))
- return false;
-
/* Globally visible variables can have their addresses taken by other
translation units. */
if (DECL_EXTERNAL (var) || TREE_PUBLIC (var))
return true;
+ /* Automatic variables can't have their addresses escape any other way.
+ This must be after the check for global variables, as extern declarations
+ do not have TREE_STATIC set. */
+ if (!TREE_STATIC (var))
+ return false;
+
/* If we're in unit-at-a-time mode, then we must have seen all occurrences
of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
we can only be sure the variable isn't addressable if it's local to the