/* Alias analysis for trees.
- Copyright (C) 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
static void add_pointed_to_var (struct alias_info *, tree, tree);
static void create_global_var (void);
static void collect_points_to_info_for (struct alias_info *, tree);
-static bool ptr_is_dereferenced_by (tree, tree, bool *);
static void maybe_create_global_var (struct alias_info *ai);
static void group_aliases (struct alias_info *);
static void set_pt_anything (tree ptr);
The concept of 'escaping' is the same one used in the Java world. When
a pointer or an ADDR_EXPR escapes, it means that it has been exposed
outside of the current function. So, assignment to global variables,
- function arguments and returning a pointer are all escape sites.
+ function arguments and returning a pointer are all escape sites, as are
+ conversions between pointers and integers.
This is where we are currently limited. Since not everything is renamed
into SSA, we lose track of escape properties when a pointer is stashed
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ | TODO_ggc_collect | TODO_verify_ssa
+ | TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
+/* Data structure used to count the number of dereferences to PTR
+ inside an expression. */
+struct count_ptr_d
+{
+ tree ptr;
+ unsigned count;
+};
+
+
+/* Helper for count_uses_and_derefs. Called by walk_tree to look for
+ (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
+
+static tree
+count_ptr_derefs (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
+{
+ struct count_ptr_d *count_p = (struct count_ptr_d *) data;
+
+ if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+ count_p->count++;
+
+ return NULL_TREE;
+}
+
+
+/* Count the number of direct and indirect uses for pointer PTR in
+ statement STMT. The two counts are stored in *NUM_USES_P and
+ *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
+ least one of those dereferences is a store operation. */
+
+static void
+count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
+ unsigned *num_derefs_p, bool *is_store)
+{
+ ssa_op_iter i;
+ tree use;
+
+ *num_uses_p = 0;
+ *num_derefs_p = 0;
+ *is_store = false;
+
+ /* Find out the total number of uses of PTR in STMT. */
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+ if (use == ptr)
+ (*num_uses_p)++;
+
+ /* Now count the number of indirect references to PTR. This is
+ truly awful, but we don't have much choice. There are no parent
+ pointers inside INDIRECT_REFs, so an expression like
+ '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
+ find all the indirect and direct uses of x_1 inside. The only
+ shortcut we can take is the fact that GIMPLE only allows
+ INDIRECT_REFs inside the expressions below. */
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ || (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
+ || TREE_CODE (stmt) == ASM_EXPR
+ || TREE_CODE (stmt) == CALL_EXPR)
+ {
+ tree lhs, rhs;
+
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
+ {
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+ }
+ else if (TREE_CODE (stmt) == RETURN_EXPR)
+ {
+ tree e = TREE_OPERAND (stmt, 0);
+ lhs = TREE_OPERAND (e, 0);
+ rhs = TREE_OPERAND (e, 1);
+ }
+ else if (TREE_CODE (stmt) == ASM_EXPR)
+ {
+ lhs = ASM_OUTPUTS (stmt);
+ rhs = ASM_INPUTS (stmt);
+ }
+ else
+ {
+ lhs = NULL_TREE;
+ rhs = stmt;
+ }
+
+ if (lhs && EXPR_P (lhs))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&lhs, count_ptr_derefs, &count, NULL);
+ *is_store = true;
+ *num_derefs_p = count.count;
+ }
+
+ if (rhs && EXPR_P (rhs))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&rhs, count_ptr_derefs, &count, NULL);
+ *num_derefs_p += count.count;
+ }
+ }
+
+ gcc_assert (*num_uses_p >= *num_derefs_p);
+}
+
+
+/* Count the number of calls in the function and conditionally
+ create GLOBAL_VAR. This is performed before translation
+ into SSA (and thus before alias analysis) to avoid compile time
+ and memory utilization explosions in functions with many
+ of calls and call clobbered variables. */
+
+static void
+count_calls_and_maybe_create_global_var (void)
+{
+ struct alias_info ai;
+ basic_block bb;
+ bool temp;
+
+ memset (&ai, 0, sizeof (struct alias_info));
+
+ /* First count the number of calls in the IL. */
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator si;
+
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt = bsi_stmt (si);
+
+ if (get_call_expr_in (stmt) != NULL_TREE)
+ ai.num_calls_found++;
+ }
+ }
+
+ /* If there are no call clobbered variables, then maybe_create_global_var
+ will always create a GLOBAL_VAR. At this point we do not want that
+ behavior. So we turn on one bit in CALL_CLOBBERED_VARs, call
+ maybe_create_global_var, then reset the bit to its original state. */
+ temp = bitmap_bit_p (call_clobbered_vars, 0);
+ bitmap_set_bit (call_clobbered_vars, 0);
+ maybe_create_global_var (&ai);
+ if (!temp)
+ bitmap_clear_bit (call_clobbered_vars, 0);
+}
+
+struct tree_opt_pass pass_maybe_create_global_var =
+{
+ "maybe_create_global_var", /* name */
+ NULL, /* gate */
+ count_calls_and_maybe_create_global_var, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_MAY_ALIAS, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
/* Initialize the data structures used for alias analysis. */
static struct alias_info *
if (aliases_computed_p)
{
unsigned i;
- bitmap_iterator bi;
-
- /* Clear the call-clobbered set. We are going to re-discover
- call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ basic_block bb;
+
+ /* Make sure that every statement has a valid set of operands.
+ If a statement needs to be scanned for operands while we
+ compute aliases, it may get erroneous operands because all
+ the alias relations are not built at that point.
+ FIXME: This code will become obsolete when operands are not
+ lazily updated. */
+ FOR_EACH_BB (bb)
{
- tree var = referenced_var (i);
-
- /* Variables that are intrinsically call-clobbered (globals,
- local statics, etc) will not be marked by the aliasing
- code, so we can't remove them from CALL_CLOBBERED_VARS. */
- if (!is_call_clobbered (var))
- bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
+ block_stmt_iterator si;
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ get_stmt_operands (bsi_stmt (si));
}
/* Similarly, clear the set of addressable variables. In this
/* Clear flow-insensitive alias information from each symbol. */
for (i = 0; i < num_referenced_vars; i++)
{
- var_ann_t ann = var_ann (referenced_var (i));
+ tree var = referenced_var (i);
+ var_ann_t ann = var_ann (var);
+
ann->is_alias_tag = 0;
ann->may_aliases = NULL;
+
+ /* Since we are about to re-discover call-clobbered
+ variables, clear the call-clobbered flag. Variables that
+ are intrinsically call-clobbered (globals, local statics,
+ etc) will not be marked by the aliasing code, so we can't
+ remove them from CALL_CLOBBERED_VARS. */
+ if (ann->mem_tag_kind != NOT_A_TAG || !is_global_var (var))
+ clear_call_clobbered (var);
}
/* Clear flow-sensitive points-to information from each SSA name. */
tag will need to be created in create_name_tags. */
pi->pt_anything = 0;
pi->pt_malloc = 0;
+ pi->pt_null = 0;
pi->value_escapes_p = 0;
pi->is_dereferenced = 0;
if (pi->pt_vars)
}
-/* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
- (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
-
-static tree
-find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
-{
- tree ptr = (tree) data;
-
- if (INDIRECT_REF_P (*tp)
- && TREE_OPERAND (*tp, 0) == ptr)
- return *tp;
-
- return NULL_TREE;
-}
-
-
-/* Return true if STMT contains (ALIGN/MISALIGNED_)INDIRECT_REF <PTR>.
- *IS_STORE is set to 'true' if the dereference is on the LHS of an
- assignment. */
-
-static bool
-ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
-{
- *is_store = false;
-
- if (TREE_CODE (stmt) == MODIFY_EXPR
- || (TREE_CODE (stmt) == RETURN_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR))
- {
- tree e, lhs, rhs;
-
- e = (TREE_CODE (stmt) == RETURN_EXPR) ? TREE_OPERAND (stmt, 0) : stmt;
- lhs = TREE_OPERAND (e, 0);
- rhs = TREE_OPERAND (e, 1);
-
- if (EXPR_P (lhs)
- && walk_tree (&lhs, find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (EXPR_P (rhs)
- && walk_tree (&rhs, find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
- else if (TREE_CODE (stmt) == ASM_EXPR)
- {
- if (walk_tree (&ASM_OUTPUTS (stmt), find_ptr_dereference, ptr, NULL)
- || walk_tree (&ASM_CLOBBERS (stmt), find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (walk_tree (&ASM_INPUTS (stmt), find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
-
- return false;
-}
-
-
/* Traverse use-def links for all the pointers in the program to collect
address escape and points-to information.
if (stmt_escapes_p)
block_ann->has_escape_site = 1;
- /* Special case for silly ADDR_EXPR tricks
- (gcc.c-torture/unsorted/pass.c). If this statement is an
- assignment to a non-pointer variable and the RHS takes the
- address of a variable, assume that the variable on the RHS is
- call-clobbered. We could add the LHS to the list of
- "pointers" and follow it to see if it really escapes, but it's
- not worth the pain. */
- if (addr_taken
- && TREE_CODE (stmt) == MODIFY_EXPR
- && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0))))
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
- {
- tree var = referenced_var (i);
- mark_call_clobbered (var);
- }
-
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
struct ptr_info_def *pi;
bool is_store;
+ unsigned num_uses, num_derefs;
/* If the operand's variable may be aliased, keep track
of how many times we've referenced it. This is used
collect_points_to_info_for (ai, op);
pi = SSA_NAME_PTR_INFO (op);
- if (ptr_is_dereferenced_by (op, stmt, &is_store))
+ count_uses_and_derefs (op, stmt, &num_uses, &num_derefs,
+ &is_store);
+
+ if (num_derefs > 0)
{
/* Mark OP as dereferenced. In a subsequent pass,
dereferenced pointers that point to a set of
else
bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
}
- else if (stmt_escapes_p)
+
+ if (stmt_escapes_p && num_derefs < num_uses)
{
- /* Note that even if STMT is an escape point, pointer OP
- will not escape if it is being dereferenced. That's
- why we only check for escape points if OP is not
- dereferenced by STMT. */
+ /* If STMT is an escape point and STMT contains at
+ least one direct use of OP, then the value of OP
+ escapes and so the pointed-to variables need to
+ be marked call-clobbered. */
pi->value_escapes_p = 1;
/* If the statement makes a function call, assume
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
{
add_may_alias (pi->name_mem_tag, referenced_var (j));
+ add_may_alias (v_ann->type_mem_tag, referenced_var (j));
}
/* If the name tag is call clobbered, so is the type tag
compute_flow_insensitive_aliasing (struct alias_info *ai)
{
size_t i;
- sbitmap res;
/* Initialize counter for the total number of virtual operands that
aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
To avoid this problem, we do a final traversal of AI->POINTERS
looking for pairs of pointers that have no aliased symbols in
common and yet have conflicting alias set numbers. */
- res = sbitmap_alloc (num_referenced_vars);
-
for (i = 0; i < ai->num_pointers; i++)
{
size_t j;
/* The two pointers may alias each other. If they already have
symbols in common, do nothing. */
- sbitmap_a_and_b (res, may_aliases1, may_aliases2);
- if (sbitmap_first_set_bit (res) >= 0)
+ if (sbitmap_any_common_bits (may_aliases1, may_aliases2))
continue;
if (sbitmap_first_set_bit (may_aliases2) >= 0)
}
}
- sbitmap_free (res);
-
if (dump_file)
fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
group_aliases (struct alias_info *ai)
{
size_t i;
- sbitmap res;
/* Sort the POINTERS array in descending order of contributed
virtual operands. */
qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
total_alias_vops_cmp);
- res = sbitmap_alloc (num_referenced_vars);
-
/* For every pointer in AI->POINTERS, reverse the roles of its tag
and the tag's may-aliases set. */
for (i = 0; i < ai->num_pointers; i++)
{
sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
- sbitmap_a_and_b (res, tag1_aliases, tag2_aliases);
- if (sbitmap_first_set_bit (res) >= 0)
+ if (sbitmap_any_common_bits (tag1_aliases, tag2_aliases))
{
tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
}
}
- sbitmap_free (res);
-
if (dump_file)
fprintf (dump_file,
"%s: Total number of aliased vops after grouping: %ld%s\n",
if (TREE_ADDRESSABLE (var))
{
if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
- && v_ann->mem_tag_kind == NOT_A_TAG
&& TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
n_clobbered++;
}
- /* Create .GLOBAL_VAR if we have too many call-clobbered
- variables. We also create .GLOBAL_VAR when there no
- call-clobbered variables to prevent code motion
- transformations from re-arranging function calls that may
- have side effects. For instance,
-
- foo ()
- {
- int a = f ();
- g ();
- h (a);
- }
-
- There are no call-clobbered variables in foo(), so it would
- be entirely possible for a pass to want to move the call to
- f() after the call to g(). If f() has side effects, that
- would be wrong. Creating .GLOBAL_VAR in this case will
- insert VDEFs for it and prevent such transformations. */
- if (n_clobbered == 0
- || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
+ if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
create_global_var ();
}
if (orig_pi)
{
+ gcc_assert (orig_pi != dest_pi);
+
/* Notice that we never merge PT_MALLOC. This attribute is only
true if the pointer is the result of a malloc() call.
Otherwise, we can end up in this situation:
create_name_tags is not smart enough to determine that the
two come from the same malloc call. Copy propagation before
aliasing should cure this. */
- gcc_assert (orig_pi != dest_pi);
-
dest_pi->pt_malloc = 0;
-
if (orig_pi->pt_malloc || orig_pi->pt_anything)
set_pt_anything (dest);
+ dest_pi->pt_null |= orig_pi->pt_null;
+
if (!dest_pi->pt_anything
&& orig_pi->pt_vars
&& !bitmap_empty_p (orig_pi->pt_vars))
&& TREE_CODE (op1) != INTEGER_CST))
set_pt_anything (ptr);
}
+ else if (integer_zerop (expr))
+ {
+ /* EXPR is the NULL pointer. Mark PTR as pointing to NULL. */
+ SSA_NAME_PTR_INFO (ptr)->pt_null = 1;
+ }
else
{
/* If we can't recognize the expression, assume that PTR may
if (lhs == NULL_TREE)
return true;
+ /* If the RHS is a conversion between a pointer and an integer, the
+ pointer escapes since we can't track the integer. */
+ if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (stmt, 1), 0)))
+ && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
+ return true;
+
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
create_global_var (void)
{
global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
- size_type_node);
+ void_type_node);
DECL_ARTIFICIAL (global_var) = 1;
TREE_READONLY (global_var) = 0;
DECL_EXTERNAL (global_var) = 1;
if (pi->pt_malloc)
fprintf (file, ", points-to malloc");
+ if (pi->pt_null)
+ fprintf (file, ", points-to NULL");
+
if (pi->pt_vars)
{
unsigned ix;
return true;
}
-