/* Alias analysis for trees.
- Copyright (C) 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
#include "tree-pass.h"
#include "convert.h"
#include "params.h"
+#include "vec.h"
+/* 'true' after aliases have been computed (see compute_may_aliases). */
+bool aliases_computed_p;
/* Structure to map a variable to its alias set and keep track of the
virtual operands that will be needed to represent it. */
/* Number of function calls found in the program. */
size_t num_calls_found;
+ /* Number of const/pure function calls found in the program. */
+ size_t num_pure_const_calls_found;
+
/* Array of counters to keep track of how many times each pointer has
been dereferenced in the program. This is used by the alias grouping
heuristic in compute_flow_insensitive_aliasing. */
static void compute_flow_sensitive_aliasing (struct alias_info *);
static void setup_pointers_and_addressables (struct alias_info *);
static bool collect_points_to_info_r (tree, tree, void *);
-static bool is_escape_site (tree, size_t *);
+static bool is_escape_site (tree, struct alias_info *);
static void add_pointed_to_var (struct alias_info *, tree, tree);
static void create_global_var (void);
static void collect_points_to_info_for (struct alias_info *, tree);
-static bool ptr_is_dereferenced_by (tree, tree, bool *);
static void maybe_create_global_var (struct alias_info *ai);
static void group_aliases (struct alias_info *);
static void set_pt_anything (tree ptr);
/* Deallocate memory used by aliasing data structures. */
delete_alias_info (ai);
+
+ {
+ block_stmt_iterator bsi;
+ basic_block bb;
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ update_stmt_if_modified (bsi_stmt (bsi));
+ }
+ }
+ }
+
}
struct tree_opt_pass pass_may_alias =
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ | TODO_ggc_collect | TODO_verify_ssa
+ | TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
-/* Count the number of calls in the function and conditionally
- create GLOBAL_VAR. This is performed before translation
- into SSA (and thus before alias analysis) to avoid compile time
- and memory utilization explosions in functions with many
- of calls and call clobbered variables. */
+
+/* Data structure used to count the number of dereferences to PTR
+ inside an expression. */
+struct count_ptr_d
+{
+ tree ptr;
+ unsigned count;
+};
+
+
+/* Helper for count_uses_and_derefs. Called by walk_tree to look for
+ (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
+
+static tree
+count_ptr_derefs (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
+{
+ struct count_ptr_d *count_p = (struct count_ptr_d *) data;
+
+ if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+ count_p->count++;
+
+ return NULL_TREE;
+}
+
+
+/* Count the number of direct and indirect uses for pointer PTR in
+ statement STMT. The two counts are stored in *NUM_USES_P and
+ *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
+ least one of those dereferences is a store operation. */
static void
-count_calls_and_maybe_create_global_var (void)
+count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
+ unsigned *num_derefs_p, bool *is_store)
{
- struct alias_info ai;
- basic_block bb;
- bool temp;
+ ssa_op_iter i;
+ tree use;
- memset (&ai, 0, sizeof (struct alias_info));
+ *num_uses_p = 0;
+ *num_derefs_p = 0;
+ *is_store = false;
- /* First count the number of calls in the IL. */
- FOR_EACH_BB (bb)
+ /* Find out the total number of uses of PTR in STMT. */
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+ if (use == ptr)
+ (*num_uses_p)++;
+
+ /* Now count the number of indirect references to PTR. This is
+ truly awful, but we don't have much choice. There are no parent
+ pointers inside INDIRECT_REFs, so an expression like
+ '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
+ find all the indirect and direct uses of x_1 inside. The only
+ shortcut we can take is the fact that GIMPLE only allows
+ INDIRECT_REFs inside the expressions below. */
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ || (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
+ || TREE_CODE (stmt) == ASM_EXPR
+ || TREE_CODE (stmt) == CALL_EXPR)
{
- block_stmt_iterator si;
+ tree lhs, rhs;
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- {
- tree stmt = bsi_stmt (si);
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
+ {
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+ }
+ else if (TREE_CODE (stmt) == RETURN_EXPR)
+ {
+ tree e = TREE_OPERAND (stmt, 0);
+ lhs = TREE_OPERAND (e, 0);
+ rhs = TREE_OPERAND (e, 1);
+ }
+ else if (TREE_CODE (stmt) == ASM_EXPR)
+ {
+ lhs = ASM_OUTPUTS (stmt);
+ rhs = ASM_INPUTS (stmt);
+ }
+ else
+ {
+ lhs = NULL_TREE;
+ rhs = stmt;
+ }
- if (get_call_expr_in (stmt) != NULL_TREE)
- ai.num_calls_found++;
+ if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&lhs, count_ptr_derefs, &count, NULL);
+ *is_store = true;
+ *num_derefs_p = count.count;
+ }
+
+ if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&rhs, count_ptr_derefs, &count, NULL);
+ *num_derefs_p += count.count;
}
}
- /* If there are no call clobbered variables, then maybe_create_global_var
- will always create a GLOBAL_VAR. At this point we do not want that
- behavior. So we turn on one bit in CALL_CLOBBERED_VARs, call
- maybe_create_global_var, then reset the bit to its original state. */
- temp = bitmap_bit_p (call_clobbered_vars, 0);
- bitmap_set_bit (call_clobbered_vars, 0);
- maybe_create_global_var (&ai);
- if (!temp)
- bitmap_clear_bit (call_clobbered_vars, 0);
+ gcc_assert (*num_uses_p >= *num_derefs_p);
}
-struct tree_opt_pass pass_maybe_create_global_var =
-{
- "maybe_create_global_var", /* name */
- NULL, /* gate */
- count_calls_and_maybe_create_global_var, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_TREE_MAY_ALIAS, /* tv_id */
- PROP_cfg, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
-};
/* Initialize the data structures used for alias analysis. */
init_alias_info (void)
{
struct alias_info *ai;
- static bool aliases_computed_p = false;
ai = xcalloc (1, sizeof (struct alias_info));
ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
- ai->addresses_needed = BITMAP_XMALLOC ();
+ ai->addresses_needed = BITMAP_ALLOC (NULL);
VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
- ai->written_vars = BITMAP_XMALLOC ();
- ai->dereferenced_ptrs_store = BITMAP_XMALLOC ();
- ai->dereferenced_ptrs_load = BITMAP_XMALLOC ();
+ ai->written_vars = BITMAP_ALLOC (NULL);
+ ai->dereferenced_ptrs_store = BITMAP_ALLOC (NULL);
+ ai->dereferenced_ptrs_load = BITMAP_ALLOC (NULL);
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
{
unsigned i;
- bitmap_iterator bi;
basic_block bb;
/* Make sure that every statement has a valid set of operands.
get_stmt_operands (bsi_stmt (si));
}
- /* Clear the call-clobbered set. We are going to re-discover
- call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Variables that are intrinsically call-clobbered (globals,
- local statics, etc) will not be marked by the aliasing
- code, so we can't remove them from CALL_CLOBBERED_VARS. */
- if (!is_call_clobbered (var))
- bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
- }
-
/* Similarly, clear the set of addressable variables. In this
case, we can just clear the set because addressability is
only computed here. */
/* Clear flow-insensitive alias information from each symbol. */
for (i = 0; i < num_referenced_vars; i++)
{
- var_ann_t ann = var_ann (referenced_var (i));
+ tree var = referenced_var (i);
+ var_ann_t ann = var_ann (var);
+
ann->is_alias_tag = 0;
ann->may_aliases = NULL;
+
+ /* Since we are about to re-discover call-clobbered
+ variables, clear the call-clobbered flag. Variables that
+ are intrinsically call-clobbered (globals, local statics,
+ etc) will not be marked by the aliasing code, so we can't
+ remove them from CALL_CLOBBERED_VARS.
+
+ NB: STRUCT_FIELDS are still call clobbered if they are for
+ a global variable, so we *don't* clear their call clobberedness
+ just because they are tags, though we will clear it if they
+ aren't for global variables. */
+ if (ann->mem_tag_kind == NAME_TAG
+ || ann->mem_tag_kind == TYPE_TAG
+ || !is_global_var (var))
+ clear_call_clobbered (var);
}
/* Clear flow-sensitive points-to information from each SSA name. */
tag will need to be created in create_name_tags. */
pi->pt_anything = 0;
pi->pt_malloc = 0;
+ pi->pt_null = 0;
pi->value_escapes_p = 0;
pi->is_dereferenced = 0;
if (pi->pt_vars)
sbitmap_free (ai->ssa_names_visited);
ai->processed_ptrs = NULL;
- BITMAP_XFREE (ai->addresses_needed);
+ BITMAP_FREE (ai->addresses_needed);
for (i = 0; i < ai->num_addressable_vars; i++)
{
free (ai->pointers);
ai->num_references = NULL;
- BITMAP_XFREE (ai->written_vars);
- BITMAP_XFREE (ai->dereferenced_ptrs_store);
- BITMAP_XFREE (ai->dereferenced_ptrs_load);
+ BITMAP_FREE (ai->written_vars);
+ BITMAP_FREE (ai->dereferenced_ptrs_store);
+ BITMAP_FREE (ai->dereferenced_ptrs_load);
free (ai);
}
}
-/* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
- (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
-
-static tree
-find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
-{
- tree ptr = (tree) data;
-
- if (INDIRECT_REF_P (*tp)
- && TREE_OPERAND (*tp, 0) == ptr)
- return *tp;
-
- return NULL_TREE;
-}
-
-
-/* Return true if STMT contains (ALIGN/MISALIGNED_)INDIRECT_REF <PTR>.
- *IS_STORE is set to 'true' if the dereference is on the LHS of an
- assignment. */
-
-static bool
-ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
-{
- *is_store = false;
-
- if (TREE_CODE (stmt) == MODIFY_EXPR
- || (TREE_CODE (stmt) == RETURN_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR))
- {
- tree e, lhs, rhs;
-
- e = (TREE_CODE (stmt) == RETURN_EXPR) ? TREE_OPERAND (stmt, 0) : stmt;
- lhs = TREE_OPERAND (e, 0);
- rhs = TREE_OPERAND (e, 1);
-
- if (EXPR_P (lhs)
- && walk_tree (&lhs, find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (EXPR_P (rhs)
- && walk_tree (&rhs, find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
- else if (TREE_CODE (stmt) == ASM_EXPR)
- {
- if (walk_tree (&ASM_OUTPUTS (stmt), find_ptr_dereference, ptr, NULL)
- || walk_tree (&ASM_CLOBBERS (stmt), find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (walk_tree (&ASM_INPUTS (stmt), find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
-
- return false;
-}
-
-
/* Traverse use-def links for all the pointers in the program to collect
address escape and points-to information.
{
bitmap addr_taken;
tree stmt = bsi_stmt (si);
- bool stmt_escapes_p = is_escape_site (stmt, &ai->num_calls_found);
+ bool stmt_escapes_p = is_escape_site (stmt, ai);
bitmap_iterator bi;
/* Mark all the variables whose address are taken by the
var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
struct ptr_info_def *pi;
bool is_store;
+ unsigned num_uses, num_derefs;
/* If the operand's variable may be aliased, keep track
of how many times we've referenced it. This is used
collect_points_to_info_for (ai, op);
pi = SSA_NAME_PTR_INFO (op);
- if (ptr_is_dereferenced_by (op, stmt, &is_store))
+ count_uses_and_derefs (op, stmt, &num_uses, &num_derefs,
+ &is_store);
+
+ if (num_derefs > 0)
{
/* Mark OP as dereferenced. In a subsequent pass,
dereferenced pointers that point to a set of
else
bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
}
- else if (stmt_escapes_p)
+
+ if (stmt_escapes_p && num_derefs < num_uses)
{
- /* Note that even if STMT is an escape point, pointer OP
- will not escape if it is being dereferenced. That's
- why we only check for escape points if OP is not
- dereferenced by STMT. */
+ /* If STMT is an escape point and STMT contains at
+ least one direct use of OP, then the value of OP
+ escapes and so the pointed-to variables need to
+ be marked call-clobbered. */
pi->value_escapes_p = 1;
/* If the statement makes a function call, assume
need to re-scan most statements. FIXME: Try to minimize the
number of statements re-scanned. It's not really necessary to
re-scan *all* statements. */
- modify_stmt (stmt);
+ mark_stmt_modified (stmt);
}
}
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
{
add_may_alias (pi->name_mem_tag, referenced_var (j));
+ add_may_alias (v_ann->type_mem_tag, referenced_var (j));
}
/* If the name tag is call clobbered, so is the type tag
if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
{
+ subvar_t svars;
size_t num_tag_refs, num_var_refs;
num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
/* Add VAR to TAG's may-aliases set. */
- add_may_alias (tag, var);
+
+ /* If this is an aggregate, we may have subvariables for it
+ that need to be pointed to. */
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+
+ for (sv = svars; sv; sv = sv->next)
+ {
+ add_may_alias (tag, sv->var);
+ /* Update the bitmap used to represent TAG's alias set
+ in case we need to group aliases. */
+ SET_BIT (p_map->may_aliases, var_ann (sv->var)->uid);
+ }
+ }
+ else
+ {
+ add_may_alias (tag, var);
+ /* Update the bitmap used to represent TAG's alias set
+ in case we need to group aliases. */
+ SET_BIT (p_map->may_aliases, var_ann (var)->uid);
+ }
/* Update the total number of virtual operands due to
aliasing. Since we are adding one more alias to TAG's
ai->total_alias_vops += (num_var_refs + num_tag_refs);
p_map->total_alias_vops += (num_var_refs + num_tag_refs);
- /* Update the bitmap used to represent TAG's alias set
- in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (var)->uid);
+
}
}
}
sbitmap may_aliases2 = p_map2->may_aliases;
/* If the pointers may not point to each other, do nothing. */
- if (!may_alias_p (p_map1->var, p_map1->set, p_map2->var, p_map2->set))
+ if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set))
continue;
/* The two pointers may alias each other. If they already have
/* Since TAG2 does not have any aliases of its own, add
TAG2 itself to the alias set of TAG1. */
add_may_alias (tag1, tag2);
+ SET_BIT (may_aliases1, var_ann (tag2)->uid);
}
}
}
tree alias = VARRAY_TREE (aliases, j);
var_ann_t ann = var_ann (alias);
- if (ann->mem_tag_kind == NOT_A_TAG && ann->may_aliases)
+ if ((ann->mem_tag_kind == NOT_A_TAG
+ || ann->mem_tag_kind == STRUCT_FIELD)
+ && ann->may_aliases)
{
tree new_alias;
{
tree var = referenced_var (i);
var_ann_t v_ann = var_ann (var);
+ subvar_t svars;
/* Name memory tags already have flow-sensitive aliasing
information, so they need not be processed by
compute_flow_insensitive_aliasing. Similarly, type memory
tags are already accounted for when we process their
- associated pointer. */
- if (v_ann->mem_tag_kind != NOT_A_TAG)
+ associated pointer.
+
+ Structure fields, on the other hand, have to have some of this
+ information processed for them, but it's pointless to mark them
+ non-addressable (since they are fake variables anyway). */
+ if (v_ann->mem_tag_kind != NOT_A_TAG
+ && v_ann->mem_tag_kind != STRUCT_FIELD)
continue;
/* Remove the ADDRESSABLE flag from every addressable variable whose
of ADDR_EXPR constants into INDIRECT_REF expressions and the
removal of dead pointer assignments done by the early scalar
cleanup passes. */
- if (TREE_ADDRESSABLE (var))
+ if (TREE_ADDRESSABLE (var) && v_ann->mem_tag_kind != STRUCT_FIELD)
{
if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
&& TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
+ bool okay_to_mark = true;
+ /* Since VAR is now a regular GIMPLE register, we will need
+ to rename VAR into SSA afterwards. */
+ bitmap_set_bit (vars_to_rename, v_ann->uid);
+
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+
+ for (sv = svars; sv; sv = sv->next)
+ {
+ var_ann_t svann = var_ann (sv->var);
+ if (bitmap_bit_p (ai->addresses_needed, svann->uid))
+ okay_to_mark = false;
+ bitmap_set_bit (vars_to_rename, svann->uid);
+ }
+ }
/* The address of VAR is not needed, remove the
addressable bit, so that it can be optimized as a
regular variable. */
- mark_non_addressable (var);
+ if (okay_to_mark)
+ mark_non_addressable (var);
- /* Since VAR is now a regular GIMPLE register, we will need
- to rename VAR into SSA afterwards. */
- bitmap_set_bit (vars_to_rename, v_ann->uid);
}
else
{
clobber memory. In those cases, we need to clobber
all call-clobbered variables and all addressables. */
bitmap_set_bit (addressable_vars, v_ann->uid);
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ bitmap_set_bit (addressable_vars, var_ann (sv->var)->uid);
+ }
+
}
}
if (may_be_aliased (var))
{
create_alias_map_for (var, ai);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
}
/* Add pointer variables that have been dereferenced to the POINTERS
n_clobbered++;
}
- /* Create .GLOBAL_VAR if we have too many call-clobbered
- variables. We also create .GLOBAL_VAR when there no
- call-clobbered variables to prevent code motion
- transformations from re-arranging function calls that may
- have side effects. For instance,
+ /* If the number of virtual operands that would be needed to
+ model all the call-clobbered variables is larger than
+ GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
- foo ()
- {
- int a = f ();
- g ();
- h (a);
- }
+ Also create .GLOBAL_VAR if there are no call-clobbered
+ variables and the program contains a mixture of pure/const
+ and regular function calls. This is to avoid the problem
+ described in PR 20115:
+
+ int X;
+ int func_pure (void) { return X; }
+ int func_non_pure (int a) { X += a; }
+ int foo ()
+ {
+ int a = func_pure ();
+ func_non_pure (a);
+ a = func_pure ();
+ return a;
+ }
- There are no call-clobbered variables in foo(), so it would
- be entirely possible for a pass to want to move the call to
- f() after the call to g(). If f() has side effects, that
- would be wrong. Creating .GLOBAL_VAR in this case will
- insert VDEFs for it and prevent such transformations. */
- if (n_clobbered == 0
- || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
+ Since foo() has no call-clobbered variables, there is
+ no relationship between the calls to func_pure and
+ func_non_pure. Since func_pure has no side-effects, value
+ numbering optimizations elide the second call to func_pure.
+ So, if we have some pure/const and some regular calls in the
+ program we create .GLOBAL_VAR to avoid missing these
+ relations. */
+ if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
+ || (n_clobbered == 0
+ && ai->num_calls_found > 0
+ && ai->num_pure_const_calls_found > 0
+ && ai->num_calls_found > ai->num_pure_const_calls_found))
create_global_var ();
}
- /* If the function has calls to clobbering functions and .GLOBAL_VAR has
- been created, make it an alias for all call-clobbered variables. */
- if (global_var)
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- if (var != global_var)
- {
- add_may_alias (var, global_var);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
- }
- }
+ /* Mark all call-clobbered symbols for renaming. Since the initial
+ rewrite into SSA ignored all call sites, we may need to rename
+ .GLOBAL_VAR and the call-clobbered variables. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+
+ /* If the function has calls to clobbering functions and
+ .GLOBAL_VAR has been created, make it an alias for all
+ call-clobbered variables. */
+ if (global_var && var != global_var)
+ {
+ subvar_t svars;
+ add_may_alias (var, global_var);
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ bitmap_set_bit (vars_to_rename, var_ann (sv->var)->uid);
+ }
+ }
+
+ bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ }
}
tree var, HOST_WIDE_INT var_alias_set)
{
tree mem;
- var_ann_t v_ann, m_ann;
+ var_ann_t m_ann;
alias_stats.alias_queries++;
alias_stats.simple_queries++;
return false;
}
- v_ann = var_ann (var);
m_ann = var_ann (mem);
gcc_assert (m_ann->mem_tag_kind == TYPE_TAG);
alias_stats.tbaa_resolved++;
return false;
}
-
alias_stats.alias_mayalias++;
return true;
}
/* Given two different pointers DEST and ORIG. Merge the points-to
- information in ORIG into DEST. AI is as in
- collect_points_to_info. */
+ information in ORIG into DEST. AI contains all the alias
+ information collected up to this point. */
static void
merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
if (orig_pi)
{
+ gcc_assert (orig_pi != dest_pi);
+
/* Notice that we never merge PT_MALLOC. This attribute is only
true if the pointer is the result of a malloc() call.
Otherwise, we can end up in this situation:
create_name_tags is not smart enough to determine that the
two come from the same malloc call. Copy propagation before
aliasing should cure this. */
- gcc_assert (orig_pi != dest_pi);
-
dest_pi->pt_malloc = 0;
-
if (orig_pi->pt_malloc || orig_pi->pt_anything)
set_pt_anything (dest);
+ dest_pi->pt_null |= orig_pi->pt_null;
+
if (!dest_pi->pt_anything
&& orig_pi->pt_vars
&& !bitmap_empty_p (orig_pi->pt_vars))
&& TREE_CODE (op1) != INTEGER_CST))
set_pt_anything (ptr);
}
+ else if (integer_zerop (expr))
+ {
+ /* EXPR is the NULL pointer. Mark PTR as pointing to NULL. */
+ SSA_NAME_PTR_INFO (ptr)->pt_null = 1;
+ }
else
{
/* If we can't recognize the expression, assume that PTR may
/* If VALUE is of the form &DECL, add DECL to the set of variables
pointed-to by PTR. Otherwise, add VALUE as a pointed-to expression by
- PTR. AI is as in collect_points_to_info. */
+ PTR. AI points to the collected alias information. */
static void
add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
{
struct ptr_info_def *pi = get_ptr_info (ptr);
- tree pt_var;
+ tree pt_var = NULL_TREE;
+ HOST_WIDE_INT offset, size;
+ tree addrop;
size_t uid;
+ tree ref;
+ subvar_t svars;
gcc_assert (TREE_CODE (value) == ADDR_EXPR);
- pt_var = TREE_OPERAND (value, 0);
- if (REFERENCE_CLASS_P (pt_var))
- pt_var = get_base_address (pt_var);
+ addrop = TREE_OPERAND (value, 0);
+ if (REFERENCE_CLASS_P (addrop))
+ pt_var = get_base_address (addrop);
+ else
+ pt_var = addrop;
+
+ /* If this is a component_ref, see if we can get a smaller number of
+ variables to take the address of. */
+ if (TREE_CODE (addrop) == COMPONENT_REF
+ && (ref = okay_component_ref_for_subvars (addrop, &offset ,&size)))
+ {
+ subvar_t sv;
+ svars = get_subvars_for_var (ref);
- if (pt_var && SSA_VAR_P (pt_var))
- {
uid = var_ann (pt_var)->uid;
- bitmap_set_bit (ai->addresses_needed, uid);
+
+ if (pi->pt_vars == NULL)
+ pi->pt_vars = BITMAP_GGC_ALLOC ();
+ /* If the variable is a global, mark the pointer as pointing to
+ global memory (which will make its tag a global variable). */
+ if (is_global_var (pt_var))
+ pi->pt_global_mem = 1;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ if (overlap_subvar (offset, size, sv, NULL))
+ {
+ bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid);
+ bitmap_set_bit (ai->addresses_needed, var_ann (sv->var)->uid);
+ }
+ }
+ }
+ else if (pt_var && SSA_VAR_P (pt_var))
+ {
+
+ uid = var_ann (pt_var)->uid;
+
if (pi->pt_vars == NULL)
pi->pt_vars = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (pi->pt_vars, uid);
+
+ /* If this is an aggregate, we may have subvariables for it that need
+ to be pointed to. */
+ if (var_can_have_subvars (pt_var)
+ && (svars = get_subvars_for_var (pt_var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ uid = var_ann (sv->var)->uid;
+ bitmap_set_bit (ai->addresses_needed, uid);
+ bitmap_set_bit (pi->pt_vars, uid);
+ }
+ }
+ else
+ {
+ bitmap_set_bit (ai->addresses_needed, uid);
+ bitmap_set_bit (pi->pt_vars, uid);
+ }
/* If the variable is a global, mark the pointer as pointing to
global memory (which will make its tag a global variable). */
switch (TREE_CODE (stmt))
{
case RETURN_EXPR:
- if (TREE_CODE (TREE_OPERAND (stmt, 0)) != MODIFY_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR);
stmt = TREE_OPERAND (stmt, 0);
/* FALLTHRU */
3- STMT is an assignment to a non-local variable, or
4- STMT is a return statement.
- If NUM_CALLS_P is not NULL, the counter is incremented if STMT contains
- a function call. */
+ AI points to the alias information collected so far. */
static bool
-is_escape_site (tree stmt, size_t *num_calls_p)
+is_escape_site (tree stmt, struct alias_info *ai)
{
- if (get_call_expr_in (stmt) != NULL_TREE)
+ tree call = get_call_expr_in (stmt);
+ if (call != NULL_TREE)
{
- if (num_calls_p)
- (*num_calls_p)++;
+ ai->num_calls_found++;
+
+ if (!TREE_SIDE_EFFECTS (call))
+ ai->num_pure_const_calls_found++;
return true;
}
if (pi->pt_malloc)
fprintf (file, ", points-to malloc");
+ if (pi->pt_null)
+ fprintf (file, ", points-to NULL");
+
if (pi->pt_vars)
{
unsigned ix;
return true;
}
+/* This structure is simply used during pushing fields onto the fieldstack
+ to track the offset of the field, since bitpos_of_field gives it relative
+ to its immediate containing type, and we want it relative to the ultimate
+ containing object. */
+
+typedef struct fieldoff
+{
+ tree field;
+ HOST_WIDE_INT offset;
+} *fieldoff_t;
+
+DEF_VEC_MALLOC_P(fieldoff_t);
+
+/* Return the position, in bits, of FIELD_DECL from the beginning of its
+ structure.
+ Return -1 if the position is conditional or otherwise non-constant
+ integer. */
+
+static HOST_WIDE_INT
+bitpos_of_field (const tree fdecl)
+{
+
+ if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
+ || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
+ return -1;
+
+ return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
+}
+
+/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
+ of TYPE onto fieldstack, recording their offsets along the way.
+ OFFSET is used to keep track of the offset in this entire structure, rather
+ than just the immediately containing structure. */
+
+static void
+push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
+ HOST_WIDE_INT offset)
+{
+ fieldoff_t pair;
+ tree field = TYPE_FIELDS (type);
+ if (!field)
+ return;
+ if (var_can_have_subvars (field)
+ && TREE_CODE (field) == FIELD_DECL)
+ {
+ size_t before = VEC_length (fieldoff_t, *fieldstack);
+ /* Empty structures may have actual size, like in C++. So see if we
+ actually end up pushing a field, and if not, if the size is nonzero,
+ push the field onto the stack */
+ push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack, offset);
+ if (before == VEC_length (fieldoff_t, *fieldstack)
+ && DECL_SIZE (field)
+ && !integer_zerop (DECL_SIZE (field)))
+ {
+ pair = xmalloc (sizeof (struct fieldoff));
+ pair->field = field;
+ pair->offset = offset;
+ VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ }
+ }
+ else if (TREE_CODE (field) == FIELD_DECL)
+ {
+ pair = xmalloc (sizeof (struct fieldoff));
+ pair->field = field;
+ pair->offset = offset + bitpos_of_field (field);
+ VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ }
+ for (field = TREE_CHAIN (field); field; field = TREE_CHAIN (field))
+ {
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+ if (var_can_have_subvars (field))
+ {
+ size_t before = VEC_length (fieldoff_t, *fieldstack);
+ push_fields_onto_fieldstack (TREE_TYPE (field), fieldstack,
+ offset + bitpos_of_field (field));
+ /* Empty structures may have actual size, like in C++. So see if we
+ actually end up pushing a field, and if not, if the size is nonzero,
+ push the field onto the stack */
+ if (before == VEC_length (fieldoff_t, *fieldstack)
+ && DECL_SIZE (field)
+ && !integer_zerop (DECL_SIZE (field)))
+ {
+ pair = xmalloc (sizeof (struct fieldoff));
+ pair->field = field;
+ pair->offset = offset + bitpos_of_field (field);
+ VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ }
+ }
+ else
+ {
+ pair = xmalloc (sizeof (struct fieldoff));
+ pair->field = field;
+ pair->offset = offset + bitpos_of_field (field);
+ VEC_safe_push (fieldoff_t, *fieldstack, pair);
+ }
+ }
+}
+
+
+/* This represents the used range of a variable. */
+
+typedef struct used_part
+{
+ HOST_WIDE_INT minused;
+ HOST_WIDE_INT maxused;
+} *used_part_t;
+
+/* An array of used_part structures, indexed by variable uid. */
+
+static used_part_t *used_portions;
+
+/* Given a variable uid, UID, get or create the entry in the used portions
+ table for the variable. */
+
+static used_part_t
+get_or_create_used_part_for (size_t uid)
+{
+ used_part_t up;
+ if (used_portions[uid] == NULL)
+ {
+ up = xcalloc (1, sizeof (struct used_part));
+ up->minused = INT_MAX;
+ up->maxused = 0;
+ }
+ else
+ up = used_portions[uid];
+ return up;
+}
+
+
+
+/* Given an aggregate VAR, create the subvariables that represent its
+ fields. */
+
+static void
+create_overlap_variables_for (tree var)
+{
+ VEC(fieldoff_t) *fieldstack = NULL;
+ used_part_t up;
+ size_t uid = var_ann (var)->uid;
+
+ if (used_portions[uid] == NULL)
+ return;
+
+ push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0);
+ if (VEC_length (fieldoff_t, fieldstack) != 0)
+ {
+ subvar_t *subvars;
+ fieldoff_t fo;
+ bool notokay = false;
+ int i;
+
+ /* Not all fields have DECL_SIZE set, and those that don't, we don't
+ know their size, and thus, can't handle.
+ The same is true of fields with DECL_SIZE that is not an integer
+ constant (such as variable sized fields).
+ Fields with offsets which are not constant will have an offset < 0
+ We *could* handle fields that are constant sized arrays, but
+ currently don't. Doing so would require some extra changes to
+ tree-ssa-operands.c. */
+
+ for (i = 0; VEC_iterate (fieldoff_t, fieldstack, i, fo); i++)
+ {
+ if (!DECL_SIZE (fo->field)
+ || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
+ || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
+ || fo->offset < 0)
+ {
+ notokay = true;
+ break;
+ }
+ }
+ /* Cleanup after ourselves if we can't create overlap variables. */
+ if (notokay)
+ {
+ while (VEC_length (fieldoff_t, fieldstack) != 0)
+ {
+ fo = VEC_pop (fieldoff_t, fieldstack);
+ free (fo);
+ }
+ VEC_free (fieldoff_t, fieldstack);
+ return;
+ }
+ /* Otherwise, create the variables. */
+ subvars = lookup_subvars_for_var (var);
+ up = used_portions[uid];
+
+ while (VEC_length (fieldoff_t, fieldstack) != 0)
+ {
+ subvar_t sv = ggc_alloc (sizeof (struct subvar));
+ HOST_WIDE_INT fosize;
+ var_ann_t ann;
+
+ fo = VEC_pop (fieldoff_t, fieldstack);
+ fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
+
+ if ((fo->offset <= up->minused
+ && fo->offset + fosize <= up->minused)
+ || fo->offset >= up->maxused)
+ {
+ free (fo);
+ continue;
+ }
+
+ sv->offset = fo->offset;
+ sv->size = fosize;
+ sv->next = *subvars;
+ sv->var = create_tmp_var_raw (TREE_TYPE (fo->field), "SFT");
+ if (dump_file)
+ {
+ fprintf (dump_file, "structure field tag %s created for var %s",
+ get_name (sv->var), get_name (var));
+ fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
+ sv->offset);
+ fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
+ sv->size);
+ fprintf (dump_file, "\n");
+
+ }
+
+ /* We need to copy the various flags from var to sv->var, so that
+ they are is_global_var iff the original variable was. */
+
+ DECL_EXTERNAL (sv->var) = DECL_EXTERNAL (var);
+ TREE_PUBLIC (sv->var) = TREE_PUBLIC (var);
+ TREE_STATIC (sv->var) = TREE_STATIC (var);
+ TREE_READONLY (sv->var) = TREE_READONLY (var);
+
+ /* Like other memory tags, these need to be marked addressable to
+ keep is_gimple_reg from thinking they are real. */
+ TREE_ADDRESSABLE (sv->var) = 1;
+
+ DECL_CONTEXT (sv->var) = DECL_CONTEXT (var);
+
+ ann = get_var_ann (sv->var);
+ ann->mem_tag_kind = STRUCT_FIELD;
+ ann->type_mem_tag = NULL;
+ add_referenced_tmp_var (sv->var);
+
+ *subvars = sv;
+ free (fo);
+ }
+
+ /* Once we have created subvars, the original is no longer call
+ clobbered on its own. Its call clobbered status depends
+ completely on the call clobbered status of the subvars.
+
+ add_referenced_var in the above loop will take care of
+ marking subvars of global variables as call clobbered for us
+ to start, since they are global as well. */
+ clear_call_clobbered (var);
+
+ }
+
+ VEC_free (fieldoff_t, fieldstack);
+}
+
+
+/* Find the conservative answer to the question of what portions of what
+ structures are used by this statement. We assume that if we have a
+ component ref with a known size + offset, that we only need that part
+ of the structure. For unknown cases, or cases where we do something
+ to the whole structure, we assume we need to create fields for the
+ entire structure. */
+
+static tree
+find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case COMPONENT_REF:
+ {
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp;
+ int volatilep;
+ tree ref;
+ ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &volatilep, false);
+ if (DECL_P (ref) && offset == NULL && bitsize != -1)
+ {
+ size_t uid = var_ann (ref)->uid;
+ used_part_t up;
+
+ up = get_or_create_used_part_for (uid);
+
+ if (bitpos <= up->minused)
+ up->minused = bitpos;
+ if ((bitpos + bitsize >= up->maxused))
+ up->maxused = bitpos + bitsize;
+
+ used_portions[uid] = up;
+
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ else if (DECL_P (ref))
+ {
+ if (DECL_SIZE (ref)
+ && var_can_have_subvars (ref)
+ && TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
+ {
+ used_part_t up;
+ size_t uid = var_ann (ref)->uid;
+
+ up = get_or_create_used_part_for (uid);
+
+ up->minused = 0;
+ up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
+
+ used_portions[uid] = up;
+
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ }
+ }
+ break;
+ case VAR_DECL:
+ case PARM_DECL:
+ {
+ tree var = *tp;
+ if (DECL_SIZE (var)
+ && var_can_have_subvars (var)
+ && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
+ {
+ used_part_t up;
+ size_t uid = var_ann (var)->uid;
+
+ up = get_or_create_used_part_for (uid);
+
+ up->minused = 0;
+ up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
+
+ used_portions[uid] = up;
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ }
+ break;
+
+ default:
+ break;
+
+ }
+ return NULL_TREE;
+}
+
+/* We are about to create some new referenced variables, and we need the
+ before size. */
+
+static size_t old_referenced_vars;
+
+
+/* Create structure field variables for structures used in this function. */
+
+static void
+create_structure_vars (void)
+{
+ basic_block bb;
+ size_t i;
+
+ old_referenced_vars = num_referenced_vars;
+ used_portions = xcalloc (num_referenced_vars, sizeof (used_part_t));
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
+ find_used_portions,
+ NULL);
+ }
+ }
+ for (i = 0; i < old_referenced_vars; i++)
+ {
+ tree var = referenced_var (i);
+ /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
+ if (var
+ && DECL_SIZE (var)
+ && var_can_have_subvars (var)
+ && var_ann (var)->mem_tag_kind == NOT_A_TAG
+ && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
+ create_overlap_variables_for (var);
+ }
+ for (i = 0; i < old_referenced_vars; i++)
+ free (used_portions[i]);
+
+ free (used_portions);
+}
+
+static bool
+gate_structure_vars (void)
+{
+ return flag_tree_salias != 0;
+}
+
+struct tree_opt_pass pass_create_structure_vars =
+{
+ "salias", /* name */
+ gate_structure_vars, /* gate */
+ create_structure_vars, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+