/* Alias analysis for trees.
- Copyright (C) 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
#include "tree-pass.h"
#include "convert.h"
#include "params.h"
+#include "vec.h"
+/* 'true' after aliases have been computed (see compute_may_aliases). */
+bool aliases_computed_p;
/* Structure to map a variable to its alias set and keep track of the
virtual operands that will be needed to represent it. */
/* SSA names visited while collecting points-to information. If bit I
is set, it means that SSA variable with version I has already been
visited. */
- bitmap ssa_names_visited;
+ sbitmap ssa_names_visited;
/* Array of SSA_NAME pointers processed by the points-to collector. */
varray_type processed_ptrs;
/* Number of function calls found in the program. */
size_t num_calls_found;
+ /* Number of const/pure function calls found in the program. */
+ size_t num_pure_const_calls_found;
+
/* Array of counters to keep track of how many times each pointer has
been dereferenced in the program. This is used by the alias grouping
heuristic in compute_flow_insensitive_aliasing. */
static void compute_flow_sensitive_aliasing (struct alias_info *);
static void setup_pointers_and_addressables (struct alias_info *);
static bool collect_points_to_info_r (tree, tree, void *);
-static bool is_escape_site (tree, size_t *);
+static bool is_escape_site (tree, struct alias_info *);
static void add_pointed_to_var (struct alias_info *, tree, tree);
-static void add_pointed_to_expr (tree, tree);
static void create_global_var (void);
static void collect_points_to_info_for (struct alias_info *, tree);
-static bool ptr_is_dereferenced_by (tree, tree, bool *);
static void maybe_create_global_var (struct alias_info *ai);
static void group_aliases (struct alias_info *);
-static struct ptr_info_def *get_ptr_info (tree t);
static void set_pt_anything (tree ptr);
static void set_pt_malloc (tree ptr);
The concept of 'escaping' is the same one used in the Java world. When
a pointer or an ADDR_EXPR escapes, it means that it has been exposed
outside of the current function. So, assignment to global variables,
- function arguments and returning a pointer are all escape sites.
+ function arguments and returning a pointer are all escape sites, as are
+ conversions between pointers and integers.
This is where we are currently limited. Since not everything is renamed
into SSA, we lose track of escape properties when a pointer is stashed
foo (int i)
{
- int *p, *q, a, b;
+ int *p, a, b;
if (i > 10)
p = &a;
else
- q = &b;
+ p = &b;
*p = 3;
- *q = 5;
a = b + 2;
return *p;
}
/* Deallocate memory used by aliasing data structures. */
delete_alias_info (ai);
+
+ {
+ block_stmt_iterator bsi;
+ basic_block bb;
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ update_stmt_if_modified (bsi_stmt (bsi));
+ }
+ }
+ }
+
}
struct tree_opt_pass pass_may_alias =
PROP_alias, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_dump_func | TODO_update_ssa
+ | TODO_ggc_collect | TODO_verify_ssa
+ | TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
+/* Data structure used to count the number of dereferences to PTR
+ inside an expression. */
+struct count_ptr_d
+{
+ tree ptr;
+ unsigned count;
+};
+
+
+/* Helper for count_uses_and_derefs. Called by walk_tree to look for
+ (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
+
+static tree
+count_ptr_derefs (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
+{
+ struct count_ptr_d *count_p = (struct count_ptr_d *) data;
+
+ if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+ count_p->count++;
+
+ return NULL_TREE;
+}
+
+
+/* Count the number of direct and indirect uses for pointer PTR in
+ statement STMT. The two counts are stored in *NUM_USES_P and
+ *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
+ least one of those dereferences is a store operation. */
+
+void
+count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
+ unsigned *num_derefs_p, bool *is_store)
+{
+ ssa_op_iter i;
+ tree use;
+
+ *num_uses_p = 0;
+ *num_derefs_p = 0;
+ *is_store = false;
+
+ /* Find out the total number of uses of PTR in STMT. */
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+ if (use == ptr)
+ (*num_uses_p)++;
+
+ /* Now count the number of indirect references to PTR. This is
+ truly awful, but we don't have much choice. There are no parent
+ pointers inside INDIRECT_REFs, so an expression like
+ '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
+ find all the indirect and direct uses of x_1 inside. The only
+ shortcut we can take is the fact that GIMPLE only allows
+ INDIRECT_REFs inside the expressions below. */
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ || (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
+ || TREE_CODE (stmt) == ASM_EXPR
+ || TREE_CODE (stmt) == CALL_EXPR)
+ {
+ tree lhs, rhs;
+
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
+ {
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+ }
+ else if (TREE_CODE (stmt) == RETURN_EXPR)
+ {
+ tree e = TREE_OPERAND (stmt, 0);
+ lhs = TREE_OPERAND (e, 0);
+ rhs = TREE_OPERAND (e, 1);
+ }
+ else if (TREE_CODE (stmt) == ASM_EXPR)
+ {
+ lhs = ASM_OUTPUTS (stmt);
+ rhs = ASM_INPUTS (stmt);
+ }
+ else
+ {
+ lhs = NULL_TREE;
+ rhs = stmt;
+ }
+
+ if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&lhs, count_ptr_derefs, &count, NULL);
+ *is_store = true;
+ *num_derefs_p = count.count;
+ }
+
+ if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
+ {
+ struct count_ptr_d count;
+ count.ptr = ptr;
+ count.count = 0;
+ walk_tree (&rhs, count_ptr_derefs, &count, NULL);
+ *num_derefs_p += count.count;
+ }
+ }
+
+ gcc_assert (*num_uses_p >= *num_derefs_p);
+}
+
+
/* Initialize the data structures used for alias analysis. */
static struct alias_info *
init_alias_info (void)
{
struct alias_info *ai;
- static bool aliases_computed_p = false;
ai = xcalloc (1, sizeof (struct alias_info));
- ai->ssa_names_visited = BITMAP_XMALLOC ();
+ ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
+ sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
- ai->addresses_needed = BITMAP_XMALLOC ();
+ ai->addresses_needed = BITMAP_ALLOC (NULL);
VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
- ai->written_vars = BITMAP_XMALLOC ();
- ai->dereferenced_ptrs_store = BITMAP_XMALLOC ();
- ai->dereferenced_ptrs_load = BITMAP_XMALLOC ();
+ ai->written_vars = BITMAP_ALLOC (NULL);
+ ai->dereferenced_ptrs_store = BITMAP_ALLOC (NULL);
+ ai->dereferenced_ptrs_load = BITMAP_ALLOC (NULL);
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
{
- size_t i;
-
- /* Clear the call-clobbered set. We are going to re-discover
- call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
- {
- tree var = referenced_var (i);
-
- /* Variables that are intrinsically call-clobbered (globals,
- local statics, etc) will not be marked by the aliasing
- code, so we can't remove them from CALL_CLOBBERED_VARS. */
- if (!is_call_clobbered (var))
- bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
- });
-
+ unsigned i;
+
/* Similarly, clear the set of addressable variables. In this
case, we can just clear the set because addressability is
only computed here. */
/* Clear flow-insensitive alias information from each symbol. */
for (i = 0; i < num_referenced_vars; i++)
{
- var_ann_t ann = var_ann (referenced_var (i));
+ tree var = referenced_var (i);
+ var_ann_t ann = var_ann (var);
+
ann->is_alias_tag = 0;
ann->may_aliases = NULL;
+
+ /* Since we are about to re-discover call-clobbered
+ variables, clear the call-clobbered flag. Variables that
+ are intrinsically call-clobbered (globals, local statics,
+ etc) will not be marked by the aliasing code, so we can't
+ remove them from CALL_CLOBBERED_VARS.
+
+ NB: STRUCT_FIELDS are still call clobbered if they are for
+ a global variable, so we *don't* clear their call clobberedness
+ just because they are tags, though we will clear it if they
+ aren't for global variables. */
+ if (ann->mem_tag_kind == NAME_TAG
+ || ann->mem_tag_kind == TYPE_TAG
+ || !is_global_var (var))
+ clear_call_clobbered (var);
}
/* Clear flow-sensitive points-to information from each SSA name. */
{
tree name = ssa_name (i);
- if (!POINTER_TYPE_P (TREE_TYPE (name)))
+ if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
continue;
if (SSA_NAME_PTR_INFO (name))
tag will need to be created in create_name_tags. */
pi->pt_anything = 0;
pi->pt_malloc = 0;
+ pi->pt_null = 0;
pi->value_escapes_p = 0;
pi->is_dereferenced = 0;
if (pi->pt_vars)
{
size_t i;
- BITMAP_XFREE (ai->ssa_names_visited);
+ sbitmap_free (ai->ssa_names_visited);
ai->processed_ptrs = NULL;
- BITMAP_XFREE (ai->addresses_needed);
+ BITMAP_FREE (ai->addresses_needed);
for (i = 0; i < ai->num_addressable_vars; i++)
{
free (ai->pointers);
ai->num_references = NULL;
- BITMAP_XFREE (ai->written_vars);
- BITMAP_XFREE (ai->dereferenced_ptrs_store);
- BITMAP_XFREE (ai->dereferenced_ptrs_load);
+ BITMAP_FREE (ai->written_vars);
+ BITMAP_FREE (ai->dereferenced_ptrs_store);
+ BITMAP_FREE (ai->dereferenced_ptrs_load);
free (ai);
}
{
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
- if (!bitmap_bit_p (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
+ if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr)))
{
- bitmap_set_bit (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
+ SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (ptr));
walk_use_def_chains (ptr, collect_points_to_info_r, ai, true);
VARRAY_PUSH_TREE (ai->processed_ptrs, ptr);
}
}
-/* Helper for ptr_is_dereferenced_by. Called by walk_tree to look for
- INDIRECT_REF nodes for the pointer passed in DATA. */
-
-static tree
-find_ptr_dereference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
-{
- tree ptr = (tree) data;
-
- if (TREE_CODE (*tp) == INDIRECT_REF
- && TREE_OPERAND (*tp, 0) == ptr)
- return *tp;
-
- return NULL_TREE;
-}
-
-
-/* Return true if STMT contains INDIRECT_REF <PTR>. *IS_STORE is set
- to 'true' if the dereference is on the LHS of an assignment. */
-
-static bool
-ptr_is_dereferenced_by (tree ptr, tree stmt, bool *is_store)
-{
- *is_store = false;
-
- if (TREE_CODE (stmt) == MODIFY_EXPR
- || (TREE_CODE (stmt) == RETURN_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR))
- {
- tree e, lhs, rhs;
-
- e = (TREE_CODE (stmt) == RETURN_EXPR) ? TREE_OPERAND (stmt, 0) : stmt;
- lhs = TREE_OPERAND (e, 0);
- rhs = TREE_OPERAND (e, 1);
-
- if (EXPR_P (lhs)
- && walk_tree (&lhs, find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (EXPR_P (rhs)
- && walk_tree (&rhs, find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
- else if (TREE_CODE (stmt) == ASM_EXPR)
- {
- if (walk_tree (&ASM_OUTPUTS (stmt), find_ptr_dereference, ptr, NULL)
- || walk_tree (&ASM_CLOBBERS (stmt), find_ptr_dereference, ptr, NULL))
- {
- *is_store = true;
- return true;
- }
- else if (walk_tree (&ASM_INPUTS (stmt), find_ptr_dereference, ptr, NULL))
- {
- return true;
- }
- }
-
- return false;
-}
-
-
/* Traverse use-def links for all the pointers in the program to collect
address escape and points-to information.
compute_points_to_and_addr_escape (struct alias_info *ai)
{
basic_block bb;
- size_t i;
+ unsigned i;
tree op;
ssa_op_iter iter;
{
bitmap addr_taken;
tree stmt = bsi_stmt (si);
- bool stmt_escapes_p = is_escape_site (stmt, &ai->num_calls_found);
+ bool stmt_escapes_p = is_escape_site (stmt, ai);
+ bitmap_iterator bi;
/* Mark all the variables whose address are taken by the
statement. Note that this will miss all the addresses taken
in PHI nodes (those are discovered while following the use-def
chains). */
- get_stmt_operands (stmt);
addr_taken = addresses_taken (stmt);
if (addr_taken)
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
- {
- tree var = referenced_var (i);
- bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
- if (stmt_escapes_p)
- mark_call_clobbered (var);
- });
+ EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
+ if (stmt_escapes_p)
+ mark_call_clobbered (var);
+ }
if (stmt_escapes_p)
block_ann->has_escape_site = 1;
- /* Special case for silly ADDR_EXPR tricks
- (gcc.c-torture/unsorted/pass.c). If this statement is an
- assignment to a non-pointer variable and the RHS takes the
- address of a variable, assume that the variable on the RHS is
- call-clobbered. We could add the LHS to the list of
- "pointers" and follow it to see if it really escapes, but it's
- not worth the pain. */
- if (addr_taken
- && TREE_CODE (stmt) == MODIFY_EXPR
- && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0))))
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i,
- {
- tree var = referenced_var (i);
- mark_call_clobbered (var);
- });
-
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
struct ptr_info_def *pi;
bool is_store;
+ unsigned num_uses, num_derefs;
/* If the operand's variable may be aliased, keep track
of how many times we've referenced it. This is used
collect_points_to_info_for (ai, op);
pi = SSA_NAME_PTR_INFO (op);
- if (ptr_is_dereferenced_by (op, stmt, &is_store))
+ count_uses_and_derefs (op, stmt, &num_uses, &num_derefs,
+ &is_store);
+
+ if (num_derefs > 0)
{
/* Mark OP as dereferenced. In a subsequent pass,
dereferenced pointers that point to a set of
else
bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
}
- else if (stmt_escapes_p)
+
+ if (stmt_escapes_p && num_derefs < num_uses)
{
- /* Note that even if STMT is an escape point, pointer OP
- will not escape if it is being dereferenced. That's
- why we only check for escape points if OP is not
- dereferenced by STMT. */
+ /* If STMT is an escape point and STMT contains at
+ least one direct use of OP, then the value of OP
+ escapes and so the pointed-to variables need to
+ be marked call-clobbered. */
pi->value_escapes_p = 1;
/* If the statement makes a function call, assume
bitmap_set_bit (ai->written_vars, ann->uid);
if (may_be_aliased (var))
(VARRAY_UINT (ai->num_references, ann->uid))++;
+
+ if (POINTER_TYPE_P (TREE_TYPE (op)))
+ collect_points_to_info_for (ai, op);
}
/* Mark variables in V_MAY_DEF operands as being written to. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
- tree var = SSA_NAME_VAR (op);
+ tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
bitmap_set_bit (ai->written_vars, ann->uid);
}
need to re-scan most statements. FIXME: Try to minimize the
number of statements re-scanned. It's not really necessary to
re-scan *all* statements. */
- modify_stmt (stmt);
+ mark_stmt_modified (stmt);
}
}
continue;
}
- if (pi->pt_vars
- && bitmap_first_set_bit (pi->pt_vars) >= 0)
+ if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
{
size_t j;
tree old_name_tag = pi->name_mem_tag;
needs to be removed from the IL, so we mark it for
renaming. */
if (old_name_tag && old_name_tag != pi->name_mem_tag)
- bitmap_set_bit (vars_to_rename, var_ann (old_name_tag)->uid);
+ mark_sym_for_renaming (old_name_tag);
}
else if (pi->pt_malloc)
{
continue;
}
+ TREE_THIS_VOLATILE (pi->name_mem_tag)
+ |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
+
/* Mark the new name tag for renaming. */
- bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
+ mark_sym_for_renaming (pi->name_mem_tag);
}
}
for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
{
- size_t j;
+ unsigned j;
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+ bitmap_iterator bi;
if (pi->value_escapes_p || pi->pt_anything)
{
mark_call_clobbered (v_ann->type_mem_tag);
if (pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
- mark_call_clobbered (referenced_var (j)));
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ {
+ mark_call_clobbered (referenced_var (j));
+ }
}
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
have a name memory tag. */
if (pi->name_mem_tag && pi->pt_vars)
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j,
- add_may_alias (pi->name_mem_tag, referenced_var (j)));
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
+ {
+ add_may_alias (pi->name_mem_tag, referenced_var (j));
+ add_may_alias (v_ann->type_mem_tag, referenced_var (j));
+ }
/* If the name tag is call clobbered, so is the type tag
associated with the base VAR_DECL. */
/* Skip memory tags and variables that have never been
written to. We also need to check if the variables are
call-clobbered because they may be overwritten by
- function calls. */
- tag_stored_p = bitmap_bit_p (ai->written_vars, tag_ann->uid)
- || is_call_clobbered (tag);
- var_stored_p = bitmap_bit_p (ai->written_vars, v_ann->uid)
- || is_call_clobbered (var);
+ function calls.
+
+ Note this is effectively random accessing elements in
+ the sparse bitset, which can be highly inefficient.
+ So we first check the call_clobbered status of the
+ tag and variable before querying the bitmap. */
+ tag_stored_p = is_call_clobbered (tag)
+ || bitmap_bit_p (ai->written_vars, tag_ann->uid);
+ var_stored_p = is_call_clobbered (var)
+ || bitmap_bit_p (ai->written_vars, v_ann->uid);
if (!tag_stored_p && !var_stored_p)
continue;
-
+
if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
{
+ subvar_t svars;
size_t num_tag_refs, num_var_refs;
num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
/* Add VAR to TAG's may-aliases set. */
- add_may_alias (tag, var);
+
+ /* If this is an aggregate, we may have subvariables for it
+ that need to be pointed to. */
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+
+ for (sv = svars; sv; sv = sv->next)
+ {
+ add_may_alias (tag, sv->var);
+ /* Update the bitmap used to represent TAG's alias set
+ in case we need to group aliases. */
+ SET_BIT (p_map->may_aliases, var_ann (sv->var)->uid);
+ }
+ }
+ else
+ {
+ add_may_alias (tag, var);
+ /* Update the bitmap used to represent TAG's alias set
+ in case we need to group aliases. */
+ SET_BIT (p_map->may_aliases, var_ann (var)->uid);
+ }
/* Update the total number of virtual operands due to
aliasing. Since we are adding one more alias to TAG's
ai->total_alias_vops += (num_var_refs + num_tag_refs);
p_map->total_alias_vops += (num_var_refs + num_tag_refs);
- /* Update the bitmap used to represent TAG's alias set
- in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (var)->uid);
+
+ }
+ }
+ }
+
+ /* Since this analysis is based exclusively on symbols, it fails to
+ handle cases where two pointers P and Q have different memory
+ tags with conflicting alias set numbers but no aliased symbols in
+ common.
+
+ For example, suppose that we have two memory tags TMT.1 and TMT.2
+ such that
+
+ may-aliases (TMT.1) = { a }
+ may-aliases (TMT.2) = { b }
+
+ and the alias set number of TMT.1 conflicts with that of TMT.2.
+ Since they don't have symbols in common, loads and stores from
+ TMT.1 and TMT.2 will seem independent of each other, which will
+ lead to the optimizers making invalid transformations (see
+ testsuite/gcc.c-torture/execute/pr15262-[12].c).
+
+ To avoid this problem, we do a final traversal of AI->POINTERS
+ looking for pairs of pointers that have no aliased symbols in
+ common and yet have conflicting alias set numbers. */
+ for (i = 0; i < ai->num_pointers; i++)
+ {
+ size_t j;
+ struct alias_map_d *p_map1 = ai->pointers[i];
+ tree tag1 = var_ann (p_map1->var)->type_mem_tag;
+ sbitmap may_aliases1 = p_map1->may_aliases;
+
+ for (j = i + 1; j < ai->num_pointers; j++)
+ {
+ struct alias_map_d *p_map2 = ai->pointers[j];
+ tree tag2 = var_ann (p_map2->var)->type_mem_tag;
+ sbitmap may_aliases2 = p_map2->may_aliases;
+
+ /* If the pointers may not point to each other, do nothing. */
+ if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set))
+ continue;
+
+ /* The two pointers may alias each other. If they already have
+ symbols in common, do nothing. */
+ if (sbitmap_any_common_bits (may_aliases1, may_aliases2))
+ continue;
+
+ if (sbitmap_first_set_bit (may_aliases2) >= 0)
+ {
+ size_t k;
+
+ /* Add all the aliases for TAG2 into TAG1's alias set.
+ FIXME, update grouping heuristic counters. */
+ EXECUTE_IF_SET_IN_SBITMAP (may_aliases2, 0, k,
+ add_may_alias (tag1, referenced_var (k)));
+ sbitmap_a_or_b (may_aliases1, may_aliases1, may_aliases2);
+ }
+ else
+ {
+ /* Since TAG2 does not have any aliases of its own, add
+ TAG2 itself to the alias set of TAG1. */
+ add_may_alias (tag1, tag2);
+ SET_BIT (may_aliases1, var_ann (tag2)->uid);
}
}
}
group_aliases (struct alias_info *ai)
{
size_t i;
- sbitmap res;
/* Sort the POINTERS array in descending order of contributed
virtual operands. */
qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
total_alias_vops_cmp);
- res = sbitmap_alloc (num_referenced_vars);
-
/* For every pointer in AI->POINTERS, reverse the roles of its tag
and the tag's may-aliases set. */
for (i = 0; i < ai->num_pointers; i++)
{
sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
- sbitmap_a_and_b (res, tag1_aliases, tag2_aliases);
- if (sbitmap_first_set_bit (res) >= 0)
+ if (sbitmap_any_common_bits (tag1_aliases, tag2_aliases))
{
tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
tree alias = VARRAY_TREE (aliases, j);
var_ann_t ann = var_ann (alias);
- if (ann->mem_tag_kind == NOT_A_TAG && ann->may_aliases)
+ if ((ann->mem_tag_kind == NOT_A_TAG
+ || ann->mem_tag_kind == STRUCT_FIELD)
+ && ann->may_aliases)
{
tree new_alias;
}
}
- sbitmap_free (res);
-
if (dump_file)
fprintf (dump_file,
"%s: Total number of aliased vops after grouping: %ld%s\n",
{
tree var = referenced_var (i);
var_ann_t v_ann = var_ann (var);
+ subvar_t svars;
/* Name memory tags already have flow-sensitive aliasing
information, so they need not be processed by
- compute_may_aliases. Similarly, type memory tags are already
- accounted for when we process their associated pointer. */
- if (v_ann->mem_tag_kind != NOT_A_TAG)
+ compute_flow_insensitive_aliasing. Similarly, type memory
+ tags are already accounted for when we process their
+ associated pointer.
+
+ Structure fields, on the other hand, have to have some of this
+ information processed for them, but it's pointless to mark them
+ non-addressable (since they are fake variables anyway). */
+ if (v_ann->mem_tag_kind != NOT_A_TAG
+ && v_ann->mem_tag_kind != STRUCT_FIELD)
continue;
/* Remove the ADDRESSABLE flag from every addressable variable whose
of ADDR_EXPR constants into INDIRECT_REF expressions and the
removal of dead pointer assignments done by the early scalar
cleanup passes. */
- if (TREE_ADDRESSABLE (var))
+ if (TREE_ADDRESSABLE (var) && v_ann->mem_tag_kind != STRUCT_FIELD)
{
if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
- && v_ann->mem_tag_kind == NOT_A_TAG
+ && TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
- /* The address of VAR is not needed, remove the
- addressable bit, so that it can be optimized as a
- regular variable. */
- mark_non_addressable (var);
+ bool okay_to_mark = true;
/* Since VAR is now a regular GIMPLE register, we will need
to rename VAR into SSA afterwards. */
- bitmap_set_bit (vars_to_rename, v_ann->uid);
+ mark_sym_for_renaming (var);
+
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+
+ for (sv = svars; sv; sv = sv->next)
+ {
+ var_ann_t svann = var_ann (sv->var);
+ if (bitmap_bit_p (ai->addresses_needed, svann->uid))
+ okay_to_mark = false;
+ mark_sym_for_renaming (sv->var);
+ }
+ }
+
+ /* The address of VAR is not needed, remove the
+ addressable bit, so that it can be optimized as a
+ regular variable. */
+ if (okay_to_mark)
+ mark_non_addressable (var);
}
else
{
clobber memory. In those cases, we need to clobber
all call-clobbered variables and all addressables. */
bitmap_set_bit (addressable_vars, v_ann->uid);
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ bitmap_set_bit (addressable_vars, var_ann (sv->var)->uid);
+ }
+
}
}
if (may_be_aliased (var))
{
create_alias_map_for (var, ai);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ mark_sym_for_renaming (var);
}
/* Add pointer variables that have been dereferenced to the POINTERS
afterwards. Note that we cannot do this inside
get_tmt_for because aliasing may run multiple times
and we only create type tags the first time. */
- bitmap_set_bit (vars_to_rename, t_ann->uid);
+ mark_sym_for_renaming (tag);
+
+ /* Similarly, if pointer VAR used to have another type
+ tag, we will need to process it in the renamer to
+ remove the stale virtual operands. */
+ if (v_ann->type_mem_tag)
+ mark_sym_for_renaming (v_ann->type_mem_tag);
/* Associate the tag with pointer VAR. */
v_ann->type_mem_tag = tag;
tree tag = ann->type_mem_tag;
if (tag)
{
- bitmap_set_bit (vars_to_rename, var_ann (tag)->uid);
+ mark_sym_for_renaming (tag);
ann->type_mem_tag = NULL_TREE;
}
}
}
}
-
- /* If we found no addressable variables, but we have more than one
- pointer, we will need to check for conflicts between the
- pointers. Otherwise, we would miss alias relations as in
- testsuite/gcc.dg/tree-ssa/20040319-1.c:
-
- struct bar { int count; int *arr;};
-
- void foo (struct bar *b)
- {
- b->count = 0;
- *(b->arr) = 2;
- if (b->count == 0)
- abort ();
- }
-
- b->count and *(b->arr) could be aliased if b->arr == &b->count.
- To do this, we add all the memory tags for the pointers in
- AI->POINTERS to AI->ADDRESSABLE_VARS, so that
- compute_flow_insensitive_aliasing will naturally compare every
- pointer to every type tag. */
- if (ai->num_addressable_vars == 0
- && ai->num_pointers > 1)
- {
- free (ai->addressable_vars);
- ai->addressable_vars = xcalloc (ai->num_pointers,
- sizeof (struct alias_map_d *));
- ai->num_addressable_vars = 0;
- for (i = 0; i < ai->num_pointers; i++)
- {
- struct alias_map_d *p = ai->pointers[i];
- tree tag = var_ann (p->var)->type_mem_tag;
- create_alias_map_for (tag, ai);
- }
- }
}
static void
maybe_create_global_var (struct alias_info *ai)
{
- size_t i, n_clobbered;
+ unsigned i, n_clobbered;
+ bitmap_iterator bi;
/* No need to create it, if we have one already. */
if (global_var == NULL_TREE)
{
/* Count all the call-clobbered variables. */
n_clobbered = 0;
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, n_clobbered++);
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ n_clobbered++;
+ }
- /* Create .GLOBAL_VAR if we have too many call-clobbered
- variables. We also create .GLOBAL_VAR when there no
- call-clobbered variables to prevent code motion
- transformations from re-arranging function calls that may
- have side effects. For instance,
+ /* If the number of virtual operands that would be needed to
+ model all the call-clobbered variables is larger than
+ GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
- foo ()
- {
- int a = f ();
- g ();
- h (a);
- }
+ Also create .GLOBAL_VAR if there are no call-clobbered
+ variables and the program contains a mixture of pure/const
+ and regular function calls. This is to avoid the problem
+ described in PR 20115:
+
+ int X;
+ int func_pure (void) { return X; }
+ int func_non_pure (int a) { X += a; }
+ int foo ()
+ {
+ int a = func_pure ();
+ func_non_pure (a);
+ a = func_pure ();
+ return a;
+ }
- There are no call-clobbered variables in foo(), so it would
- be entirely possible for a pass to want to move the call to
- f() after the call to g(). If f() has side effects, that
- would be wrong. Creating .GLOBAL_VAR in this case will
- insert VDEFs for it and prevent such transformations. */
- if (n_clobbered == 0
- || ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD)
+ Since foo() has no call-clobbered variables, there is
+ no relationship between the calls to func_pure and
+ func_non_pure. Since func_pure has no side-effects, value
+ numbering optimizations elide the second call to func_pure.
+ So, if we have some pure/const and some regular calls in the
+ program we create .GLOBAL_VAR to avoid missing these
+ relations. */
+ if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
+ || (n_clobbered == 0
+ && ai->num_calls_found > 0
+ && ai->num_pure_const_calls_found > 0
+ && ai->num_calls_found > ai->num_pure_const_calls_found))
create_global_var ();
}
- /* If the function has calls to clobbering functions and .GLOBAL_VAR has
- been created, make it an alias for all call-clobbered variables. */
- if (global_var)
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
- {
- tree var = referenced_var (i);
- if (var != global_var)
- {
- add_may_alias (var, global_var);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
- }
- });
+ /* Mark all call-clobbered symbols for renaming. Since the initial
+ rewrite into SSA ignored all call sites, we may need to rename
+ .GLOBAL_VAR and the call-clobbered variables. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+
+ /* If the function has calls to clobbering functions and
+ .GLOBAL_VAR has been created, make it an alias for all
+ call-clobbered variables. */
+ if (global_var && var != global_var)
+ {
+ subvar_t svars;
+ add_may_alias (var, global_var);
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ mark_sym_for_renaming (sv->var);
+ }
+ }
+
+ mark_sym_for_renaming (var);
+ }
}
tree var, HOST_WIDE_INT var_alias_set)
{
tree mem;
- var_ann_t v_ann, m_ann;
+ var_ann_t m_ann;
alias_stats.alias_queries++;
alias_stats.simple_queries++;
alias_stats.simple_resolved++;
return false;
}
+
+ /* If -fargument-noalias-global is >1, pointer arguments may
+ not point to global variables. */
+ if (flag_argument_noalias > 1 && is_global_var (var)
+ && TREE_CODE (ptr) == PARM_DECL)
+ {
+ alias_stats.alias_noalias++;
+ alias_stats.simple_resolved++;
+ return false;
+ }
+
+ /* If either MEM or VAR is a read-only global and the other one
+ isn't, then PTR cannot point to VAR. */
+ if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
+ || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
+ {
+ alias_stats.alias_noalias++;
+ alias_stats.simple_resolved++;
+ return false;
+ }
- v_ann = var_ann (var);
m_ann = var_ann (mem);
gcc_assert (m_ann->mem_tag_kind == TYPE_TAG);
for PTR's alias set here, not its pointed-to type. We also can't
do this check with relaxed aliasing enabled. */
if (POINTER_TYPE_P (TREE_TYPE (var))
- && var_alias_set != 0)
+ && var_alias_set != 0
+ && mem_alias_set != 0)
{
HOST_WIDE_INT ptr_alias_set = get_alias_set (ptr);
if (ptr_alias_set == var_alias_set)
/* If the alias sets don't conflict then MEM cannot alias VAR. */
if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
{
- /* Handle aliases to structure fields. If either VAR or MEM are
- aggregate types, they may not have conflicting types, but one of
- the structures could contain a pointer to the other one.
-
- For instance, given
-
- MEM -> struct P *p;
- VAR -> struct Q *q;
-
- It may happen that '*p' and '*q' can't alias because 'struct P'
- and 'struct Q' have non-conflicting alias sets. However, it could
- happen that one of the fields in 'struct P' is a 'struct Q *' or
- vice-versa.
-
- Therefore, we also need to check if 'struct P' aliases 'struct Q *'
- or 'struct Q' aliases 'struct P *'. Notice, that since GIMPLE
- does not have more than one-level pointers, we don't need to
- recurse into the structures. */
- if (AGGREGATE_TYPE_P (TREE_TYPE (mem))
- || AGGREGATE_TYPE_P (TREE_TYPE (var)))
- {
- tree ptr_to_var;
-
- if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
- ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (var)));
- else
- ptr_to_var = TYPE_POINTER_TO (TREE_TYPE (var));
-
- /* If no pointer-to VAR exists, then MEM can't alias VAR. */
- if (ptr_to_var == NULL_TREE)
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
-
- /* If MEM doesn't alias a pointer to VAR and VAR doesn't alias
- PTR, then PTR can't alias VAR. */
- if (!alias_sets_conflict_p (mem_alias_set, get_alias_set (ptr_to_var))
- && !alias_sets_conflict_p (var_alias_set, get_alias_set (ptr)))
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
- }
- else
- {
- alias_stats.alias_noalias++;
- alias_stats.tbaa_resolved++;
- return false;
- }
+ alias_stats.alias_noalias++;
+ alias_stats.tbaa_resolved++;
+ return false;
}
-
alias_stats.alias_mayalias++;
return true;
}
disassociated from PTR. */
if (pi->name_mem_tag)
{
- bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
+ mark_sym_for_renaming (pi->name_mem_tag);
pi->name_mem_tag = NULL_TREE;
}
}
}
-/* Given two pointers DEST and ORIG. Merge the points-to information in
- ORIG into DEST. AI is as in collect_points_to_info. */
+/* Given two different pointers DEST and ORIG. Merge the points-to
+ information in ORIG into DEST. AI contains all the alias
+ information collected up to this point. */
static void
merge_pointed_to_info (struct alias_info *ai, tree dest, tree orig)
{
struct ptr_info_def *dest_pi, *orig_pi;
+ gcc_assert (dest != orig);
+
/* Make sure we have points-to information for ORIG. */
collect_points_to_info_for (ai, orig);
if (orig_pi)
{
+ gcc_assert (orig_pi != dest_pi);
+
/* Notice that we never merge PT_MALLOC. This attribute is only
true if the pointer is the result of a malloc() call.
Otherwise, we can end up in this situation:
...
P_j = P_i + X;
- P_j would be marked as PT_MALLOC, which is wrong because
- PT_MALLOC implies that the pointer may not point to another
- variable.
-
- FIXME 1: Subsequent analysis may determine that P_j
- cannot alias anything else, but we are being conservative
- here.
-
- FIXME 2: If the merging comes from a copy assignment, we
- ought to merge PT_MALLOC, but then both pointers would end up
- getting different name tags because create_name_tags is not
- smart enough to determine that the two come from the same
- malloc call. Copy propagation before aliasing should cure
- this. */
+ P_j would be marked as PT_MALLOC, however we currently do not
+ handle cases of more than one pointer pointing to the same
+ malloc'd area.
+
+ FIXME: If the merging comes from an expression that preserves
+ the PT_MALLOC attribute (copy assignment, address
+ arithmetic), we ought to merge PT_MALLOC, but then both
+ pointers would end up getting different name tags because
+ create_name_tags is not smart enough to determine that the
+ two come from the same malloc call. Copy propagation before
+ aliasing should cure this. */
dest_pi->pt_malloc = 0;
-
if (orig_pi->pt_malloc || orig_pi->pt_anything)
set_pt_anything (dest);
+ dest_pi->pt_null |= orig_pi->pt_null;
+
if (!dest_pi->pt_anything
&& orig_pi->pt_vars
- && bitmap_first_set_bit (orig_pi->pt_vars) >= 0)
+ && !bitmap_empty_p (orig_pi->pt_vars))
{
if (dest_pi->pt_vars == NULL)
{
bitmap_copy (dest_pi->pt_vars, orig_pi->pt_vars);
}
else
- bitmap_a_or_b (dest_pi->pt_vars,
- dest_pi->pt_vars,
- orig_pi->pt_vars);
+ bitmap_ior_into (dest_pi->pt_vars, orig_pi->pt_vars);
}
}
else
}
-/* Add VALUE to the list of expressions pointed-to by PTR. */
+/* Add EXPR to the list of expressions pointed-to by PTR. */
static void
-add_pointed_to_expr (tree ptr, tree value)
+add_pointed_to_expr (struct alias_info *ai, tree ptr, tree expr)
{
- if (TREE_CODE (value) == WITH_SIZE_EXPR)
- value = TREE_OPERAND (value, 0);
-
- /* Pointer variables should have been handled by merge_pointed_to_info. */
- gcc_assert (TREE_CODE (value) != SSA_NAME
- || !POINTER_TYPE_P (TREE_TYPE (value)));
+ if (TREE_CODE (expr) == WITH_SIZE_EXPR)
+ expr = TREE_OPERAND (expr, 0);
get_ptr_info (ptr);
- /* If VALUE is the result of a malloc-like call, then the area pointed to
- PTR is guaranteed to not alias with anything else. */
- if (TREE_CODE (value) == CALL_EXPR
- && (call_expr_flags (value) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
- set_pt_malloc (ptr);
- else
- set_pt_anything (ptr);
-
- if (dump_file)
+ if (TREE_CODE (expr) == CALL_EXPR
+ && (call_expr_flags (expr) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
{
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ /* If EXPR is a malloc-like call, then the area pointed to PTR
+ is guaranteed to not alias with anything else. */
+ set_pt_malloc (ptr);
+ }
+ else if (TREE_CODE (expr) == ADDR_EXPR)
+ {
+ /* Found P_i = ADDR_EXPR */
+ add_pointed_to_var (ai, ptr, expr);
+ }
+ else if (TREE_CODE (expr) == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (expr)))
+ {
+ /* Found P_i = Q_j. */
+ merge_pointed_to_info (ai, ptr, expr);
+ }
+ else if (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR)
+ {
+ /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+
+ /* Both operands may be of pointer type. FIXME: Shouldn't
+ we just expect PTR + OFFSET always? */
+ if (POINTER_TYPE_P (TREE_TYPE (op0))
+ && TREE_CODE (op0) != INTEGER_CST)
+ {
+ if (TREE_CODE (op0) == SSA_NAME)
+ merge_pointed_to_info (ai, ptr, op0);
+ else if (TREE_CODE (op0) == ADDR_EXPR)
+ add_pointed_to_var (ai, ptr, op0);
+ else
+ set_pt_anything (ptr);
+ }
- fprintf (dump_file, "Pointer ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " points to ");
- if (pi->pt_malloc)
- fprintf (dump_file, "malloc space: ");
- else
- fprintf (dump_file, "an arbitrary address: ");
- print_generic_expr (dump_file, value, dump_flags);
- fprintf (dump_file, "\n");
+ if (POINTER_TYPE_P (TREE_TYPE (op1))
+ && TREE_CODE (op1) != INTEGER_CST)
+ {
+ if (TREE_CODE (op1) == SSA_NAME)
+ merge_pointed_to_info (ai, ptr, op1);
+ else if (TREE_CODE (op1) == ADDR_EXPR)
+ add_pointed_to_var (ai, ptr, op1);
+ else
+ set_pt_anything (ptr);
+ }
+
+ /* Neither operand is a pointer? VAR can be pointing anywhere.
+ FIXME: Shouldn't we asserting here? If we get here, we found
+ PTR = INT_CST + INT_CST, which should not be a valid pointer
+ expression. */
+ if (!(POINTER_TYPE_P (TREE_TYPE (op0))
+ && TREE_CODE (op0) != INTEGER_CST)
+ && !(POINTER_TYPE_P (TREE_TYPE (op1))
+ && TREE_CODE (op1) != INTEGER_CST))
+ set_pt_anything (ptr);
+ }
+ else if (integer_zerop (expr))
+ {
+ /* EXPR is the NULL pointer. Mark PTR as pointing to NULL. */
+ SSA_NAME_PTR_INFO (ptr)->pt_null = 1;
+ }
+ else
+ {
+ /* If we can't recognize the expression, assume that PTR may
+ point anywhere. */
+ set_pt_anything (ptr);
}
}
/* If VALUE is of the form &DECL, add DECL to the set of variables
pointed-to by PTR. Otherwise, add VALUE as a pointed-to expression by
- PTR. AI is as in collect_points_to_info. */
+ PTR. AI points to the collected alias information. */
static void
add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
{
struct ptr_info_def *pi = get_ptr_info (ptr);
- tree pt_var;
+ tree pt_var = NULL_TREE;
+ HOST_WIDE_INT offset, size;
+ tree addrop;
size_t uid;
+ tree ref;
+ subvar_t svars;
gcc_assert (TREE_CODE (value) == ADDR_EXPR);
- pt_var = TREE_OPERAND (value, 0);
- if (TREE_CODE_CLASS (TREE_CODE (pt_var)) == 'r')
- pt_var = get_base_address (pt_var);
+ addrop = TREE_OPERAND (value, 0);
+ if (REFERENCE_CLASS_P (addrop))
+ pt_var = get_base_address (addrop);
+ else
+ pt_var = addrop;
+
+ /* If this is a component_ref, see if we can get a smaller number of
+ variables to take the address of. */
+ if (TREE_CODE (addrop) == COMPONENT_REF
+ && (ref = okay_component_ref_for_subvars (addrop, &offset ,&size)))
+ {
+ subvar_t sv;
+ svars = get_subvars_for_var (ref);
- if (pt_var && SSA_VAR_P (pt_var))
- {
uid = var_ann (pt_var)->uid;
- bitmap_set_bit (ai->addresses_needed, uid);
+
+ if (pi->pt_vars == NULL)
+ pi->pt_vars = BITMAP_GGC_ALLOC ();
+ /* If the variable is a global, mark the pointer as pointing to
+ global memory (which will make its tag a global variable). */
+ if (is_global_var (pt_var))
+ pi->pt_global_mem = 1;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ if (overlap_subvar (offset, size, sv, NULL))
+ {
+ bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid);
+ bitmap_set_bit (ai->addresses_needed, var_ann (sv->var)->uid);
+ }
+ }
+ }
+ else if (pt_var && SSA_VAR_P (pt_var))
+ {
+
+ uid = var_ann (pt_var)->uid;
+
if (pi->pt_vars == NULL)
pi->pt_vars = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (pi->pt_vars, uid);
+
+ /* If this is an aggregate, we may have subvariables for it that need
+ to be pointed to. */
+ if (var_can_have_subvars (pt_var)
+ && (svars = get_subvars_for_var (pt_var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ uid = var_ann (sv->var)->uid;
+ bitmap_set_bit (ai->addresses_needed, uid);
+ bitmap_set_bit (pi->pt_vars, uid);
+ }
+ }
+ else
+ {
+ bitmap_set_bit (ai->addresses_needed, uid);
+ bitmap_set_bit (pi->pt_vars, uid);
+ }
/* If the variable is a global, mark the pointer as pointing to
global memory (which will make its tag a global variable). */
switch (TREE_CODE (stmt))
{
+ case RETURN_EXPR:
+ gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR);
+ stmt = TREE_OPERAND (stmt, 0);
+ /* FALLTHRU */
+
case MODIFY_EXPR:
{
tree rhs = TREE_OPERAND (stmt, 1);
STRIP_NOPS (rhs);
-
- /* Found P_i = ADDR_EXPR */
- if (TREE_CODE (rhs) == ADDR_EXPR)
- add_pointed_to_var (ai, var, rhs);
-
- /* Found P_i = Q_j. */
- else if (TREE_CODE (rhs) == SSA_NAME
- && POINTER_TYPE_P (TREE_TYPE (rhs)))
- merge_pointed_to_info (ai, var, rhs);
-
- /* Found P_i = PLUS_EXPR or P_i = MINUS_EXPR */
- else if (TREE_CODE (rhs) == PLUS_EXPR
- || TREE_CODE (rhs) == MINUS_EXPR)
- {
- tree op0 = TREE_OPERAND (rhs, 0);
- tree op1 = TREE_OPERAND (rhs, 1);
-
- /* Both operands may be of pointer type. FIXME: Shouldn't
- we just expect PTR + OFFSET always? */
- if (POINTER_TYPE_P (TREE_TYPE (op0)))
- {
- if (TREE_CODE (op0) == SSA_NAME)
- merge_pointed_to_info (ai, var, op0);
- else if (TREE_CODE (op0) == ADDR_EXPR)
- add_pointed_to_var (ai, var, op0);
- else
- add_pointed_to_expr (var, op0);
- }
-
- if (POINTER_TYPE_P (TREE_TYPE (op1)))
- {
- if (TREE_CODE (op1) == SSA_NAME)
- merge_pointed_to_info (ai, var, op1);
- else if (TREE_CODE (op1) == ADDR_EXPR)
- add_pointed_to_var (ai, var, op1);
- else
- add_pointed_to_expr (var, op1);
- }
-
- /* Neither operand is a pointer? VAR can be pointing
- anywhere. FIXME: Is this right? If we get here, we
- found PTR = INT_CST + INT_CST. */
- if (!POINTER_TYPE_P (TREE_TYPE (op0))
- && !POINTER_TYPE_P (TREE_TYPE (op1)))
- add_pointed_to_expr (var, rhs);
- }
-
- /* Something else. */
- else
- add_pointed_to_expr (var, rhs);
+ add_pointed_to_expr (ai, var, rhs);
break;
}
+
case ASM_EXPR:
/* Pointers defined by __asm__ statements can point anywhere. */
set_pt_anything (var);
tree decl = SSA_NAME_VAR (var);
if (TREE_CODE (decl) == PARM_DECL)
- add_pointed_to_expr (var, decl);
+ add_pointed_to_expr (ai, var, decl);
else if (DECL_INITIAL (decl))
- add_pointed_to_var (ai, var, DECL_INITIAL (decl));
+ add_pointed_to_expr (ai, var, DECL_INITIAL (decl));
else
- add_pointed_to_expr (var, decl);
+ add_pointed_to_expr (ai, var, decl);
}
break;
+
case PHI_NODE:
{
/* It STMT is a PHI node, then VAR is one of its arguments. The
break;
case SSA_NAME:
- merge_pointed_to_info (ai, lhs, var);
+ /* Avoid unnecessary merges. */
+ if (lhs != var)
+ merge_pointed_to_info (ai, lhs, var);
break;
default:
gcc_assert (is_gimple_min_invariant (var));
- add_pointed_to_expr (lhs, var);
+ add_pointed_to_expr (ai, lhs, var);
break;
}
break;
}
+
default:
gcc_unreachable ();
}
3- STMT is an assignment to a non-local variable, or
4- STMT is a return statement.
- If NUM_CALLS_P is not NULL, the counter is incremented if STMT contains
- a function call. */
+ AI points to the alias information collected so far. */
static bool
-is_escape_site (tree stmt, size_t *num_calls_p)
+is_escape_site (tree stmt, struct alias_info *ai)
{
- if (get_call_expr_in (stmt) != NULL_TREE)
+ tree call = get_call_expr_in (stmt);
+ if (call != NULL_TREE)
{
- if (num_calls_p)
- (*num_calls_p)++;
+ ai->num_calls_found++;
+
+ if (!TREE_SIDE_EFFECTS (call))
+ ai->num_pure_const_calls_found++;
return true;
}
if (lhs == NULL_TREE)
return true;
+ /* If the RHS is a conversion between a pointer and an integer, the
+ pointer escapes since we can't track the integer. */
+ if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (stmt, 1), 0)))
+ && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
+ return true;
+
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
determine whether they should be considered globals. */
DECL_CONTEXT (tag) = current_function_decl;
- /* If the pointed-to type is volatile, so is the tag. */
- TREE_THIS_VOLATILE (tag) = TREE_THIS_VOLATILE (type);
-
/* Memory tags are by definition addressable. This also prevents
is_gimple_ref frome confusing memory tags with optimizable
variables. */
for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
{
struct alias_map_d *curr = ai->pointers[i];
- if (tag_set == curr->set)
+ tree curr_tag = var_ann (curr->var)->type_mem_tag;
+ if (tag_set == curr->set
+ && TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (curr_tag)))
{
- tag = var_ann (curr->var)->type_mem_tag;
+ tag = curr_tag;
break;
}
}
ai->pointers[ai->num_pointers++] = alias_map;
}
+ /* If the pointed-to type is volatile, so is the tag. */
+ TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
+
/* Make sure that the type tag has the same alias set as the
pointed-to type. */
gcc_assert (tag_set == get_alias_set (tag));
+ /* If PTR's pointed-to type is read-only, then TAG's type must also
+ be read-only. */
+ gcc_assert (TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (tag)));
+
return tag;
}
create_global_var (void)
{
global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
- size_type_node);
+ void_type_node);
DECL_ARTIFICIAL (global_var) = 1;
TREE_READONLY (global_var) = 0;
DECL_EXTERNAL (global_var) = 1;
TREE_ADDRESSABLE (global_var) = 0;
add_referenced_tmp_var (global_var);
- bitmap_set_bit (vars_to_rename, var_ann (global_var)->uid);
+ mark_sym_for_renaming (global_var);
}
for (i = 1; i < num_ssa_names; i++)
{
tree ptr = ssa_name (i);
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ struct ptr_info_def *pi;
+
+ if (ptr == NULL_TREE)
+ continue;
+
+ pi = SSA_NAME_PTR_INFO (ptr);
if (!SSA_NAME_IN_FREE_LIST (ptr)
&& pi
&& pi->name_mem_tag)
/* Return the alias information associated with pointer T. It creates a
new instance if none existed. */
-static struct ptr_info_def *
+struct ptr_info_def *
get_ptr_info (tree t)
{
struct ptr_info_def *pi;
if (pi->pt_malloc)
fprintf (file, ", points-to malloc");
+ if (pi->pt_null)
+ fprintf (file, ", points-to NULL");
+
if (pi->pt_vars)
{
unsigned ix;
+ bitmap_iterator bi;
fprintf (file, ", points-to vars: { ");
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix,
- {
- print_generic_expr (file, referenced_var (ix), dump_flags);
- fprintf (file, " ");
- });
+ EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
+ {
+ print_generic_expr (file, referenced_var (ix), dump_flags);
+ fprintf (file, " ");
+ }
fprintf (file, "}");
}
}
return true;
}
+
+/* Add VAR to the list of may-aliases of PTR's type tag. If PTR
+ doesn't already have a type tag, create one. */
+
+void
+add_type_alias (tree ptr, tree var)
+{
+ varray_type aliases;
+ tree tag;
+ var_ann_t ann = var_ann (ptr);
+ subvar_t svars;
+
+ if (ann->type_mem_tag == NULL_TREE)
+ {
+ size_t i;
+ tree q = NULL_TREE;
+ tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
+ HOST_WIDE_INT tag_set = get_alias_set (tag_type);
+
+ /* PTR doesn't have a type tag, create a new one and add VAR to
+ the new tag's alias set.
+
+ FIXME, This is slower than necessary. We need to determine
+ whether there is another pointer Q with the same alias set as
+ PTR. This could be sped up by having type tags associated
+ with types. */
+ for (i = 0; i < num_referenced_vars; i++)
+ {
+ q = referenced_var (i);
+
+ if (POINTER_TYPE_P (TREE_TYPE (q))
+ && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
+ {
+ /* Found another pointer Q with the same alias set as
+ the PTR's pointed-to type. If Q has a type tag, use
+ it. Otherwise, create a new memory tag for PTR. */
+ var_ann_t ann1 = var_ann (q);
+ if (ann1->type_mem_tag)
+ ann->type_mem_tag = ann1->type_mem_tag;
+ else
+ ann->type_mem_tag = create_memory_tag (tag_type, true);
+ goto found_tag;
+ }
+ }
+
+ /* Couldn't find any other pointer with a type tag we could use.
+ Create a new memory tag for PTR. */
+ ann->type_mem_tag = create_memory_tag (tag_type, true);
+ }
+
+found_tag:
+ /* If VAR is not already PTR's type tag, add it to the may-alias set
+ for PTR's type tag. */
+ gcc_assert (var_ann (var)->type_mem_tag == NOT_A_TAG);
+ tag = ann->type_mem_tag;
+
+ /* If VAR has subvars, add the subvars to the tag instead of the
+ actual var. */
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ add_may_alias (tag, sv->var);
+ }
+ else
+ add_may_alias (tag, var);
+
+ /* TAG and its set of aliases need to be marked for renaming. */
+ mark_sym_for_renaming (tag);
+ if ((aliases = var_ann (tag)->may_aliases) != NULL)
+ {
+ size_t i;
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ }
+
+ /* If we had grouped aliases, VAR may have aliases of its own. Mark
+ them for renaming as well. Other statements referencing the
+ aliases of VAR will need to be updated. */
+ if ((aliases = var_ann (var)->may_aliases) != NULL)
+ {
+ size_t i;
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ }
+}
+
+
+/* This structure is simply used during pushing fields onto the fieldstack
+ to track the offset of the field, since bitpos_of_field gives it relative
+ to its immediate containing type, and we want it relative to the ultimate
+ containing object. */
+
+typedef struct fieldoff
+{
+ tree field;
+ HOST_WIDE_INT offset;
+} fieldoff_s;
+
+DEF_VEC_O (fieldoff_s);
+DEF_VEC_ALLOC_O(fieldoff_s,heap);
+
+/* Return the position, in bits, of FIELD_DECL from the beginning of its
+ structure.
+ Return -1 if the position is conditional or otherwise non-constant
+ integer. */
+
+static HOST_WIDE_INT
+bitpos_of_field (const tree fdecl)
+{
+
+ if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
+ || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
+ return -1;
+
+ return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
+}
+
+/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
+ of TYPE onto fieldstack, recording their offsets along the way.
+ OFFSET is used to keep track of the offset in this entire structure, rather
+ than just the immediately containing structure. Returns the number
+ of fields pushed. */
+
+static int
+push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
+ HOST_WIDE_INT offset)
+{
+ tree field;
+ int count = 0;
+
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL)
+ {
+ bool push = false;
+
+ if (!var_can_have_subvars (field))
+ push = true;
+ else if (!(push_fields_onto_fieldstack
+ (TREE_TYPE (field), fieldstack,
+ offset + bitpos_of_field (field)))
+ && DECL_SIZE (field)
+ && !integer_zerop (DECL_SIZE (field)))
+ /* Empty structures may have actual size, like in C++. So
+ see if we didn't push any subfields and the size is
+ nonzero, push the field onto the stack */
+ push = true;
+
+ if (push)
+ {
+ fieldoff_s *pair;
+
+ pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
+ pair->field = field;
+ pair->offset = offset + bitpos_of_field (field);
+ count++;
+ }
+ }
+ return count;
+}
+
+
+/* This represents the used range of a variable. */
+
+typedef struct used_part
+{
+ HOST_WIDE_INT minused;
+ HOST_WIDE_INT maxused;
+ /* True if we have an explicit use/def of some portion of this variable,
+ even if it is all of it. i.e. a.b = 5 or temp = a.b. */
+ bool explicit_uses;
+ /* True if we have an implicit use/def of some portion of this
+ variable. Implicit uses occur when we can't tell what part we
+ are referencing, and have to make conservative assumptions. */
+ bool implicit_uses;
+} *used_part_t;
+
+/* An array of used_part structures, indexed by variable uid. */
+
+static used_part_t *used_portions;
+
+/* Given a variable uid, UID, get or create the entry in the used portions
+ table for the variable. */
+
+static used_part_t
+get_or_create_used_part_for (size_t uid)
+{
+ used_part_t up;
+ if (used_portions[uid] == NULL)
+ {
+ up = xcalloc (1, sizeof (struct used_part));
+ up->minused = INT_MAX;
+ up->maxused = 0;
+ up->explicit_uses = false;
+ up->implicit_uses = false;
+ }
+ else
+ up = used_portions[uid];
+ return up;
+}
+
+/* qsort comparison function for two fieldoff's PA and PB */
+
+static int
+fieldoff_compare (const void *pa, const void *pb)
+{
+ const fieldoff_s *foa = (const fieldoff_s *)pa;
+ const fieldoff_s *fob = (const fieldoff_s *)pb;
+ HOST_WIDE_INT foasize, fobsize;
+
+ if (foa->offset != fob->offset)
+ return foa->offset - fob->offset;
+
+ foasize = TREE_INT_CST_LOW (DECL_SIZE (foa->field));
+ fobsize = TREE_INT_CST_LOW (DECL_SIZE (fob->field));
+ return foasize - fobsize;
+}
+
+/* Given an aggregate VAR, create the subvariables that represent its
+ fields. */
+
+static void
+create_overlap_variables_for (tree var)
+{
+ VEC(fieldoff_s,heap) *fieldstack = NULL;
+ used_part_t up;
+ size_t uid = var_ann (var)->uid;
+
+ if (used_portions[uid] == NULL)
+ return;
+
+ up = used_portions[uid];
+ push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0);
+ if (VEC_length (fieldoff_s, fieldstack) != 0)
+ {
+ subvar_t *subvars;
+ fieldoff_s *fo;
+ bool notokay = false;
+ int fieldcount = 0;
+ int i;
+ HOST_WIDE_INT lastfooffset = -1;
+ HOST_WIDE_INT lastfosize = -1;
+ tree lastfotype = NULL_TREE;
+
+ /* Not all fields have DECL_SIZE set, and those that don't, we don't
+ know their size, and thus, can't handle.
+ The same is true of fields with DECL_SIZE that is not an integer
+ constant (such as variable sized fields).
+ Fields with offsets which are not constant will have an offset < 0
+ We *could* handle fields that are constant sized arrays, but
+ currently don't. Doing so would require some extra changes to
+ tree-ssa-operands.c. */
+
+ for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ {
+ if (!DECL_SIZE (fo->field)
+ || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
+ || TREE_CODE (TREE_TYPE (fo->field)) == ARRAY_TYPE
+ || fo->offset < 0)
+ {
+ notokay = true;
+ break;
+ }
+ fieldcount++;
+ }
+
+ /* The current heuristic we use is as follows:
+ If the variable has no used portions in this function, no
+ structure vars are created for it.
+ Otherwise,
+ If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
+ we always create structure vars for them.
+ If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
+ some explicit uses, we create structure vars for them.
+ If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
+ no explicit uses, we do not create structure vars for them.
+ */
+
+ if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
+ && !up->explicit_uses)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Variable ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
+ }
+ notokay = true;
+ }
+
+ /* Bail out, if we can't create overlap variables. */
+ if (notokay)
+ {
+ VEC_free (fieldoff_s, heap, fieldstack);
+ return;
+ }
+
+ /* Otherwise, create the variables. */
+ subvars = lookup_subvars_for_var (var);
+
+ qsort (VEC_address (fieldoff_s, fieldstack),
+ VEC_length (fieldoff_s, fieldstack),
+ sizeof (fieldoff_s),
+ fieldoff_compare);
+
+ for (i = VEC_length (fieldoff_s, fieldstack);
+ VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
+ {
+ subvar_t sv;
+ HOST_WIDE_INT fosize;
+ var_ann_t ann;
+ tree currfotype;
+
+ fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
+ currfotype = TREE_TYPE (fo->field);
+
+ /* If this field isn't in the used portion,
+ or it has the exact same offset and size as the last
+ field, skip it. */
+
+ if (((fo->offset <= up->minused
+ && fo->offset + fosize <= up->minused)
+ || fo->offset >= up->maxused)
+ || (fo->offset == lastfooffset
+ && fosize == lastfosize
+ && currfotype == lastfotype))
+ continue;
+ sv = ggc_alloc (sizeof (struct subvar));
+ sv->offset = fo->offset;
+ sv->size = fosize;
+ sv->next = *subvars;
+ sv->var = create_tmp_var_raw (TREE_TYPE (fo->field), "SFT");
+ if (dump_file)
+ {
+ fprintf (dump_file, "structure field tag %s created for var %s",
+ get_name (sv->var), get_name (var));
+ fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
+ sv->offset);
+ fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
+ sv->size);
+ fprintf (dump_file, "\n");
+ }
+
+ /* We need to copy the various flags from var to sv->var, so that
+ they are is_global_var iff the original variable was. */
+
+ DECL_EXTERNAL (sv->var) = DECL_EXTERNAL (var);
+ TREE_PUBLIC (sv->var) = TREE_PUBLIC (var);
+ TREE_STATIC (sv->var) = TREE_STATIC (var);
+ TREE_READONLY (sv->var) = TREE_READONLY (var);
+
+ /* Like other memory tags, these need to be marked addressable to
+ keep is_gimple_reg from thinking they are real. */
+ TREE_ADDRESSABLE (sv->var) = 1;
+
+ DECL_CONTEXT (sv->var) = DECL_CONTEXT (var);
+
+ ann = get_var_ann (sv->var);
+ ann->mem_tag_kind = STRUCT_FIELD;
+ ann->type_mem_tag = NULL;
+ add_referenced_tmp_var (sv->var);
+
+ lastfotype = currfotype;
+ lastfooffset = fo->offset;
+ lastfosize = fosize;
+ *subvars = sv;
+ }
+
+ /* Once we have created subvars, the original is no longer call
+ clobbered on its own. Its call clobbered status depends
+ completely on the call clobbered status of the subvars.
+
+ add_referenced_var in the above loop will take care of
+ marking subvars of global variables as call clobbered for us
+ to start, since they are global as well. */
+ clear_call_clobbered (var);
+ }
+
+ VEC_free (fieldoff_s, heap, fieldstack);
+}
+
+
+/* Find the conservative answer to the question of what portions of what
+ structures are used by this statement. We assume that if we have a
+ component ref with a known size + offset, that we only need that part
+ of the structure. For unknown cases, or cases where we do something
+ to the whole structure, we assume we need to create fields for the
+ entire structure. */
+
+static tree
+find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case COMPONENT_REF:
+ {
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp;
+ int volatilep;
+ tree ref;
+ ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &volatilep, false);
+ if (DECL_P (ref) && offset == NULL && bitsize != -1)
+ {
+ size_t uid = var_ann (ref)->uid;
+ used_part_t up;
+
+ up = get_or_create_used_part_for (uid);
+
+ if (bitpos <= up->minused)
+ up->minused = bitpos;
+ if ((bitpos + bitsize >= up->maxused))
+ up->maxused = bitpos + bitsize;
+
+ up->explicit_uses = true;
+ used_portions[uid] = up;
+
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ else if (DECL_P (ref))
+ {
+ if (DECL_SIZE (ref)
+ && var_can_have_subvars (ref)
+ && TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
+ {
+ used_part_t up;
+ size_t uid = var_ann (ref)->uid;
+
+ up = get_or_create_used_part_for (uid);
+
+ up->minused = 0;
+ up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
+
+ up->implicit_uses = true;
+
+ used_portions[uid] = up;
+
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ }
+ }
+ break;
+ case VAR_DECL:
+ case PARM_DECL:
+ {
+ tree var = *tp;
+ if (DECL_SIZE (var)
+ && var_can_have_subvars (var)
+ && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
+ {
+ used_part_t up;
+ size_t uid = var_ann (var)->uid;
+
+ up = get_or_create_used_part_for (uid);
+
+ up->minused = 0;
+ up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
+ up->implicit_uses = true;
+
+ used_portions[uid] = up;
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+ }
+ break;
+
+ default:
+ break;
+
+ }
+ return NULL_TREE;
+}
+
+/* We are about to create some new referenced variables, and we need the
+ before size. */
+
+static size_t old_referenced_vars;
+
+
+/* Create structure field variables for structures used in this function. */
+
+static void
+create_structure_vars (void)
+{
+ basic_block bb;
+ size_t i;
+
+ old_referenced_vars = num_referenced_vars;
+ used_portions = xcalloc (num_referenced_vars, sizeof (used_part_t));
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
+ find_used_portions,
+ NULL);
+ }
+ }
+ for (i = 0; i < old_referenced_vars; i++)
+ {
+ tree var = referenced_var (i);
+ /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
+ if (var
+ && DECL_SIZE (var)
+ && var_can_have_subvars (var)
+ && var_ann (var)->mem_tag_kind == NOT_A_TAG
+ && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
+ create_overlap_variables_for (var);
+ }
+ for (i = 0; i < old_referenced_vars; i++)
+ free (used_portions[i]);
+
+ free (used_portions);
+}
+
+static bool
+gate_structure_vars (void)
+{
+ return flag_tree_salias != 0;
+}
+
+struct tree_opt_pass pass_create_structure_vars =
+{
+ "salias", /* name */
+ gate_structure_vars, /* gate */
+ create_structure_vars, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};