/* Tree based points-to analysis
- Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
This file is part of GCC.
#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
-#include "errors.h"
-#include "diagnostic.h"
#include "tree.h"
-#include "c-common.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "varray.h"
-#include "c-tree.h"
+#include "diagnostic.h"
+#include "toplev.h"
#include "gimple.h"
#include "hashtab.h"
#include "function.h"
#include "alloc-pool.h"
#include "splay-tree.h"
#include "params.h"
-#include "tree-ssa-structalias.h"
#include "cgraph.h"
#include "alias.h"
#include "pointer-set.h"
typedef struct constraint_graph *constraint_graph_t;
static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
+struct constraint;
+typedef struct constraint *constraint_t;
+
DEF_VEC_P(constraint_t);
DEF_VEC_ALLOC_P(constraint_t,heap);
/* True if this is a variable created by the constraint analysis, such as
heap variables and constraints we had to break up. */
- unsigned int is_artificial_var:1;
+ unsigned int is_artificial_var : 1;
/* True if this is a special variable whose solution set should not be
changed. */
- unsigned int is_special_var:1;
+ unsigned int is_special_var : 1;
/* True for variables whose size is not known or variable. */
- unsigned int is_unknown_size_var:1;
+ unsigned int is_unknown_size_var : 1;
/* True for (sub-)fields that represent a whole variable. */
unsigned int is_full_var : 1;
/* True if this is a heap variable. */
- unsigned int is_heap_var:1;
+ unsigned int is_heap_var : 1;
+
+ /* True if this is a variable tracking a restrict pointer source. */
+ unsigned int is_restrict_var : 1;
- /* True if we may not use TBAA to prune references to this
- variable. This is used for C++ placement new. */
- unsigned int no_tbaa_pruning : 1;
+ /* True if this field may contain pointers. */
+ unsigned int may_have_pointers : 1;
- /* Variable id this was collapsed to due to type unsafety. Zero if
- this variable was not collapsed. This should be unused completely
- after build_succ_graph, or something is broken. */
- unsigned int collapsed_to;
+ /* True if this represents a global variable. */
+ unsigned int is_global_var : 1;
/* A link to the variable for the next field in this structure. */
struct variable_info *next;
typedef struct variable_info *varinfo_t;
static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
+static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
+ unsigned HOST_WIDE_INT);
static varinfo_t lookup_vi_for_tree (tree);
/* Pool of variable info structures. */
return VEC_index (varinfo_t, varmap, n);
}
-/* Return the varmap element N, following the collapsed_to link. */
-
-static inline varinfo_t
-get_varinfo_fc (unsigned int n)
-{
- varinfo_t v = VEC_index (varinfo_t, varmap, n);
-
- if (v->collapsed_to != 0)
- return get_varinfo (v->collapsed_to);
- return v;
-}
-
/* Static IDs for the special variables. */
enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
- escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
-
-/* Variable that represents the unknown pointer. */
-static varinfo_t var_anything;
-static tree anything_tree;
-
-/* Variable that represents the NULL pointer. */
-static varinfo_t var_nothing;
-static tree nothing_tree;
+ escaped_id = 3, nonlocal_id = 4, callused_id = 5,
+ storedanything_id = 6, integer_id = 7 };
-/* Variable that represents read only memory. */
-static varinfo_t var_readonly;
-static tree readonly_tree;
-
-/* Variable that represents escaped memory. */
-static varinfo_t var_escaped;
-static tree escaped_tree;
-
-/* Variable that represents nonlocal memory. */
-static varinfo_t var_nonlocal;
-static tree nonlocal_tree;
+struct GTY(()) heapvar_map {
+ struct tree_map map;
+ unsigned HOST_WIDE_INT offset;
+};
-/* Variable that represents call-used memory. */
-static varinfo_t var_callused;
-static tree callused_tree;
+static int
+heapvar_map_eq (const void *p1, const void *p2)
+{
+ const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
+ const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
+ return (h1->map.base.from == h2->map.base.from
+ && h1->offset == h2->offset);
+}
-/* Variable that represents integers. This is used for when people do things
- like &0->a.b. */
-static varinfo_t var_integer;
-static tree integer_tree;
+static unsigned int
+heapvar_map_hash (struct heapvar_map *h)
+{
+ return iterative_hash_host_wide_int (h->offset,
+ htab_hash_pointer (h->map.base.from));
+}
/* Lookup a heap var for FROM, and return it if we find one. */
static tree
-heapvar_lookup (tree from)
+heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
{
- struct tree_map *h, in;
- in.base.from = from;
-
- h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
- htab_hash_pointer (from));
+ struct heapvar_map *h, in;
+ in.map.base.from = from;
+ in.offset = offset;
+ h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
+ heapvar_map_hash (&in));
if (h)
- return h->to;
+ return h->map.to;
return NULL_TREE;
}
hashtable. */
static void
-heapvar_insert (tree from, tree to)
+heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
{
- struct tree_map *h;
+ struct heapvar_map *h;
void **loc;
- h = GGC_NEW (struct tree_map);
- h->hash = htab_hash_pointer (from);
- h->base.from = from;
- h->to = to;
- loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
- *(struct tree_map **) loc = h;
+ h = GGC_NEW (struct heapvar_map);
+ h->map.base.from = from;
+ h->offset = offset;
+ h->map.hash = heapvar_map_hash (h);
+ h->map.to = to;
+ loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
+ gcc_assert (*loc == NULL);
+ *(struct heapvar_map **) loc = h;
}
/* Return a new variable info structure consisting for a variable
- named NAME, and using constraint graph node NODE. */
+ named NAME, and using constraint graph node NODE. Append it
+ to the vector of variable info structures. */
static varinfo_t
-new_var_info (tree t, unsigned int id, const char *name)
+new_var_info (tree t, const char *name)
{
+ unsigned index = VEC_length (varinfo_t, varmap);
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
- tree var;
- ret->id = id;
+ ret->id = index;
ret->name = name;
ret->decl = t;
- ret->is_artificial_var = false;
- ret->is_heap_var = false;
+ /* Vars without decl are artificial and do not have sub-variables. */
+ ret->is_artificial_var = (t == NULL_TREE);
ret->is_special_var = false;
ret->is_unknown_size_var = false;
- ret->is_full_var = false;
- var = t;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- ret->no_tbaa_pruning = (DECL_P (var)
- && POINTER_TYPE_P (TREE_TYPE (var))
- && DECL_NO_TBAA_P (var));
+ ret->is_full_var = (t == NULL_TREE);
+ ret->is_heap_var = false;
+ ret->is_restrict_var = false;
+ ret->may_have_pointers = true;
+ ret->is_global_var = (t == NULL_TREE);
+ if (t && DECL_P (t))
+ ret->is_global_var = is_global_var (t);
ret->solution = BITMAP_ALLOC (&pta_obstack);
ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
ret->next = NULL;
- ret->collapsed_to = 0;
+
+ VEC_safe_push (varinfo_t, heap, varmap, ret);
+
return ret;
}
IOW, in a deref constraint, we would deref, get the result set,
then add OFFSET to each member. */
- unsigned HOST_WIDE_INT offset;
+ HOST_WIDE_INT offset;
};
+/* Use 0x8000... as special unknown offset. */
+#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
+
typedef struct constraint_expr ce_s;
DEF_VEC_O(ce_s);
DEF_VEC_ALLOC_O(ce_s, heap);
taken. Used for variable substitution. */
bitmap address_taken;
- /* True if points_to bitmap for this node is stored in the hash
- table. */
- sbitmap pt_used;
-
- /* Number of incoming edges remaining to be processed by pointer
- equivalence.
- Used for variable substitution. */
- unsigned int *number_incoming;
-
-
/* Vector of complex constraints for each graph node. Complex
constraints are those involving dereferences or offsets that are
not 0. */
/* Print out constraint C to FILE. */
-void
+static void
dump_constraint (FILE *file, constraint_t c)
{
if (c->lhs.type == ADDRESSOF)
fprintf (file, "&");
else if (c->lhs.type == DEREF)
fprintf (file, "*");
- fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
- if (c->lhs.offset != 0)
+ fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
+ if (c->lhs.offset == UNKNOWN_OFFSET)
+ fprintf (file, " + UNKNOWN");
+ else if (c->lhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
fprintf (file, " = ");
if (c->rhs.type == ADDRESSOF)
fprintf (file, "&");
else if (c->rhs.type == DEREF)
fprintf (file, "*");
- fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
- if (c->rhs.offset != 0)
+ fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
+ if (c->rhs.offset == UNKNOWN_OFFSET)
+ fprintf (file, " + UNKNOWN");
+ else if (c->rhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
fprintf (file, "\n");
}
+
+void debug_constraint (constraint_t);
+void debug_constraints (void);
+void debug_constraint_graph (void);
+void debug_solution_for_var (unsigned int);
+void debug_sa_points_to_info (void);
+
/* Print out constraint C to stderr. */
void
/* Print out all constraints to FILE */
-void
+static void
dump_constraints (FILE *file)
{
int i;
complex with an offset, e.g: a = b + 8, then the label is "+".
Otherwise the edge has no label. */
-void
+static void
dump_constraint_edge (FILE *file, constraint_t c)
{
if (c->rhs.type != ADDRESSOF)
{
- const char *src = get_varinfo_fc (c->rhs.var)->name;
- const char *dst = get_varinfo_fc (c->lhs.var)->name;
+ const char *src = get_varinfo (c->rhs.var)->name;
+ const char *dst = get_varinfo (c->lhs.var)->name;
fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
/* Due to preprocessing of constraints, instructions like *a = *b are
illegal; thus, we do not have to handle such cases. */
/* Print the constraint graph in dot format. */
-void
+static void
dump_constraint_graph (FILE *file)
{
unsigned int i=0, size;
size = size < graph->size ? size : graph->size;
for (i = 0; i < size; i++)
{
- const char *name = get_varinfo_fc (graph->rep[i])->name;
+ const char *name = get_varinfo (graph->rep[i])->name;
fprintf (file, " \"%s\" ;\n", name);
}
}
}
+/* Expands the solution in SET to all sub-fields of variables included.
+ Union the expanded result into RESULT. */
+
+static void
+solution_set_expand (bitmap result, bitmap set)
+{
+ bitmap_iterator bi;
+ bitmap vars = NULL;
+ unsigned j;
+
+ /* In a first pass record all variables we need to add all
+ sub-fields off. This avoids quadratic behavior. */
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
+ {
+ varinfo_t v = get_varinfo (j);
+ if (v->is_artificial_var
+ || v->is_full_var)
+ continue;
+ v = lookup_vi_for_tree (v->decl);
+ if (vars == NULL)
+ vars = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (vars, v->id);
+ }
+
+ /* In the second pass now do the addition to the solution and
+ to speed up solving add it to the delta as well. */
+ if (vars != NULL)
+ {
+ EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
+ {
+ varinfo_t v = get_varinfo (j);
+ for (; v != NULL; v = v->next)
+ bitmap_set_bit (result, v->id);
+ }
+ BITMAP_FREE (vars);
+ }
+}
+
/* Take a solution set SET, add OFFSET to each member of the set, and
overwrite SET with the result when done. */
static void
-solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
+solution_set_add (bitmap set, HOST_WIDE_INT offset)
{
bitmap result = BITMAP_ALLOC (&iteration_obstack);
unsigned int i;
bitmap_iterator bi;
+ /* If the offset is unknown we have to expand the solution to
+ all subfields. */
+ if (offset == UNKNOWN_OFFSET)
+ {
+ solution_set_expand (set, set);
+ return;
+ }
+
EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
{
varinfo_t vi = get_varinfo (i);
else
{
unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
- varinfo_t v = first_vi_for_offset (vi, fieldoffset);
- /* If the result is outside of the variable use the last field. */
- if (!v)
- {
- v = vi;
- while (v->next != NULL)
- v = v->next;
- }
- bitmap_set_bit (result, v->id);
+
+ /* If the offset makes the pointer point to before the
+ variable use offset zero for the field lookup. */
+ if (offset < 0
+ && fieldoffset > vi->offset)
+ fieldoffset = 0;
+
+ if (offset != 0)
+ vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
+
+ bitmap_set_bit (result, vi->id);
/* If the result is not exactly at fieldoffset include the next
field as well. See get_constraint_for_ptr_offset for more
rationale. */
- if (v->offset != fieldoffset
- && v->next != NULL)
- bitmap_set_bit (result, v->next->id);
+ if (vi->offset != fieldoffset
+ && vi->next != NULL)
+ bitmap_set_bit (result, vi->next->id);
}
}
process. */
static bool
-set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
+set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
{
if (inc == 0)
return bitmap_ior_into (to, from);
graph->points_to = XCNEWVEC (bitmap, graph->size);
graph->eq_rep = XNEWVEC (int, graph->size);
graph->direct_nodes = sbitmap_alloc (graph->size);
- graph->pt_used = sbitmap_alloc (graph->size);
graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
- graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
sbitmap_zero (graph->direct_nodes);
- sbitmap_zero (graph->pt_used);
for (j = 0; j < FIRST_REF_NODE; j++)
{
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
- unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
- unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
+ unsigned int lhsvar = lhs.var;
+ unsigned int rhsvar = rhs.var;
if (lhs.type == DEREF)
{
}
else if (rhs.type == ADDRESSOF)
{
+ varinfo_t v;
+
/* x = &y */
if (graph->points_to[lhsvar] == NULL)
graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
/* Implicitly, *x = y */
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
+ /* All related variables are no longer direct nodes. */
RESET_BIT (graph->direct_nodes, rhsvar);
+ v = get_varinfo (rhsvar);
+ if (!v->is_full_var)
+ {
+ v = lookup_vi_for_tree (v->decl);
+ do
+ {
+ RESET_BIT (graph->direct_nodes, v->id);
+ v = v->next;
+ }
+ while (v != NULL);
+ }
bitmap_set_bit (graph->address_taken, rhsvar);
}
else if (lhsvar > anything_id
static void
build_succ_graph (void)
{
- int i;
+ unsigned i, t;
constraint_t c;
for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
lhs = c->lhs;
rhs = c->rhs;
- lhsvar = find (get_varinfo_fc (lhs.var)->id);
- rhsvar = find (get_varinfo_fc (rhs.var)->id);
+ lhsvar = find (lhs.var);
+ rhsvar = find (rhs.var);
if (lhs.type == DEREF)
{
else if (rhs.type == ADDRESSOF)
{
/* x = &y */
- gcc_assert (find (get_varinfo_fc (rhs.var)->id)
- == get_varinfo_fc (rhs.var)->id);
+ gcc_assert (find (rhs.var) == rhs.var);
bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
}
else if (lhsvar > anything_id
add_graph_edge (graph, lhsvar, rhsvar);
}
}
+
+ /* Add edges from STOREDANYTHING to all non-direct nodes. */
+ t = find (storedanything_id);
+ for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
+ {
+ if (!TEST_BIT (graph->direct_nodes, i))
+ add_graph_edge (graph, find (i), t);
+ }
}
merge_graph_nodes (graph, to, from);
merge_node_constraints (graph, to, from);
- if (get_varinfo (from)->no_tbaa_pruning)
- get_varinfo (to)->no_tbaa_pruning = true;
-
/* Mark TO as changed if FROM was changed. If TO was already marked
as changed, decrease the changed count. */
VEC_safe_push (unsigned, heap, ti->topo_order, n);
}
-/* Return true if variable N + OFFSET is a legal field of N. */
-
-static bool
-type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
-{
- varinfo_t ninfo = get_varinfo (n);
-
- /* For things we've globbed to single variables, any offset into the
- variable acts like the entire variable, so that it becomes offset
- 0. */
- if (ninfo->is_special_var
- || ninfo->is_artificial_var
- || ninfo->is_unknown_size_var
- || ninfo->is_full_var)
- {
- *offset = 0;
- return true;
- }
- return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
-}
-
-/* Process a constraint C that represents x = *y, using DELTA as the
- starting solution. */
+/* Process a constraint C that represents x = *(y + off), using DELTA as the
+ starting solution for y. */
static void
do_sd_constraint (constraint_graph_t graph, constraint_t c,
bitmap sol = get_varinfo (lhs)->solution;
unsigned int j;
bitmap_iterator bi;
+ HOST_WIDE_INT roffset = c->rhs.offset;
+ /* Our IL does not allow this. */
+ gcc_assert (c->lhs.offset == 0);
+
+ /* If the solution of Y contains anything it is good enough to transfer
+ this to the LHS. */
if (bitmap_bit_p (delta, anything_id))
{
flag |= bitmap_set_bit (sol, anything_id);
goto done;
}
- /* For x = *ESCAPED and x = *CALLUSED we want to compute the
- reachability set of the rhs var. As a pointer to a sub-field
- of a variable can also reach all other fields of the variable
- we simply have to expand the solution to contain all sub-fields
- if one sub-field is contained. */
- if (c->rhs.var == escaped_id
- || c->rhs.var == callused_id)
+ /* If we do not know at with offset the rhs is dereferenced compute
+ the reachability set of DELTA, conservatively assuming it is
+ dereferenced at all valid offsets. */
+ if (roffset == UNKNOWN_OFFSET)
{
- bitmap vars = NULL;
- /* In a first pass record all variables we need to add all
- sub-fields off. This avoids quadratic behavior. */
- EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
- {
- varinfo_t v = get_varinfo (j);
- if (v->is_full_var)
- continue;
-
- v = lookup_vi_for_tree (v->decl);
- if (v->next != NULL)
- {
- if (vars == NULL)
- vars = BITMAP_ALLOC (NULL);
- bitmap_set_bit (vars, v->id);
- }
- }
- /* In the second pass now do the addition to the solution and
- to speed up solving add it to the delta as well. */
- if (vars != NULL)
- {
- EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
- {
- varinfo_t v = get_varinfo (j);
- for (; v != NULL; v = v->next)
- {
- if (bitmap_set_bit (sol, v->id))
- {
- flag = true;
- bitmap_set_bit (delta, v->id);
- }
- }
- }
- BITMAP_FREE (vars);
- }
+ solution_set_expand (delta, delta);
+ /* No further offset processing is necessary. */
+ roffset = 0;
}
/* For each variable j in delta (Sol(y)), add
an edge in the graph from j to x, and union Sol(j) into Sol(x). */
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
{
- unsigned HOST_WIDE_INT roffset = c->rhs.offset;
- if (type_safe (j, &roffset))
- {
- varinfo_t v;
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
- unsigned int t;
+ varinfo_t v = get_varinfo (j);
+ HOST_WIDE_INT fieldoffset = v->offset + roffset;
+ unsigned int t;
+
+ if (v->is_full_var)
+ fieldoffset = v->offset;
+ else if (roffset != 0)
+ v = first_vi_for_offset (v, fieldoffset);
+ /* If the access is outside of the variable we can ignore it. */
+ if (!v)
+ continue;
- v = first_vi_for_offset (get_varinfo (j), fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
+ do
+ {
t = find (v->id);
/* Adding edges from the special vars is pointless.
if (get_varinfo (t)->is_special_var)
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
/* Merging the solution from ESCAPED needlessly increases
- the set. Use ESCAPED as representative instead.
- Same for CALLUSED. */
- else if (get_varinfo (t)->id == escaped_id
- || get_varinfo (t)->id == callused_id)
- flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
+ the set. Use ESCAPED as representative instead. */
+ else if (v->id == escaped_id)
+ flag |= bitmap_set_bit (sol, escaped_id);
else if (add_graph_edge (graph, lhs, t))
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
+
+ /* If the variable is not exactly at the requested offset
+ we have to include the next one. */
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ || v->next == NULL)
+ break;
+
+ v = v->next;
+ fieldoffset = v->offset;
}
+ while (1);
}
done:
}
}
-/* Process a constraint C that represents *x = y. */
+/* Process a constraint C that represents *(x + off) = y using DELTA
+ as the starting solution for x. */
static void
do_ds_constraint (constraint_t c, bitmap delta)
bitmap sol = get_varinfo (rhs)->solution;
unsigned int j;
bitmap_iterator bi;
+ HOST_WIDE_INT loff = c->lhs.offset;
- if (bitmap_bit_p (sol, anything_id))
- {
- EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
- {
- varinfo_t jvi = get_varinfo (j);
- unsigned int t;
- unsigned int loff = c->lhs.offset;
- unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
- varinfo_t v;
-
- v = get_varinfo (j);
- if (!v->is_full_var)
- {
- v = first_vi_for_offset (v, fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
- }
- t = find (v->id);
-
- if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
- && !TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
- }
- return;
- }
+ /* Our IL does not allow this. */
+ gcc_assert (c->rhs.offset == 0);
+
+ /* If the solution of y contains ANYTHING simply use the ANYTHING
+ solution. This avoids needlessly increasing the points-to sets. */
+ if (bitmap_bit_p (sol, anything_id))
+ sol = get_varinfo (find (anything_id))->solution;
+
+ /* If the solution for x contains ANYTHING we have to merge the
+ solution of y into all pointer variables which we do via
+ STOREDANYTHING. */
+ if (bitmap_bit_p (delta, anything_id))
+ {
+ unsigned t = find (storedanything_id);
+ if (add_graph_edge (graph, t, rhs))
+ {
+ if (bitmap_ior_into (get_varinfo (t)->solution, sol))
+ {
+ if (!TEST_BIT (changed, t))
+ {
+ SET_BIT (changed, t);
+ changed_count++;
+ }
+ }
+ }
+ return;
+ }
+
+ /* If we do not know at with offset the rhs is dereferenced compute
+ the reachability set of DELTA, conservatively assuming it is
+ dereferenced at all valid offsets. */
+ if (loff == UNKNOWN_OFFSET)
+ {
+ solution_set_expand (delta, delta);
+ loff = 0;
+ }
/* For each member j of delta (Sol(x)), add an edge from y to j and
union Sol(y) into Sol(j) */
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
{
- unsigned HOST_WIDE_INT loff = c->lhs.offset;
- if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
+ varinfo_t v = get_varinfo (j);
+ unsigned int t;
+ HOST_WIDE_INT fieldoffset = v->offset + loff;
+
+ /* If v is a global variable then this is an escape point. */
+ if (v->is_global_var)
{
- varinfo_t v;
- unsigned int t;
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
- bitmap tmp;
+ t = find (escaped_id);
+ if (add_graph_edge (graph, t, rhs)
+ && bitmap_ior_into (get_varinfo (t)->solution, sol)
+ && !TEST_BIT (changed, t))
+ {
+ SET_BIT (changed, t);
+ changed_count++;
+ }
+ }
- v = first_vi_for_offset (get_varinfo (j), fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
- t = find (v->id);
- tmp = get_varinfo (t)->solution;
+ if (v->is_special_var)
+ continue;
- if (set_union_with_increment (tmp, sol, 0))
+ if (v->is_full_var)
+ fieldoffset = v->offset;
+ else if (loff != 0)
+ v = first_vi_for_offset (v, fieldoffset);
+ /* If the access is outside of the variable we can ignore it. */
+ if (!v)
+ continue;
+
+ do
+ {
+ if (v->may_have_pointers)
{
- get_varinfo (t)->solution = tmp;
- if (t == rhs)
- sol = get_varinfo (rhs)->solution;
- if (!TEST_BIT (changed, t))
+ t = find (v->id);
+ if (add_graph_edge (graph, t, rhs)
+ && bitmap_ior_into (get_varinfo (t)->solution, sol)
+ && !TEST_BIT (changed, t))
{
SET_BIT (changed, t);
changed_count++;
}
}
+
+ /* If the variable is not exactly at the requested offset
+ we have to include the next one. */
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ || v->next == NULL)
+ break;
+
+ v = v->next;
+ fieldoffset = v->offset;
}
+ while (1);
}
}
typedef struct equiv_class_label
{
+ hashval_t hashcode;
unsigned int equivalence_class;
bitmap labels;
- hashval_t hashcode;
} *equiv_class_label_t;
typedef const struct equiv_class_label *const_equiv_class_label_t;
{
const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
- return bitmap_equal_p (eql1->labels, eql2->labels);
+ return (eql1->hashcode == eql2->hashcode
+ && bitmap_equal_p (eql1->labels, eql2->labels));
}
/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
bitmap_ior_into (graph->points_to[n],
graph->points_to[w]);
}
- EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
- {
- unsigned int rep = si->node_mapping[i];
- graph->number_incoming[rep]++;
- }
}
SET_BIT (si->deleted, n);
}
/* Skip unused edges */
if (w == n || graph->pointer_label[w] == 0)
- {
- graph->number_incoming[w]--;
- continue;
- }
+ continue;
+
if (graph->points_to[w])
bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
-
- /* If all incoming edges to w have been processed and
- graph->points_to[w] was not stored in the hash table, we can
- free it. */
- graph->number_incoming[w]--;
- if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
- {
- BITMAP_FREE (graph->points_to[w]);
- }
}
/* Indirect nodes get fresh variables. */
if (!TEST_BIT (graph->direct_nodes, n))
graph->points_to[n]);
if (!label)
{
- SET_BIT (graph->pt_used, n);
label = pointer_equiv_class++;
equiv_class_add (pointer_equiv_class_table,
label, graph->points_to[n]);
free (graph->loc_label);
free (graph->pointed_by);
free (graph->points_to);
- free (graph->number_incoming);
free (graph->eq_rep);
sbitmap_free (graph->direct_nodes);
- sbitmap_free (graph->pt_used);
htab_delete (pointer_equiv_class_table);
htab_delete (location_equiv_class_table);
bitmap_obstack_release (&iteration_obstack);
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
- unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
- unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
+ unsigned int lhsvar = find (lhs.var);
+ unsigned int rhsvar = find (rhs.var);
unsigned int lhsnode, rhsnode;
unsigned int lhslabel, rhslabel;
solution_empty = bitmap_empty_p (solution);
- if (!solution_empty
- /* Do not propagate the ESCAPED/CALLUSED solutions. */
- && i != escaped_id
- && i != callused_id)
+ if (!solution_empty)
{
bitmap_iterator bi;
+ unsigned eff_escaped_id = find (escaped_id);
/* Propagate solution to all successors. */
EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
if (to == i)
continue;
- flag = set_union_with_increment (tmp, pts, 0);
+ /* If we propagate from ESCAPED use ESCAPED as
+ placeholder. */
+ if (i == eff_escaped_id)
+ flag = bitmap_set_bit (tmp, escaped_id);
+ else
+ flag = set_union_with_increment (tmp, pts, 0);
if (flag)
{
return (varinfo_t) *slot;
}
-/* Get a constraint expression for a new temporary variable. */
+/* Get a scalar constraint expression for a new temporary variable. */
static struct constraint_expr
-get_constraint_exp_for_temp (tree t)
+new_scalar_tmp_constraint_exp (const char *name)
{
- struct constraint_expr cexpr;
+ struct constraint_expr tmp;
+ varinfo_t vi;
- gcc_assert (SSA_VAR_P (t));
+ vi = new_var_info (NULL_TREE, name);
+ vi->offset = 0;
+ vi->size = -1;
+ vi->fullsize = -1;
+ vi->is_full_var = 1;
- cexpr.type = SCALAR;
- cexpr.var = get_vi_for_tree (t)->id;
- cexpr.offset = 0;
+ tmp.var = vi->id;
+ tmp.type = SCALAR;
+ tmp.offset = 0;
- return cexpr;
+ return tmp;
}
/* Get a constraint expression vector from an SSA_VAR_P node.
gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
- /* ANYTHING == ANYTHING is pointless. */
- if (lhs.var == anything_id && rhs.var == anything_id)
- return;
+ /* If we didn't get any useful constraint from the lhs we get
+ &ANYTHING as fallback from get_constraint_for. Deal with
+ it here by turning it into *ANYTHING. */
+ if (lhs.type == ADDRESSOF
+ && lhs.var == anything_id)
+ lhs.type = DEREF;
+
+ /* ADDRESSOF on the lhs is invalid. */
+ gcc_assert (lhs.type != ADDRESSOF);
- /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
- else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
- {
- rhs = t->lhs;
- t->lhs = t->rhs;
- t->rhs = rhs;
- process_constraint (t);
- }
/* This can happen in our IR with things like n->a = *p */
- else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
+ if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
{
/* Split into tmp = *rhs, *lhs = tmp */
- tree rhsdecl = get_varinfo (rhs.var)->decl;
- tree pointertype = TREE_TYPE (rhsdecl);
- tree pointedtotype = TREE_TYPE (pointertype);
- tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
-
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
process_constraint (new_constraint (tmplhs, rhs));
process_constraint (new_constraint (lhs, tmplhs));
}
else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
{
/* Split into tmp = &rhs, *lhs = tmp */
- tree rhsdecl = get_varinfo (rhs.var)->decl;
- tree pointertype = TREE_TYPE (rhsdecl);
- tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
-
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
process_constraint (new_constraint (tmplhs, rhs));
process_constraint (new_constraint (lhs, tmplhs));
}
}
}
+/* Return true if T is a type that could contain pointers. */
+
+static bool
+type_could_have_pointers (tree type)
+{
+ if (POINTER_TYPE_P (type))
+ return true;
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ return type_could_have_pointers (TREE_TYPE (type));
+
+ return AGGREGATE_TYPE_P (type);
+}
+
/* Return true if T is a variable of a type that could contain
pointers. */
static bool
could_have_pointers (tree t)
{
- tree type = TREE_TYPE (t);
-
- if (POINTER_TYPE_P (type)
- || AGGREGATE_TYPE_P (type)
- || TREE_CODE (type) == COMPLEX_TYPE)
- return true;
-
- return false;
+ return type_could_have_pointers (TREE_TYPE (t));
}
/* Return the position, in bits, of FIELD_DECL from the beginning of its
{
struct constraint_expr *c;
unsigned int j, n;
- unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
+ HOST_WIDE_INT rhsunitoffset, rhsoffset;
/* If we do not do field-sensitive PTA adding offsets to pointers
does not change the points-to solution. */
/* If the offset is not a non-negative integer constant that fits
in a HOST_WIDE_INT, we have to fall back to a conservative
solution which includes all sub-fields of all pointed-to
- variables of ptr.
- ??? As we do not have the ability to express this, fall back
- to anything. */
- if (!host_integerp (offset, 1))
- {
- struct constraint_expr temp;
- temp.var = anything_id;
- temp.type = SCALAR;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
- return;
- }
-
- /* Make sure the bit-offset also fits. */
- rhsunitoffset = TREE_INT_CST_LOW (offset);
- rhsoffset = rhsunitoffset * BITS_PER_UNIT;
- if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ variables of ptr. */
+ if (offset == NULL_TREE
+ || !host_integerp (offset, 0))
+ rhsoffset = UNKNOWN_OFFSET;
+ else
{
- struct constraint_expr temp;
- temp.var = anything_id;
- temp.type = SCALAR;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
- return;
+ /* Make sure the bit-offset also fits. */
+ rhsunitoffset = TREE_INT_CST_LOW (offset);
+ rhsoffset = rhsunitoffset * BITS_PER_UNIT;
+ if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ rhsoffset = UNKNOWN_OFFSET;
}
get_constraint_for (ptr, results);
curr = get_varinfo (c->var);
if (c->type == ADDRESSOF
- && !curr->is_full_var)
+ /* If this varinfo represents a full variable just use it. */
+ && curr->is_full_var)
+ c->offset = 0;
+ else if (c->type == ADDRESSOF
+ /* If we do not know the offset add all subfields. */
+ && rhsoffset == UNKNOWN_OFFSET)
+ {
+ varinfo_t temp = lookup_vi_for_tree (curr->decl);
+ do
+ {
+ struct constraint_expr c2;
+ c2.var = temp->id;
+ c2.type = ADDRESSOF;
+ c2.offset = 0;
+ if (c2.var != c->var)
+ VEC_safe_push (ce_s, heap, *results, &c2);
+ temp = temp->next;
+ }
+ while (temp);
+ }
+ else if (c->type == ADDRESSOF)
{
- varinfo_t temp, curr = get_varinfo (c->var);
+ varinfo_t temp;
+ unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
/* Search the sub-field which overlaps with the
- pointed-to offset. As we deal with positive offsets
- only, we can start the search from the current variable. */
- temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
-
- /* If the result is outside of the variable we have to provide
- a conservative result, as the variable is still reachable
- from the resulting pointer (even though it technically
- cannot point to anything). The last sub-field is such
- a conservative result.
+ pointed-to offset. If the result is outside of the variable
+ we have to provide a conservative result, as the variable is
+ still reachable from the resulting pointer (even though it
+ technically cannot point to anything). The last and first
+ sub-fields are such conservative results.
??? If we always had a sub-field for &object + 1 then
we could represent this in a more precise way. */
- if (temp == NULL)
- {
- temp = curr;
- while (temp->next != NULL)
- temp = temp->next;
- continue;
- }
+ if (rhsoffset < 0
+ && curr->offset < offset)
+ offset = 0;
+ temp = first_or_preceding_vi_for_offset (curr, offset);
/* If the found variable is not exactly at the pointed to
result, we have to include the next variable in the
solution as well. Otherwise two increments by offset / 2
do not result in the same or a conservative superset
solution. */
- if (temp->offset != curr->offset + rhsoffset
+ if (temp->offset != offset
&& temp->next != NULL)
{
struct constraint_expr c2;
c->var = temp->id;
c->offset = 0;
}
- else if (c->type == ADDRESSOF
- /* If this varinfo represents a full variable just use it. */
- && curr->is_full_var)
- c->offset = 0;
else
c->offset = rhsoffset;
}
gcc_assert (VEC_length (ce_s, *results) == 1);
result = VEC_last (ce_s, *results);
- /* This can also happen due to weird offsetof type macros. */
- if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
- result->type = SCALAR;
-
if (result->type == SCALAR
&& get_varinfo (result->var)->is_full_var)
/* For single-field vars do not bother about the offset. */
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to past the end of variable, ignoring\n");
}
- else if (bitmaxsize == -1)
+ else if (result->type == DEREF)
+ {
+ /* If we do not know exactly where the access goes say so. Note
+ that only for non-structure accesses we know that we access
+ at most one subfiled of any variable. */
+ if (bitpos == -1
+ || bitsize != bitmaxsize
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
+ result->offset = UNKNOWN_OFFSET;
+ else
+ result->offset = bitpos;
+ }
+ else if (result->type == ADDRESSOF)
{
- /* We can't handle DEREF constraints with unknown size, we'll
- get the wrong answer. Punt and return anything. */
+ /* We can end up here for component references on a
+ VIEW_CONVERT_EXPR <>(&foobar). */
+ result->type = SCALAR;
result->var = anything_id;
result->offset = 0;
}
else
- result->offset = bitpos;
+ gcc_unreachable ();
}
c->type = SCALAR;
else if (c->type == DEREF)
{
- tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
process_constraint (new_constraint (tmplhs, *c));
c->var = tmplhs.var;
}
}
}
+static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
+
+/* Given a tree T, return the constraint expression for taking the
+ address of it. */
+
+static void
+get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
+{
+ struct constraint_expr *c;
+ unsigned int i;
+
+ get_constraint_for_1 (t, results, true);
+
+ for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
+ {
+ if (c->type == DEREF)
+ c->type = SCALAR;
+ else
+ c->type = ADDRESSOF;
+ }
+}
+
/* Given a tree T, return the constraint expression for it. */
static void
happens below, since it will fall into the default case. The only
case we know something about an integer treated like a pointer is
when it is the NULL pointer, and then we just say it points to
- NULL. */
- if (TREE_CODE (t) == INTEGER_CST
- && integer_zerop (t))
+ NULL.
+
+ Do not do that if -fno-delete-null-pointer-checks though, because
+ in that case *NULL does not fail, so it _should_ alias *anything.
+ It is not worth adding a new option or renaming the existing one,
+ since this case is relatively obscure. */
+ if (flag_delete_null_pointer_checks
+ && ((TREE_CODE (t) == INTEGER_CST
+ && integer_zerop (t))
+ /* The only valid CONSTRUCTORs in gimple with pointer typed
+ elements are zero-initializer. */
+ || TREE_CODE (t) == CONSTRUCTOR))
{
temp.var = nothing_id;
temp.type = ADDRESSOF;
switch (TREE_CODE (t))
{
case ADDR_EXPR:
- {
- struct constraint_expr *c;
- unsigned int i;
- tree exp = TREE_OPERAND (t, 0);
-
- get_constraint_for_1 (exp, results, true);
-
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
- {
- if (c->type == DEREF)
- c->type = SCALAR;
- else
- c->type = ADDRESSOF;
- }
- return;
- }
- break;
+ get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
+ return;
default:;
}
break;
case COMPONENT_REF:
get_constraint_for_component_ref (t, results, address_p);
return;
+ case VIEW_CONVERT_EXPR:
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ return;
+ /* We are missing handling for TARGET_MEM_REF here. */
default:;
}
break;
get_constraint_for_1 (t, results, false);
}
-/* Handle the structure copy case where we have a simple structure copy
- between LHS and RHS that is of SIZE (in bits)
- For each field of the lhs variable (lhsfield)
- For each field of the rhs variable at lhsfield.offset (rhsfield)
- add the constraint lhsfield = rhsfield
+/* Efficiently generates constraints from all entries in *RHSC to all
+ entries in *LHSC. */
- If we fail due to some kind of type unsafety or other thing we
- can't handle, return false. We expect the caller to collapse the
- variable in that case. */
+static void
+process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
+{
+ struct constraint_expr *lhsp, *rhsp;
+ unsigned i, j;
-static bool
-do_simple_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
-{
- varinfo_t p = get_varinfo (lhs.var);
- unsigned HOST_WIDE_INT pstart, last;
- pstart = p->offset;
- last = p->offset + size;
- for (; p && p->offset < last; p = p->next)
+ if (VEC_length (ce_s, lhsc) <= 1
+ || VEC_length (ce_s, rhsc) <= 1)
{
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
- templhs.var = p->id;
- q = get_varinfo (temprhs.var);
- fieldoffset = p->offset - pstart;
- q = first_vi_for_offset (q, q->offset + fieldoffset);
- if (!q)
- return false;
- temprhs.var = q->id;
- process_constraint (new_constraint (templhs, temprhs));
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
+ process_constraint (new_constraint (*lhsp, *rhsp));
+ }
+ else
+ {
+ struct constraint_expr tmp;
+ tmp = new_scalar_tmp_constraint_exp ("allalltmp");
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ process_constraint (new_constraint (tmp, *rhsp));
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, tmp));
}
- return true;
}
-
-/* Handle the structure copy case where we have a structure copy between a
- aggregate on the LHS and a dereference of a pointer on the RHS
- that is of SIZE (in bits)
-
- For each field of the lhs variable (lhsfield)
- rhs.offset = lhsfield->offset
- add the constraint lhsfield = rhs
-*/
+/* Handle aggregate copies by expanding into copies of the respective
+ fields of the structures. */
static void
-do_rhs_deref_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
+do_structure_copy (tree lhsop, tree rhsop)
{
- varinfo_t p = get_varinfo (lhs.var);
- unsigned HOST_WIDE_INT pstart,last;
- pstart = p->offset;
- last = p->offset + size;
-
- for (; p && p->offset < last; p = p->next)
+ struct constraint_expr *lhsp, *rhsp;
+ VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
+ unsigned j;
+
+ get_constraint_for (lhsop, &lhsc);
+ get_constraint_for (rhsop, &rhsc);
+ lhsp = VEC_index (ce_s, lhsc, 0);
+ rhsp = VEC_index (ce_s, rhsc, 0);
+ if (lhsp->type == DEREF
+ || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
+ || rhsp->type == DEREF)
+ process_all_all_constraints (lhsc, rhsc);
+ else if (lhsp->type == SCALAR
+ && (rhsp->type == SCALAR
+ || rhsp->type == ADDRESSOF))
{
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
-
- if (templhs.type == SCALAR)
- templhs.var = p->id;
- else
- templhs.offset = p->offset;
-
- q = get_varinfo (temprhs.var);
- fieldoffset = p->offset - pstart;
- temprhs.offset += fieldoffset;
- process_constraint (new_constraint (templhs, temprhs));
- }
-}
+ tree lhsbase, rhsbase;
+ HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
+ HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
+ unsigned k = 0;
+ lhsbase = get_ref_base_and_extent (lhsop, &lhsoffset,
+ &lhssize, &lhsmaxsize);
+ rhsbase = get_ref_base_and_extent (rhsop, &rhsoffset,
+ &rhssize, &rhsmaxsize);
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
+ {
+ varinfo_t lhsv, rhsv;
+ rhsp = VEC_index (ce_s, rhsc, k);
+ lhsv = get_varinfo (lhsp->var);
+ rhsv = get_varinfo (rhsp->var);
+ if (lhsv->may_have_pointers
+ && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
+ rhsv->offset + lhsoffset, rhsv->size))
+ process_constraint (new_constraint (*lhsp, *rhsp));
+ if (lhsv->offset + rhsoffset + lhsv->size
+ > rhsv->offset + lhsoffset + rhsv->size)
+ {
+ ++k;
+ if (k >= VEC_length (ce_s, rhsc))
+ break;
+ }
+ else
+ ++j;
+ }
+ }
+ else
+ gcc_unreachable ();
-/* Handle the structure copy case where we have a structure copy
- between an aggregate on the RHS and a dereference of a pointer on
- the LHS that is of SIZE (in bits)
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+}
- For each field of the rhs variable (rhsfield)
- lhs.offset = rhsfield->offset
- add the constraint lhs = rhsfield
-*/
+/* Create a constraint ID = OP. */
static void
-do_lhs_deref_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
+make_constraint_to (unsigned id, tree op)
{
- varinfo_t p = get_varinfo (rhs.var);
- unsigned HOST_WIDE_INT pstart,last;
- pstart = p->offset;
- last = p->offset + size;
-
- for (; p && p->offset < last; p = p->next)
- {
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
+ VEC(ce_s, heap) *rhsc = NULL;
+ struct constraint_expr *c;
+ struct constraint_expr includes;
+ unsigned int j;
- if (temprhs.type == SCALAR)
- temprhs.var = p->id;
- else
- temprhs.offset = p->offset;
+ includes.var = id;
+ includes.offset = 0;
+ includes.type = SCALAR;
- q = get_varinfo (templhs.var);
- fieldoffset = p->offset - pstart;
- templhs.offset += fieldoffset;
- process_constraint (new_constraint (templhs, temprhs));
- }
+ get_constraint_for (op, &rhsc);
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
+ process_constraint (new_constraint (includes, *c));
+ VEC_free (ce_s, heap, rhsc);
}
-/* Sometimes, frontends like to give us bad type information. This
- function will collapse all the fields from VAR to the end of VAR,
- into VAR, so that we treat those fields as a single variable.
- We return the variable they were collapsed into. */
+/* Create a constraint ID = &FROM. */
-static unsigned int
-collapse_rest_of_var (unsigned int var)
+static void
+make_constraint_from (varinfo_t vi, int from)
{
- varinfo_t currvar = get_varinfo (var);
- varinfo_t field;
-
- for (field = currvar->next; field; field = field->next)
- {
- if (dump_file)
- fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
- field->name, currvar->name);
-
- gcc_assert (field->collapsed_to == 0);
- field->collapsed_to = currvar->id;
- }
+ struct constraint_expr lhs, rhs;
- currvar->next = NULL;
- currvar->size = currvar->fullsize - currvar->offset;
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
- return currvar->id;
+ rhs.var = from;
+ rhs.offset = 0;
+ rhs.type = ADDRESSOF;
+ process_constraint (new_constraint (lhs, rhs));
}
-/* Handle aggregate copies by expanding into copies of the respective
- fields of the structures. */
+/* Create a constraint ID = FROM. */
static void
-do_structure_copy (tree lhsop, tree rhsop)
+make_copy_constraint (varinfo_t vi, int from)
{
- struct constraint_expr lhs, rhs, tmp;
- VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
- varinfo_t p;
- unsigned HOST_WIDE_INT lhssize;
- unsigned HOST_WIDE_INT rhssize;
-
- /* Pretend we are taking the address of the constraint exprs.
- We deal with walking the sub-fields ourselves. */
- get_constraint_for_1 (lhsop, &lhsc, true);
- get_constraint_for_1 (rhsop, &rhsc, true);
- gcc_assert (VEC_length (ce_s, lhsc) == 1);
- gcc_assert (VEC_length (ce_s, rhsc) == 1);
- lhs = *(VEC_last (ce_s, lhsc));
- rhs = *(VEC_last (ce_s, rhsc));
-
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ struct constraint_expr lhs, rhs;
- /* If we have special var = x, swap it around. */
- if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
- {
- tmp = lhs;
- lhs = rhs;
- rhs = tmp;
- }
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
- /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
- possible it's something we could handle. However, most cases falling
- into this are dealing with transparent unions, which are slightly
- weird. */
- if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
- {
- rhs.type = ADDRESSOF;
- rhs.var = anything_id;
- }
+ rhs.var = from;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+}
- /* If the RHS is a special var, or an addressof, set all the LHS fields to
- that special var. */
- if (rhs.var <= integer_id)
- {
- for (p = get_varinfo (lhs.var); p; p = p->next)
- {
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
+/* Make constraints necessary to make OP escape. */
- if (templhs.type == SCALAR )
- templhs.var = p->id;
- else
- templhs.offset += p->offset;
- process_constraint (new_constraint (templhs, temprhs));
- }
- }
- else
- {
- tree rhstype = TREE_TYPE (rhsop);
- tree lhstype = TREE_TYPE (lhsop);
- tree rhstypesize;
- tree lhstypesize;
-
- lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
- rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
-
- /* If we have a variably sized types on the rhs or lhs, and a deref
- constraint, add the constraint, lhsconstraint = &ANYTHING.
- This is conservatively correct because either the lhs is an unknown
- sized var (if the constraint is SCALAR), or the lhs is a DEREF
- constraint, and every variable it can point to must be unknown sized
- anyway, so we don't need to worry about fields at all. */
- if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
- || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
- {
- rhs.var = anything_id;
- rhs.type = ADDRESSOF;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
- return;
- }
+static void
+make_escape_constraint (tree op)
+{
+ make_constraint_to (escaped_id, op);
+}
- /* The size only really matters insofar as we don't set more or less of
- the variable. If we hit an unknown size var, the size should be the
- whole darn thing. */
- if (get_varinfo (rhs.var)->is_unknown_size_var)
- rhssize = ~0;
- else
- rhssize = TREE_INT_CST_LOW (rhstypesize);
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Return the created variable. */
- if (get_varinfo (lhs.var)->is_unknown_size_var)
- lhssize = ~0;
- else
- lhssize = TREE_INT_CST_LOW (lhstypesize);
+static varinfo_t
+make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi;
+ tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
+ if (heapvar == NULL_TREE)
+ {
+ var_ann_t ann;
+ heapvar = create_tmp_var_raw (ptr_type_node, name);
+ DECL_EXTERNAL (heapvar) = 1;
- if (rhs.type == SCALAR && lhs.type == SCALAR)
- {
- if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
- {
- lhs.var = collapse_rest_of_var (lhs.var);
- rhs.var = collapse_rest_of_var (rhs.var);
- lhs.offset = 0;
- rhs.offset = 0;
- lhs.type = SCALAR;
- rhs.type = SCALAR;
- process_constraint (new_constraint (lhs, rhs));
- }
- }
- else if (lhs.type != DEREF && rhs.type == DEREF)
- do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
- else if (lhs.type == DEREF && rhs.type != DEREF)
- do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
- else
- {
- tree pointedtotype = lhstype;
- tree tmpvar;
+ heapvar_insert (lhs->decl, lhs->offset, heapvar);
- gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
- tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
- do_structure_copy (tmpvar, rhsop);
- do_structure_copy (lhsop, tmpvar);
- }
+ ann = get_var_ann (heapvar);
+ ann->is_heapvar = 1;
}
-}
-/* Create a constraint ID = OP. */
+ /* For global vars we need to add a heapvar to the list of referenced
+ vars of a different function than it was created for originally. */
+ if (gimple_referenced_vars (cfun))
+ add_referenced_var (heapvar);
-static void
-make_constraint_to (unsigned id, tree op)
-{
- VEC(ce_s, heap) *rhsc = NULL;
- struct constraint_expr *c;
- struct constraint_expr includes;
- unsigned int j;
+ vi = new_var_info (heapvar, name);
+ vi->is_artificial_var = true;
+ vi->is_heap_var = true;
+ vi->is_unknown_size_var = true;
+ vi->offset = 0;
+ vi->fullsize = ~0;
+ vi->size = ~0;
+ vi->is_full_var = true;
+ insert_vi_for_tree (heapvar, vi);
- includes.var = id;
- includes.offset = 0;
- includes.type = SCALAR;
+ make_constraint_from (lhs, vi->id);
- get_constraint_for (op, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
- process_constraint (new_constraint (includes, *c));
- VEC_free (ce_s, heap, rhsc);
+ return vi;
}
-/* Make constraints necessary to make OP escape. */
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Set flags according to a tag used
+ for tracking restrict pointers. */
static void
-make_escape_constraint (tree op)
+make_constraint_from_restrict (varinfo_t lhs, const char *name)
{
- make_constraint_to (escaped_id, op);
+ varinfo_t vi;
+ vi = make_constraint_from_heapvar (lhs, name);
+ vi->is_restrict_var = 1;
+ vi->is_global_var = 0;
+ vi->is_special_var = 1;
+ vi->may_have_pointers = 0;
}
/* For non-IPA mode, generate constraints necessary for a call on the
RHS. */
static void
-handle_rhs_call (gimple stmt)
+handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
{
+ struct constraint_expr rhsc;
unsigned i;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
/* The static chain escapes as well. */
if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
+
+ /* And if we applied NRV the address of the return slot escapes as well. */
+ if (gimple_call_return_slot_opt_p (stmt)
+ && gimple_call_lhs (stmt) != NULL_TREE
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
+ {
+ VEC(ce_s, heap) *tmpc = NULL;
+ struct constraint_expr lhsc, *c;
+ get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
+ lhsc.var = escaped_id;
+ lhsc.offset = 0;
+ lhsc.type = SCALAR;
+ for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
+ process_constraint (new_constraint (lhsc, *c));
+ VEC_free(ce_s, heap, tmpc);
+ }
+
+ /* Regular functions return nonlocal memory. */
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (tree lhs, int flags)
+handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc)
{
VEC(ce_s, heap) *lhsc = NULL;
- struct constraint_expr rhsc;
- unsigned int j;
- struct constraint_expr *lhsp;
get_constraint_for (lhs, &lhsc);
if (flags & ECF_MALLOC)
{
- tree heapvar = heapvar_lookup (lhs);
varinfo_t vi;
-
- if (heapvar == NULL)
- {
- heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
- DECL_EXTERNAL (heapvar) = 1;
- get_var_ann (heapvar)->is_heapvar = 1;
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
- heapvar_insert (lhs, heapvar);
- }
-
- rhsc.var = create_variable_info_for (heapvar,
- alias_get_name (heapvar));
- vi = get_varinfo (rhsc.var);
- vi->is_artificial_var = 1;
- vi->is_heap_var = 1;
- rhsc.type = ADDRESSOF;
- rhsc.offset = 0;
+ vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
+ /* We delay marking allocated storage global until we know if
+ it escapes. */
+ DECL_EXTERNAL (vi->decl) = 0;
+ vi->is_global_var = 0;
}
- else
+ else if (VEC_length (ce_s, rhsc) > 0)
{
- rhsc.var = escaped_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
+ /* If the store is to a global decl make sure to
+ add proper escape constraints. */
+ lhs = get_base_address (lhs);
+ if (lhs
+ && DECL_P (lhs)
+ && is_global_var (lhs))
+ {
+ struct constraint_expr tmpc;
+ tmpc.var = escaped_id;
+ tmpc.offset = 0;
+ tmpc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, lhsc, &tmpc);
+ }
+ process_all_all_constraints (lhsc, rhsc);
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
VEC_free (ce_s, heap, lhsc);
}
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt)
+handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
{
- tree lhs = gimple_call_lhs (stmt);
- VEC(ce_s, heap) *lhsc = NULL;
struct constraint_expr rhsc;
- unsigned int j, k;
- struct constraint_expr *lhsp;
- tree tmpvar;
- struct constraint_expr tmpc;
-
- get_constraint_for (lhs, &lhsc);
+ unsigned int k;
- /* If this is a nested function then it can return anything. */
+ /* Treat nested const functions the same as pure functions as far
+ as the static chain is concerned. */
if (gimple_call_chain (stmt))
{
- rhsc.var = anything_id;
+ make_constraint_to (callused_id, gimple_call_chain (stmt));
+ rhsc.var = callused_id;
rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
- return;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
- /* We always use a temporary here, otherwise we end up with a quadratic
- amount of constraints for
- large_struct = const_call (large_struct);
- in field-sensitive PTA. */
- tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
- tmpc = get_constraint_exp_for_temp (tmpvar);
-
- /* May return addresses of globals. */
- rhsc.var = nonlocal_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- process_constraint (new_constraint (tmpc, rhsc));
-
/* May return arguments. */
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
if (could_have_pointers (arg))
{
VEC(ce_s, heap) *argc = NULL;
+ unsigned i;
struct constraint_expr *argp;
- int i;
-
get_constraint_for (arg, &argc);
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
- process_constraint (new_constraint (tmpc, *argp));
- VEC_free (ce_s, heap, argc);
+ for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
+ VEC_safe_push (ce_s, heap, *results, argp);
+ VEC_free(ce_s, heap, argc);
}
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, tmpc));
-
- VEC_free (ce_s, heap, lhsc);
+ /* May return addresses of globals. */
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt)
+handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
{
+ struct constraint_expr rhsc;
unsigned i;
+ bool need_callused = false;
/* Memory reached from pointer arguments is call-used. */
for (i = 0; i < gimple_call_num_args (stmt); ++i)
tree arg = gimple_call_arg (stmt, i);
if (could_have_pointers (arg))
- make_constraint_to (callused_id, arg);
+ {
+ make_constraint_to (callused_id, arg);
+ need_callused = true;
+ }
}
/* The static chain is used as well. */
if (gimple_call_chain (stmt))
- make_constraint_to (callused_id, gimple_call_chain (stmt));
-
- /* If the call returns a pointer it may point to reachable memory
- from the arguments. Not so for malloc functions though. */
- if (gimple_call_lhs (stmt)
- && could_have_pointers (gimple_call_lhs (stmt))
- && !(gimple_call_flags (stmt) & ECF_MALLOC))
{
- tree lhs = gimple_call_lhs (stmt);
- VEC(ce_s, heap) *lhsc = NULL;
- struct constraint_expr rhsc;
- struct constraint_expr *lhsp;
- unsigned j;
-
- get_constraint_for (lhs, &lhsc);
-
- /* If this is a nested function then it can return anything. */
- if (gimple_call_chain (stmt))
- {
- rhsc.var = anything_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
- return;
- }
+ make_constraint_to (callused_id, gimple_call_chain (stmt));
+ need_callused = true;
+ }
- /* Else just add the call-used memory here. Escaped variables
- and globals will be dealt with in handle_lhs_call. */
+ /* Pure functions may return callused and nonlocal memory. */
+ if (need_callused)
+ {
rhsc.var = callused_id;
rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
/* Walk statement T setting up aliasing constraints according to the
VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
- enum escape_type stmt_escape_type;
/* Now build constraints expressions. */
if (gimple_code (t) == GIMPLE_PHI)
pointer passed by address. */
else if (is_gimple_call (t))
{
+ tree fndecl;
+ if ((fndecl = gimple_call_fndecl (t)) != NULL_TREE
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ /* ??? All builtins that are handled here need to be handled
+ in the alias-oracle query functions explicitly! */
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ /* All the following functions return a pointer to the same object
+ as their first argument points to. The functions do not add
+ to the ESCAPED solution. The functions make the first argument
+ pointed to memory point to what the second argument pointed to
+ memory points to. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ tree src = gimple_call_arg (t, 1);
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
+ else
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ return;
+ }
+ case BUILT_IN_MEMSET:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ struct constraint_expr ac;
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ do_deref (&lhsc);
+ if (flag_delete_null_pointer_checks
+ && integer_zerop (gimple_call_arg (t, 1)))
+ {
+ ac.type = ADDRESSOF;
+ ac.var = nothing_id;
+ }
+ else
+ {
+ ac.type = SCALAR;
+ ac.var = integer_id;
+ }
+ ac.offset = 0;
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, ac));
+ VEC_free (ce_s, heap, lhsc);
+ return;
+ }
+ /* All the following functions do not return pointers, do not
+ modify the points-to sets of memory reachable from their
+ arguments and do not add to the ESCAPED solution. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_FREE:
+ return;
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
if (!in_ipa_mode)
{
+ VEC(ce_s, heap) *rhsc = NULL;
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
of global memory but not of escaped memory. */
- if (flags & ECF_CONST)
+ if (flags & (ECF_CONST|ECF_NOVOPS))
{
if (gimple_call_lhs (t)
&& could_have_pointers (gimple_call_lhs (t)))
- handle_const_call (t);
+ handle_const_call (t, &rhsc);
}
/* Pure functions can return addresses in and of memory
reachable from their arguments, but they are not an escape
point for reachable memory of their arguments. */
- else if (flags & ECF_PURE)
- {
- handle_pure_call (t);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags);
- }
+ else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
+ handle_pure_call (t, &rhsc);
else
- {
- handle_rhs_call (t);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags);
- }
+ handle_rhs_call (t, &rhsc);
+ if (gimple_call_lhs (t)
+ && could_have_pointers (gimple_call_lhs (t)))
+ handle_lhs_call (gimple_call_lhs (t), flags, rhsc);
+ VEC_free (ce_s, heap, rhsc);
}
else
{
do_structure_copy (lhsop, rhsop);
else
{
- unsigned int j;
struct constraint_expr temp;
get_constraint_for (lhsop, &lhsc);
temp.offset = 0;
VEC_safe_push (ce_s, heap, rhsc, &temp);
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
- {
- struct constraint_expr *c2;
- unsigned int k;
-
- for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
- process_constraint (new_constraint (*c, *c2));
- }
+ process_all_all_constraints (lhsc, rhsc);
}
+ /* If there is a store to a global variable the rhs escapes. */
+ if ((lhsop = get_base_address (lhsop)) != NULL_TREE
+ && DECL_P (lhsop)
+ && is_global_var (lhsop))
+ make_escape_constraint (rhsop);
+ /* If this is a conversion of a non-restrict pointer to a
+ restrict pointer track it with a new heapvar. */
+ else if (gimple_assign_cast_p (t)
+ && POINTER_TYPE_P (TREE_TYPE (rhsop))
+ && POINTER_TYPE_P (TREE_TYPE (lhsop))
+ && !TYPE_RESTRICT (TREE_TYPE (rhsop))
+ && TYPE_RESTRICT (TREE_TYPE (lhsop)))
+ make_constraint_from_restrict (get_vi_for_tree (lhsop),
+ "CAST_RESTRICT");
}
- else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
- {
- unsigned int j;
-
- get_constraint_for (gimple_cdt_location (t), &lhsc);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
- get_varinfo (c->var)->no_tbaa_pruning = true;
- }
-
- stmt_escape_type = is_escape_site (t);
- if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
+ /* For conversions of pointers to non-pointers the pointer escapes. */
+ else if (gimple_assign_cast_p (t)
+ && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
+ && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
{
- gcc_assert (is_gimple_assign (t));
- if (gimple_assign_rhs_code (t) == ADDR_EXPR)
- {
- tree rhs = gimple_assign_rhs1 (t);
- tree base = get_base_address (TREE_OPERAND (rhs, 0));
- if (base
- && (!DECL_P (base)
- || !is_global_var (base)))
- make_escape_constraint (rhs);
- }
- else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
- == GIMPLE_SINGLE_RHS)
- {
- if (could_have_pointers (gimple_assign_rhs1 (t)))
- make_escape_constraint (gimple_assign_rhs1 (t));
- }
- else
- gcc_unreachable ();
+ make_escape_constraint (gimple_assign_rhs1 (t));
}
- else if (stmt_escape_type == ESCAPE_BAD_CAST)
+ /* Handle escapes through return. */
+ else if (gimple_code (t) == GIMPLE_RETURN
+ && gimple_return_retval (t) != NULL_TREE
+ && could_have_pointers (gimple_return_retval (t)))
{
- gcc_assert (is_gimple_assign (t));
- gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
- || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
- make_escape_constraint (gimple_assign_rhs1 (t));
+ make_escape_constraint (gimple_return_retval (t));
}
- else if (stmt_escape_type == ESCAPE_TO_ASM)
+ /* Handle asms conservatively by adding escape constraints to everything. */
+ else if (gimple_code (t) == GIMPLE_ASM)
{
- unsigned i;
- for (i = 0; i < gimple_asm_noutputs (t); ++i)
+ unsigned i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
+
+ noutputs = gimple_asm_noutputs (t);
+ oconstraints = XALLOCAVEC (const char *, noutputs);
+
+ for (i = 0; i < noutputs; ++i)
{
- tree op = TREE_VALUE (gimple_asm_output_op (t, i));
+ tree link = gimple_asm_output_op (t, i);
+ tree op = TREE_VALUE (link);
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+
+ /* A memory constraint makes the address of the operand escape. */
+ if (!allows_reg && allows_mem)
+ make_escape_constraint (build_fold_addr_expr (op));
+
+ /* The asm may read global memory, so outputs may point to
+ any global memory. */
if (op && could_have_pointers (op))
- /* Strictly we'd only need the constraints from ESCAPED and
- NONLOCAL. */
- make_escape_constraint (op);
+ {
+ VEC(ce_s, heap) *lhsc = NULL;
+ struct constraint_expr rhsc, *lhsp;
+ unsigned j;
+ get_constraint_for (op, &lhsc);
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ process_constraint (new_constraint (*lhsp, rhsc));
+ VEC_free (ce_s, heap, lhsc);
+ }
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
{
- tree op = TREE_VALUE (gimple_asm_input_op (t, i));
- if (op && could_have_pointers (op))
- /* Strictly we'd only need the constraint to ESCAPED. */
+ tree link = gimple_asm_input_op (t, i);
+ tree op = TREE_VALUE (link);
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
+
+ /* A memory constraint makes the address of the operand escape. */
+ if (!allows_reg && allows_mem)
+ make_escape_constraint (build_fold_addr_expr (op));
+ /* Strictly we'd only need the constraint to ESCAPED if
+ the asm clobbers memory, otherwise using CALLUSED
+ would be enough. */
+ else if (op && could_have_pointers (op))
make_escape_constraint (op);
}
}
- /* After promoting variables and computing aliasing we will
- need to re-scan most statements. FIXME: Try to minimize the
- number of statements re-scanned. It's not really necessary to
- re-scan *all* statements. */
- if (!in_ipa_mode)
- gimple_set_modified (origt, true);
VEC_free (ce_s, heap, rhsc);
VEC_free (ce_s, heap, lhsc);
}
/* Find the first varinfo in the same variable as START that overlaps with
- OFFSET.
- Effectively, walk the chain of fields for the variable START to find the
- first field that overlaps with OFFSET.
- Return NULL if we can't find one. */
+ OFFSET. Return NULL if we can't find one. */
static varinfo_t
first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
{
- varinfo_t curr = start;
- while (curr)
+ /* If the offset is outside of the variable, bail out. */
+ if (offset >= start->fullsize)
+ return NULL;
+
+ /* If we cannot reach offset from start, lookup the first field
+ and start from there. */
+ if (start->offset > offset)
+ start = lookup_vi_for_tree (start->decl);
+
+ while (start)
{
/* We may not find a variable in the field list with the actual
offset when when we have glommed a structure to a variable.
In that case, however, offset should still be within the size
of the variable. */
- if (offset >= curr->offset && offset < (curr->offset + curr->size))
- return curr;
- curr = curr->next;
+ if (offset >= start->offset
+ && offset < (start->offset + start->size))
+ return start;
+
+ start= start->next;
}
+
return NULL;
}
+/* Find the first varinfo in the same variable as START that overlaps with
+ OFFSET. If there is no such varinfo the varinfo directly preceding
+ OFFSET is returned. */
+
+static varinfo_t
+first_or_preceding_vi_for_offset (varinfo_t start,
+ unsigned HOST_WIDE_INT offset)
+{
+ /* If we cannot reach offset from start, lookup the first field
+ and start from there. */
+ if (start->offset > offset)
+ start = lookup_vi_for_tree (start->decl);
+
+ /* We may not find a variable in the field list with the actual
+ offset when when we have glommed a structure to a variable.
+ In that case, however, offset should still be within the size
+ of the variable.
+ If we got beyond the offset we look for return the field
+ directly preceding offset which may be the last field. */
+ while (start->next
+ && offset >= start->offset
+ && !(offset < (start->offset + start->size)))
+ start = start->next;
+
+ return start;
+}
+
/* Insert the varinfo FIELD into the field list for BASE, at the front
of the list. */
unsigned has_unknown_size : 1;
unsigned may_have_pointers : 1;
+
+ unsigned only_restrict_pointers : 1;
};
typedef struct fieldoff fieldoff_s;
return false;
/* Non decls or memory tags can never have subvars. */
- if (!DECL_P (v) || MTAG_P (v))
+ if (!DECL_P (v))
return false;
/* Aggregates without overlapping fields can have subvars. */
else
pair->size = -1;
pair->may_have_pointers = could_have_pointers (field);
+ pair->only_restrict_pointers
+ = (!has_unknown_size
+ && POINTER_TYPE_P (TREE_TYPE (field))
+ && TYPE_RESTRICT (TREE_TYPE (field)));
count++;
}
}
return count;
}
-/* Create a constraint ID = &FROM. */
-
-static void
-make_constraint_from (varinfo_t vi, int from)
-{
- struct constraint_expr lhs, rhs;
-
- lhs.var = vi->id;
- lhs.offset = 0;
- lhs.type = SCALAR;
-
- rhs.var = from;
- rhs.offset = 0;
- rhs.type = ADDRESSOF;
- process_constraint (new_constraint (lhs, rhs));
-}
-
/* Count the number of arguments DECL has, and set IS_VARARGS to true
if it is a varargs function. */
static unsigned int
create_function_info_for (tree decl, const char *name)
{
- unsigned int index = VEC_length (varinfo_t, varmap);
varinfo_t vi;
tree arg;
unsigned int i;
/* Create the variable info. */
- vi = new_var_info (decl, index, name);
- vi->decl = decl;
+ vi = new_var_info (decl, name);
vi->offset = 0;
vi->size = 1;
vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
insert_vi_for_tree (vi->decl, vi);
- VEC_safe_push (varinfo_t, heap, varmap, vi);
stats.total_vars++;
vi->fullsize = ~0;
vi->size = ~0;
vi->is_unknown_size_var = true;
- return index;
+ return vi->id;
}
-
arg = DECL_ARGUMENTS (decl);
/* Set up variables for each argument. */
varinfo_t argvi;
const char *newname;
char *tempname;
- unsigned int newindex;
tree argdecl = decl;
if (arg)
argdecl = arg;
- newindex = VEC_length (varinfo_t, varmap);
asprintf (&tempname, "%s.arg%d", name, i-1);
newname = ggc_strdup (tempname);
free (tempname);
- argvi = new_var_info (argdecl, newindex, newname);
- argvi->decl = argdecl;
- VEC_safe_push (varinfo_t, heap, varmap, argvi);
+ argvi = new_var_info (argdecl, newname);
argvi->offset = i;
argvi->size = 1;
argvi->is_full_var = true;
varinfo_t resultvi;
const char *newname;
char *tempname;
- unsigned int newindex;
tree resultdecl = decl;
vi->fullsize ++;
if (DECL_RESULT (decl))
resultdecl = DECL_RESULT (decl);
- newindex = VEC_length (varinfo_t, varmap);
asprintf (&tempname, "%s.result", name);
newname = ggc_strdup (tempname);
free (tempname);
- resultvi = new_var_info (resultdecl, newindex, newname);
- resultvi->decl = resultdecl;
- VEC_safe_push (varinfo_t, heap, varmap, resultvi);
+ resultvi = new_var_info (resultdecl, newname);
resultvi->offset = i;
resultvi->size = 1;
resultvi->fullsize = vi->fullsize;
if (DECL_RESULT (decl))
insert_vi_for_tree (DECL_RESULT (decl), resultvi);
}
- return index;
+
+ return vi->id;
}
static unsigned int
create_variable_info_for (tree decl, const char *name)
{
- unsigned int index = VEC_length (varinfo_t, varmap);
varinfo_t vi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
VEC (fieldoff_s,heap) *fieldstack = NULL;
if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
/* If the variable doesn't have subvars, we may end up needing to
sort the field list and create fake variables for all the
fields. */
- vi = new_var_info (decl, index, name);
- vi->decl = decl;
+ vi = new_var_info (decl, name);
vi->offset = 0;
+ vi->may_have_pointers = could_have_pointers (decl);
if (!declsize
|| !host_integerp (declsize, 1))
{
}
insert_vi_for_tree (vi->decl, vi);
- VEC_safe_push (varinfo_t, heap, varmap, vi);
- if (is_global && (!flag_whole_program || !in_ipa_mode)
- && could_have_pointers (decl))
- make_constraint_from (vi, escaped_id);
+ if (vi->is_global_var
+ && (!flag_whole_program || !in_ipa_mode)
+ && vi->may_have_pointers)
+ {
+ if (POINTER_TYPE_P (TREE_TYPE (decl))
+ && TYPE_RESTRICT (TREE_TYPE (decl)))
+ make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
+ make_copy_constraint (vi, nonlocal_id);
+ }
stats.total_vars++;
if (use_field_sensitive
&& VEC_length (fieldoff_s, fieldstack) > 1
&& VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
{
- unsigned int newindex = VEC_length (varinfo_t, varmap);
fieldoff_s *fo = NULL;
bool notokay = false;
unsigned int i;
vi->size = ~0;
vi->is_full_var = true;
VEC_free (fieldoff_s, heap, fieldstack);
- return index;
+ return vi->id;
}
vi->size = fo->size;
vi->offset = fo->offset;
+ vi->may_have_pointers = fo->may_have_pointers;
for (i = VEC_length (fieldoff_s, fieldstack) - 1;
i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
i--)
const char *newname = "NULL";
char *tempname;
- newindex = VEC_length (varinfo_t, varmap);
if (dump_file)
{
asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
newname = ggc_strdup (tempname);
free (tempname);
}
- newvi = new_var_info (decl, newindex, newname);
+ newvi = new_var_info (decl, newname);
newvi->offset = fo->offset;
newvi->size = fo->size;
newvi->fullsize = vi->fullsize;
+ newvi->may_have_pointers = fo->may_have_pointers;
insert_into_field_list (vi, newvi);
- VEC_safe_push (varinfo_t, heap, varmap, newvi);
- if (is_global && (!flag_whole_program || !in_ipa_mode)
- && fo->may_have_pointers)
- make_constraint_from (newvi, escaped_id);
+ if (newvi->is_global_var
+ && (!flag_whole_program || !in_ipa_mode)
+ && newvi->may_have_pointers)
+ {
+ if (fo->only_restrict_pointers)
+ make_constraint_from_restrict (newvi, "GLOBAL_RESTRICT");
+ make_copy_constraint (newvi, nonlocal_id);
+ }
stats.total_vars++;
}
VEC_free (fieldoff_s, heap, fieldstack);
- return index;
+ return vi->id;
}
/* Print out the points-to solution for VAR to FILE. */
-void
+static void
dump_solution_for_var (FILE *file, unsigned int var)
{
varinfo_t vi = get_varinfo (var);
{
fprintf (file, "%s ", get_varinfo (i)->name);
}
- fprintf (file, "}");
- if (vi->no_tbaa_pruning)
- fprintf (file, " no-tbaa-pruning");
- fprintf (file, "\n");
+ fprintf (file, "}\n");
}
}
intra_create_variable_infos (void)
{
tree t;
- struct constraint_expr lhs, rhs;
/* For each incoming pointer argument arg, create the constraint ARG
= NONLOCAL or a dummy variable if flag_argument_noalias is set. */
if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
{
varinfo_t vi;
- tree heapvar = heapvar_lookup (t);
+ var_ann_t ann;
- lhs.offset = 0;
- lhs.type = SCALAR;
- lhs.var = get_vi_for_tree (t)->id;
-
- if (heapvar == NULL_TREE)
+ vi = make_constraint_from_heapvar (get_vi_for_tree (t),
+ "PARM_NOALIAS");
+ ann = get_var_ann (vi->decl);
+ if (flag_argument_noalias == 1)
{
- var_ann_t ann;
- heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
- "PARM_NOALIAS");
- DECL_EXTERNAL (heapvar) = 1;
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
-
- heapvar_insert (t, heapvar);
-
- ann = get_var_ann (heapvar);
- if (flag_argument_noalias == 1)
- ann->noalias_state = NO_ALIAS;
- else if (flag_argument_noalias == 2)
- ann->noalias_state = NO_ALIAS_GLOBAL;
- else if (flag_argument_noalias == 3)
- ann->noalias_state = NO_ALIAS_ANYTHING;
- else
- gcc_unreachable ();
+ ann->noalias_state = NO_ALIAS;
+ make_copy_constraint (vi, nonlocal_id);
}
-
- vi = get_vi_for_tree (heapvar);
- vi->is_artificial_var = 1;
- vi->is_heap_var = 1;
- rhs.var = vi->id;
- rhs.type = ADDRESSOF;
- rhs.offset = 0;
- for (p = get_varinfo (lhs.var); p; p = p->next)
+ else if (flag_argument_noalias == 2)
{
- struct constraint_expr temp = lhs;
- temp.var = p->id;
- process_constraint (new_constraint (temp, rhs));
+ ann->noalias_state = NO_ALIAS_GLOBAL;
+ make_constraint_from (vi, vi->id);
}
+ else if (flag_argument_noalias == 3)
+ {
+ ann->noalias_state = NO_ALIAS_ANYTHING;
+ make_constraint_from (vi, vi->id);
+ }
+ else
+ gcc_unreachable ();
}
else
{
for (p = arg_vi; p; p = p->next)
make_constraint_from (p, nonlocal_id);
}
+ if (POINTER_TYPE_P (TREE_TYPE (t))
+ && TYPE_RESTRICT (TREE_TYPE (t)))
+ make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
+ }
+
+ /* Add a constraint for a result decl that is passed by reference. */
+ if (DECL_RESULT (cfun->decl)
+ && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
+ {
+ varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
+
+ for (p = result_vi; p; p = p->next)
+ make_constraint_from (p, nonlocal_id);
+ }
+
+ /* Add a constraint for the incoming static chain parameter. */
+ if (cfun->static_chain_decl != NULL_TREE)
+ {
+ varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
+
+ for (p = chain_vi; p; p = p->next)
+ make_constraint_from (p, nonlocal_id);
}
}
}
-/* Set bits in INTO corresponding to the variable uids in solution set
- FROM, which came from variable PTR.
- For variables that are actually dereferenced, we also use type
- based alias analysis to prune the points-to sets.
- IS_DEREFED is true if PTR was directly dereferenced, which we use to
- help determine whether we are we are allowed to prune using TBAA.
- If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
- the from set. */
+/* Set bits in INTO corresponding to the variable uids in solution set FROM. */
-static void
-set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
- bool no_tbaa_pruning)
+static void
+set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
{
unsigned int i;
bitmap_iterator bi;
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
-
EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
{
varinfo_t vi = get_varinfo (i);
|| TREE_CODE (vi->decl) == PARM_DECL
|| TREE_CODE (vi->decl) == RESULT_DECL)
{
- /* Just add VI->DECL to the alias set.
- Don't type prune artificial vars or points-to sets
- for pointers that have not been dereferenced or with
- type-based pruning disabled. */
- if (vi->is_artificial_var
- || !is_derefed
- || no_tbaa_pruning)
- bitmap_set_bit (into, DECL_UID (vi->decl));
- else
- {
- alias_set_type var_alias_set, mem_alias_set;
- var_alias_set = get_alias_set (vi->decl);
- mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
- if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
- vi->decl, var_alias_set, true))
- bitmap_set_bit (into, DECL_UID (vi->decl));
- }
+ /* Add the decl to the points-to set. Note that the points-to
+ set contains global variables. */
+ bitmap_set_bit (into, DECL_UID (vi->decl));
+ if (vi->is_global_var)
+ pt->vars_contains_global = true;
}
}
}
static bool have_alias_info = false;
-/* Given a pointer variable P, fill in its points-to set, or return
- false if we can't.
- Rather than return false for variables that point-to anything, we
- instead find the corresponding SMT, and merge in its aliases. In
- addition to these aliases, we also set the bits for the SMT's
- themselves and their subsets, as SMT's are still in use by
- non-SSA_NAME's, and pruning may eliminate every one of their
- aliases. In such a case, if we did not include the right set of
- SMT's in the points-to set of the variable, we'd end up with
- statements that do not conflict but should. */
+/* Compute the points-to solution *PT for the variable VI. */
-bool
+static void
+find_what_var_points_to (varinfo_t vi, struct pt_solution *pt)
+{
+ unsigned int i;
+ bitmap_iterator bi;
+ bitmap finished_solution;
+ bitmap result;
+
+ memset (pt, 0, sizeof (struct pt_solution));
+
+ /* This variable may have been collapsed, let's get the real
+ variable. */
+ vi = get_varinfo (find (vi->id));
+
+ /* Translate artificial variables into SSA_NAME_PTR_INFO
+ attributes. */
+ EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
+ {
+ varinfo_t vi = get_varinfo (i);
+
+ if (vi->is_artificial_var)
+ {
+ if (vi->id == nothing_id)
+ pt->null = 1;
+ else if (vi->id == escaped_id)
+ pt->escaped = 1;
+ else if (vi->id == callused_id)
+ gcc_unreachable ();
+ else if (vi->id == nonlocal_id)
+ pt->nonlocal = 1;
+ else if (vi->is_heap_var)
+ /* We represent heapvars in the points-to set properly. */
+ ;
+ else if (vi->id == readonly_id)
+ /* Nobody cares. */
+ ;
+ else if (vi->id == anything_id
+ || vi->id == integer_id)
+ pt->anything = 1;
+ }
+ if (vi->is_restrict_var)
+ pt->vars_contains_restrict = true;
+ }
+
+ /* Instead of doing extra work, simply do not create
+ elaborate points-to information for pt_anything pointers. */
+ if (pt->anything
+ && (vi->is_artificial_var
+ || !pt->vars_contains_restrict))
+ return;
+
+ /* Share the final set of variables when possible. */
+ finished_solution = BITMAP_GGC_ALLOC ();
+ stats.points_to_sets_created++;
+
+ set_uids_in_ptset (finished_solution, vi->solution, pt);
+ result = shared_bitmap_lookup (finished_solution);
+ if (!result)
+ {
+ shared_bitmap_add (finished_solution);
+ pt->vars = finished_solution;
+ }
+ else
+ {
+ pt->vars = result;
+ bitmap_clear (finished_solution);
+ }
+}
+
+/* Given a pointer variable P, fill in its points-to set. */
+
+static void
find_what_p_points_to (tree p)
{
+ struct ptr_info_def *pi;
tree lookup_p = p;
varinfo_t vi;
- if (!have_alias_info)
- return false;
-
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
lookup_p = SSA_NAME_VAR (p);
vi = lookup_vi_for_tree (lookup_p);
- if (vi)
- {
- if (vi->is_artificial_var)
- return false;
+ if (!vi)
+ return;
- /* See if this is a field or a structure. */
- if (vi->size != vi->fullsize)
- {
- /* Nothing currently asks about structure fields directly,
- but when they do, we need code here to hand back the
- points-to set. */
- return false;
- }
- else
- {
- struct ptr_info_def *pi = get_ptr_info (p);
- unsigned int i;
- bitmap_iterator bi;
- bool was_pt_anything = false;
- bitmap finished_solution;
- bitmap result;
+ pi = get_ptr_info (p);
+ find_what_var_points_to (vi, &pi->pt);
+}
- if (!pi->memory_tag_needed)
- return false;
- /* This variable may have been collapsed, let's get the real
- variable. */
- vi = get_varinfo (find (vi->id));
+/* Query statistics for points-to solutions. */
- /* Translate artificial variables into SSA_NAME_PTR_INFO
- attributes. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
- {
- varinfo_t vi = get_varinfo (i);
+static struct {
+ unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
+ unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
+ unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
+ unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
+} pta_stats;
- if (vi->is_artificial_var)
- {
- /* FIXME. READONLY should be handled better so that
- flow insensitive aliasing can disregard writable
- aliases. */
- if (vi->id == nothing_id)
- pi->pt_null = 1;
- else if (vi->id == anything_id
- || vi->id == nonlocal_id
- || vi->id == escaped_id
- || vi->id == callused_id)
- was_pt_anything = 1;
- else if (vi->id == readonly_id)
- was_pt_anything = 1;
- else if (vi->id == integer_id)
- was_pt_anything = 1;
- else if (vi->is_heap_var)
- pi->pt_global_mem = 1;
- }
- }
+void
+dump_pta_stats (FILE *s)
+{
+ fprintf (s, "\nPTA query stats:\n");
+ fprintf (s, " pt_solution_includes: "
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, "
+ HOST_WIDE_INT_PRINT_DEC" queries\n",
+ pta_stats.pt_solution_includes_no_alias,
+ pta_stats.pt_solution_includes_no_alias
+ + pta_stats.pt_solution_includes_may_alias);
+ fprintf (s, " pt_solutions_intersect: "
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, "
+ HOST_WIDE_INT_PRINT_DEC" queries\n",
+ pta_stats.pt_solutions_intersect_no_alias,
+ pta_stats.pt_solutions_intersect_no_alias
+ + pta_stats.pt_solutions_intersect_may_alias);
+}
- /* Instead of doing extra work, simply do not create
- points-to information for pt_anything pointers. This
- will cause the operand scanner to fall back to the
- type-based SMT and its aliases. Which is the best
- we could do here for the points-to set as well. */
- if (was_pt_anything)
- return false;
- /* Share the final set of variables when possible. */
- finished_solution = BITMAP_GGC_ALLOC ();
- stats.points_to_sets_created++;
+/* Reset the points-to solution *PT to a conservative default
+ (point to anything). */
- set_uids_in_ptset (p, finished_solution, vi->solution,
- pi->is_dereferenced,
- vi->no_tbaa_pruning);
- result = shared_bitmap_lookup (finished_solution);
+void
+pt_solution_reset (struct pt_solution *pt)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->anything = true;
+}
- if (!result)
- {
- shared_bitmap_add (finished_solution);
- pi->pt_vars = finished_solution;
- }
- else
- {
- pi->pt_vars = result;
- bitmap_clear (finished_solution);
- }
+/* Set the points-to solution *PT to point only to the variables
+ in VARS. */
- if (bitmap_empty_p (pi->pt_vars))
- pi->pt_vars = NULL;
+void
+pt_solution_set (struct pt_solution *pt, bitmap vars)
+{
+ bitmap_iterator bi;
+ unsigned i;
- return true;
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = vars;
+ EXECUTE_IF_SET_IN_BITMAP (vars, 0, i, bi)
+ {
+ tree var = referenced_var_lookup (i);
+ if (is_global_var (var))
+ {
+ pt->vars_contains_global = true;
+ break;
}
}
+}
- return false;
+/* Return true if the points-to solution *PT is empty. */
+
+static bool
+pt_solution_empty_p (struct pt_solution *pt)
+{
+ if (pt->anything
+ || pt->nonlocal)
+ return false;
+
+ if (pt->vars
+ && !bitmap_empty_p (pt->vars))
+ return false;
+
+ /* If the solution includes ESCAPED, check if that is empty. */
+ if (pt->escaped
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped))
+ return false;
+
+ return true;
}
-/* Mark the ESCAPED solution as call clobbered. Returns false if
- pt_anything escaped which needs all locals that have their address
- taken marked call clobbered as well. */
+/* Return true if the points-to solution *PT includes global memory. */
bool
-clobber_what_escaped (void)
+pt_solution_includes_global (struct pt_solution *pt)
{
- varinfo_t vi;
- unsigned int i;
- bitmap_iterator bi;
+ if (pt->anything
+ || pt->nonlocal
+ || pt->vars_contains_global)
+ return true;
- if (!have_alias_info)
- return false;
+ if (pt->escaped)
+ return pt_solution_includes_global (&cfun->gimple_df->escaped);
- /* This variable may have been collapsed, let's get the real
- variable for escaped_id. */
- vi = get_varinfo (find (escaped_id));
+ return false;
+}
- /* If call-used memory escapes we need to include it in the
- set of escaped variables. This can happen if a pure
- function returns a pointer and this pointer escapes. */
- if (bitmap_bit_p (vi->solution, callused_id))
- {
- varinfo_t cu_vi = get_varinfo (find (callused_id));
- bitmap_ior_into (vi->solution, cu_vi->solution);
- }
+/* Return true if the points-to solution *PT includes the variable
+ declaration DECL. */
- /* Mark variables in the solution call-clobbered. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
- {
- varinfo_t vi = get_varinfo (i);
+static bool
+pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
+{
+ if (pt->anything)
+ return true;
- if (vi->is_artificial_var)
- {
- /* nothing_id and readonly_id do not cause any
- call clobber ops. For anything_id and integer_id
- we need to clobber all addressable vars. */
- if (vi->id == anything_id
- || vi->id == integer_id)
- return false;
- }
+ if (pt->nonlocal
+ && is_global_var (decl))
+ return true;
- /* Only artificial heap-vars are further interesting. */
- if (vi->is_artificial_var && !vi->is_heap_var)
- continue;
+ if (pt->vars
+ && bitmap_bit_p (pt->vars, DECL_UID (decl)))
+ return true;
- if ((TREE_CODE (vi->decl) == VAR_DECL
- || TREE_CODE (vi->decl) == PARM_DECL
- || TREE_CODE (vi->decl) == RESULT_DECL)
- && !unmodifiable_var_p (vi->decl))
- mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
- }
+ /* If the solution includes ESCAPED, check it. */
+ if (pt->escaped
+ && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
+ return true;
- return true;
+ return false;
}
-/* Compute the call-used variables. */
-
-void
-compute_call_used_vars (void)
+bool
+pt_solution_includes (struct pt_solution *pt, const_tree decl)
{
- varinfo_t vi;
- unsigned int i;
- bitmap_iterator bi;
- bool has_anything_id = false;
+ bool res = pt_solution_includes_1 (pt, decl);
+ if (res)
+ ++pta_stats.pt_solution_includes_may_alias;
+ else
+ ++pta_stats.pt_solution_includes_no_alias;
+ return res;
+}
- if (!have_alias_info)
- return;
+/* Return true if both points-to solutions PT1 and PT2 have a non-empty
+ intersection. */
- /* This variable may have been collapsed, let's get the real
- variable for escaped_id. */
- vi = get_varinfo (find (callused_id));
+static bool
+pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
+{
+ if (pt1->anything || pt2->anything)
+ return true;
- /* Mark variables in the solution call-clobbered. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
+ /* If either points to unknown global memory and the other points to
+ any global memory they alias. */
+ if ((pt1->nonlocal
+ && (pt2->nonlocal
+ || pt2->vars_contains_global))
+ || (pt2->nonlocal
+ && pt1->vars_contains_global))
+ return true;
+
+ /* Check the escaped solution if required. */
+ if ((pt1->escaped || pt2->escaped)
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped))
{
- varinfo_t vi = get_varinfo (i);
+ /* If both point to escaped memory and that solution
+ is not empty they alias. */
+ if (pt1->escaped && pt2->escaped)
+ return true;
- if (vi->is_artificial_var)
- {
- /* For anything_id and integer_id we need to make
- all local addressable vars call-used. */
- if (vi->id == anything_id
- || vi->id == integer_id)
- has_anything_id = true;
- }
+ /* If either points to escaped memory see if the escaped solution
+ intersects with the other. */
+ if ((pt1->escaped
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
+ || (pt2->escaped
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
+ return true;
+ }
- /* Only artificial heap-vars are further interesting. */
- if (vi->is_artificial_var && !vi->is_heap_var)
- continue;
+ /* Now both pointers alias if their points-to solution intersects. */
+ return (pt1->vars
+ && pt2->vars
+ && bitmap_intersect_p (pt1->vars, pt2->vars));
+}
- if ((TREE_CODE (vi->decl) == VAR_DECL
- || TREE_CODE (vi->decl) == PARM_DECL
- || TREE_CODE (vi->decl) == RESULT_DECL)
- && !unmodifiable_var_p (vi->decl))
- bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
+bool
+pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
+{
+ bool res = pt_solutions_intersect_1 (pt1, pt2);
+ if (res)
+ ++pta_stats.pt_solutions_intersect_may_alias;
+ else
+ ++pta_stats.pt_solutions_intersect_no_alias;
+ return res;
+}
+
+/* Return true if both points-to solutions PT1 and PT2 for two restrict
+ qualified pointers are possibly based on the same pointer. */
+
+bool
+pt_solutions_same_restrict_base (struct pt_solution *pt1,
+ struct pt_solution *pt2)
+{
+ /* If we deal with points-to solutions of two restrict qualified
+ pointers solely rely on the pointed-to variable bitmap intersection.
+ For two pointers that are based on each other the bitmaps will
+ intersect. */
+ if (pt1->vars_contains_restrict
+ && pt2->vars_contains_restrict)
+ {
+ gcc_assert (pt1->vars && pt2->vars);
+ return bitmap_intersect_p (pt1->vars, pt2->vars);
}
- /* If anything is call-used, add all addressable locals to the set. */
- if (has_anything_id)
- bitmap_ior_into (gimple_call_used_vars (cfun),
- gimple_addressable_vars (cfun));
+ return true;
}
/* Dump points-to information to OUTFILE. */
-void
+static void
dump_sa_points_to_info (FILE *outfile)
{
unsigned int i;
init_base_vars (void)
{
struct constraint_expr lhs, rhs;
+ varinfo_t var_anything;
+ varinfo_t var_nothing;
+ varinfo_t var_readonly;
+ varinfo_t var_escaped;
+ varinfo_t var_nonlocal;
+ varinfo_t var_callused;
+ varinfo_t var_storedanything;
+ varinfo_t var_integer;
/* Create the NULL variable, used to represent that a variable points
to NULL. */
- nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
- var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
- insert_vi_for_tree (nothing_tree, var_nothing);
+ var_nothing = new_var_info (NULL_TREE, "NULL");
+ gcc_assert (var_nothing->id == nothing_id);
var_nothing->is_artificial_var = 1;
var_nothing->offset = 0;
var_nothing->size = ~0;
var_nothing->fullsize = ~0;
var_nothing->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
/* Create the ANYTHING variable, used to represent that a variable
points to some unknown piece of memory. */
- anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
- var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
- insert_vi_for_tree (anything_tree, var_anything);
+ var_anything = new_var_info (NULL_TREE, "ANYTHING");
+ gcc_assert (var_anything->id == anything_id);
var_anything->is_artificial_var = 1;
var_anything->size = ~0;
var_anything->offset = 0;
/* Anything points to anything. This makes deref constraints just
work in the presence of linked list and other p = *p type loops,
by saying that *ANYTHING = ANYTHING. */
- VEC_safe_push (varinfo_t, heap, varmap, var_anything);
lhs.type = SCALAR;
lhs.var = anything_id;
lhs.offset = 0;
/* Create the READONLY variable, used to represent that a variable
points to readonly memory. */
- readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
- var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
+ var_readonly = new_var_info (NULL_TREE, "READONLY");
+ gcc_assert (var_readonly->id == readonly_id);
var_readonly->is_artificial_var = 1;
var_readonly->offset = 0;
var_readonly->size = ~0;
var_readonly->fullsize = ~0;
var_readonly->next = NULL;
var_readonly->is_special_var = 1;
- insert_vi_for_tree (readonly_tree, var_readonly);
- VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
/* readonly memory points to anything, in order to make deref
easier. In reality, it points to anything the particular
/* Create the ESCAPED variable, used to represent the set of escaped
memory. */
- escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
- var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
- insert_vi_for_tree (escaped_tree, var_escaped);
+ var_escaped = new_var_info (NULL_TREE, "ESCAPED");
+ gcc_assert (var_escaped->id == escaped_id);
var_escaped->is_artificial_var = 1;
var_escaped->offset = 0;
var_escaped->size = ~0;
var_escaped->fullsize = ~0;
var_escaped->is_special_var = 0;
- VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
- gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
+
+ /* Create the NONLOCAL variable, used to represent the set of nonlocal
+ memory. */
+ var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
+ gcc_assert (var_nonlocal->id == nonlocal_id);
+ var_nonlocal->is_artificial_var = 1;
+ var_nonlocal->offset = 0;
+ var_nonlocal->size = ~0;
+ var_nonlocal->fullsize = ~0;
+ var_nonlocal->is_special_var = 1;
/* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
lhs.type = SCALAR;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
- /* Create the NONLOCAL variable, used to represent the set of nonlocal
- memory. */
- nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
- var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
- insert_vi_for_tree (nonlocal_tree, var_nonlocal);
- var_nonlocal->is_artificial_var = 1;
- var_nonlocal->offset = 0;
- var_nonlocal->size = ~0;
- var_nonlocal->fullsize = ~0;
- var_nonlocal->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
+ /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
+ whole variable escapes. */
+ lhs.type = SCALAR;
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = escaped_id;
+ rhs.offset = UNKNOWN_OFFSET;
+ process_constraint (new_constraint (lhs, rhs));
+
+ /* *ESCAPED = NONLOCAL. This is true because we have to assume
+ everything pointed to by escaped points to what global memory can
+ point to. */
+ lhs.type = DEREF;
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = nonlocal_id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
- /* Nonlocal memory points to escaped (which includes nonlocal),
- in order to make deref easier. */
+ /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
+ global memory may point to global memory and escaped memory. */
lhs.type = SCALAR;
lhs.var = nonlocal_id;
lhs.offset = 0;
rhs.type = ADDRESSOF;
+ rhs.var = nonlocal_id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
+ rhs.type = ADDRESSOF;
rhs.var = escaped_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
/* Create the CALLUSED variable, used to represent the set of call-used
memory. */
- callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
- var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
- insert_vi_for_tree (callused_tree, var_callused);
+ var_callused = new_var_info (NULL_TREE, "CALLUSED");
+ gcc_assert (var_callused->id == callused_id);
var_callused->is_artificial_var = 1;
var_callused->offset = 0;
var_callused->size = ~0;
var_callused->fullsize = ~0;
var_callused->is_special_var = 0;
- VEC_safe_push (varinfo_t, heap, varmap, var_callused);
/* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
lhs.type = SCALAR;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
+ /* CALLUSED = CALLUSED + UNKNOWN, because if a sub-field is call-used the
+ whole variable is call-used. */
+ lhs.type = SCALAR;
+ lhs.var = callused_id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = callused_id;
+ rhs.offset = UNKNOWN_OFFSET;
+ process_constraint (new_constraint (lhs, rhs));
+
+ /* Create the STOREDANYTHING variable, used to represent the set of
+ variables stored to *ANYTHING. */
+ var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
+ gcc_assert (var_storedanything->id == storedanything_id);
+ var_storedanything->is_artificial_var = 1;
+ var_storedanything->offset = 0;
+ var_storedanything->size = ~0;
+ var_storedanything->fullsize = ~0;
+ var_storedanything->is_special_var = 0;
+
/* Create the INTEGER variable, used to represent that a variable points
- to an INTEGER. */
- integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
- var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
- insert_vi_for_tree (integer_tree, var_integer);
+ to what an INTEGER "points to". */
+ var_integer = new_var_info (NULL_TREE, "INTEGER");
+ gcc_assert (var_integer->id == integer_id);
var_integer->is_artificial_var = 1;
var_integer->size = ~0;
var_integer->fullsize = ~0;
var_integer->offset = 0;
var_integer->next = NULL;
var_integer->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_integer);
/* INTEGER = ANYTHING, because we don't know where a dereference of
a random integer will point to. */
rhs.var = anything_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
-
- /* *ESCAPED = &ESCAPED. This is true because we have to assume
- everything pointed to by escaped can also point to escaped. */
- lhs.type = DEREF;
- lhs.var = escaped_id;
- lhs.offset = 0;
- rhs.type = ADDRESSOF;
- rhs.var = escaped_id;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
-
- /* *ESCAPED = &NONLOCAL. This is true because we have to assume
- everything pointed to by escaped can also point to nonlocal. */
- lhs.type = DEREF;
- lhs.var = escaped_id;
- lhs.offset = 0;
- rhs.type = ADDRESSOF;
- rhs.var = nonlocal_id;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
}
/* Initialize things necessary to perform PTA */
bitmap_obstack_release (&predbitmap_obstack);
}
-/* Compute the set of variables we can't TBAA prune. */
+/* Initialize the heapvar for statement mapping. */
static void
-compute_tbaa_pruning (void)
+init_alias_heapvars (void)
{
- unsigned int size = VEC_length (varinfo_t, varmap);
- unsigned int i;
- bool any;
-
- changed_count = 0;
- changed = sbitmap_alloc (size);
- sbitmap_zero (changed);
-
- /* Mark all initial no_tbaa_pruning nodes as changed. */
- any = false;
- for (i = 0; i < size; ++i)
- {
- varinfo_t ivi = get_varinfo (i);
-
- if (find (i) == i && ivi->no_tbaa_pruning)
- {
- any = true;
- if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
- || VEC_length (constraint_t, graph->complex[i]) > 0)
- {
- SET_BIT (changed, i);
- ++changed_count;
- }
- }
- }
-
- while (changed_count > 0)
- {
- struct topo_info *ti = init_topo_info ();
- ++stats.iterations;
-
- compute_topo_order (graph, ti);
-
- while (VEC_length (unsigned, ti->topo_order) != 0)
- {
- bitmap_iterator bi;
-
- i = VEC_pop (unsigned, ti->topo_order);
-
- /* If this variable is not a representative, skip it. */
- if (find (i) != i)
- continue;
-
- /* If the node has changed, we need to process the complex
- constraints and outgoing edges again. */
- if (TEST_BIT (changed, i))
- {
- unsigned int j;
- constraint_t c;
- VEC(constraint_t,heap) *complex = graph->complex[i];
-
- RESET_BIT (changed, i);
- --changed_count;
-
- /* Process the complex copy constraints. */
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
- {
- if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
- {
- varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
-
- if (!lhsvi->no_tbaa_pruning)
- {
- lhsvi->no_tbaa_pruning = true;
- if (!TEST_BIT (changed, lhsvi->id))
- {
- SET_BIT (changed, lhsvi->id);
- ++changed_count;
- }
- }
- }
- }
-
- /* Propagate to all successors. */
- EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
- {
- unsigned int to = find (j);
- varinfo_t tovi = get_varinfo (to);
-
- /* Don't propagate to ourselves. */
- if (to == i)
- continue;
-
- if (!tovi->no_tbaa_pruning)
- {
- tovi->no_tbaa_pruning = true;
- if (!TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- ++changed_count;
- }
- }
- }
- }
- }
-
- free_topo_info (ti);
- }
-
- sbitmap_free (changed);
-
- if (any)
- {
- for (i = 0; i < size; ++i)
- {
- varinfo_t ivi = get_varinfo (i);
- varinfo_t ivip = get_varinfo (find (i));
-
- if (ivip->no_tbaa_pruning)
- {
- tree var = ivi->decl;
-
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
+ if (!heapvar_for_stmt)
+ heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
+ NULL);
+}
- if (POINTER_TYPE_P (TREE_TYPE (var)))
- {
- DECL_NO_TBAA_P (var) = 1;
+/* Delete the heapvar for statement mapping. */
- /* Tell the RTL layer that this pointer can alias
- anything. */
- DECL_POINTER_ALIAS_SET (var) = 0;
- }
- }
- }
- }
+void
+delete_alias_heapvars (void)
+{
+ if (heapvar_for_stmt)
+ htab_delete (heapvar_for_stmt);
+ heapvar_for_stmt = NULL;
}
/* Create points-to sets for the current function. See the comments
at the start of the file for an algorithmic overview. */
-void
+static void
compute_points_to_sets (void)
{
struct scc_info *si;
basic_block bb;
+ unsigned i;
+ varinfo_t vi;
timevar_push (TV_TREE_PTA);
find_func_aliases (phi);
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
find_func_aliases (stmt);
-
- /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
- has now been captured, and we can remove them. */
- if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
- gsi_remove (&gsi, true);
- else
- gsi_next (&gsi);
}
}
-
if (dump_file)
{
fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
fprintf (dump_file, "Rewriting constraints and unifying "
"variables\n");
rewrite_constraints (graph, si);
- free_var_substitution_info (si);
build_succ_graph ();
+ free_var_substitution_info (si);
if (dump_file && (dump_flags & TDF_GRAPH))
dump_constraint_graph (dump_file);
solve_graph (graph);
- compute_tbaa_pruning ();
-
if (dump_file)
dump_sa_points_to_info (dump_file);
- have_alias_info = true;
+ /* Compute the points-to sets for ESCAPED and CALLUSED used for
+ call-clobber analysis. */
+ find_what_var_points_to (get_varinfo (escaped_id),
+ &cfun->gimple_df->escaped);
+ find_what_var_points_to (get_varinfo (callused_id),
+ &cfun->gimple_df->callused);
+
+ /* Make sure the ESCAPED solution (which is used as placeholder in
+ other solutions) does not reference itself. This simplifies
+ points-to solution queries. */
+ cfun->gimple_df->escaped.escaped = 0;
+
+ /* Mark escaped HEAP variables as global. */
+ for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
+ if (vi->is_heap_var
+ && !vi->is_restrict_var
+ && !vi->is_global_var)
+ DECL_EXTERNAL (vi->decl) = vi->is_global_var
+ = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
+
+ /* Compute the points-to sets for pointer SSA_NAMEs. */
+ for (i = 0; i < num_ssa_names; ++i)
+ {
+ tree ptr = ssa_name (i);
+ if (ptr
+ && POINTER_TYPE_P (TREE_TYPE (ptr)))
+ find_what_p_points_to (ptr);
+ }
timevar_pop (TV_TREE_PTA);
+
+ have_alias_info = true;
}
/* Delete created points-to sets. */
-void
+static void
delete_points_to_sets (void)
{
unsigned int i;
have_alias_info = false;
}
+
+/* Compute points-to information for every SSA_NAME pointer in the
+ current function and compute the transitive closure of escaped
+ variables to re-initialize the call-clobber states of local variables. */
+
+unsigned int
+compute_may_aliases (void)
+{
+ /* For each pointer P_i, determine the sets of variables that P_i may
+ point-to. Compute the reachability set of escaped and call-used
+ variables. */
+ compute_points_to_sets ();
+
+ /* Debugging dumps. */
+ if (dump_file)
+ {
+ dump_alias_info (dump_file);
+
+ if (dump_flags & TDF_DETAILS)
+ dump_referenced_vars (dump_file);
+ }
+
+ /* Deallocate memory used by aliasing data structures and the internal
+ points-to solution. */
+ delete_points_to_sets ();
+
+ gcc_assert (!need_ssa_update_p (cfun));
+
+ return 0;
+}
+
+static bool
+gate_tree_pta (void)
+{
+ return flag_tree_pta;
+}
+
+/* A dummy pass to cause points-to information to be computed via
+ TODO_rebuild_alias. */
+
+struct gimple_opt_pass pass_build_alias =
+{
+ {
+ GIMPLE_PASS,
+ "alias", /* name */
+ gate_tree_pta, /* gate */
+ NULL, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ }
+};
+
+/* A dummy pass to cause points-to information to be computed via
+ TODO_rebuild_alias. */
+
+struct gimple_opt_pass pass_build_ealias =
+{
+ {
+ GIMPLE_PASS,
+ "ealias", /* name */
+ gate_tree_pta, /* gate */
+ NULL, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ }
+};
+
+
/* Return true if we should execute IPA PTA. */
static bool
gate_ipa_pta (void)
for (node = cgraph_nodes; node; node = node->next)
{
- if (!node->analyzed || cgraph_is_master_clone (node))
- {
- unsigned int varid;
+ unsigned int varid;
- varid = create_function_info_for (node->decl,
- cgraph_node_name (node));
- if (node->local.externally_visible)
- {
- varinfo_t fi = get_varinfo (varid);
- for (; fi; fi = fi->next)
- make_constraint_from (fi, anything_id);
- }
+ varid = create_function_info_for (node->decl,
+ cgraph_node_name (node));
+ if (node->local.externally_visible)
+ {
+ varinfo_t fi = get_varinfo (varid);
+ for (; fi; fi = fi->next)
+ make_constraint_from (fi, anything_id);
}
}
for (node = cgraph_nodes; node; node = node->next)
{
- if (node->analyzed && cgraph_is_master_clone (node))
+ if (node->analyzed)
{
struct function *func = DECL_STRUCT_FUNCTION (node->decl);
basic_block bb;
build_pred_graph ();
si = perform_var_substitution (graph);
rewrite_constraints (graph, si);
- free_var_substitution_info (si);
build_succ_graph ();
+ free_var_substitution_info (si);
move_complex_constraints (graph);
unite_pointer_equivalences (graph);
find_indirect_cycles (graph);
}
};
-/* Initialize the heapvar for statement mapping. */
-void
-init_alias_heapvars (void)
-{
- if (!heapvar_for_stmt)
- heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
- NULL);
-}
-
-void
-delete_alias_heapvars (void)
-{
- htab_delete (heapvar_for_stmt);
- heapvar_for_stmt = NULL;
-}
#include "gt-tree-ssa-structalias.h"