/* Tree based points-to analysis
- Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
This file is part of GCC.
#include "obstack.h"
#include "bitmap.h"
#include "flags.h"
-#include "rtl.h"
-#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
#include "tree.h"
-#include "c-common.h"
#include "tree-flow.h"
#include "tree-inline.h"
-#include "varray.h"
-#include "c-tree.h"
-#include "diagnostic.h"
+#include "diagnostic-core.h"
#include "toplev.h"
#include "gimple.h"
#include "hashtab.h"
#include "alloc-pool.h"
#include "splay-tree.h"
#include "params.h"
-#include "tree-ssa-structalias.h"
#include "cgraph.h"
#include "alias.h"
#include "pointer-set.h"
TODO: We could handle unions, but to be honest, it's probably not
worth the pain or slowdown. */
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+/* IPA-PTA optimizations possible.
+
+ When the indirect function called is ANYTHING we can add disambiguation
+ based on the function signatures (or simply the parameter count which
+ is the varinfo size). We also do not need to consider functions that
+ do not have their address taken.
+
+ The is_global_var bit which marks escape points is overly conservative
+ in IPA mode. Split it to is_escape_point and is_global_var - only
+ externally visible globals are escape points in IPA mode. This is
+ also needed to fix the pt_solution_includes_global predicate
+ (and thus ptr_deref_may_alias_global_p).
+
+ The way we introduce DECL_PT_UID to avoid fixing up all points-to
+ sets in the translation unit when we copy a DECL during inlining
+ pessimizes precision. The advantage is that the DECL_PT_UID keeps
+ compile-time and memory usage overhead low - the points-to sets
+ do not grow or get unshared as they would during a fixup phase.
+ An alternative solution is to delay IPA PTA until after all
+ inlining transformations have been applied.
+
+ The way we propagate clobber/use information isn't optimized.
+ It should use a new complex constraint that properly filters
+ out local variables of the callee (though that would make
+ the sets invalid after inlining). OTOH we might as well
+ admit defeat to WHOPR and simply do all the clobber/use analysis
+ and propagation after PTA finished but before we threw away
+ points-to information for memory variables. WHOPR and PTA
+ do not play along well anyway - the whole constraint solving
+ would need to be done in WPA phase and it will be very interesting
+ to apply the results to local SSA names during LTRANS phase.
+
+ We probably should compute a per-function unit-ESCAPE solution
+ propagating it simply like the clobber / uses solutions. The
+ solution can go alongside the non-IPA espaced solution and be
+ used to query which vars escape the unit through a function.
+
+ We never put function decls in points-to sets so we do not
+ keep the set of called functions for indirect calls.
+
+ And probably more. */
+static GTY ((if_marked ("tree_map_marked_p"), param_is (struct heapvar_map)))
htab_t heapvar_for_stmt;
static bool use_field_sensitive = true;
typedef struct constraint_graph *constraint_graph_t;
static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
+struct constraint;
+typedef struct constraint *constraint_t;
+
DEF_VEC_P(constraint_t);
DEF_VEC_ALLOC_P(constraint_t,heap);
/* True if this is a variable created by the constraint analysis, such as
heap variables and constraints we had to break up. */
- unsigned int is_artificial_var:1;
+ unsigned int is_artificial_var : 1;
/* True if this is a special variable whose solution set should not be
changed. */
- unsigned int is_special_var:1;
+ unsigned int is_special_var : 1;
/* True for variables whose size is not known or variable. */
- unsigned int is_unknown_size_var:1;
+ unsigned int is_unknown_size_var : 1;
/* True for (sub-)fields that represent a whole variable. */
unsigned int is_full_var : 1;
/* True if this is a heap variable. */
- unsigned int is_heap_var:1;
+ unsigned int is_heap_var : 1;
- /* True if we may not use TBAA to prune references to this
- variable. This is used for C++ placement new. */
- unsigned int no_tbaa_pruning : 1;
+ /* True if this is a variable tracking a restrict pointer source. */
+ unsigned int is_restrict_var : 1;
/* True if this field may contain pointers. */
unsigned int may_have_pointers : 1;
- /* Variable id this was collapsed to due to type unsafety. Zero if
- this variable was not collapsed. This should be unused completely
- after build_succ_graph, or something is broken. */
- unsigned int collapsed_to;
+ /* True if this field has only restrict qualified pointers. */
+ unsigned int only_restrict_pointers : 1;
+
+ /* True if this represents a global variable. */
+ unsigned int is_global_var : 1;
+
+ /* True if this represents a IPA function info. */
+ unsigned int is_fn_info : 1;
/* A link to the variable for the next field in this structure. */
struct variable_info *next;
typedef struct variable_info *varinfo_t;
static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
+static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
+ unsigned HOST_WIDE_INT);
static varinfo_t lookup_vi_for_tree (tree);
/* Pool of variable info structures. */
return VEC_index (varinfo_t, varmap, n);
}
-/* Return the varmap element N, following the collapsed_to link. */
-
-static inline varinfo_t
-get_varinfo_fc (unsigned int n)
-{
- varinfo_t v = VEC_index (varinfo_t, varmap, n);
-
- if (v->collapsed_to != 0)
- return get_varinfo (v->collapsed_to);
- return v;
-}
-
/* Static IDs for the special variables. */
enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
- escaped_id = 3, nonlocal_id = 4, callused_id = 5,
- storedanything_id = 6, integer_id = 7 };
-
-/* Variable that represents the unknown pointer. */
-static varinfo_t var_anything;
-static tree anything_tree;
-
-/* Variable that represents the NULL pointer. */
-static varinfo_t var_nothing;
-static tree nothing_tree;
-
-/* Variable that represents read only memory. */
-static varinfo_t var_readonly;
-static tree readonly_tree;
+ escaped_id = 3, nonlocal_id = 4,
+ storedanything_id = 5, integer_id = 6 };
-/* Variable that represents escaped memory. */
-static varinfo_t var_escaped;
-static tree escaped_tree;
-
-/* Variable that represents nonlocal memory. */
-static varinfo_t var_nonlocal;
-static tree nonlocal_tree;
-
-/* Variable that represents call-used memory. */
-static varinfo_t var_callused;
-static tree callused_tree;
+struct GTY(()) heapvar_map {
+ struct tree_map map;
+ unsigned HOST_WIDE_INT offset;
+};
-/* Variable that represents variables that are stored to anything. */
-static varinfo_t var_storedanything;
-static tree storedanything_tree;
+static int
+heapvar_map_eq (const void *p1, const void *p2)
+{
+ const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
+ const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
+ return (h1->map.base.from == h2->map.base.from
+ && h1->offset == h2->offset);
+}
-/* Variable that represents integers. This is used for when people do things
- like &0->a.b. */
-static varinfo_t var_integer;
-static tree integer_tree;
+static unsigned int
+heapvar_map_hash (struct heapvar_map *h)
+{
+ return iterative_hash_host_wide_int (h->offset,
+ htab_hash_pointer (h->map.base.from));
+}
/* Lookup a heap var for FROM, and return it if we find one. */
static tree
-heapvar_lookup (tree from)
+heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
{
- struct tree_map *h, in;
- in.base.from = from;
-
- h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
- htab_hash_pointer (from));
+ struct heapvar_map *h, in;
+ in.map.base.from = from;
+ in.offset = offset;
+ h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
+ heapvar_map_hash (&in));
if (h)
- return h->to;
+ return h->map.to;
return NULL_TREE;
}
hashtable. */
static void
-heapvar_insert (tree from, tree to)
+heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
{
- struct tree_map *h;
+ struct heapvar_map *h;
void **loc;
- h = GGC_NEW (struct tree_map);
- h->hash = htab_hash_pointer (from);
- h->base.from = from;
- h->to = to;
- loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
- *(struct tree_map **) loc = h;
+ h = ggc_alloc_heapvar_map ();
+ h->map.base.from = from;
+ h->offset = offset;
+ h->map.hash = heapvar_map_hash (h);
+ h->map.to = to;
+ loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
+ gcc_assert (*loc == NULL);
+ *(struct heapvar_map **) loc = h;
}
/* Return a new variable info structure consisting for a variable
- named NAME, and using constraint graph node NODE. */
+ named NAME, and using constraint graph node NODE. Append it
+ to the vector of variable info structures. */
static varinfo_t
-new_var_info (tree t, unsigned int id, const char *name)
+new_var_info (tree t, const char *name)
{
+ unsigned index = VEC_length (varinfo_t, varmap);
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
- tree var;
- ret->id = id;
+ ret->id = index;
ret->name = name;
ret->decl = t;
- ret->is_artificial_var = false;
- ret->is_heap_var = false;
+ /* Vars without decl are artificial and do not have sub-variables. */
+ ret->is_artificial_var = (t == NULL_TREE);
ret->is_special_var = false;
ret->is_unknown_size_var = false;
- ret->is_full_var = false;
+ ret->is_full_var = (t == NULL_TREE);
+ ret->is_heap_var = false;
+ ret->is_restrict_var = false;
ret->may_have_pointers = true;
- var = t;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- ret->no_tbaa_pruning = (DECL_P (var)
- && POINTER_TYPE_P (TREE_TYPE (var))
- && DECL_NO_TBAA_P (var));
+ ret->only_restrict_pointers = false;
+ ret->is_global_var = (t == NULL_TREE);
+ ret->is_fn_info = false;
+ if (t && DECL_P (t))
+ ret->is_global_var = is_global_var (t);
ret->solution = BITMAP_ALLOC (&pta_obstack);
ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
ret->next = NULL;
- ret->collapsed_to = 0;
+
+ stats.total_vars++;
+
+ VEC_safe_push (varinfo_t, heap, varmap, ret);
+
return ret;
}
+
+/* A map mapping call statements to per-stmt variables for uses
+ and clobbers specific to the call. */
+struct pointer_map_t *call_stmt_vars;
+
+/* Lookup or create the variable for the call statement CALL. */
+
+static varinfo_t
+get_call_vi (gimple call)
+{
+ void **slot_p;
+ varinfo_t vi, vi2;
+
+ slot_p = pointer_map_insert (call_stmt_vars, call);
+ if (*slot_p)
+ return (varinfo_t) *slot_p;
+
+ vi = new_var_info (NULL_TREE, "CALLUSED");
+ vi->offset = 0;
+ vi->size = 1;
+ vi->fullsize = 2;
+ vi->is_full_var = true;
+
+ vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
+ vi2->offset = 1;
+ vi2->size = 1;
+ vi2->fullsize = 2;
+ vi2->is_full_var = true;
+
+ *slot_p = (void *) vi;
+ return vi;
+}
+
+/* Lookup the variable for the call statement CALL representing
+ the uses. Returns NULL if there is nothing special about this call. */
+
+static varinfo_t
+lookup_call_use_vi (gimple call)
+{
+ void **slot_p;
+
+ slot_p = pointer_map_contains (call_stmt_vars, call);
+ if (slot_p)
+ return (varinfo_t) *slot_p;
+
+ return NULL;
+}
+
+/* Lookup the variable for the call statement CALL representing
+ the clobbers. Returns NULL if there is nothing special about this call. */
+
+static varinfo_t
+lookup_call_clobber_vi (gimple call)
+{
+ varinfo_t uses = lookup_call_use_vi (call);
+ if (!uses)
+ return NULL;
+
+ return uses->next;
+}
+
+/* Lookup or create the variable for the call statement CALL representing
+ the uses. */
+
+static varinfo_t
+get_call_use_vi (gimple call)
+{
+ return get_call_vi (call);
+}
+
+/* Lookup or create the variable for the call statement CALL representing
+ the clobbers. */
+
+static varinfo_t ATTRIBUTE_UNUSED
+get_call_clobber_vi (gimple call)
+{
+ return get_call_vi (call)->next;
+}
+
+
typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
/* An expression that appears in a constraint. */
IOW, in a deref constraint, we would deref, get the result set,
then add OFFSET to each member. */
- unsigned HOST_WIDE_INT offset;
+ HOST_WIDE_INT offset;
};
+/* Use 0x8000... as special unknown offset. */
+#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
+
typedef struct constraint_expr ce_s;
DEF_VEC_O(ce_s);
DEF_VEC_ALLOC_O(ce_s, heap);
static VEC(constraint_t,heap) *constraints;
static alloc_pool constraint_pool;
-
-DEF_VEC_I(int);
-DEF_VEC_ALLOC_I(int, heap);
-
/* The constraint graph is represented as an array of bitmaps
containing successor nodes. */
/* Print out constraint C to FILE. */
-void
+static void
dump_constraint (FILE *file, constraint_t c)
{
if (c->lhs.type == ADDRESSOF)
fprintf (file, "&");
else if (c->lhs.type == DEREF)
fprintf (file, "*");
- fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
- if (c->lhs.offset != 0)
+ fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
+ if (c->lhs.offset == UNKNOWN_OFFSET)
+ fprintf (file, " + UNKNOWN");
+ else if (c->lhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
fprintf (file, " = ");
if (c->rhs.type == ADDRESSOF)
fprintf (file, "&");
else if (c->rhs.type == DEREF)
fprintf (file, "*");
- fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
- if (c->rhs.offset != 0)
+ fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
+ if (c->rhs.offset == UNKNOWN_OFFSET)
+ fprintf (file, " + UNKNOWN");
+ else if (c->rhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
fprintf (file, "\n");
}
+
+void debug_constraint (constraint_t);
+void debug_constraints (void);
+void debug_constraint_graph (void);
+void debug_solution_for_var (unsigned int);
+void debug_sa_points_to_info (void);
+
/* Print out constraint C to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint (constraint_t c)
{
dump_constraint (stderr, c);
/* Print out all constraints to FILE */
-void
-dump_constraints (FILE *file)
+static void
+dump_constraints (FILE *file, int from)
{
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
dump_constraint (file, c);
}
/* Print out all constraints to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraints (void)
{
- dump_constraints (stderr);
+ dump_constraints (stderr, 0);
}
/* Print out to FILE the edge in the constraint graph that is created by
complex with an offset, e.g: a = b + 8, then the label is "+".
Otherwise the edge has no label. */
-void
+static void
dump_constraint_edge (FILE *file, constraint_t c)
{
if (c->rhs.type != ADDRESSOF)
{
- const char *src = get_varinfo_fc (c->rhs.var)->name;
- const char *dst = get_varinfo_fc (c->lhs.var)->name;
+ const char *src = get_varinfo (c->rhs.var)->name;
+ const char *dst = get_varinfo (c->lhs.var)->name;
fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
/* Due to preprocessing of constraints, instructions like *a = *b are
illegal; thus, we do not have to handle such cases. */
/* Print the constraint graph in dot format. */
-void
+static void
dump_constraint_graph (FILE *file)
{
unsigned int i=0, size;
/* Print the constraints used to produce the constraint graph. The
constraints will be printed as comments in the dot file: */
fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
- dump_constraints (file);
+ dump_constraints (file, 0);
fprintf (file, "*/\n");
/* Prints the header of the dot file: */
size = size < graph->size ? size : graph->size;
for (i = 0; i < size; i++)
{
- const char *name = get_varinfo_fc (graph->rep[i])->name;
+ const char *name = get_varinfo (graph->rep[i])->name;
fprintf (file, " \"%s\" ;\n", name);
}
/* Print out the constraint graph to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint_graph (void)
{
dump_constraint_graph (stderr);
}
}
+/* Expands the solution in SET to all sub-fields of variables included.
+ Union the expanded result into RESULT. */
+
+static void
+solution_set_expand (bitmap result, bitmap set)
+{
+ bitmap_iterator bi;
+ bitmap vars = NULL;
+ unsigned j;
+
+ /* In a first pass record all variables we need to add all
+ sub-fields off. This avoids quadratic behavior. */
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
+ {
+ varinfo_t v = get_varinfo (j);
+ if (v->is_artificial_var
+ || v->is_full_var)
+ continue;
+ v = lookup_vi_for_tree (v->decl);
+ if (vars == NULL)
+ vars = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (vars, v->id);
+ }
+
+ /* In the second pass now do the addition to the solution and
+ to speed up solving add it to the delta as well. */
+ if (vars != NULL)
+ {
+ EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
+ {
+ varinfo_t v = get_varinfo (j);
+ for (; v != NULL; v = v->next)
+ bitmap_set_bit (result, v->id);
+ }
+ BITMAP_FREE (vars);
+ }
+}
+
/* Take a solution set SET, add OFFSET to each member of the set, and
overwrite SET with the result when done. */
static void
-solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
+solution_set_add (bitmap set, HOST_WIDE_INT offset)
{
bitmap result = BITMAP_ALLOC (&iteration_obstack);
unsigned int i;
bitmap_iterator bi;
+ /* If the offset is unknown we have to expand the solution to
+ all subfields. */
+ if (offset == UNKNOWN_OFFSET)
+ {
+ solution_set_expand (set, set);
+ return;
+ }
+
EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
{
varinfo_t vi = get_varinfo (i);
else
{
unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
- varinfo_t v = first_vi_for_offset (vi, fieldoffset);
- /* If the result is outside of the variable use the last field. */
- if (!v)
- {
- v = vi;
- while (v->next != NULL)
- v = v->next;
- }
- bitmap_set_bit (result, v->id);
+
+ /* If the offset makes the pointer point to before the
+ variable use offset zero for the field lookup. */
+ if (offset < 0
+ && fieldoffset > vi->offset)
+ fieldoffset = 0;
+
+ if (offset != 0)
+ vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
+
+ bitmap_set_bit (result, vi->id);
/* If the result is not exactly at fieldoffset include the next
field as well. See get_constraint_for_ptr_offset for more
rationale. */
- if (v->offset != fieldoffset
- && v->next != NULL)
- bitmap_set_bit (result, v->next->id);
+ if (vi->offset != fieldoffset
+ && vi->next != NULL)
+ bitmap_set_bit (result, vi->next->id);
}
}
process. */
static bool
-set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
+set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
{
if (inc == 0)
return bitmap_ior_into (to, from);
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
- unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
- unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
+ unsigned int lhsvar = lhs.var;
+ unsigned int rhsvar = rhs.var;
if (lhs.type == DEREF)
{
/* All related variables are no longer direct nodes. */
RESET_BIT (graph->direct_nodes, rhsvar);
- v = get_varinfo (rhsvar);
- if (!v->is_full_var)
- {
- v = lookup_vi_for_tree (v->decl);
- do
- {
- RESET_BIT (graph->direct_nodes, v->id);
- v = v->next;
- }
- while (v != NULL);
- }
+ v = get_varinfo (rhsvar);
+ if (!v->is_full_var)
+ {
+ v = lookup_vi_for_tree (v->decl);
+ do
+ {
+ RESET_BIT (graph->direct_nodes, v->id);
+ v = v->next;
+ }
+ while (v != NULL);
+ }
bitmap_set_bit (graph->address_taken, rhsvar);
}
else if (lhsvar > anything_id
lhs = c->lhs;
rhs = c->rhs;
- lhsvar = find (get_varinfo_fc (lhs.var)->id);
- rhsvar = find (get_varinfo_fc (rhs.var)->id);
+ lhsvar = find (lhs.var);
+ rhsvar = find (rhs.var);
if (lhs.type == DEREF)
{
else if (rhs.type == ADDRESSOF)
{
/* x = &y */
- gcc_assert (find (get_varinfo_fc (rhs.var)->id)
- == get_varinfo_fc (rhs.var)->id);
+ gcc_assert (find (rhs.var) == rhs.var);
bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
}
else if (lhsvar > anything_id
}
}
- /* Add edges from STOREDANYTHING to all non-direct nodes. */
+ /* Add edges from STOREDANYTHING to all non-direct nodes that can
+ receive pointers. */
t = find (storedanything_id);
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
{
- if (!TEST_BIT (graph->direct_nodes, i))
+ if (!TEST_BIT (graph->direct_nodes, i)
+ && get_varinfo (i)->may_have_pointers)
add_graph_edge (graph, find (i), t);
}
+
+ /* Everything stored to ANYTHING also potentially escapes. */
+ add_graph_edge (graph, find (escaped_id), t);
}
static unsigned int changed_count;
static sbitmap changed;
-DEF_VEC_I(unsigned);
-DEF_VEC_ALLOC_I(unsigned,heap);
-
-
/* Strongly Connected Component visitation info. */
struct scc_info
&& si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
{
bitmap scc = BITMAP_ALLOC (NULL);
- bool have_ref_node = n >= FIRST_REF_NODE;
unsigned int lowest_node;
bitmap_iterator bi;
unsigned int w = VEC_pop (unsigned, si->scc_stack);
bitmap_set_bit (scc, w);
- if (w >= FIRST_REF_NODE)
- have_ref_node = true;
}
lowest_node = bitmap_first_set_bit (scc);
merge_graph_nodes (graph, to, from);
merge_node_constraints (graph, to, from);
- if (get_varinfo (from)->no_tbaa_pruning)
- get_varinfo (to)->no_tbaa_pruning = true;
-
/* Mark TO as changed if FROM was changed. If TO was already marked
as changed, decrease the changed count. */
changed_count++;
}
}
-
+
BITMAP_FREE (get_varinfo (from)->solution);
BITMAP_FREE (get_varinfo (from)->oldsolution);
-
+
if (stats.iterations > 0)
{
BITMAP_FREE (get_varinfo (to)->oldsolution);
VEC_safe_push (unsigned, heap, ti->topo_order, n);
}
-/* Return true if variable N + OFFSET is a legal field of N. */
-
-static bool
-type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
-{
- varinfo_t ninfo = get_varinfo (n);
-
- /* For things we've globbed to single variables, any offset into the
- variable acts like the entire variable, so that it becomes offset
- 0. */
- if (ninfo->is_special_var
- || ninfo->is_artificial_var
- || ninfo->is_unknown_size_var
- || ninfo->is_full_var)
- {
- *offset = 0;
- return true;
- }
- return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
-}
-
-/* Process a constraint C that represents x = *y, using DELTA as the
- starting solution. */
+/* Process a constraint C that represents x = *(y + off), using DELTA as the
+ starting solution for y. */
static void
do_sd_constraint (constraint_graph_t graph, constraint_t c,
bitmap sol = get_varinfo (lhs)->solution;
unsigned int j;
bitmap_iterator bi;
+ HOST_WIDE_INT roffset = c->rhs.offset;
- /* For x = *ESCAPED and x = *CALLUSED we want to compute the
- reachability set of the rhs var. As a pointer to a sub-field
- of a variable can also reach all other fields of the variable
- we simply have to expand the solution to contain all sub-fields
- if one sub-field is contained. */
- if (c->rhs.var == escaped_id
- || c->rhs.var == callused_id)
- {
- bitmap vars = NULL;
- /* In a first pass record all variables we need to add all
- sub-fields off. This avoids quadratic behavior. */
- EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
- {
- varinfo_t v = get_varinfo (j);
- if (v->is_full_var)
- continue;
-
- v = lookup_vi_for_tree (v->decl);
- if (v->next != NULL)
- {
- if (vars == NULL)
- vars = BITMAP_ALLOC (NULL);
- bitmap_set_bit (vars, v->id);
- }
- }
- /* In the second pass now do the addition to the solution and
- to speed up solving add it to the delta as well. */
- if (vars != NULL)
- {
- EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
- {
- varinfo_t v = get_varinfo (j);
- for (; v != NULL; v = v->next)
- {
- if (bitmap_set_bit (sol, v->id))
- {
- flag = true;
- bitmap_set_bit (delta, v->id);
- }
- }
- }
- BITMAP_FREE (vars);
- }
- }
+ /* Our IL does not allow this. */
+ gcc_assert (c->lhs.offset == 0);
+ /* If the solution of Y contains anything it is good enough to transfer
+ this to the LHS. */
if (bitmap_bit_p (delta, anything_id))
{
flag |= bitmap_set_bit (sol, anything_id);
goto done;
}
+ /* If we do not know at with offset the rhs is dereferenced compute
+ the reachability set of DELTA, conservatively assuming it is
+ dereferenced at all valid offsets. */
+ if (roffset == UNKNOWN_OFFSET)
+ {
+ solution_set_expand (delta, delta);
+ /* No further offset processing is necessary. */
+ roffset = 0;
+ }
+
/* For each variable j in delta (Sol(y)), add
an edge in the graph from j to x, and union Sol(j) into Sol(x). */
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
{
- unsigned HOST_WIDE_INT roffset = c->rhs.offset;
- if (type_safe (j, &roffset))
- {
- varinfo_t v;
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
- unsigned int t;
+ varinfo_t v = get_varinfo (j);
+ HOST_WIDE_INT fieldoffset = v->offset + roffset;
+ unsigned int t;
+
+ if (v->is_full_var)
+ fieldoffset = v->offset;
+ else if (roffset != 0)
+ v = first_vi_for_offset (v, fieldoffset);
+ /* If the access is outside of the variable we can ignore it. */
+ if (!v)
+ continue;
- v = first_vi_for_offset (get_varinfo (j), fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
+ do
+ {
t = find (v->id);
/* Adding edges from the special vars is pointless.
if (get_varinfo (t)->is_special_var)
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
/* Merging the solution from ESCAPED needlessly increases
- the set. Use ESCAPED as representative instead.
- Same for CALLUSED. */
- else if (get_varinfo (t)->id == escaped_id
- || get_varinfo (t)->id == callused_id)
- flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
- else if (add_graph_edge (graph, lhs, t))
+ the set. Use ESCAPED as representative instead. */
+ else if (v->id == escaped_id)
+ flag |= bitmap_set_bit (sol, escaped_id);
+ else if (v->may_have_pointers
+ && add_graph_edge (graph, lhs, t))
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
+
+ /* If the variable is not exactly at the requested offset
+ we have to include the next one. */
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ || v->next == NULL)
+ break;
+
+ v = v->next;
+ fieldoffset = v->offset;
}
+ while (1);
}
done:
}
}
-/* Process a constraint C that represents *x = y. */
+/* Process a constraint C that represents *(x + off) = y using DELTA
+ as the starting solution for x. */
static void
do_ds_constraint (constraint_t c, bitmap delta)
bitmap sol = get_varinfo (rhs)->solution;
unsigned int j;
bitmap_iterator bi;
+ HOST_WIDE_INT loff = c->lhs.offset;
+ bool escaped_p = false;
/* Our IL does not allow this. */
gcc_assert (c->rhs.offset == 0);
return;
}
+ /* If we do not know at with offset the rhs is dereferenced compute
+ the reachability set of DELTA, conservatively assuming it is
+ dereferenced at all valid offsets. */
+ if (loff == UNKNOWN_OFFSET)
+ {
+ solution_set_expand (delta, delta);
+ loff = 0;
+ }
+
/* For each member j of delta (Sol(x)), add an edge from y to j and
union Sol(y) into Sol(j) */
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
{
- unsigned HOST_WIDE_INT loff = c->lhs.offset;
- if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
- {
- varinfo_t v;
- unsigned int t;
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
-
- v = first_vi_for_offset (get_varinfo (j), fieldoffset);
- /* If the access is outside of the variable we can ignore it. */
- if (!v)
- continue;
+ varinfo_t v = get_varinfo (j);
+ unsigned int t;
+ HOST_WIDE_INT fieldoffset = v->offset + loff;
+
+ if (v->is_full_var)
+ fieldoffset = v->offset;
+ else if (loff != 0)
+ v = first_vi_for_offset (v, fieldoffset);
+ /* If the access is outside of the variable we can ignore it. */
+ if (!v)
+ continue;
+ do
+ {
if (v->may_have_pointers)
{
- t = find (v->id);
- if (add_graph_edge (graph, t, rhs))
+ /* If v is a global variable then this is an escape point. */
+ if (v->is_global_var
+ && !escaped_p)
{
- if (bitmap_ior_into (get_varinfo (t)->solution, sol))
+ t = find (escaped_id);
+ if (add_graph_edge (graph, t, rhs)
+ && bitmap_ior_into (get_varinfo (t)->solution, sol)
+ && !TEST_BIT (changed, t))
{
- if (t == rhs)
- sol = get_varinfo (rhs)->solution;
- if (!TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
+ SET_BIT (changed, t);
+ changed_count++;
}
+ /* Enough to let rhs escape once. */
+ escaped_p = true;
+ }
+
+ if (v->is_special_var)
+ break;
+
+ t = find (v->id);
+ if (add_graph_edge (graph, t, rhs)
+ && bitmap_ior_into (get_varinfo (t)->solution, sol)
+ && !TEST_BIT (changed, t))
+ {
+ SET_BIT (changed, t);
+ changed_count++;
}
}
+
+ /* If the variable is not exactly at the requested offset
+ we have to include the next one. */
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
+ || v->next == NULL)
+ break;
+
+ v = v->next;
+ fieldoffset = v->offset;
}
+ while (1);
}
}
typedef struct equiv_class_label
{
+ hashval_t hashcode;
unsigned int equivalence_class;
bitmap labels;
- hashval_t hashcode;
} *equiv_class_label_t;
typedef const struct equiv_class_label *const_equiv_class_label_t;
{
const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
- return bitmap_equal_p (eql1->labels, eql2->labels);
+ return (eql1->hashcode == eql2->hashcode
+ && bitmap_equal_p (eql1->labels, eql2->labels));
}
/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
if (label)
{
int label_rep = graph->pe_rep[label];
-
+
if (label_rep == -1)
continue;
-
+
label_rep = find (label_rep);
if (label_rep >= 0 && unite (label_rep, find (i)))
unify_nodes (graph, label_rep, i, false);
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
- unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
- unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
+ unsigned int lhsvar = find (lhs.var);
+ unsigned int rhsvar = find (rhs.var);
unsigned int lhsnode, rhsnode;
unsigned int lhslabel, rhslabel;
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
-
+
fprintf (dump_file, "%s is a non-pointer variable,"
"ignoring constraint:",
get_varinfo (lhs.var)->name);
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
-
+
fprintf (dump_file, "%s is a non-pointer variable,"
"ignoring constraint:",
get_varinfo (rhs.var)->name);
solution_empty = bitmap_empty_p (solution);
- if (!solution_empty
- /* Do not propagate the ESCAPED/CALLUSED solutions. */
- && i != escaped_id
- && i != callused_id)
+ if (!solution_empty)
{
bitmap_iterator bi;
+ unsigned eff_escaped_id = find (escaped_id);
/* Propagate solution to all successors. */
EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
if (to == i)
continue;
- flag = set_union_with_increment (tmp, pts, 0);
+ /* If we propagate from ESCAPED use ESCAPED as
+ placeholder. */
+ if (i == eff_escaped_id)
+ flag = bitmap_set_bit (tmp, escaped_id);
+ else
+ flag = set_union_with_increment (tmp, pts, 0);
if (flag)
{
static const char *
alias_get_name (tree decl)
{
- const char *res = get_name (decl);
+ const char *res;
char *temp;
int num_printed = 0;
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ else
+ res= get_name (decl);
if (res != NULL)
return res;
return (varinfo_t) *slot;
}
-/* Get a constraint expression for a new temporary variable. */
+/* Get a scalar constraint expression for a new temporary variable. */
static struct constraint_expr
-get_constraint_exp_for_temp (tree t)
+new_scalar_tmp_constraint_exp (const char *name)
{
- struct constraint_expr cexpr;
+ struct constraint_expr tmp;
+ varinfo_t vi;
- gcc_assert (SSA_VAR_P (t));
+ vi = new_var_info (NULL_TREE, name);
+ vi->offset = 0;
+ vi->size = -1;
+ vi->fullsize = -1;
+ vi->is_full_var = 1;
- cexpr.type = SCALAR;
- cexpr.var = get_vi_for_tree (t)->id;
- cexpr.offset = 0;
+ tmp.var = vi->id;
+ tmp.type = SCALAR;
+ tmp.offset = 0;
- return cexpr;
+ return tmp;
}
/* Get a constraint expression vector from an SSA_VAR_P node.
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (t) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (t))
{
get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
/* If we are not taking the address of the constraint expr, add all
sub-fiels of the variable as well. */
- if (!address_p)
+ if (!address_p
+ && !vi->is_full_var)
{
for (; vi; vi = vi->next)
{
gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
- /* ANYTHING == ANYTHING is pointless. */
- if (lhs.var == anything_id && rhs.var == anything_id)
+ /* If we didn't get any useful constraint from the lhs we get
+ &ANYTHING as fallback from get_constraint_for. Deal with
+ it here by turning it into *ANYTHING. */
+ if (lhs.type == ADDRESSOF
+ && lhs.var == anything_id)
+ lhs.type = DEREF;
+
+ /* ADDRESSOF on the lhs is invalid. */
+ gcc_assert (lhs.type != ADDRESSOF);
+
+ /* We shouldn't add constraints from things that cannot have pointers.
+ It's not completely trivial to avoid in the callers, so do it here. */
+ if (rhs.type != ADDRESSOF
+ && !get_varinfo (rhs.var)->may_have_pointers)
+ return;
+
+ /* Likewise adding to the solution of a non-pointer var isn't useful. */
+ if (!get_varinfo (lhs.var)->may_have_pointers)
return;
- /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
- else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
- {
- rhs = t->lhs;
- t->lhs = t->rhs;
- t->rhs = rhs;
- process_constraint (t);
- }
/* This can happen in our IR with things like n->a = *p */
- else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
+ if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
{
/* Split into tmp = *rhs, *lhs = tmp */
- tree rhsdecl = get_varinfo (rhs.var)->decl;
- tree pointertype = TREE_TYPE (rhsdecl);
- tree pointedtotype = TREE_TYPE (pointertype);
- tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
-
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
process_constraint (new_constraint (tmplhs, rhs));
process_constraint (new_constraint (lhs, tmplhs));
}
else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
{
/* Split into tmp = &rhs, *lhs = tmp */
- tree rhsdecl = get_varinfo (rhs.var)->decl;
- tree pointertype = TREE_TYPE (rhsdecl);
- tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
-
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
process_constraint (new_constraint (tmplhs, rhs));
process_constraint (new_constraint (lhs, tmplhs));
}
if (TREE_CODE (type) == ARRAY_TYPE)
return type_could_have_pointers (TREE_TYPE (type));
+ /* A function or method can consume pointers.
+ ??? We could be more precise here. */
+ if (TREE_CODE (type) == FUNCTION_TYPE
+ || TREE_CODE (type) == METHOD_TYPE)
+ return true;
+
return AGGREGATE_TYPE_P (type);
}
static bool
could_have_pointers (tree t)
{
- return type_could_have_pointers (TREE_TYPE (t));
+ return (((TREE_CODE (t) == VAR_DECL
+ || TREE_CODE (t) == PARM_DECL
+ || TREE_CODE (t) == RESULT_DECL)
+ && (TREE_PUBLIC (t) || DECL_EXTERNAL (t) || TREE_ADDRESSABLE (t)))
+ || type_could_have_pointers (TREE_TYPE (t)));
}
/* Return the position, in bits, of FIELD_DECL from the beginning of its
get_constraint_for_ptr_offset (tree ptr, tree offset,
VEC (ce_s, heap) **results)
{
- struct constraint_expr *c;
+ struct constraint_expr c;
unsigned int j, n;
- unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
+ HOST_WIDE_INT rhsunitoffset, rhsoffset;
/* If we do not do field-sensitive PTA adding offsets to pointers
does not change the points-to solution. */
/* If the offset is not a non-negative integer constant that fits
in a HOST_WIDE_INT, we have to fall back to a conservative
solution which includes all sub-fields of all pointed-to
- variables of ptr.
- ??? As we do not have the ability to express this, fall back
- to anything. */
- if (!host_integerp (offset, 1))
- {
- struct constraint_expr temp;
- temp.var = anything_id;
- temp.type = SCALAR;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
- return;
- }
-
- /* Make sure the bit-offset also fits. */
- rhsunitoffset = TREE_INT_CST_LOW (offset);
- rhsoffset = rhsunitoffset * BITS_PER_UNIT;
- if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ variables of ptr. */
+ if (offset == NULL_TREE
+ || !host_integerp (offset, 0))
+ rhsoffset = UNKNOWN_OFFSET;
+ else
{
- struct constraint_expr temp;
- temp.var = anything_id;
- temp.type = SCALAR;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
- return;
+ /* Make sure the bit-offset also fits. */
+ rhsunitoffset = TREE_INT_CST_LOW (offset);
+ rhsoffset = rhsunitoffset * BITS_PER_UNIT;
+ if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ rhsoffset = UNKNOWN_OFFSET;
}
get_constraint_for (ptr, results);
for (j = 0; j < n; j++)
{
varinfo_t curr;
- c = VEC_index (ce_s, *results, j);
- curr = get_varinfo (c->var);
-
- if (c->type == ADDRESSOF
- && !curr->is_full_var)
+ c = *VEC_index (ce_s, *results, j);
+ curr = get_varinfo (c.var);
+
+ if (c.type == ADDRESSOF
+ /* If this varinfo represents a full variable just use it. */
+ && curr->is_full_var)
+ c.offset = 0;
+ else if (c.type == ADDRESSOF
+ /* If we do not know the offset add all subfields. */
+ && rhsoffset == UNKNOWN_OFFSET)
+ {
+ varinfo_t temp = lookup_vi_for_tree (curr->decl);
+ do
+ {
+ struct constraint_expr c2;
+ c2.var = temp->id;
+ c2.type = ADDRESSOF;
+ c2.offset = 0;
+ if (c2.var != c.var)
+ VEC_safe_push (ce_s, heap, *results, &c2);
+ temp = temp->next;
+ }
+ while (temp);
+ }
+ else if (c.type == ADDRESSOF)
{
- varinfo_t temp, curr = get_varinfo (c->var);
+ varinfo_t temp;
+ unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
/* Search the sub-field which overlaps with the
- pointed-to offset. As we deal with positive offsets
- only, we can start the search from the current variable. */
- temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
-
- /* If the result is outside of the variable we have to provide
- a conservative result, as the variable is still reachable
- from the resulting pointer (even though it technically
- cannot point to anything). The last sub-field is such
- a conservative result.
+ pointed-to offset. If the result is outside of the variable
+ we have to provide a conservative result, as the variable is
+ still reachable from the resulting pointer (even though it
+ technically cannot point to anything). The last and first
+ sub-fields are such conservative results.
??? If we always had a sub-field for &object + 1 then
we could represent this in a more precise way. */
- if (temp == NULL)
- {
- temp = curr;
- while (temp->next != NULL)
- temp = temp->next;
- continue;
- }
+ if (rhsoffset < 0
+ && curr->offset < offset)
+ offset = 0;
+ temp = first_or_preceding_vi_for_offset (curr, offset);
/* If the found variable is not exactly at the pointed to
result, we have to include the next variable in the
solution as well. Otherwise two increments by offset / 2
do not result in the same or a conservative superset
solution. */
- if (temp->offset != curr->offset + rhsoffset
+ if (temp->offset != offset
&& temp->next != NULL)
{
struct constraint_expr c2;
c2.offset = 0;
VEC_safe_push (ce_s, heap, *results, &c2);
}
- c->var = temp->id;
- c->offset = 0;
+ c.var = temp->id;
+ c.offset = 0;
}
- else if (c->type == ADDRESSOF
- /* If this varinfo represents a full variable just use it. */
- && curr->is_full_var)
- c->offset = 0;
else
- c->offset = rhsoffset;
+ c.offset = rhsoffset;
+
+ VEC_replace (ce_s, *results, j, &c);
}
}
/* Some people like to do cute things like take the address of
&0->a.b */
forzero = t;
- while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
+ while (handled_component_p (forzero)
+ || INDIRECT_REF_P (forzero)
+ || TREE_CODE (forzero) == MEM_REF)
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
gcc_assert (VEC_length (ce_s, *results) == 1);
result = VEC_last (ce_s, *results);
- /* This can also happen due to weird offsetof type macros. */
- if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
- result->type = SCALAR;
-
if (result->type == SCALAR
&& get_varinfo (result->var)->is_full_var)
/* For single-field vars do not bother about the offset. */
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to past the end of variable, ignoring\n");
}
- else if (bitmaxsize == -1)
+ else if (result->type == DEREF)
+ {
+ /* If we do not know exactly where the access goes say so. Note
+ that only for non-structure accesses we know that we access
+ at most one subfiled of any variable. */
+ if (bitpos == -1
+ || bitsize != bitmaxsize
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
+ result->offset = UNKNOWN_OFFSET;
+ else
+ result->offset = bitpos;
+ }
+ else if (result->type == ADDRESSOF)
{
- /* We can't handle DEREF constraints with unknown size, we'll
- get the wrong answer. Punt and return anything. */
+ /* We can end up here for component references on a
+ VIEW_CONVERT_EXPR <>(&foobar). */
+ result->type = SCALAR;
result->var = anything_id;
result->offset = 0;
}
else
- result->offset = bitpos;
+ gcc_unreachable ();
}
c->type = SCALAR;
else if (c->type == DEREF)
{
- tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
+ struct constraint_expr tmplhs;
+ tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
process_constraint (new_constraint (tmplhs, *c));
c->var = tmplhs.var;
}
}
}
+static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
+
+/* Given a tree T, return the constraint expression for taking the
+ address of it. */
+
+static void
+get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
+{
+ struct constraint_expr *c;
+ unsigned int i;
+
+ get_constraint_for_1 (t, results, true);
+
+ for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
+ {
+ if (c->type == DEREF)
+ c->type = SCALAR;
+ else
+ c->type = ADDRESSOF;
+ }
+}
+
/* Given a tree T, return the constraint expression for it. */
static void
in that case *NULL does not fail, so it _should_ alias *anything.
It is not worth adding a new option or renaming the existing one,
since this case is relatively obscure. */
- if (flag_delete_null_pointer_checks
- && TREE_CODE (t) == INTEGER_CST
- && integer_zerop (t))
+ if ((TREE_CODE (t) == INTEGER_CST
+ && integer_zerop (t))
+ /* The only valid CONSTRUCTORs in gimple with pointer typed
+ elements are zero-initializer. But in IPA mode we also
+ process global initializers, so verify at least. */
+ || (TREE_CODE (t) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (t) == 0))
{
- temp.var = nothing_id;
+ if (flag_delete_null_pointer_checks)
+ temp.var = nothing_id;
+ else
+ temp.var = anything_id;
temp.type = ADDRESSOF;
temp.offset = 0;
VEC_safe_push (ce_s, heap, *results, &temp);
switch (TREE_CODE (t))
{
case ADDR_EXPR:
- {
- struct constraint_expr *c;
- unsigned int i;
- tree exp = TREE_OPERAND (t, 0);
-
- get_constraint_for_1 (exp, results, true);
-
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
- {
- if (c->type == DEREF)
- c->type = SCALAR;
- else
- c->type = ADDRESSOF;
- }
- return;
- }
- break;
+ get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
+ return;
default:;
}
break;
{
switch (TREE_CODE (t))
{
- case INDIRECT_REF:
+ case MEM_REF:
{
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1), results);
do_deref (results);
return;
}
case COMPONENT_REF:
get_constraint_for_component_ref (t, results, address_p);
return;
+ case VIEW_CONVERT_EXPR:
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ return;
+ /* We are missing handling for TARGET_MEM_REF here. */
default:;
}
break;
get_constraint_for_ssa_var (t, results, address_p);
return;
}
+ case CONSTRUCTOR:
+ {
+ unsigned int i;
+ tree val;
+ VEC (ce_s, heap) *tmp = NULL;
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
+ {
+ struct constraint_expr *rhsp;
+ unsigned j;
+ get_constraint_for_1 (val, &tmp, address_p);
+ for (j = 0; VEC_iterate (ce_s, tmp, j, rhsp); ++j)
+ VEC_safe_push (ce_s, heap, *results, rhsp);
+ VEC_truncate (ce_s, tmp, 0);
+ }
+ VEC_free (ce_s, heap, tmp);
+ /* We do not know whether the constructor was complete,
+ so technically we have to add &NOTHING or &ANYTHING
+ like we do for an empty constructor as well. */
+ return;
+ }
default:;
}
break;
get_constraint_for_1 (t, results, false);
}
-/* Handle the structure copy case where we have a simple structure copy
- between LHS and RHS that is of SIZE (in bits)
-
- For each field of the lhs variable (lhsfield)
- For each field of the rhs variable at lhsfield.offset (rhsfield)
- add the constraint lhsfield = rhsfield
-
- If we fail due to some kind of type unsafety or other thing we
- can't handle, return false. We expect the caller to collapse the
- variable in that case. */
-
-static bool
-do_simple_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
-{
- varinfo_t p = get_varinfo (lhs.var);
- unsigned HOST_WIDE_INT pstart, last;
- pstart = p->offset;
- last = p->offset + size;
- for (; p && p->offset < last; p = p->next)
- {
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
- templhs.var = p->id;
- q = get_varinfo (temprhs.var);
- fieldoffset = p->offset - pstart;
- q = first_vi_for_offset (q, q->offset + fieldoffset);
- if (!q)
- return false;
- temprhs.var = q->id;
- process_constraint (new_constraint (templhs, temprhs));
- }
- return true;
-}
-
-
-/* Handle the structure copy case where we have a structure copy between a
- aggregate on the LHS and a dereference of a pointer on the RHS
- that is of SIZE (in bits)
-
- For each field of the lhs variable (lhsfield)
- rhs.offset = lhsfield->offset
- add the constraint lhsfield = rhs
-*/
-
-static void
-do_rhs_deref_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
-{
- varinfo_t p = get_varinfo (lhs.var);
- unsigned HOST_WIDE_INT pstart,last;
- pstart = p->offset;
- last = p->offset + size;
-
- for (; p && p->offset < last; p = p->next)
- {
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
-
- if (templhs.type == SCALAR)
- templhs.var = p->id;
- else
- templhs.offset = p->offset;
-
- q = get_varinfo (temprhs.var);
- fieldoffset = p->offset - pstart;
- temprhs.offset += fieldoffset;
- process_constraint (new_constraint (templhs, temprhs));
- }
-}
-
-/* Handle the structure copy case where we have a structure copy
- between an aggregate on the RHS and a dereference of a pointer on
- the LHS that is of SIZE (in bits)
- For each field of the rhs variable (rhsfield)
- lhs.offset = rhsfield->offset
- add the constraint lhs = rhsfield
-*/
+/* Efficiently generates constraints from all entries in *RHSC to all
+ entries in *LHSC. */
static void
-do_lhs_deref_structure_copy (const struct constraint_expr lhs,
- const struct constraint_expr rhs,
- const unsigned HOST_WIDE_INT size)
+process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
{
- varinfo_t p = get_varinfo (rhs.var);
- unsigned HOST_WIDE_INT pstart,last;
- pstart = p->offset;
- last = p->offset + size;
+ struct constraint_expr *lhsp, *rhsp;
+ unsigned i, j;
- for (; p && p->offset < last; p = p->next)
+ if (VEC_length (ce_s, lhsc) <= 1
+ || VEC_length (ce_s, rhsc) <= 1)
{
- varinfo_t q;
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
- unsigned HOST_WIDE_INT fieldoffset;
-
-
- if (temprhs.type == SCALAR)
- temprhs.var = p->id;
- else
- temprhs.offset = p->offset;
-
- q = get_varinfo (templhs.var);
- fieldoffset = p->offset - pstart;
- templhs.offset += fieldoffset;
- process_constraint (new_constraint (templhs, temprhs));
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
+ process_constraint (new_constraint (*lhsp, *rhsp));
}
-}
-
-/* Sometimes, frontends like to give us bad type information. This
- function will collapse all the fields from VAR to the end of VAR,
- into VAR, so that we treat those fields as a single variable.
- We return the variable they were collapsed into. */
-
-static unsigned int
-collapse_rest_of_var (unsigned int var)
-{
- varinfo_t currvar = get_varinfo (var);
- varinfo_t field;
-
- for (field = currvar->next; field; field = field->next)
+ else
{
- if (dump_file)
- fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
- field->name, currvar->name);
-
- gcc_assert (field->collapsed_to == 0);
- field->collapsed_to = currvar->id;
+ struct constraint_expr tmp;
+ tmp = new_scalar_tmp_constraint_exp ("allalltmp");
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ process_constraint (new_constraint (tmp, *rhsp));
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, tmp));
}
-
- currvar->next = NULL;
- currvar->size = currvar->fullsize - currvar->offset;
-
- return currvar->id;
}
/* Handle aggregate copies by expanding into copies of the respective
static void
do_structure_copy (tree lhsop, tree rhsop)
{
- struct constraint_expr lhs, rhs, tmp;
+ struct constraint_expr *lhsp, *rhsp;
VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
- varinfo_t p;
- unsigned HOST_WIDE_INT lhssize;
- unsigned HOST_WIDE_INT rhssize;
-
- /* Pretend we are taking the address of the constraint exprs.
- We deal with walking the sub-fields ourselves. */
- get_constraint_for_1 (lhsop, &lhsc, true);
- get_constraint_for_1 (rhsop, &rhsc, true);
- gcc_assert (VEC_length (ce_s, lhsc) == 1);
- gcc_assert (VEC_length (ce_s, rhsc) == 1);
- lhs = *(VEC_last (ce_s, lhsc));
- rhs = *(VEC_last (ce_s, rhsc));
-
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
-
- /* If we have special var = x, swap it around. */
- if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
- {
- tmp = lhs;
- lhs = rhs;
- rhs = tmp;
- }
-
- /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
- possible it's something we could handle. However, most cases falling
- into this are dealing with transparent unions, which are slightly
- weird. */
- if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
+ unsigned j;
+
+ get_constraint_for (lhsop, &lhsc);
+ get_constraint_for (rhsop, &rhsc);
+ lhsp = VEC_index (ce_s, lhsc, 0);
+ rhsp = VEC_index (ce_s, rhsc, 0);
+ if (lhsp->type == DEREF
+ || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
+ || rhsp->type == DEREF)
{
- rhs.type = ADDRESSOF;
- rhs.var = anything_id;
- }
-
- /* If the RHS is a special var, or an addressof, set all the LHS fields to
- that special var. */
- if (rhs.var <= integer_id)
- {
- for (p = get_varinfo (lhs.var); p; p = p->next)
+ if (lhsp->type == DEREF)
{
- struct constraint_expr templhs = lhs;
- struct constraint_expr temprhs = rhs;
-
- if (templhs.type == SCALAR )
- templhs.var = p->id;
- else
- templhs.offset += p->offset;
- process_constraint (new_constraint (templhs, temprhs));
+ gcc_assert (VEC_length (ce_s, lhsc) == 1);
+ lhsp->offset = UNKNOWN_OFFSET;
}
- }
- else
- {
- tree rhstype = TREE_TYPE (rhsop);
- tree lhstype = TREE_TYPE (lhsop);
- tree rhstypesize;
- tree lhstypesize;
-
- lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
- rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
-
- /* If we have a variably sized types on the rhs or lhs, and a deref
- constraint, add the constraint, lhsconstraint = &ANYTHING.
- This is conservatively correct because either the lhs is an unknown
- sized var (if the constraint is SCALAR), or the lhs is a DEREF
- constraint, and every variable it can point to must be unknown sized
- anyway, so we don't need to worry about fields at all. */
- if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
- || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
+ if (rhsp->type == DEREF)
{
- rhs.var = anything_id;
- rhs.type = ADDRESSOF;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
- return;
+ gcc_assert (VEC_length (ce_s, rhsc) == 1);
+ rhsp->offset = UNKNOWN_OFFSET;
}
-
- /* The size only really matters insofar as we don't set more or less of
- the variable. If we hit an unknown size var, the size should be the
- whole darn thing. */
- if (get_varinfo (rhs.var)->is_unknown_size_var)
- rhssize = ~0;
- else
- rhssize = TREE_INT_CST_LOW (rhstypesize);
-
- if (get_varinfo (lhs.var)->is_unknown_size_var)
- lhssize = ~0;
- else
- lhssize = TREE_INT_CST_LOW (lhstypesize);
-
-
- if (rhs.type == SCALAR && lhs.type == SCALAR)
+ process_all_all_constraints (lhsc, rhsc);
+ }
+ else if (lhsp->type == SCALAR
+ && (rhsp->type == SCALAR
+ || rhsp->type == ADDRESSOF))
+ {
+ HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
+ HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
+ unsigned k = 0;
+ get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
+ get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
{
- if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
+ varinfo_t lhsv, rhsv;
+ rhsp = VEC_index (ce_s, rhsc, k);
+ lhsv = get_varinfo (lhsp->var);
+ rhsv = get_varinfo (rhsp->var);
+ if (lhsv->may_have_pointers
+ && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
+ rhsv->offset + lhsoffset, rhsv->size))
+ process_constraint (new_constraint (*lhsp, *rhsp));
+ if (lhsv->offset + rhsoffset + lhsv->size
+ > rhsv->offset + lhsoffset + rhsv->size)
{
- lhs.var = collapse_rest_of_var (get_varinfo_fc (lhs.var)->id);
- rhs.var = collapse_rest_of_var (get_varinfo_fc (rhs.var)->id);
- lhs.offset = 0;
- rhs.offset = 0;
- lhs.type = SCALAR;
- rhs.type = SCALAR;
- process_constraint (new_constraint (lhs, rhs));
+ ++k;
+ if (k >= VEC_length (ce_s, rhsc))
+ break;
}
- }
- else if (lhs.type != DEREF && rhs.type == DEREF)
- do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
- else if (lhs.type == DEREF && rhs.type != DEREF)
- do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
- else
- {
- tree pointedtotype = lhstype;
- tree tmpvar;
-
- gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
- tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
- do_structure_copy (tmpvar, rhsop);
- do_structure_copy (lhsop, tmpvar);
+ else
+ ++j;
}
}
+ else
+ gcc_unreachable ();
+
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
}
/* Create a constraint ID = OP. */
VEC_free (ce_s, heap, rhsc);
}
+/* Create a constraint ID = &FROM. */
+
+static void
+make_constraint_from (varinfo_t vi, int from)
+{
+ struct constraint_expr lhs, rhs;
+
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+
+ rhs.var = from;
+ rhs.offset = 0;
+ rhs.type = ADDRESSOF;
+ process_constraint (new_constraint (lhs, rhs));
+}
+
+/* Create a constraint ID = FROM. */
+
+static void
+make_copy_constraint (varinfo_t vi, int from)
+{
+ struct constraint_expr lhs, rhs;
+
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+
+ rhs.var = from;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+}
+
/* Make constraints necessary to make OP escape. */
static void
make_constraint_to (escaped_id, op);
}
+/* Add constraints to that the solution of VI is transitively closed. */
+
+static void
+make_transitive_closure_constraints (varinfo_t vi)
+{
+ struct constraint_expr lhs, rhs;
+
+ /* VAR = *VAR; */
+ lhs.type = SCALAR;
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ rhs.type = DEREF;
+ rhs.var = vi->id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
+
+ /* VAR = VAR + UNKNOWN; */
+ lhs.type = SCALAR;
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = vi->id;
+ rhs.offset = UNKNOWN_OFFSET;
+ process_constraint (new_constraint (lhs, rhs));
+}
+
+/* Create a new artificial heap variable with NAME.
+ Return the created variable. */
+
+static varinfo_t
+make_heapvar_for (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi;
+ tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
+
+ if (heapvar == NULL_TREE)
+ {
+ var_ann_t ann;
+ heapvar = create_tmp_var_raw (ptr_type_node, name);
+ DECL_EXTERNAL (heapvar) = 1;
+
+ heapvar_insert (lhs->decl, lhs->offset, heapvar);
+
+ ann = get_var_ann (heapvar);
+ ann->is_heapvar = 1;
+ }
+
+ /* For global vars we need to add a heapvar to the list of referenced
+ vars of a different function than it was created for originally. */
+ if (cfun && gimple_referenced_vars (cfun))
+ add_referenced_var (heapvar);
+
+ vi = new_var_info (heapvar, name);
+ vi->is_artificial_var = true;
+ vi->is_heap_var = true;
+ vi->is_unknown_size_var = true;
+ vi->offset = 0;
+ vi->fullsize = ~0;
+ vi->size = ~0;
+ vi->is_full_var = true;
+ insert_vi_for_tree (heapvar, vi);
+
+ return vi;
+}
+
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Return the created variable. */
+
+static varinfo_t
+make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi = make_heapvar_for (lhs, name);
+ make_constraint_from (lhs, vi->id);
+
+ return vi;
+}
+
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Set flags according to a tag used
+ for tracking restrict pointers. */
+
+static void
+make_constraint_from_restrict (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi;
+ vi = make_constraint_from_heapvar (lhs, name);
+ vi->is_restrict_var = 1;
+ vi->is_global_var = 0;
+ vi->is_special_var = 1;
+ vi->may_have_pointers = 0;
+}
+
+/* In IPA mode there are varinfos for different aspects of reach
+ function designator. One for the points-to set of the return
+ value, one for the variables that are clobbered by the function,
+ one for its uses and one for each parameter (including a single
+ glob for remaining variadic arguments). */
+
+enum { fi_clobbers = 1, fi_uses = 2,
+ fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
+
+/* Get a constraint for the requested part of a function designator FI
+ when operating in IPA mode. */
+
+static struct constraint_expr
+get_function_part_constraint (varinfo_t fi, unsigned part)
+{
+ struct constraint_expr c;
+
+ gcc_assert (in_ipa_mode);
+
+ if (fi->id == anything_id)
+ {
+ /* ??? We probably should have a ANYFN special variable. */
+ c.var = anything_id;
+ c.offset = 0;
+ c.type = SCALAR;
+ }
+ else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
+ {
+ varinfo_t ai = first_vi_for_offset (fi, part);
+ if (ai)
+ c.var = ai->id;
+ else
+ c.var = anything_id;
+ c.offset = 0;
+ c.type = SCALAR;
+ }
+ else
+ {
+ c.var = fi->id;
+ c.offset = part;
+ c.type = DEREF;
+ }
+
+ return c;
+}
+
/* For non-IPA mode, generate constraints necessary for a call on the
RHS. */
static void
-handle_rhs_call (gimple stmt)
+handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
{
+ struct constraint_expr rhsc;
unsigned i;
+ bool returns_uses = false;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
+ int flags = gimple_call_arg_flags (stmt, i);
- /* Find those pointers being passed, and make sure they end up
- pointing to anything. */
- if (could_have_pointers (arg))
+ /* If the argument is not used or it does not contain pointers
+ we can ignore it. */
+ if ((flags & EAF_UNUSED)
+ || !could_have_pointers (arg))
+ continue;
+
+ /* As we compute ESCAPED context-insensitive we do not gain
+ any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
+ set. The argument would still get clobbered through the
+ escape solution.
+ ??? We might get away with less (and more precise) constraints
+ if using a temporary for transitively closing things. */
+ if ((flags & EAF_NOCLOBBER)
+ && (flags & EAF_NOESCAPE))
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ make_transitive_closure_constraints (uses);
+ make_constraint_to (uses->id, arg);
+ returns_uses = true;
+ }
+ else if (flags & EAF_NOESCAPE)
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ varinfo_t clobbers = get_call_clobber_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ {
+ make_transitive_closure_constraints (uses);
+ make_transitive_closure_constraints (clobbers);
+ }
+ make_constraint_to (uses->id, arg);
+ make_constraint_to (clobbers->id, arg);
+ returns_uses = true;
+ }
+ else
make_escape_constraint (arg);
}
+ /* If we added to the calls uses solution make sure we account for
+ pointers to it to be returned. */
+ if (returns_uses)
+ {
+ rhsc.var = get_call_use_vi (stmt)->id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
+ }
+
/* The static chain escapes as well. */
if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
+
+ /* And if we applied NRV the address of the return slot escapes as well. */
+ if (gimple_call_return_slot_opt_p (stmt)
+ && gimple_call_lhs (stmt) != NULL_TREE
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
+ {
+ VEC(ce_s, heap) *tmpc = NULL;
+ struct constraint_expr lhsc, *c;
+ get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
+ lhsc.var = escaped_id;
+ lhsc.offset = 0;
+ lhsc.type = SCALAR;
+ for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
+ process_constraint (new_constraint (lhsc, *c));
+ VEC_free(ce_s, heap, tmpc);
+ }
+
+ /* Regular functions return nonlocal memory. */
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (tree lhs, int flags)
+handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
+ tree fndecl)
{
VEC(ce_s, heap) *lhsc = NULL;
- struct constraint_expr rhsc;
- unsigned int j;
- struct constraint_expr *lhsp;
get_constraint_for (lhs, &lhsc);
-
- if (flags & ECF_MALLOC)
+ /* If the store is to a global decl make sure to
+ add proper escape constraints. */
+ lhs = get_base_address (lhs);
+ if (lhs
+ && DECL_P (lhs)
+ && is_global_var (lhs))
{
- tree heapvar = heapvar_lookup (lhs);
- varinfo_t vi;
-
- if (heapvar == NULL)
- {
- heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
- DECL_EXTERNAL (heapvar) = 1;
- get_var_ann (heapvar)->is_heapvar = 1;
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
- heapvar_insert (lhs, heapvar);
- }
+ struct constraint_expr tmpc;
+ tmpc.var = escaped_id;
+ tmpc.offset = 0;
+ tmpc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, lhsc, &tmpc);
+ }
- rhsc.var = create_variable_info_for (heapvar,
- alias_get_name (heapvar));
- vi = get_varinfo (rhsc.var);
- vi->is_artificial_var = 1;
- vi->is_heap_var = 1;
- vi->is_unknown_size_var = true;
- vi->fullsize = ~0;
- vi->size = ~0;
- rhsc.type = ADDRESSOF;
- rhsc.offset = 0;
+ /* If the call returns an argument unmodified override the rhs
+ constraints. */
+ flags = gimple_call_return_flags (stmt);
+ if (flags & ERF_RETURNS_ARG
+ && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
+ {
+ tree arg;
+ rhsc = NULL;
+ arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
+ get_constraint_for (arg, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
}
- else
+ else if (flags & ERF_NOALIAS)
{
- rhsc.var = escaped_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
+ varinfo_t vi;
+ struct constraint_expr tmpc;
+ rhsc = NULL;
+ vi = make_heapvar_for (get_vi_for_tree (lhs), "HEAP");
+ /* We delay marking allocated storage global until we know if
+ it escapes. */
+ DECL_EXTERNAL (vi->decl) = 0;
+ vi->is_global_var = 0;
+ /* If this is not a real malloc call assume the memory was
+ initialized and thus may point to global memory. All
+ builtin functions with the malloc attribute behave in a sane way. */
+ if (!fndecl
+ || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
+ make_constraint_from (vi, nonlocal_id);
+ tmpc.var = vi->id;
+ tmpc.offset = 0;
+ tmpc.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, rhsc, &tmpc);
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
+
+ process_all_all_constraints (lhsc, rhsc);
+
VEC_free (ce_s, heap, lhsc);
}
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt)
+handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
{
- tree lhs = gimple_call_lhs (stmt);
- VEC(ce_s, heap) *lhsc = NULL;
struct constraint_expr rhsc;
- unsigned int j, k;
- struct constraint_expr *lhsp;
- tree tmpvar;
- struct constraint_expr tmpc;
-
- get_constraint_for (lhs, &lhsc);
+ unsigned int k;
- /* If this is a nested function then it can return anything. */
+ /* Treat nested const functions the same as pure functions as far
+ as the static chain is concerned. */
if (gimple_call_chain (stmt))
{
- rhsc.var = anything_id;
+ varinfo_t uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
+ make_constraint_to (uses->id, gimple_call_chain (stmt));
+ rhsc.var = uses->id;
rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
- return;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
- /* We always use a temporary here, otherwise we end up with a quadratic
- amount of constraints for
- large_struct = const_call (large_struct);
- in field-sensitive PTA. */
- tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
- tmpc = get_constraint_exp_for_temp (tmpvar);
-
- /* May return addresses of globals. */
- rhsc.var = nonlocal_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- process_constraint (new_constraint (tmpc, rhsc));
-
/* May return arguments. */
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
if (could_have_pointers (arg))
{
VEC(ce_s, heap) *argc = NULL;
+ unsigned i;
struct constraint_expr *argp;
- int i;
-
get_constraint_for (arg, &argc);
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
- process_constraint (new_constraint (tmpc, *argp));
- VEC_free (ce_s, heap, argc);
+ for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
+ VEC_safe_push (ce_s, heap, *results, argp);
+ VEC_free(ce_s, heap, argc);
}
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, tmpc));
-
- VEC_free (ce_s, heap, lhsc);
+ /* May return addresses of globals. */
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt)
+handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
{
+ struct constraint_expr rhsc;
unsigned i;
+ varinfo_t uses = NULL;
/* Memory reached from pointer arguments is call-used. */
for (i = 0; i < gimple_call_num_args (stmt); ++i)
tree arg = gimple_call_arg (stmt, i);
if (could_have_pointers (arg))
- make_constraint_to (callused_id, arg);
+ {
+ if (!uses)
+ {
+ uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
+ }
+ make_constraint_to (uses->id, arg);
+ }
}
/* The static chain is used as well. */
if (gimple_call_chain (stmt))
- make_constraint_to (callused_id, gimple_call_chain (stmt));
-
- /* If the call returns a pointer it may point to reachable memory
- from the arguments. Not so for malloc functions though. */
- if (gimple_call_lhs (stmt)
- && could_have_pointers (gimple_call_lhs (stmt))
- && !(gimple_call_flags (stmt) & ECF_MALLOC))
{
- tree lhs = gimple_call_lhs (stmt);
- VEC(ce_s, heap) *lhsc = NULL;
- struct constraint_expr rhsc;
- struct constraint_expr *lhsp;
- unsigned j;
-
- get_constraint_for (lhs, &lhsc);
-
- /* If this is a nested function then it can return anything. */
- if (gimple_call_chain (stmt))
+ if (!uses)
{
- rhsc.var = anything_id;
- rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
- return;
+ uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
}
+ make_constraint_to (uses->id, gimple_call_chain (stmt));
+ }
- /* Else just add the call-used memory here. Escaped variables
- and globals will be dealt with in handle_lhs_call. */
- rhsc.var = callused_id;
+ /* Pure functions may return call-used and nonlocal memory. */
+ if (uses)
+ {
+ rhsc.var = uses->id;
rhsc.offset = 0;
- rhsc.type = ADDRESSOF;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
}
-/* Walk statement T setting up aliasing constraints according to the
- references found in T. This function is the main part of the
- constraint builder. AI points to auxiliary alias information used
+
+/* Return the varinfo for the callee of CALL. */
+
+static varinfo_t
+get_fi_for_callee (gimple call)
+{
+ tree decl;
+
+ /* If we can directly resolve the function being called, do so.
+ Otherwise, it must be some sort of indirect expression that
+ we should still be able to handle. */
+ decl = gimple_call_fndecl (call);
+ if (decl)
+ return get_vi_for_tree (decl);
+
+ decl = gimple_call_fn (call);
+ /* The function can be either an SSA name pointer or,
+ worse, an OBJ_TYPE_REF. In this case we have no
+ clue and should be getting ANYFN (well, ANYTHING for now). */
+ if (TREE_CODE (decl) == SSA_NAME)
+ {
+ if (TREE_CODE (decl) == SSA_NAME
+ && (TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL)
+ && SSA_NAME_IS_DEFAULT_DEF (decl))
+ decl = SSA_NAME_VAR (decl);
+ return get_vi_for_tree (decl);
+ }
+ else if (TREE_CODE (decl) == INTEGER_CST
+ || TREE_CODE (decl) == OBJ_TYPE_REF)
+ return get_varinfo (anything_id);
+ else
+ gcc_unreachable ();
+}
+
+/* Walk statement T setting up aliasing constraints according to the
+ references found in T. This function is the main part of the
+ constraint builder. AI points to auxiliary alias information used
when building alias sets and computing alias grouping heuristics. */
static void
VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
- enum escape_type stmt_escape_type;
+ varinfo_t fi;
/* Now build constraints expressions. */
if (gimple_code (t) == GIMPLE_PHI)
get_constraint_for (gimple_phi_result (t), &lhsc);
for (i = 0; i < gimple_phi_num_args (t); i++)
{
- tree rhstype;
tree strippedrhs = PHI_ARG_DEF (t, i);
STRIP_NOPS (strippedrhs);
- rhstype = TREE_TYPE (strippedrhs);
get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
pointer passed by address. */
else if (is_gimple_call (t))
{
- if (!in_ipa_mode)
+ tree fndecl = gimple_call_fndecl (t);
+ if (fndecl != NULL_TREE
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ /* ??? All builtins that are handled here need to be handled
+ in the alias-oracle query functions explicitly! */
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ /* All the following functions return a pointer to the same object
+ as their first argument points to. The functions do not add
+ to the ESCAPED solution. The functions make the first argument
+ pointed to memory point to what the second argument pointed to
+ memory points to. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 1 : 0));
+ tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 0 : 1));
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
+ else
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ return;
+ }
+ case BUILT_IN_MEMSET:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ struct constraint_expr ac;
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ do_deref (&lhsc);
+ if (flag_delete_null_pointer_checks
+ && integer_zerop (gimple_call_arg (t, 1)))
+ {
+ ac.type = ADDRESSOF;
+ ac.var = nothing_id;
+ }
+ else
+ {
+ ac.type = SCALAR;
+ ac.var = integer_id;
+ }
+ ac.offset = 0;
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, ac));
+ VEC_free (ce_s, heap, lhsc);
+ return;
+ }
+ /* All the following functions do not return pointers, do not
+ modify the points-to sets of memory reachable from their
+ arguments and do not add to the ESCAPED solution. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_FREE:
+ return;
+ /* Trampolines are special - they set up passing the static
+ frame. */
+ case BUILT_IN_INIT_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree nfunc = gimple_call_arg (t, 1);
+ tree frame = gimple_call_arg (t, 2);
+ unsigned i;
+ struct constraint_expr lhs, *rhsp;
+ if (in_ipa_mode)
+ {
+ varinfo_t nfi = NULL;
+ gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
+ nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
+ if (nfi)
+ {
+ lhs = get_function_part_constraint (nfi, fi_static_chain);
+ get_constraint_for (frame, &rhsc);
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+
+ /* Make the frame point to the function for
+ the trampoline adjustment call. */
+ get_constraint_for (tramp, &lhsc);
+ do_deref (&lhsc);
+ get_constraint_for (nfunc, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+
+ return;
+ }
+ }
+ /* Else fallthru to generic handling which will let
+ the frame escape. */
+ break;
+ }
+ case BUILT_IN_ADJUST_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree res = gimple_call_lhs (t);
+ if (in_ipa_mode && res)
+ {
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (tramp, &rhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ }
+ return;
+ }
+ /* Variadic argument handling needs to be handled in IPA
+ mode as well. */
+ case BUILT_IN_VA_START:
+ {
+ if (in_ipa_mode)
+ {
+ tree valist = gimple_call_arg (t, 0);
+ struct constraint_expr rhs, *lhsp;
+ unsigned i;
+ /* The va_list gets access to pointers in variadic
+ arguments. */
+ fi = lookup_vi_for_tree (cfun->decl);
+ gcc_assert (fi != NULL);
+ get_constraint_for (valist, &lhsc);
+ do_deref (&lhsc);
+ rhs = get_function_part_constraint (fi, ~0);
+ rhs.type = ADDRESSOF;
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, rhs));
+ VEC_free (ce_s, heap, lhsc);
+ /* va_list is clobbered. */
+ make_constraint_to (get_call_clobber_vi (t)->id, valist);
+ return;
+ }
+ break;
+ }
+ /* va_end doesn't have any effect that matters. */
+ case BUILT_IN_VA_END:
+ return;
+ /* Alternate return. Simply give up for now. */
+ case BUILT_IN_RETURN:
+ {
+ fi = NULL;
+ if (!in_ipa_mode
+ || !(fi = get_vi_for_tree (cfun->decl)))
+ make_constraint_from (get_varinfo (escaped_id), anything_id);
+ else if (in_ipa_mode
+ && fi != NULL)
+ {
+ struct constraint_expr lhs, rhs;
+ lhs = get_function_part_constraint (fi, fi_result);
+ rhs.var = anything_id;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+ }
+ return;
+ }
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
+ if (!in_ipa_mode
+ || (fndecl
+ && (!(fi = lookup_vi_for_tree (fndecl))
+ || !fi->is_fn_info)))
{
+ VEC(ce_s, heap) *rhsc = NULL;
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
of global memory but not of escaped memory. */
- if (flags & ECF_CONST)
+ if (flags & (ECF_CONST|ECF_NOVOPS))
{
if (gimple_call_lhs (t)
&& could_have_pointers (gimple_call_lhs (t)))
- handle_const_call (t);
+ handle_const_call (t, &rhsc);
}
/* Pure functions can return addresses in and of memory
reachable from their arguments, but they are not an escape
point for reachable memory of their arguments. */
- else if (flags & ECF_PURE)
- {
- handle_pure_call (t);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags);
- }
+ else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
+ handle_pure_call (t, &rhsc);
else
- {
- handle_rhs_call (t);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags);
- }
+ handle_rhs_call (t, &rhsc);
+ if (gimple_call_lhs (t)
+ && could_have_pointers (gimple_call_lhs (t)))
+ handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
+ VEC_free (ce_s, heap, rhsc);
}
else
{
tree lhsop;
- varinfo_t fi;
- int i = 1;
- size_t j;
- tree decl;
-
- lhsop = gimple_call_lhs (t);
- decl = gimple_call_fndecl (t);
+ unsigned j;
- /* If we can directly resolve the function being called, do so.
- Otherwise, it must be some sort of indirect expression that
- we should still be able to handle. */
- if (decl)
- fi = get_vi_for_tree (decl);
- else
- {
- decl = gimple_call_fn (t);
- fi = get_vi_for_tree (decl);
- }
+ fi = get_fi_for_callee (t);
/* Assign all the passed arguments to the appropriate incoming
parameters of the function. */
struct constraint_expr *rhsp;
tree arg = gimple_call_arg (t, j);
+ if (!could_have_pointers (arg))
+ continue;
+
get_constraint_for (arg, &rhsc);
- if (TREE_CODE (decl) != FUNCTION_DECL)
- {
- lhs.type = DEREF;
- lhs.var = fi->id;
- lhs.offset = i;
- }
- else
- {
- lhs.type = SCALAR;
- lhs.var = first_vi_for_offset (fi, i)->id;
- lhs.offset = 0;
- }
+ lhs = get_function_part_constraint (fi, fi_parm_base + j);
while (VEC_length (ce_s, rhsc) != 0)
{
rhsp = VEC_last (ce_s, rhsc);
process_constraint (new_constraint (lhs, *rhsp));
VEC_pop (ce_s, rhsc);
}
- i++;
}
/* If we are returning a value, assign it to the result. */
- if (lhsop)
+ lhsop = gimple_call_lhs (t);
+ if (lhsop
+ && type_could_have_pointers (TREE_TYPE (lhsop)))
{
struct constraint_expr rhs;
struct constraint_expr *lhsp;
- unsigned int j = 0;
get_constraint_for (lhsop, &lhsc);
- if (TREE_CODE (decl) != FUNCTION_DECL)
+ rhs = get_function_part_constraint (fi, fi_result);
+ if (fndecl
+ && DECL_RESULT (fndecl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- rhs.type = DEREF;
- rhs.var = fi->id;
- rhs.offset = i;
- }
- else
- {
- rhs.type = SCALAR;
- rhs.var = first_vi_for_offset (fi, i)->id;
- rhs.offset = 0;
+ VEC(ce_s, heap) *tem = NULL;
+ VEC_safe_push (ce_s, heap, tem, &rhs);
+ do_deref (&tem);
+ rhs = *VEC_index (ce_s, tem, 0);
+ VEC_free(ce_s, heap, tem);
}
for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
process_constraint (new_constraint (*lhsp, rhs));
}
+
+ /* If we pass the result decl by reference, honor that. */
+ if (lhsop
+ && fndecl
+ && DECL_RESULT (fndecl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
+
+ get_constraint_for_address_of (lhsop, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_result);
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ }
+
+ /* If we use a static chain, pass it along. */
+ if (gimple_call_chain (t))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
+
+ get_constraint_for (gimple_call_chain (t), &rhsc);
+ lhs = get_function_part_constraint (fi, fi_static_chain);
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ process_constraint (new_constraint (lhs, *rhsp));
+ }
}
}
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
points if they have pointer operands. */
else if (is_gimple_assign (t)
- && could_have_pointers (gimple_assign_lhs (t)))
+ && type_could_have_pointers (TREE_TYPE (gimple_assign_lhs (t))))
{
/* Otherwise, just a regular assignment statement. */
tree lhsop = gimple_assign_lhs (t);
do_structure_copy (lhsop, rhsop);
else
{
- unsigned int j;
struct constraint_expr temp;
get_constraint_for (lhsop, &lhsc);
if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
gimple_assign_rhs2 (t), &rhsc);
+ else if (gimple_assign_rhs_code (t) == BIT_AND_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
+ {
+ /* Aligning a pointer via a BIT_AND_EXPR is offsetting
+ the pointer. Handle it by offsetting it by UNKNOWN. */
+ get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
+ NULL_TREE, &rhsc);
+ }
else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
&& !(POINTER_TYPE_P (gimple_expr_type (t))
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
temp.offset = 0;
VEC_safe_push (ce_s, heap, rhsc, &temp);
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
- {
- struct constraint_expr *c2;
- unsigned int k;
-
- for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
- process_constraint (new_constraint (*c, *c2));
- }
+ process_all_all_constraints (lhsc, rhsc);
}
+ /* If there is a store to a global variable the rhs escapes. */
+ if ((lhsop = get_base_address (lhsop)) != NULL_TREE
+ && DECL_P (lhsop)
+ && is_global_var (lhsop)
+ && (!in_ipa_mode
+ || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
+ make_escape_constraint (rhsop);
+ /* If this is a conversion of a non-restrict pointer to a
+ restrict pointer track it with a new heapvar. */
+ else if (gimple_assign_cast_p (t)
+ && POINTER_TYPE_P (TREE_TYPE (rhsop))
+ && POINTER_TYPE_P (TREE_TYPE (lhsop))
+ && !TYPE_RESTRICT (TREE_TYPE (rhsop))
+ && TYPE_RESTRICT (TREE_TYPE (lhsop)))
+ make_constraint_from_restrict (get_vi_for_tree (lhsop),
+ "CAST_RESTRICT");
}
- else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
+ /* For conversions of pointers to non-pointers the pointer escapes. */
+ else if (gimple_assign_cast_p (t)
+ && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
+ && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
{
- unsigned int j;
-
- get_constraint_for (gimple_cdt_location (t), &lhsc);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
- get_varinfo (c->var)->no_tbaa_pruning = true;
+ make_escape_constraint (gimple_assign_rhs1 (t));
}
-
- stmt_escape_type = is_escape_site (t);
- if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
+ /* Handle escapes through return. */
+ else if (gimple_code (t) == GIMPLE_RETURN
+ && gimple_return_retval (t) != NULL_TREE
+ && could_have_pointers (gimple_return_retval (t)))
{
- gcc_assert (is_gimple_assign (t));
- if (gimple_assign_rhs_code (t) == ADDR_EXPR)
- {
- tree rhs = gimple_assign_rhs1 (t);
- tree base = get_base_address (TREE_OPERAND (rhs, 0));
- if (base
- && (!DECL_P (base)
- || !is_global_var (base)))
- make_escape_constraint (rhs);
- }
- else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
- == GIMPLE_SINGLE_RHS)
+ fi = NULL;
+ if (!in_ipa_mode
+ || !(fi = get_vi_for_tree (cfun->decl)))
+ make_escape_constraint (gimple_return_retval (t));
+ else if (in_ipa_mode
+ && fi != NULL)
{
- if (could_have_pointers (gimple_assign_rhs1 (t)))
- make_escape_constraint (gimple_assign_rhs1 (t));
+ struct constraint_expr lhs ;
+ struct constraint_expr *rhsp;
+ unsigned i;
+
+ lhs = get_function_part_constraint (fi, fi_result);
+ get_constraint_for (gimple_return_retval (t), &rhsc);
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ process_constraint (new_constraint (lhs, *rhsp));
}
- else
- gcc_unreachable ();
- }
- else if (stmt_escape_type == ESCAPE_BAD_CAST)
- {
- gcc_assert (is_gimple_assign (t));
- gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
- || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
- make_escape_constraint (gimple_assign_rhs1 (t));
}
- else if (stmt_escape_type == ESCAPE_TO_ASM)
+ /* Handle asms conservatively by adding escape constraints to everything. */
+ else if (gimple_code (t) == GIMPLE_ASM)
{
- unsigned i;
- for (i = 0; i < gimple_asm_noutputs (t); ++i)
+ unsigned i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
+
+ noutputs = gimple_asm_noutputs (t);
+ oconstraints = XALLOCAVEC (const char *, noutputs);
+
+ for (i = 0; i < noutputs; ++i)
{
- tree op = TREE_VALUE (gimple_asm_output_op (t, i));
+ tree link = gimple_asm_output_op (t, i);
+ tree op = TREE_VALUE (link);
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+
+ /* A memory constraint makes the address of the operand escape. */
+ if (!allows_reg && allows_mem)
+ make_escape_constraint (build_fold_addr_expr (op));
+
+ /* The asm may read global memory, so outputs may point to
+ any global memory. */
if (op && could_have_pointers (op))
- /* Strictly we'd only need the constraints from ESCAPED and
- NONLOCAL. */
- make_escape_constraint (op);
+ {
+ VEC(ce_s, heap) *lhsc = NULL;
+ struct constraint_expr rhsc, *lhsp;
+ unsigned j;
+ get_constraint_for (op, &lhsc);
+ rhsc.var = nonlocal_id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ process_constraint (new_constraint (*lhsp, rhsc));
+ VEC_free (ce_s, heap, lhsc);
+ }
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
{
- tree op = TREE_VALUE (gimple_asm_input_op (t, i));
- if (op && could_have_pointers (op))
- /* Strictly we'd only need the constraint to ESCAPED. */
+ tree link = gimple_asm_input_op (t, i);
+ tree op = TREE_VALUE (link);
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
+
+ /* A memory constraint makes the address of the operand escape. */
+ if (!allows_reg && allows_mem)
+ make_escape_constraint (build_fold_addr_expr (op));
+ /* Strictly we'd only need the constraint to ESCAPED if
+ the asm clobbers memory, otherwise using something
+ along the lines of per-call clobbers/uses would be enough. */
+ else if (op && could_have_pointers (op))
make_escape_constraint (op);
}
}
- /* After promoting variables and computing aliasing we will
- need to re-scan most statements. FIXME: Try to minimize the
- number of statements re-scanned. It's not really necessary to
- re-scan *all* statements. */
- if (!in_ipa_mode)
- gimple_set_modified (origt, true);
VEC_free (ce_s, heap, rhsc);
VEC_free (ce_s, heap, lhsc);
}
-/* Find the first varinfo in the same variable as START that overlaps with
- OFFSET.
- Effectively, walk the chain of fields for the variable START to find the
- first field that overlaps with OFFSET.
- Return NULL if we can't find one. */
+/* Create a constraint adding to the clobber set of FI the memory
+ pointed to by PTR. */
-static varinfo_t
-first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
+static void
+process_ipa_clobber (varinfo_t fi, tree ptr)
{
- varinfo_t curr = start;
- while (curr)
- {
- /* We may not find a variable in the field list with the actual
- offset when when we have glommed a structure to a variable.
- In that case, however, offset should still be within the size
- of the variable. */
- if (offset >= curr->offset && offset < (curr->offset + curr->size))
- return curr;
- curr = curr->next;
- }
- return NULL;
+ VEC(ce_s, heap) *ptrc = NULL;
+ struct constraint_expr *c, lhs;
+ unsigned i;
+ get_constraint_for (ptr, &ptrc);
+ lhs = get_function_part_constraint (fi, fi_clobbers);
+ for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
+ process_constraint (new_constraint (lhs, *c));
+ VEC_free (ce_s, heap, ptrc);
}
-
-/* Insert the varinfo FIELD into the field list for BASE, at the front
- of the list. */
+/* Walk statement T setting up clobber and use constraints according to the
+ references found in T. This function is a main part of the
+ IPA constraint builder. */
static void
-insert_into_field_list (varinfo_t base, varinfo_t field)
+find_func_clobbers (gimple origt)
{
- varinfo_t prev = base;
- varinfo_t curr = base->next;
+ gimple t = origt;
+ VEC(ce_s, heap) *lhsc = NULL;
+ VEC(ce_s, heap) *rhsc = NULL;
+ varinfo_t fi;
- field->next = curr;
- prev->next = field;
-}
+ /* Add constraints for clobbered/used in IPA mode.
+ We are not interested in what automatic variables are clobbered
+ or used as we only use the information in the caller to which
+ they do not escape. */
+ gcc_assert (in_ipa_mode);
-/* Insert the varinfo FIELD into the field list for BASE, ordered by
- offset. */
+ /* If the stmt refers to memory in any way it better had a VUSE. */
+ if (gimple_vuse (t) == NULL_TREE)
+ return;
-static void
-insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
-{
- varinfo_t prev = base;
- varinfo_t curr = base->next;
+ /* We'd better have function information for the current function. */
+ fi = lookup_vi_for_tree (cfun->decl);
+ gcc_assert (fi != NULL);
- if (curr == NULL)
+ /* Account for stores in assignments and calls. */
+ if (gimple_vdef (t) != NULL_TREE
+ && gimple_has_lhs (t))
{
- prev->next = field;
- field->next = NULL;
+ tree lhs = gimple_get_lhs (t);
+ tree tem = lhs;
+ while (handled_component_p (tem))
+ tem = TREE_OPERAND (tem, 0);
+ if ((DECL_P (tem)
+ && !auto_var_in_fn_p (tem, cfun->decl))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
+ {
+ struct constraint_expr lhsc, *rhsp;
+ unsigned i;
+ lhsc = get_function_part_constraint (fi, fi_clobbers);
+ get_constraint_for_address_of (lhs, &rhsc);
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ process_constraint (new_constraint (lhsc, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ }
}
- else
+
+ /* Account for uses in assigments and returns. */
+ if (gimple_assign_single_p (t)
+ || (gimple_code (t) == GIMPLE_RETURN
+ && gimple_return_retval (t) != NULL_TREE))
{
- while (curr)
+ tree rhs = (gimple_assign_single_p (t)
+ ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
+ tree tem = rhs;
+ while (handled_component_p (tem))
+ tem = TREE_OPERAND (tem, 0);
+ if ((DECL_P (tem)
+ && !auto_var_in_fn_p (tem, cfun->decl))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
- if (field->offset <= curr->offset)
- break;
- prev = curr;
- curr = curr->next;
+ struct constraint_expr lhs, *rhsp;
+ unsigned i;
+ lhs = get_function_part_constraint (fi, fi_uses);
+ get_constraint_for_address_of (rhs, &rhsc);
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ }
+ }
+
+ if (is_gimple_call (t))
+ {
+ varinfo_t cfi = NULL;
+ tree decl = gimple_call_fndecl (t);
+ struct constraint_expr lhs, rhs;
+ unsigned i, j;
+
+ /* For builtins we do not have separate function info. For those
+ we do not generate escapes for we have to generate clobbers/uses. */
+ if (decl
+ && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (decl))
+ {
+ /* The following functions use and clobber memory pointed to
+ by their arguments. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ {
+ tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
+ == BUILT_IN_BCOPY ? 1 : 0));
+ tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
+ == BUILT_IN_BCOPY ? 0 : 1));
+ unsigned i;
+ struct constraint_expr *rhsp, *lhsp;
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ lhs = get_function_part_constraint (fi, fi_clobbers);
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ process_constraint (new_constraint (lhs, *lhsp));
+ VEC_free (ce_s, heap, lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_uses);
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ return;
+ }
+ /* The following function clobbers memory pointed to by
+ its argument. */
+ case BUILT_IN_MEMSET:
+ {
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ lhs = get_function_part_constraint (fi, fi_clobbers);
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ process_constraint (new_constraint (lhs, *lhsp));
+ VEC_free (ce_s, heap, lhsc);
+ return;
+ }
+ /* The following functions clobber their second and third
+ arguments. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ {
+ process_ipa_clobber (fi, gimple_call_arg (t, 1));
+ process_ipa_clobber (fi, gimple_call_arg (t, 2));
+ return;
+ }
+ /* The following functions clobber their second argument. */
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ {
+ process_ipa_clobber (fi, gimple_call_arg (t, 1));
+ return;
+ }
+ /* The following functions clobber their third argument. */
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ {
+ process_ipa_clobber (fi, gimple_call_arg (t, 2));
+ return;
+ }
+ /* The following functions neither read nor clobber memory. */
+ case BUILT_IN_FREE:
+ return;
+ /* Trampolines are of no interest to us. */
+ case BUILT_IN_INIT_TRAMPOLINE:
+ case BUILT_IN_ADJUST_TRAMPOLINE:
+ return;
+ case BUILT_IN_VA_START:
+ case BUILT_IN_VA_END:
+ return;
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
+
+ /* Parameters passed by value are used. */
+ lhs = get_function_part_constraint (fi, fi_uses);
+ for (i = 0; i < gimple_call_num_args (t); i++)
+ {
+ struct constraint_expr *rhsp;
+ tree arg = gimple_call_arg (t, i);
+
+ if (TREE_CODE (arg) == SSA_NAME
+ || is_gimple_min_invariant (arg))
+ continue;
+
+ get_constraint_for_address_of (arg, &rhsc);
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ }
+
+ /* Build constraints for propagating clobbers/uses along the
+ callgraph edges. */
+ cfi = get_fi_for_callee (t);
+ if (cfi->id == anything_id)
+ {
+ if (gimple_vdef (t))
+ make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
+ anything_id);
+ make_constraint_from (first_vi_for_offset (fi, fi_uses),
+ anything_id);
+ return;
+ }
+
+ /* For callees without function info (that's external functions),
+ ESCAPED is clobbered and used. */
+ if (gimple_call_fndecl (t)
+ && !cfi->is_fn_info)
+ {
+ varinfo_t vi;
+
+ if (gimple_vdef (t))
+ make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
+ escaped_id);
+ make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
+
+ /* Also honor the call statement use/clobber info. */
+ if ((vi = lookup_call_clobber_vi (t)) != NULL)
+ make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
+ vi->id);
+ if ((vi = lookup_call_use_vi (t)) != NULL)
+ make_copy_constraint (first_vi_for_offset (fi, fi_uses),
+ vi->id);
+ return;
+ }
+
+ /* Otherwise the caller clobbers and uses what the callee does.
+ ??? This should use a new complex constraint that filters
+ local variables of the callee. */
+ if (gimple_vdef (t))
+ {
+ lhs = get_function_part_constraint (fi, fi_clobbers);
+ rhs = get_function_part_constraint (cfi, fi_clobbers);
+ process_constraint (new_constraint (lhs, rhs));
}
- field->next = prev->next;
- prev->next = field;
+ lhs = get_function_part_constraint (fi, fi_uses);
+ rhs = get_function_part_constraint (cfi, fi_uses);
+ process_constraint (new_constraint (lhs, rhs));
+ }
+ else if (gimple_code (t) == GIMPLE_ASM)
+ {
+ /* ??? Ick. We can do better. */
+ if (gimple_vdef (t))
+ make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
+ anything_id);
+ make_constraint_from (first_vi_for_offset (fi, fi_uses),
+ anything_id);
+ }
+
+ VEC_free (ce_s, heap, rhsc);
+}
+
+
+/* Find the first varinfo in the same variable as START that overlaps with
+ OFFSET. Return NULL if we can't find one. */
+
+static varinfo_t
+first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
+{
+ /* If the offset is outside of the variable, bail out. */
+ if (offset >= start->fullsize)
+ return NULL;
+
+ /* If we cannot reach offset from start, lookup the first field
+ and start from there. */
+ if (start->offset > offset)
+ start = lookup_vi_for_tree (start->decl);
+
+ while (start)
+ {
+ /* We may not find a variable in the field list with the actual
+ offset when when we have glommed a structure to a variable.
+ In that case, however, offset should still be within the size
+ of the variable. */
+ if (offset >= start->offset
+ && (offset - start->offset) < start->size)
+ return start;
+
+ start= start->next;
}
+
+ return NULL;
+}
+
+/* Find the first varinfo in the same variable as START that overlaps with
+ OFFSET. If there is no such varinfo the varinfo directly preceding
+ OFFSET is returned. */
+
+static varinfo_t
+first_or_preceding_vi_for_offset (varinfo_t start,
+ unsigned HOST_WIDE_INT offset)
+{
+ /* If we cannot reach offset from start, lookup the first field
+ and start from there. */
+ if (start->offset > offset)
+ start = lookup_vi_for_tree (start->decl);
+
+ /* We may not find a variable in the field list with the actual
+ offset when when we have glommed a structure to a variable.
+ In that case, however, offset should still be within the size
+ of the variable.
+ If we got beyond the offset we look for return the field
+ directly preceding offset which may be the last field. */
+ while (start->next
+ && offset >= start->offset
+ && !((offset - start->offset) < start->size))
+ start = start->next;
+
+ return start;
}
+
/* This structure is used during pushing fields onto the fieldstack
to track the offset of the field, since bitpos_of_field gives it
relative to its immediate containing type, and we want it relative
unsigned has_unknown_size : 1;
unsigned may_have_pointers : 1;
+
+ unsigned only_restrict_pointers : 1;
};
typedef struct fieldoff fieldoff_s;
return false;
/* Non decls or memory tags can never have subvars. */
- if (!DECL_P (v) || MTAG_P (v))
+ if (!DECL_P (v))
return false;
/* Aggregates without overlapping fields can have subvars. */
OFFSET is used to keep track of the offset in this entire
structure, rather than just the immediately containing structure.
- Returns the number of fields pushed. */
+ Returns false if the caller is supposed to handle the field we
+ recursed for. */
-static int
+static bool
push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
- HOST_WIDE_INT offset)
+ HOST_WIDE_INT offset, bool must_have_pointers_p)
{
tree field;
- int count = 0;
+ bool empty_p = true;
if (TREE_CODE (type) != RECORD_TYPE)
- return 0;
+ return false;
/* If the vector of fields is growing too big, bail out early.
Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
sure this fails. */
if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
- return 0;
+ return false;
for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL)
{
bool push = false;
- int pushed = 0;
HOST_WIDE_INT foff = bitpos_of_field (field);
if (!var_can_have_subvars (field)
|| TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
|| TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
push = true;
- else if (!(pushed = push_fields_onto_fieldstack
- (TREE_TYPE (field), fieldstack, offset + foff))
+ else if (!push_fields_onto_fieldstack
+ (TREE_TYPE (field), fieldstack, offset + foff,
+ must_have_pointers_p)
&& (DECL_SIZE (field)
&& !integer_zerop (DECL_SIZE (field))))
/* Empty structures may have actual size, like in C++. So
/* If adjacent fields do not contain pointers merge them. */
if (pair
&& !pair->may_have_pointers
- && !could_have_pointers (field)
&& !pair->has_unknown_size
&& !has_unknown_size
- && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
+ && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
+ && !must_have_pointers_p
+ && !could_have_pointers (field))
{
- pair = VEC_last (fieldoff_s, *fieldstack);
pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
}
else
pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
else
pair->size = -1;
- pair->may_have_pointers = could_have_pointers (field);
- count++;
+ pair->may_have_pointers
+ = must_have_pointers_p || could_have_pointers (field);
+ pair->only_restrict_pointers
+ = (!has_unknown_size
+ && POINTER_TYPE_P (TREE_TYPE (field))
+ && TYPE_RESTRICT (TREE_TYPE (field)));
}
}
- else
- count += pushed;
- }
-
- return count;
-}
-
-/* Create a constraint ID = &FROM. */
-static void
-make_constraint_from (varinfo_t vi, int from)
-{
- struct constraint_expr lhs, rhs;
-
- lhs.var = vi->id;
- lhs.offset = 0;
- lhs.type = SCALAR;
+ empty_p = false;
+ }
- rhs.var = from;
- rhs.offset = 0;
- rhs.type = ADDRESSOF;
- process_constraint (new_constraint (lhs, rhs));
+ return !empty_p;
}
/* Count the number of arguments DECL has, and set IS_VARARGS to true
static unsigned int
count_num_arguments (tree decl, bool *is_varargs)
{
- unsigned int i = 0;
+ unsigned int num = 0;
tree t;
- for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
- t;
- t = TREE_CHAIN (t))
- {
- if (TREE_VALUE (t) == void_type_node)
- break;
- i++;
- }
+ /* Capture named arguments for K&R functions. They do not
+ have a prototype and thus no TYPE_ARG_TYPES. */
+ for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
+ ++num;
+ /* Check if the function has variadic arguments. */
+ for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
+ if (TREE_VALUE (t) == void_type_node)
+ break;
if (!t)
*is_varargs = true;
- return i;
+
+ return num;
}
/* Creation function node for DECL, using NAME, and return the index
of the variable we've created for the function. */
-static unsigned int
+static varinfo_t
create_function_info_for (tree decl, const char *name)
{
- unsigned int index = VEC_length (varinfo_t, varmap);
- varinfo_t vi;
+ struct function *fn = DECL_STRUCT_FUNCTION (decl);
+ varinfo_t vi, prev_vi;
tree arg;
unsigned int i;
bool is_varargs = false;
+ unsigned int num_args = count_num_arguments (decl, &is_varargs);
/* Create the variable info. */
- vi = new_var_info (decl, index, name);
- vi->decl = decl;
+ vi = new_var_info (decl, name);
vi->offset = 0;
vi->size = 1;
- vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
+ vi->fullsize = fi_parm_base + num_args;
+ vi->is_fn_info = 1;
+ vi->may_have_pointers = false;
+ if (is_varargs)
+ vi->fullsize = ~0;
insert_vi_for_tree (vi->decl, vi);
- VEC_safe_push (varinfo_t, heap, varmap, vi);
- stats.total_vars++;
+ prev_vi = vi;
- /* If it's varargs, we don't know how many arguments it has, so we
- can't do much. */
- if (is_varargs)
+ /* Create a variable for things the function clobbers and one for
+ things the function uses. */
{
- vi->fullsize = ~0;
- vi->size = ~0;
- vi->is_unknown_size_var = true;
- return index;
+ varinfo_t clobbervi, usevi;
+ const char *newname;
+ char *tempname;
+
+ asprintf (&tempname, "%s.clobber", name);
+ newname = ggc_strdup (tempname);
+ free (tempname);
+
+ clobbervi = new_var_info (NULL, newname);
+ clobbervi->offset = fi_clobbers;
+ clobbervi->size = 1;
+ clobbervi->fullsize = vi->fullsize;
+ clobbervi->is_full_var = true;
+ clobbervi->is_global_var = false;
+ gcc_assert (prev_vi->offset < clobbervi->offset);
+ prev_vi->next = clobbervi;
+ prev_vi = clobbervi;
+
+ asprintf (&tempname, "%s.use", name);
+ newname = ggc_strdup (tempname);
+ free (tempname);
+
+ usevi = new_var_info (NULL, newname);
+ usevi->offset = fi_uses;
+ usevi->size = 1;
+ usevi->fullsize = vi->fullsize;
+ usevi->is_full_var = true;
+ usevi->is_global_var = false;
+ gcc_assert (prev_vi->offset < usevi->offset);
+ prev_vi->next = usevi;
+ prev_vi = usevi;
}
+ /* And one for the static chain. */
+ if (fn->static_chain_decl != NULL_TREE)
+ {
+ varinfo_t chainvi;
+ const char *newname;
+ char *tempname;
- arg = DECL_ARGUMENTS (decl);
+ asprintf (&tempname, "%s.chain", name);
+ newname = ggc_strdup (tempname);
+ free (tempname);
+
+ chainvi = new_var_info (fn->static_chain_decl, newname);
+ chainvi->offset = fi_static_chain;
+ chainvi->size = 1;
+ chainvi->fullsize = vi->fullsize;
+ chainvi->is_full_var = true;
+ chainvi->is_global_var = false;
+ gcc_assert (prev_vi->offset < chainvi->offset);
+ prev_vi->next = chainvi;
+ prev_vi = chainvi;
+ insert_vi_for_tree (fn->static_chain_decl, chainvi);
+ }
+
+ /* Create a variable for the return var. */
+ if (DECL_RESULT (decl) != NULL
+ || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
+ {
+ varinfo_t resultvi;
+ const char *newname;
+ char *tempname;
+ tree resultdecl = decl;
+
+ if (DECL_RESULT (decl))
+ resultdecl = DECL_RESULT (decl);
+
+ asprintf (&tempname, "%s.result", name);
+ newname = ggc_strdup (tempname);
+ free (tempname);
+
+ resultvi = new_var_info (resultdecl, newname);
+ resultvi->offset = fi_result;
+ resultvi->size = 1;
+ resultvi->fullsize = vi->fullsize;
+ resultvi->is_full_var = true;
+ if (DECL_RESULT (decl))
+ resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
+ gcc_assert (prev_vi->offset < resultvi->offset);
+ prev_vi->next = resultvi;
+ prev_vi = resultvi;
+ if (DECL_RESULT (decl))
+ insert_vi_for_tree (DECL_RESULT (decl), resultvi);
+ }
/* Set up variables for each argument. */
- for (i = 1; i < vi->fullsize; i++)
+ arg = DECL_ARGUMENTS (decl);
+ for (i = 0; i < num_args; i++)
{
varinfo_t argvi;
const char *newname;
char *tempname;
- unsigned int newindex;
tree argdecl = decl;
if (arg)
argdecl = arg;
- newindex = VEC_length (varinfo_t, varmap);
- asprintf (&tempname, "%s.arg%d", name, i-1);
+ asprintf (&tempname, "%s.arg%d", name, i);
newname = ggc_strdup (tempname);
free (tempname);
- argvi = new_var_info (argdecl, newindex, newname);
- argvi->decl = argdecl;
- VEC_safe_push (varinfo_t, heap, varmap, argvi);
- argvi->offset = i;
+ argvi = new_var_info (argdecl, newname);
+ argvi->offset = fi_parm_base + i;
argvi->size = 1;
argvi->is_full_var = true;
argvi->fullsize = vi->fullsize;
- insert_into_field_list_sorted (vi, argvi);
- stats.total_vars ++;
+ if (arg)
+ argvi->may_have_pointers = could_have_pointers (arg);
+ gcc_assert (prev_vi->offset < argvi->offset);
+ prev_vi->next = argvi;
+ prev_vi = argvi;
if (arg)
{
insert_vi_for_tree (arg, argvi);
}
}
- /* Create a variable for the return var. */
- if (DECL_RESULT (decl) != NULL
- || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
+ /* Add one representative for all further args. */
+ if (is_varargs)
{
- varinfo_t resultvi;
+ varinfo_t argvi;
const char *newname;
char *tempname;
- unsigned int newindex;
- tree resultdecl = decl;
-
- vi->fullsize ++;
-
- if (DECL_RESULT (decl))
- resultdecl = DECL_RESULT (decl);
+ tree decl;
- newindex = VEC_length (varinfo_t, varmap);
- asprintf (&tempname, "%s.result", name);
+ asprintf (&tempname, "%s.varargs", name);
newname = ggc_strdup (tempname);
free (tempname);
- resultvi = new_var_info (resultdecl, newindex, newname);
- resultvi->decl = resultdecl;
- VEC_safe_push (varinfo_t, heap, varmap, resultvi);
- resultvi->offset = i;
- resultvi->size = 1;
- resultvi->fullsize = vi->fullsize;
- resultvi->is_full_var = true;
- insert_into_field_list_sorted (vi, resultvi);
- stats.total_vars ++;
- if (DECL_RESULT (decl))
- insert_vi_for_tree (DECL_RESULT (decl), resultvi);
+ /* We need sth that can be pointed to for va_start. */
+ decl = create_tmp_var_raw (ptr_type_node, name);
+ get_var_ann (decl);
+
+ argvi = new_var_info (decl, newname);
+ argvi->offset = fi_parm_base + num_args;
+ argvi->size = ~0;
+ argvi->is_full_var = true;
+ argvi->is_heap_var = true;
+ argvi->fullsize = vi->fullsize;
+ gcc_assert (prev_vi->offset < argvi->offset);
+ prev_vi->next = argvi;
+ prev_vi = argvi;
}
- return index;
+
+ return vi;
}
This will also create any varinfo structures necessary for fields
of DECL. */
-static unsigned int
-create_variable_info_for (tree decl, const char *name)
+static varinfo_t
+create_variable_info_for_1 (tree decl, const char *name)
{
- unsigned int index = VEC_length (varinfo_t, varmap);
- varinfo_t vi;
+ varinfo_t vi, newvi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
VEC (fieldoff_s,heap) *fieldstack = NULL;
+ fieldoff_s *fo;
+ unsigned int i;
- if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
- return create_function_info_for (decl, name);
-
- if (var_can_have_subvars (decl) && use_field_sensitive
- && (!var_ann (decl)
- || var_ann (decl)->noalias_state == 0)
- && (!var_ann (decl)
- || !var_ann (decl)->is_heapvar))
- push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
-
- /* If the variable doesn't have subvars, we may end up needing to
- sort the field list and create fake variables for all the
- fields. */
- vi = new_var_info (decl, index, name);
- vi->decl = decl;
- vi->offset = 0;
- vi->may_have_pointers = could_have_pointers (decl);
if (!declsize
|| !host_integerp (declsize, 1))
{
- vi->is_unknown_size_var = true;
- vi->fullsize = ~0;
+ vi = new_var_info (decl, name);
+ vi->offset = 0;
vi->size = ~0;
- }
- else
- {
- vi->fullsize = TREE_INT_CST_LOW (declsize);
- vi->size = vi->fullsize;
- }
-
- insert_vi_for_tree (vi->decl, vi);
- VEC_safe_push (varinfo_t, heap, varmap, vi);
- if (is_global && (!flag_whole_program || !in_ipa_mode)
- && vi->may_have_pointers)
- {
- if (var_ann (decl)
- && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
- make_constraint_from (vi, vi->id);
- else
- make_constraint_from (vi, escaped_id);
+ vi->fullsize = ~0;
+ vi->is_unknown_size_var = true;
+ vi->is_full_var = true;
+ vi->may_have_pointers = could_have_pointers (decl);
+ return vi;
}
- stats.total_vars++;
+ /* Collect field information. */
if (use_field_sensitive
- && !vi->is_unknown_size_var
&& var_can_have_subvars (decl)
- && VEC_length (fieldoff_s, fieldstack) > 1
- && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ /* ??? Force us to not use subfields for global initializers
+ in IPA mode. Else we'd have to parse arbitrary initializers. */
+ && !(in_ipa_mode
+ && is_global_var (decl)
+ && DECL_INITIAL (decl)))
{
- unsigned int newindex = VEC_length (varinfo_t, varmap);
fieldoff_s *fo = NULL;
bool notokay = false;
unsigned int i;
+ push_fields_onto_fieldstack (decl_type, &fieldstack, 0,
+ TREE_PUBLIC (decl)
+ || DECL_EXTERNAL (decl)
+ || TREE_ADDRESSABLE (decl));
+
for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
- {
- if (fo->has_unknown_size
- || fo->offset < 0)
- {
- notokay = true;
- break;
- }
- }
+ if (fo->has_unknown_size
+ || fo->offset < 0)
+ {
+ notokay = true;
+ break;
+ }
/* We can't sort them if we have a field with a variable sized type,
which will make notokay = true. In that case, we are going to return
notokay = check_for_overlaps (fieldstack);
}
+ if (notokay)
+ VEC_free (fieldoff_s, heap, fieldstack);
+ }
+
+ /* If we didn't end up collecting sub-variables create a full
+ variable for the decl. */
+ if (VEC_length (fieldoff_s, fieldstack) <= 1
+ || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ {
+ vi = new_var_info (decl, name);
+ vi->offset = 0;
+ vi->may_have_pointers = could_have_pointers (decl);
+ vi->fullsize = TREE_INT_CST_LOW (declsize);
+ vi->size = vi->fullsize;
+ vi->is_full_var = true;
+ VEC_free (fieldoff_s, heap, fieldstack);
+ return vi;
+ }
- if (VEC_length (fieldoff_s, fieldstack) != 0)
- fo = VEC_index (fieldoff_s, fieldstack, 0);
+ vi = new_var_info (decl, name);
+ vi->fullsize = TREE_INT_CST_LOW (declsize);
+ for (i = 0, newvi = vi;
+ VEC_iterate (fieldoff_s, fieldstack, i, fo);
+ ++i, newvi = newvi->next)
+ {
+ const char *newname = "NULL";
+ char *tempname;
- if (fo == NULL || notokay)
+ if (dump_file)
{
- vi->is_unknown_size_var = 1;
- vi->fullsize = ~0;
- vi->size = ~0;
- vi->is_full_var = true;
- VEC_free (fieldoff_s, heap, fieldstack);
- return index;
+ asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
+ "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
+ newname = ggc_strdup (tempname);
+ free (tempname);
}
+ newvi->name = newname;
+ newvi->offset = fo->offset;
+ newvi->size = fo->size;
+ newvi->fullsize = vi->fullsize;
+ newvi->may_have_pointers = fo->may_have_pointers;
+ newvi->only_restrict_pointers = fo->only_restrict_pointers;
+ if (i + 1 < VEC_length (fieldoff_s, fieldstack))
+ newvi->next = new_var_info (decl, name);
+ }
- vi->size = fo->size;
- vi->offset = fo->offset;
- vi->may_have_pointers = fo->may_have_pointers;
- for (i = VEC_length (fieldoff_s, fieldstack) - 1;
- i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
- i--)
- {
- varinfo_t newvi;
- const char *newname = "NULL";
- char *tempname;
+ VEC_free (fieldoff_s, heap, fieldstack);
+
+ return vi;
+}
+
+static unsigned int
+create_variable_info_for (tree decl, const char *name)
+{
+ varinfo_t vi = create_variable_info_for_1 (decl, name);
+ unsigned int id = vi->id;
+
+ insert_vi_for_tree (decl, vi);
- newindex = VEC_length (varinfo_t, varmap);
- if (dump_file)
+ /* Create initial constraints for globals. */
+ for (; vi; vi = vi->next)
+ {
+ if (!vi->may_have_pointers
+ || !vi->is_global_var)
+ continue;
+
+ /* Mark global restrict qualified pointers. */
+ if ((POINTER_TYPE_P (TREE_TYPE (decl))
+ && TYPE_RESTRICT (TREE_TYPE (decl)))
+ || vi->only_restrict_pointers)
+ make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
+
+ /* For escaped variables initialize them from nonlocal. */
+ if (!in_ipa_mode
+ || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
+ make_copy_constraint (vi, nonlocal_id);
+
+ /* If this is a global variable with an initializer and we are in
+ IPA mode generate constraints for it. In non-IPA mode
+ the initializer from nonlocal is all we need. */
+ if (in_ipa_mode
+ && DECL_INITIAL (decl))
+ {
+ VEC (ce_s, heap) *rhsc = NULL;
+ struct constraint_expr lhs, *rhsp;
+ unsigned i;
+ get_constraint_for (DECL_INITIAL (decl), &rhsc);
+ lhs.var = vi->id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ process_constraint (new_constraint (lhs, *rhsp));
+ /* If this is a variable that escapes from the unit
+ the initializer escapes as well. */
+ if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
{
- asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
- "+" HOST_WIDE_INT_PRINT_DEC,
- vi->name, fo->offset, fo->size);
- newname = ggc_strdup (tempname);
- free (tempname);
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ process_constraint (new_constraint (lhs, *rhsp));
}
- newvi = new_var_info (decl, newindex, newname);
- newvi->offset = fo->offset;
- newvi->size = fo->size;
- newvi->fullsize = vi->fullsize;
- newvi->may_have_pointers = fo->may_have_pointers;
- insert_into_field_list (vi, newvi);
- VEC_safe_push (varinfo_t, heap, varmap, newvi);
- if (is_global && (!flag_whole_program || !in_ipa_mode)
- && newvi->may_have_pointers)
- make_constraint_from (newvi, escaped_id);
-
- stats.total_vars++;
+ VEC_free (ce_s, heap, rhsc);
}
}
- else
- vi->is_full_var = true;
-
- VEC_free (fieldoff_s, heap, fieldstack);
- return index;
+ return id;
}
/* Print out the points-to solution for VAR to FILE. */
-void
+static void
dump_solution_for_var (FILE *file, unsigned int var)
{
varinfo_t vi = get_varinfo (var);
unsigned int i;
bitmap_iterator bi;
- if (find (var) != var)
- {
- varinfo_t vipt = get_varinfo (find (var));
- fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
- }
- else
- {
- fprintf (file, "%s = { ", vi->name);
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
- {
- fprintf (file, "%s ", get_varinfo (i)->name);
- }
- fprintf (file, "}");
- if (vi->no_tbaa_pruning)
- fprintf (file, " no-tbaa-pruning");
- fprintf (file, "\n");
- }
+ /* Dump the solution for unified vars anyway, this avoids difficulties
+ in scanning dumps in the testsuite. */
+ fprintf (file, "%s = { ", vi->name);
+ vi = get_varinfo (find (var));
+ EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
+ fprintf (file, "%s ", get_varinfo (i)->name);
+ fprintf (file, "}");
+
+ /* But note when the variable was unified. */
+ if (vi->id != var)
+ fprintf (file, " same as %s", vi->name);
+
+ fprintf (file, "\n");
}
/* Print the points-to solution for VAR to stdout. */
-void
+DEBUG_FUNCTION void
debug_solution_for_var (unsigned int var)
{
dump_solution_for_var (stdout, var);
intra_create_variable_infos (void)
{
tree t;
- struct constraint_expr lhs, rhs;
/* For each incoming pointer argument arg, create the constraint ARG
- = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
+ = NONLOCAL or a dummy variable if it is a restrict qualified
+ passed-by-reference argument. */
for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
{
varinfo_t p;
if (!could_have_pointers (t))
continue;
- /* If flag_argument_noalias is set, then function pointer
- arguments are guaranteed not to point to each other. In that
- case, create an artificial variable PARM_NOALIAS and the
- constraint ARG = &PARM_NOALIAS. */
- if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
+ /* For restrict qualified pointers to objects passed by
+ reference build a real representative for the pointed-to object. */
+ if (DECL_BY_REFERENCE (t)
+ && POINTER_TYPE_P (TREE_TYPE (t))
+ && TYPE_RESTRICT (TREE_TYPE (t)))
{
+ struct constraint_expr lhsc, rhsc;
varinfo_t vi;
- tree heapvar = heapvar_lookup (t);
-
- lhs.offset = 0;
- lhs.type = SCALAR;
- lhs.var = get_vi_for_tree (t)->id;
-
+ tree heapvar = heapvar_lookup (t, 0);
if (heapvar == NULL_TREE)
{
var_ann_t ann;
- heapvar = create_tmp_var_raw (ptr_type_node,
+ heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
"PARM_NOALIAS");
DECL_EXTERNAL (heapvar) = 1;
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
-
- heapvar_insert (t, heapvar);
-
+ heapvar_insert (t, 0, heapvar);
ann = get_var_ann (heapvar);
ann->is_heapvar = 1;
- if (flag_argument_noalias == 1)
- ann->noalias_state = NO_ALIAS;
- else if (flag_argument_noalias == 2)
- ann->noalias_state = NO_ALIAS_GLOBAL;
- else if (flag_argument_noalias == 3)
- ann->noalias_state = NO_ALIAS_ANYTHING;
- else
- gcc_unreachable ();
- }
-
- vi = get_vi_for_tree (heapvar);
- vi->is_artificial_var = 1;
- vi->is_heap_var = 1;
- vi->is_unknown_size_var = true;
- vi->fullsize = ~0;
- vi->size = ~0;
- rhs.var = vi->id;
- rhs.type = ADDRESSOF;
- rhs.offset = 0;
- for (p = get_varinfo (lhs.var); p; p = p->next)
- {
- struct constraint_expr temp = lhs;
- temp.var = p->id;
- process_constraint (new_constraint (temp, rhs));
}
+ if (gimple_referenced_vars (cfun))
+ add_referenced_var (heapvar);
+ lhsc.var = get_vi_for_tree (t)->id;
+ lhsc.type = SCALAR;
+ lhsc.offset = 0;
+ rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
+ rhsc.type = ADDRESSOF;
+ rhsc.offset = 0;
+ process_constraint (new_constraint (lhsc, rhsc));
+ vi->is_restrict_var = 1;
+ continue;
}
- else
- {
- varinfo_t arg_vi = get_vi_for_tree (t);
- for (p = arg_vi; p; p = p->next)
+ for (p = get_vi_for_tree (t); p; p = p->next)
+ {
+ if (p->may_have_pointers)
make_constraint_from (p, nonlocal_id);
+ if (p->only_restrict_pointers)
+ make_constraint_from_restrict (p, "PARM_RESTRICT");
}
+ if (POINTER_TYPE_P (TREE_TYPE (t))
+ && TYPE_RESTRICT (TREE_TYPE (t)))
+ make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
}
/* Add a constraint for a result decl that is passed by reference. */
varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
for (p = result_vi; p; p = p->next)
- make_constraint_from (p, nonlocal_id);
+ make_constraint_from (p, nonlocal_id);
}
/* Add a constraint for the incoming static chain parameter. */
}
-/* Set bits in INTO corresponding to the variable uids in solution set
- FROM, which came from variable PTR.
- For variables that are actually dereferenced, we also use type
- based alias analysis to prune the points-to sets.
- IS_DEREFED is true if PTR was directly dereferenced, which we use to
- help determine whether we are we are allowed to prune using TBAA.
- If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
- the from set. Returns the number of pruned variables. */
+/* Set bits in INTO corresponding to the variable uids in solution set FROM. */
-static unsigned
-set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
- bool no_tbaa_pruning)
+static void
+set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
{
unsigned int i;
bitmap_iterator bi;
- unsigned pruned = 0;
-
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
{
|| TREE_CODE (vi->decl) == PARM_DECL
|| TREE_CODE (vi->decl) == RESULT_DECL)
{
- /* Just add VI->DECL to the alias set.
- Don't type prune artificial vars or points-to sets
- for pointers that have not been dereferenced or with
- type-based pruning disabled. */
- if (vi->is_artificial_var
- || !is_derefed
- || no_tbaa_pruning
- || vi->no_tbaa_pruning)
- bitmap_set_bit (into, DECL_UID (vi->decl));
- else
- {
- alias_set_type var_alias_set, mem_alias_set;
- var_alias_set = get_alias_set (vi->decl);
- mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
- if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
- vi->decl, var_alias_set, true))
- bitmap_set_bit (into, DECL_UID (vi->decl));
- else
- ++pruned;
- }
+ /* If we are in IPA mode we will not recompute points-to
+ sets after inlining so make sure they stay valid. */
+ if (in_ipa_mode
+ && !DECL_PT_UID_SET_P (vi->decl))
+ SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
+
+ /* Add the decl to the points-to set. Note that the points-to
+ set contains global variables. */
+ bitmap_set_bit (into, DECL_PT_UID (vi->decl));
+ if (vi->is_global_var)
+ pt->vars_contains_global = true;
}
}
-
- return pruned;
}
-static bool have_alias_info = false;
-
-/* Emit a note for the pointer initialization point DEF. */
+/* Compute the points-to solution *PT for the variable VI. */
static void
-emit_pointer_definition (tree ptr, bitmap visited)
+find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
{
- gimple def = SSA_NAME_DEF_STMT (ptr);
- if (gimple_code (def) == GIMPLE_PHI)
+ unsigned int i;
+ bitmap_iterator bi;
+ bitmap finished_solution;
+ bitmap result;
+ varinfo_t vi;
+
+ memset (pt, 0, sizeof (struct pt_solution));
+
+ /* This variable may have been collapsed, let's get the real
+ variable. */
+ vi = get_varinfo (find (orig_vi->id));
+
+ /* Translate artificial variables into SSA_NAME_PTR_INFO
+ attributes. */
+ EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
{
- use_operand_p argp;
- ssa_op_iter oi;
+ varinfo_t vi = get_varinfo (i);
- FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
+ if (vi->is_artificial_var)
{
- tree arg = USE_FROM_PTR (argp);
- if (TREE_CODE (arg) == SSA_NAME)
+ if (vi->id == nothing_id)
+ pt->null = 1;
+ else if (vi->id == escaped_id)
{
- if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
- emit_pointer_definition (arg, visited);
+ if (in_ipa_mode)
+ pt->ipa_escaped = 1;
+ else
+ pt->escaped = 1;
}
- else
- inform (0, "initialized from %qE", arg);
+ else if (vi->id == nonlocal_id)
+ pt->nonlocal = 1;
+ else if (vi->is_heap_var)
+ /* We represent heapvars in the points-to set properly. */
+ ;
+ else if (vi->id == readonly_id)
+ /* Nobody cares. */
+ ;
+ else if (vi->id == anything_id
+ || vi->id == integer_id)
+ pt->anything = 1;
}
+ if (vi->is_restrict_var)
+ pt->vars_contains_restrict = true;
}
- else if (!gimple_nop_p (def))
- inform (gimple_location (def), "initialized from here");
-}
-/* Emit a strict aliasing warning for dereferencing the pointer PTR. */
+ /* Instead of doing extra work, simply do not create
+ elaborate points-to information for pt_anything pointers. */
+ if (pt->anything
+ && (orig_vi->is_artificial_var
+ || !pt->vars_contains_restrict))
+ return;
-static void
-emit_alias_warning (tree ptr)
-{
- gimple use;
- imm_use_iterator ui;
- bool warned = false;
+ /* Share the final set of variables when possible. */
+ finished_solution = BITMAP_GGC_ALLOC ();
+ stats.points_to_sets_created++;
- FOR_EACH_IMM_USE_STMT (use, ui, ptr)
+ set_uids_in_ptset (finished_solution, vi->solution, pt);
+ result = shared_bitmap_lookup (finished_solution);
+ if (!result)
{
- tree deref = NULL_TREE;
-
- if (gimple_has_lhs (use))
- {
- tree lhs = get_base_address (gimple_get_lhs (use));
- if (lhs
- && INDIRECT_REF_P (lhs)
- && TREE_OPERAND (lhs, 0) == ptr)
- deref = lhs;
- }
- if (gimple_assign_single_p (use))
- {
- tree rhs = get_base_address (gimple_assign_rhs1 (use));
- if (rhs
- && INDIRECT_REF_P (rhs)
- && TREE_OPERAND (rhs, 0) == ptr)
- deref = rhs;
- }
- else if (is_gimple_call (use))
- {
- unsigned i;
- for (i = 0; i < gimple_call_num_args (use); ++i)
- {
- tree op = get_base_address (gimple_call_arg (use, i));
- if (op
- && INDIRECT_REF_P (op)
- && TREE_OPERAND (op, 0) == ptr)
- deref = op;
- }
- }
- if (deref
- && !TREE_NO_WARNING (deref))
- {
- TREE_NO_WARNING (deref) = 1;
- warned |= warning_at (gimple_location (use), OPT_Wstrict_aliasing,
- "dereferencing pointer %qD does break "
- "strict-aliasing rules", SSA_NAME_VAR (ptr));
- }
+ shared_bitmap_add (finished_solution);
+ pt->vars = finished_solution;
}
- if (warned)
+ else
{
- bitmap visited = BITMAP_ALLOC (NULL);
- emit_pointer_definition (ptr, visited);
- BITMAP_FREE (visited);
+ pt->vars = result;
+ bitmap_clear (finished_solution);
}
}
-/* Given a pointer variable P, fill in its points-to set, or return
- false if we can't.
- Rather than return false for variables that point-to anything, we
- instead find the corresponding SMT, and merge in its aliases. In
- addition to these aliases, we also set the bits for the SMT's
- themselves and their subsets, as SMT's are still in use by
- non-SSA_NAME's, and pruning may eliminate every one of their
- aliases. In such a case, if we did not include the right set of
- SMT's in the points-to set of the variable, we'd end up with
- statements that do not conflict but should. */
+/* Given a pointer variable P, fill in its points-to set. */
-bool
+static void
find_what_p_points_to (tree p)
{
+ struct ptr_info_def *pi;
tree lookup_p = p;
varinfo_t vi;
- if (!have_alias_info)
- return false;
-
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (p))
lookup_p = SSA_NAME_VAR (p);
vi = lookup_vi_for_tree (lookup_p);
- if (vi)
- {
- if (vi->is_artificial_var)
- return false;
+ if (!vi)
+ return;
- /* See if this is a field or a structure. */
- if (vi->size != vi->fullsize)
- {
- /* Nothing currently asks about structure fields directly,
- but when they do, we need code here to hand back the
- points-to set. */
- return false;
- }
- else
- {
- struct ptr_info_def *pi = get_ptr_info (p);
- unsigned int i, pruned;
- bitmap_iterator bi;
- bool was_pt_anything = false;
- bitmap finished_solution;
- bitmap result;
+ pi = get_ptr_info (p);
+ find_what_var_points_to (vi, &pi->pt);
+}
- if (!pi->memory_tag_needed)
- return false;
- /* This variable may have been collapsed, let's get the real
- variable. */
- vi = get_varinfo (find (vi->id));
+/* Query statistics for points-to solutions. */
- /* Translate artificial variables into SSA_NAME_PTR_INFO
- attributes. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
- {
- varinfo_t vi = get_varinfo (i);
+static struct {
+ unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
+ unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
+ unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
+ unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
+} pta_stats;
- if (vi->is_artificial_var)
- {
- /* FIXME. READONLY should be handled better so that
- flow insensitive aliasing can disregard writable
- aliases. */
- if (vi->id == nothing_id)
- pi->pt_null = 1;
- else if (vi->id == anything_id
- || vi->id == nonlocal_id
- || vi->id == escaped_id
- || vi->id == callused_id)
- was_pt_anything = 1;
- else if (vi->id == readonly_id)
- was_pt_anything = 1;
- else if (vi->id == integer_id)
- was_pt_anything = 1;
- else if (vi->is_heap_var)
- pi->pt_global_mem = 1;
- }
- }
+void
+dump_pta_stats (FILE *s)
+{
+ fprintf (s, "\nPTA query stats:\n");
+ fprintf (s, " pt_solution_includes: "
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, "
+ HOST_WIDE_INT_PRINT_DEC" queries\n",
+ pta_stats.pt_solution_includes_no_alias,
+ pta_stats.pt_solution_includes_no_alias
+ + pta_stats.pt_solution_includes_may_alias);
+ fprintf (s, " pt_solutions_intersect: "
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, "
+ HOST_WIDE_INT_PRINT_DEC" queries\n",
+ pta_stats.pt_solutions_intersect_no_alias,
+ pta_stats.pt_solutions_intersect_no_alias
+ + pta_stats.pt_solutions_intersect_may_alias);
+}
- /* Instead of doing extra work, simply do not create
- points-to information for pt_anything pointers. This
- will cause the operand scanner to fall back to the
- type-based SMT and its aliases. Which is the best
- we could do here for the points-to set as well. */
- if (was_pt_anything)
- return false;
- /* Share the final set of variables when possible. */
- finished_solution = BITMAP_GGC_ALLOC ();
- stats.points_to_sets_created++;
+/* Reset the points-to solution *PT to a conservative default
+ (point to anything). */
- pruned = set_uids_in_ptset (p, finished_solution, vi->solution,
- pi->is_dereferenced,
- vi->no_tbaa_pruning);
- result = shared_bitmap_lookup (finished_solution);
+void
+pt_solution_reset (struct pt_solution *pt)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->anything = true;
+}
- if (!result)
- {
- shared_bitmap_add (finished_solution);
- pi->pt_vars = finished_solution;
- }
- else
- {
- pi->pt_vars = result;
- bitmap_clear (finished_solution);
- }
+/* Set the points-to solution *PT to point only to the variables
+ in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
+ global variables and VARS_CONTAINS_RESTRICT specifies whether
+ it contains restrict tag variables. */
- if (bitmap_empty_p (pi->pt_vars))
- {
- pi->pt_vars = NULL;
- if (pruned > 0
- && !pi->pt_null
- && pi->is_dereferenced
- && warn_strict_aliasing > 0
- && !SSA_NAME_IS_DEFAULT_DEF (p))
- {
- if (dump_file && dump_flags & TDF_DETAILS)
- {
- fprintf (dump_file, "alias warning for ");
- print_generic_expr (dump_file, p, 0);
- fprintf (dump_file, "\n");
- }
- emit_alias_warning (p);
- }
- }
+void
+pt_solution_set (struct pt_solution *pt, bitmap vars,
+ bool vars_contains_global, bool vars_contains_restrict)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = vars;
+ pt->vars_contains_global = vars_contains_global;
+ pt->vars_contains_restrict = vars_contains_restrict;
+}
- return true;
- }
- }
+/* Set the points-to solution *PT to point only to the variable VAR. */
- return false;
+void
+pt_solution_set_var (struct pt_solution *pt, tree var)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = BITMAP_GGC_ALLOC ();
+ bitmap_set_bit (pt->vars, DECL_UID (var));
+ pt->vars_contains_global = is_global_var (var);
}
-/* Mark the ESCAPED solution as call clobbered. Returns false if
- pt_anything escaped which needs all locals that have their address
- taken marked call clobbered as well. */
+/* Computes the union of the points-to solutions *DEST and *SRC and
+ stores the result in *DEST. This changes the points-to bitmap
+ of *DEST and thus may not be used if that might be shared.
+ The points-to bitmap of *SRC and *DEST will not be shared after
+ this function if they were not before. */
-bool
-clobber_what_escaped (void)
+static void
+pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
{
- varinfo_t vi;
- unsigned int i;
- bitmap_iterator bi;
+ dest->anything |= src->anything;
+ if (dest->anything)
+ {
+ pt_solution_reset (dest);
+ return;
+ }
- if (!have_alias_info)
- return false;
+ dest->nonlocal |= src->nonlocal;
+ dest->escaped |= src->escaped;
+ dest->ipa_escaped |= src->ipa_escaped;
+ dest->null |= src->null;
+ dest->vars_contains_global |= src->vars_contains_global;
+ dest->vars_contains_restrict |= src->vars_contains_restrict;
+ if (!src->vars)
+ return;
- /* This variable may have been collapsed, let's get the real
- variable for escaped_id. */
- vi = get_varinfo (find (escaped_id));
+ if (!dest->vars)
+ dest->vars = BITMAP_GGC_ALLOC ();
+ bitmap_ior_into (dest->vars, src->vars);
+}
- /* If call-used memory escapes we need to include it in the
- set of escaped variables. This can happen if a pure
- function returns a pointer and this pointer escapes. */
- if (bitmap_bit_p (vi->solution, callused_id))
- {
- varinfo_t cu_vi = get_varinfo (find (callused_id));
- bitmap_ior_into (vi->solution, cu_vi->solution);
- }
+/* Return true if the points-to solution *PT is empty. */
- /* Mark variables in the solution call-clobbered. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
- {
- varinfo_t vi = get_varinfo (i);
+bool
+pt_solution_empty_p (struct pt_solution *pt)
+{
+ if (pt->anything
+ || pt->nonlocal)
+ return false;
- if (vi->is_artificial_var)
- {
- /* nothing_id and readonly_id do not cause any
- call clobber ops. For anything_id and integer_id
- we need to clobber all addressable vars. */
- if (vi->id == anything_id
- || vi->id == integer_id)
- return false;
- }
+ if (pt->vars
+ && !bitmap_empty_p (pt->vars))
+ return false;
- /* Only artificial heap-vars are further interesting. */
- if (vi->is_artificial_var && !vi->is_heap_var)
- continue;
+ /* If the solution includes ESCAPED, check if that is empty. */
+ if (pt->escaped
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped))
+ return false;
- if ((TREE_CODE (vi->decl) == VAR_DECL
- || TREE_CODE (vi->decl) == PARM_DECL
- || TREE_CODE (vi->decl) == RESULT_DECL)
- && !unmodifiable_var_p (vi->decl))
- mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
- }
+ /* If the solution includes ESCAPED, check if that is empty. */
+ if (pt->ipa_escaped
+ && !pt_solution_empty_p (&ipa_escaped_pt))
+ return false;
return true;
}
-/* Compute the call-used variables. */
+/* Return true if the points-to solution *PT includes global memory. */
-void
-compute_call_used_vars (void)
+bool
+pt_solution_includes_global (struct pt_solution *pt)
{
- varinfo_t vi;
- unsigned int i;
- bitmap_iterator bi;
- bool has_anything_id = false;
+ if (pt->anything
+ || pt->nonlocal
+ || pt->vars_contains_global)
+ return true;
- if (!have_alias_info)
- return;
+ if (pt->escaped)
+ return pt_solution_includes_global (&cfun->gimple_df->escaped);
- /* This variable may have been collapsed, let's get the real
- variable for escaped_id. */
- vi = get_varinfo (find (callused_id));
+ if (pt->ipa_escaped)
+ return pt_solution_includes_global (&ipa_escaped_pt);
- /* Mark variables in the solution call-clobbered. */
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
+ /* ??? This predicate is not correct for the IPA-PTA solution
+ as we do not properly distinguish between unit escape points
+ and global variables. */
+ if (cfun->gimple_df->ipa_pta)
+ return true;
+
+ return false;
+}
+
+/* Return true if the points-to solution *PT includes the variable
+ declaration DECL. */
+
+static bool
+pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
+{
+ if (pt->anything)
+ return true;
+
+ if (pt->nonlocal
+ && is_global_var (decl))
+ return true;
+
+ if (pt->vars
+ && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
+ return true;
+
+ /* If the solution includes ESCAPED, check it. */
+ if (pt->escaped
+ && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
+ return true;
+
+ /* If the solution includes ESCAPED, check it. */
+ if (pt->ipa_escaped
+ && pt_solution_includes_1 (&ipa_escaped_pt, decl))
+ return true;
+
+ return false;
+}
+
+bool
+pt_solution_includes (struct pt_solution *pt, const_tree decl)
+{
+ bool res = pt_solution_includes_1 (pt, decl);
+ if (res)
+ ++pta_stats.pt_solution_includes_may_alias;
+ else
+ ++pta_stats.pt_solution_includes_no_alias;
+ return res;
+}
+
+/* Return true if both points-to solutions PT1 and PT2 have a non-empty
+ intersection. */
+
+static bool
+pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
+{
+ if (pt1->anything || pt2->anything)
+ return true;
+
+ /* If either points to unknown global memory and the other points to
+ any global memory they alias. */
+ if ((pt1->nonlocal
+ && (pt2->nonlocal
+ || pt2->vars_contains_global))
+ || (pt2->nonlocal
+ && pt1->vars_contains_global))
+ return true;
+
+ /* Check the escaped solution if required. */
+ if ((pt1->escaped || pt2->escaped)
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped))
{
- varinfo_t vi = get_varinfo (i);
+ /* If both point to escaped memory and that solution
+ is not empty they alias. */
+ if (pt1->escaped && pt2->escaped)
+ return true;
- if (vi->is_artificial_var)
- {
- /* For anything_id and integer_id we need to make
- all local addressable vars call-used. */
- if (vi->id == anything_id
- || vi->id == integer_id)
- has_anything_id = true;
- }
+ /* If either points to escaped memory see if the escaped solution
+ intersects with the other. */
+ if ((pt1->escaped
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
+ || (pt2->escaped
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
+ return true;
+ }
- /* Only artificial heap-vars are further interesting. */
- if (vi->is_artificial_var && !vi->is_heap_var)
- continue;
+ /* Check the escaped solution if required.
+ ??? Do we need to check the local against the IPA escaped sets? */
+ if ((pt1->ipa_escaped || pt2->ipa_escaped)
+ && !pt_solution_empty_p (&ipa_escaped_pt))
+ {
+ /* If both point to escaped memory and that solution
+ is not empty they alias. */
+ if (pt1->ipa_escaped && pt2->ipa_escaped)
+ return true;
- if ((TREE_CODE (vi->decl) == VAR_DECL
- || TREE_CODE (vi->decl) == PARM_DECL
- || TREE_CODE (vi->decl) == RESULT_DECL)
- && !unmodifiable_var_p (vi->decl))
- bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
+ /* If either points to escaped memory see if the escaped solution
+ intersects with the other. */
+ if ((pt1->ipa_escaped
+ && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
+ || (pt2->ipa_escaped
+ && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
+ return true;
}
- /* If anything is call-used, add all addressable locals to the set. */
- if (has_anything_id)
- bitmap_ior_into (gimple_call_used_vars (cfun),
- gimple_addressable_vars (cfun));
+ /* Now both pointers alias if their points-to solution intersects. */
+ return (pt1->vars
+ && pt2->vars
+ && bitmap_intersect_p (pt1->vars, pt2->vars));
+}
+
+bool
+pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
+{
+ bool res = pt_solutions_intersect_1 (pt1, pt2);
+ if (res)
+ ++pta_stats.pt_solutions_intersect_may_alias;
+ else
+ ++pta_stats.pt_solutions_intersect_no_alias;
+ return res;
+}
+
+/* Return true if both points-to solutions PT1 and PT2 for two restrict
+ qualified pointers are possibly based on the same pointer. */
+
+bool
+pt_solutions_same_restrict_base (struct pt_solution *pt1,
+ struct pt_solution *pt2)
+{
+ /* If we deal with points-to solutions of two restrict qualified
+ pointers solely rely on the pointed-to variable bitmap intersection.
+ For two pointers that are based on each other the bitmaps will
+ intersect. */
+ if (pt1->vars_contains_restrict
+ && pt2->vars_contains_restrict)
+ {
+ gcc_assert (pt1->vars && pt2->vars);
+ return bitmap_intersect_p (pt1->vars, pt2->vars);
+ }
+
+ return true;
}
/* Dump points-to information to OUTFILE. */
-void
+static void
dump_sa_points_to_info (FILE *outfile)
{
unsigned int i;
}
for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
- dump_solution_for_var (outfile, i);
+ {
+ varinfo_t vi = get_varinfo (i);
+ if (!vi->may_have_pointers)
+ continue;
+ dump_solution_for_var (outfile, i);
+ }
}
/* Debug points-to information to stderr. */
-void
+DEBUG_FUNCTION void
debug_sa_points_to_info (void)
{
dump_sa_points_to_info (stderr);
init_base_vars (void)
{
struct constraint_expr lhs, rhs;
+ varinfo_t var_anything;
+ varinfo_t var_nothing;
+ varinfo_t var_readonly;
+ varinfo_t var_escaped;
+ varinfo_t var_nonlocal;
+ varinfo_t var_storedanything;
+ varinfo_t var_integer;
/* Create the NULL variable, used to represent that a variable points
to NULL. */
- nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
- var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
- insert_vi_for_tree (nothing_tree, var_nothing);
+ var_nothing = new_var_info (NULL_TREE, "NULL");
+ gcc_assert (var_nothing->id == nothing_id);
var_nothing->is_artificial_var = 1;
var_nothing->offset = 0;
var_nothing->size = ~0;
var_nothing->fullsize = ~0;
var_nothing->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
+ var_nothing->may_have_pointers = 0;
+ var_nothing->is_global_var = 0;
/* Create the ANYTHING variable, used to represent that a variable
points to some unknown piece of memory. */
- anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
- var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
- insert_vi_for_tree (anything_tree, var_anything);
+ var_anything = new_var_info (NULL_TREE, "ANYTHING");
+ gcc_assert (var_anything->id == anything_id);
var_anything->is_artificial_var = 1;
var_anything->size = ~0;
var_anything->offset = 0;
/* Anything points to anything. This makes deref constraints just
work in the presence of linked list and other p = *p type loops,
by saying that *ANYTHING = ANYTHING. */
- VEC_safe_push (varinfo_t, heap, varmap, var_anything);
lhs.type = SCALAR;
lhs.var = anything_id;
lhs.offset = 0;
/* Create the READONLY variable, used to represent that a variable
points to readonly memory. */
- readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
- var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
+ var_readonly = new_var_info (NULL_TREE, "READONLY");
+ gcc_assert (var_readonly->id == readonly_id);
var_readonly->is_artificial_var = 1;
var_readonly->offset = 0;
var_readonly->size = ~0;
var_readonly->fullsize = ~0;
var_readonly->next = NULL;
var_readonly->is_special_var = 1;
- insert_vi_for_tree (readonly_tree, var_readonly);
- VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
/* readonly memory points to anything, in order to make deref
easier. In reality, it points to anything the particular
/* Create the ESCAPED variable, used to represent the set of escaped
memory. */
- escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
- var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
- insert_vi_for_tree (escaped_tree, var_escaped);
+ var_escaped = new_var_info (NULL_TREE, "ESCAPED");
+ gcc_assert (var_escaped->id == escaped_id);
var_escaped->is_artificial_var = 1;
var_escaped->offset = 0;
var_escaped->size = ~0;
var_escaped->fullsize = ~0;
var_escaped->is_special_var = 0;
- VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
- gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
+
+ /* Create the NONLOCAL variable, used to represent the set of nonlocal
+ memory. */
+ var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
+ gcc_assert (var_nonlocal->id == nonlocal_id);
+ var_nonlocal->is_artificial_var = 1;
+ var_nonlocal->offset = 0;
+ var_nonlocal->size = ~0;
+ var_nonlocal->fullsize = ~0;
+ var_nonlocal->is_special_var = 1;
/* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
lhs.type = SCALAR;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
- /* Create the NONLOCAL variable, used to represent the set of nonlocal
- memory. */
- nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
- var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
- insert_vi_for_tree (nonlocal_tree, var_nonlocal);
- var_nonlocal->is_artificial_var = 1;
- var_nonlocal->offset = 0;
- var_nonlocal->size = ~0;
- var_nonlocal->fullsize = ~0;
- var_nonlocal->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
-
- /* Nonlocal memory points to escaped (which includes nonlocal),
- in order to make deref easier. */
+ /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
+ whole variable escapes. */
lhs.type = SCALAR;
- lhs.var = nonlocal_id;
+ lhs.var = escaped_id;
lhs.offset = 0;
- rhs.type = ADDRESSOF;
+ rhs.type = SCALAR;
rhs.var = escaped_id;
+ rhs.offset = UNKNOWN_OFFSET;
+ process_constraint (new_constraint (lhs, rhs));
+
+ /* *ESCAPED = NONLOCAL. This is true because we have to assume
+ everything pointed to by escaped points to what global memory can
+ point to. */
+ lhs.type = DEREF;
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = nonlocal_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
- /* Create the CALLUSED variable, used to represent the set of call-used
- memory. */
- callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
- var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
- insert_vi_for_tree (callused_tree, var_callused);
- var_callused->is_artificial_var = 1;
- var_callused->offset = 0;
- var_callused->size = ~0;
- var_callused->fullsize = ~0;
- var_callused->is_special_var = 0;
- VEC_safe_push (varinfo_t, heap, varmap, var_callused);
-
- /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
+ /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
+ global memory may point to global memory and escaped memory. */
lhs.type = SCALAR;
- lhs.var = callused_id;
+ lhs.var = nonlocal_id;
lhs.offset = 0;
- rhs.type = DEREF;
- rhs.var = callused_id;
+ rhs.type = ADDRESSOF;
+ rhs.var = nonlocal_id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
+ rhs.type = ADDRESSOF;
+ rhs.var = escaped_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
/* Create the STOREDANYTHING variable, used to represent the set of
variables stored to *ANYTHING. */
- storedanything_tree = create_tmp_var_raw (ptr_type_node, "STOREDANYTHING");
- var_storedanything = new_var_info (storedanything_tree, storedanything_id,
- "STOREDANYTHING");
- insert_vi_for_tree (storedanything_tree, var_storedanything);
+ var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
+ gcc_assert (var_storedanything->id == storedanything_id);
var_storedanything->is_artificial_var = 1;
var_storedanything->offset = 0;
var_storedanything->size = ~0;
var_storedanything->fullsize = ~0;
var_storedanything->is_special_var = 0;
- VEC_safe_push (varinfo_t, heap, varmap, var_storedanything);
/* Create the INTEGER variable, used to represent that a variable points
- to an INTEGER. */
- integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
- var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
- insert_vi_for_tree (integer_tree, var_integer);
+ to what an INTEGER "points to". */
+ var_integer = new_var_info (NULL_TREE, "INTEGER");
+ gcc_assert (var_integer->id == integer_id);
var_integer->is_artificial_var = 1;
var_integer->size = ~0;
var_integer->fullsize = ~0;
var_integer->offset = 0;
var_integer->next = NULL;
var_integer->is_special_var = 1;
- VEC_safe_push (varinfo_t, heap, varmap, var_integer);
/* INTEGER = ANYTHING, because we don't know where a dereference of
a random integer will point to. */
rhs.var = anything_id;
rhs.offset = 0;
process_constraint (new_constraint (lhs, rhs));
-
- /* *ESCAPED = &ESCAPED. This is true because we have to assume
- everything pointed to by escaped can also point to escaped. */
- lhs.type = DEREF;
- lhs.var = escaped_id;
- lhs.offset = 0;
- rhs.type = ADDRESSOF;
- rhs.var = escaped_id;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
-
- /* *ESCAPED = &NONLOCAL. This is true because we have to assume
- everything pointed to by escaped can also point to nonlocal. */
- lhs.type = DEREF;
- lhs.var = escaped_id;
- lhs.offset = 0;
- rhs.type = ADDRESSOF;
- rhs.var = nonlocal_id;
- rhs.offset = 0;
- process_constraint (new_constraint (lhs, rhs));
}
/* Initialize things necessary to perform PTA */
constraints = VEC_alloc (constraint_t, heap, 8);
varmap = VEC_alloc (varinfo_t, heap, 8);
vi_for_tree = pointer_map_create ();
+ call_stmt_vars = pointer_map_create ();
memset (&stats, 0, sizeof (stats));
shared_bitmap_table = htab_create (511, shared_bitmap_hash,
bitmap_obstack_release (&predbitmap_obstack);
}
-/* Compute the set of variables we can't TBAA prune. */
+/* Initialize the heapvar for statement mapping. */
static void
-compute_tbaa_pruning (void)
+init_alias_heapvars (void)
{
- unsigned int size = VEC_length (varinfo_t, varmap);
- unsigned int i;
- bool any;
-
- changed_count = 0;
- changed = sbitmap_alloc (size);
- sbitmap_zero (changed);
-
- /* Mark all initial no_tbaa_pruning nodes as changed. */
- any = false;
- for (i = 0; i < size; ++i)
- {
- varinfo_t ivi = get_varinfo (i);
-
- if (find (i) == i && ivi->no_tbaa_pruning)
- {
- any = true;
- if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
- || VEC_length (constraint_t, graph->complex[i]) > 0)
- {
- SET_BIT (changed, i);
- ++changed_count;
- }
- }
- }
-
- while (changed_count > 0)
- {
- struct topo_info *ti = init_topo_info ();
- ++stats.iterations;
+ if (!heapvar_for_stmt)
+ heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
+ NULL);
+}
- compute_topo_order (graph, ti);
+/* Delete the heapvar for statement mapping. */
- while (VEC_length (unsigned, ti->topo_order) != 0)
- {
- bitmap_iterator bi;
+void
+delete_alias_heapvars (void)
+{
+ if (heapvar_for_stmt)
+ htab_delete (heapvar_for_stmt);
+ heapvar_for_stmt = NULL;
+}
- i = VEC_pop (unsigned, ti->topo_order);
+/* Solve the constraint set. */
- /* If this variable is not a representative, skip it. */
- if (find (i) != i)
- continue;
+static void
+solve_constraints (void)
+{
+ struct scc_info *si;
- /* If the node has changed, we need to process the complex
- constraints and outgoing edges again. */
- if (TEST_BIT (changed, i))
- {
- unsigned int j;
- constraint_t c;
- VEC(constraint_t,heap) *complex = graph->complex[i];
+ if (dump_file)
+ fprintf (dump_file,
+ "\nCollapsing static cycles and doing variable "
+ "substitution\n");
- RESET_BIT (changed, i);
- --changed_count;
+ init_graph (VEC_length (varinfo_t, varmap) * 2);
- /* Process the complex copy constraints. */
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
- {
- if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
- {
- varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
+ if (dump_file)
+ fprintf (dump_file, "Building predecessor graph\n");
+ build_pred_graph ();
- if (!lhsvi->no_tbaa_pruning)
- {
- lhsvi->no_tbaa_pruning = true;
- if (!TEST_BIT (changed, lhsvi->id))
- {
- SET_BIT (changed, lhsvi->id);
- ++changed_count;
- }
- }
- }
- }
+ if (dump_file)
+ fprintf (dump_file, "Detecting pointer and location "
+ "equivalences\n");
+ si = perform_var_substitution (graph);
- /* Propagate to all successors. */
- EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
- {
- unsigned int to = find (j);
- varinfo_t tovi = get_varinfo (to);
+ if (dump_file)
+ fprintf (dump_file, "Rewriting constraints and unifying "
+ "variables\n");
+ rewrite_constraints (graph, si);
- /* Don't propagate to ourselves. */
- if (to == i)
- continue;
+ build_succ_graph ();
+ free_var_substitution_info (si);
- if (!tovi->no_tbaa_pruning)
- {
- tovi->no_tbaa_pruning = true;
- if (!TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- ++changed_count;
- }
- }
- }
- }
- }
+ if (dump_file && (dump_flags & TDF_GRAPH))
+ dump_constraint_graph (dump_file);
- free_topo_info (ti);
- }
+ move_complex_constraints (graph);
- sbitmap_free (changed);
+ if (dump_file)
+ fprintf (dump_file, "Uniting pointer but not location equivalent "
+ "variables\n");
+ unite_pointer_equivalences (graph);
- if (any)
- {
- for (i = 0; i < size; ++i)
- {
- varinfo_t ivi = get_varinfo (i);
- varinfo_t ivip = get_varinfo (find (i));
+ if (dump_file)
+ fprintf (dump_file, "Finding indirect cycles\n");
+ find_indirect_cycles (graph);
- if (ivip->no_tbaa_pruning)
- {
- tree var = ivi->decl;
+ /* Implicit nodes and predecessors are no longer necessary at this
+ point. */
+ remove_preds_and_fake_succs (graph);
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
+ if (dump_file)
+ fprintf (dump_file, "Solving graph\n");
- if (POINTER_TYPE_P (TREE_TYPE (var)))
- {
- DECL_NO_TBAA_P (var) = 1;
+ solve_graph (graph);
- /* Tell the RTL layer that this pointer can alias
- anything. */
- DECL_POINTER_ALIAS_SET (var) = 0;
- }
- }
- }
- }
+ if (dump_file)
+ dump_sa_points_to_info (dump_file);
}
/* Create points-to sets for the current function. See the comments
at the start of the file for an algorithmic overview. */
-void
+static void
compute_points_to_sets (void)
{
- struct scc_info *si;
basic_block bb;
+ unsigned i;
+ varinfo_t vi;
timevar_push (TV_TREE_PTA);
intra_create_variable_infos ();
- /* Now walk all statements and derive aliases. */
+ /* Now walk all statements and build the constraint set. */
FOR_EACH_BB (bb)
{
gimple_stmt_iterator gsi;
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- find_func_aliases (gsi_stmt (gsi));
- }
+ {
+ gimple stmt = gsi_stmt (gsi);
+ find_func_aliases (stmt);
+ }
+ }
if (dump_file)
{
fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
- dump_constraints (dump_file);
+ dump_constraints (dump_file, 0);
}
- if (dump_file)
- fprintf (dump_file,
- "\nCollapsing static cycles and doing variable "
- "substitution\n");
-
- init_graph (VEC_length (varinfo_t, varmap) * 2);
-
- if (dump_file)
- fprintf (dump_file, "Building predecessor graph\n");
- build_pred_graph ();
-
- if (dump_file)
- fprintf (dump_file, "Detecting pointer and location "
- "equivalences\n");
- si = perform_var_substitution (graph);
-
- if (dump_file)
- fprintf (dump_file, "Rewriting constraints and unifying "
- "variables\n");
- rewrite_constraints (graph, si);
-
- build_succ_graph ();
- free_var_substitution_info (si);
-
- if (dump_file && (dump_flags & TDF_GRAPH))
- dump_constraint_graph (dump_file);
+ /* From the constraints compute the points-to sets. */
+ solve_constraints ();
- move_complex_constraints (graph);
+ /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
+ find_what_var_points_to (get_varinfo (escaped_id),
+ &cfun->gimple_df->escaped);
- if (dump_file)
- fprintf (dump_file, "Uniting pointer but not location equivalent "
- "variables\n");
- unite_pointer_equivalences (graph);
+ /* Make sure the ESCAPED solution (which is used as placeholder in
+ other solutions) does not reference itself. This simplifies
+ points-to solution queries. */
+ cfun->gimple_df->escaped.escaped = 0;
- if (dump_file)
- fprintf (dump_file, "Finding indirect cycles\n");
- find_indirect_cycles (graph);
+ /* Mark escaped HEAP variables as global. */
+ for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
+ if (vi->is_heap_var
+ && !vi->is_restrict_var
+ && !vi->is_global_var)
+ DECL_EXTERNAL (vi->decl) = vi->is_global_var
+ = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
- /* Implicit nodes and predecessors are no longer necessary at this
- point. */
- remove_preds_and_fake_succs (graph);
-
- if (dump_file)
- fprintf (dump_file, "Solving graph\n");
+ /* Compute the points-to sets for pointer SSA_NAMEs. */
+ for (i = 0; i < num_ssa_names; ++i)
+ {
+ tree ptr = ssa_name (i);
+ if (ptr
+ && POINTER_TYPE_P (TREE_TYPE (ptr)))
+ find_what_p_points_to (ptr);
+ }
- solve_graph (graph);
+ /* Compute the call-used/clobbered sets. */
+ FOR_EACH_BB (bb)
+ {
+ gimple_stmt_iterator gsi;
- compute_tbaa_pruning ();
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ struct pt_solution *pt;
+ if (!is_gimple_call (stmt))
+ continue;
- if (dump_file)
- dump_sa_points_to_info (dump_file);
+ pt = gimple_call_use_set (stmt);
+ if (gimple_call_flags (stmt) & ECF_CONST)
+ memset (pt, 0, sizeof (struct pt_solution));
+ else if ((vi = lookup_call_use_vi (stmt)) != NULL)
+ {
+ find_what_var_points_to (vi, pt);
+ /* Escaped (and thus nonlocal) variables are always
+ implicitly used by calls. */
+ /* ??? ESCAPED can be empty even though NONLOCAL
+ always escaped. */
+ pt->nonlocal = 1;
+ pt->escaped = 1;
+ }
+ else
+ {
+ /* If there is nothing special about this call then
+ we have made everything that is used also escape. */
+ *pt = cfun->gimple_df->escaped;
+ pt->nonlocal = 1;
+ }
- have_alias_info = true;
+ pt = gimple_call_clobber_set (stmt);
+ if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
+ memset (pt, 0, sizeof (struct pt_solution));
+ else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
+ {
+ find_what_var_points_to (vi, pt);
+ /* Escaped (and thus nonlocal) variables are always
+ implicitly clobbered by calls. */
+ /* ??? ESCAPED can be empty even though NONLOCAL
+ always escaped. */
+ pt->nonlocal = 1;
+ pt->escaped = 1;
+ }
+ else
+ {
+ /* If there is nothing special about this call then
+ we have made everything that is used also escape. */
+ *pt = cfun->gimple_df->escaped;
+ pt->nonlocal = 1;
+ }
+ }
+ }
timevar_pop (TV_TREE_PTA);
}
/* Delete created points-to sets. */
-void
+static void
delete_points_to_sets (void)
{
unsigned int i;
stats.points_to_sets_created);
pointer_map_destroy (vi_for_tree);
+ pointer_map_destroy (call_stmt_vars);
bitmap_obstack_release (&pta_obstack);
VEC_free (constraint_t, heap, constraints);
VEC_free (varinfo_t, heap, varmap);
free_alloc_pool (variable_info_pool);
free_alloc_pool (constraint_pool);
- have_alias_info = false;
}
+
+/* Compute points-to information for every SSA_NAME pointer in the
+ current function and compute the transitive closure of escaped
+ variables to re-initialize the call-clobber states of local variables. */
+
+unsigned int
+compute_may_aliases (void)
+{
+ if (cfun->gimple_df->ipa_pta)
+ {
+ if (dump_file)
+ {
+ fprintf (dump_file, "\nNot re-computing points-to information "
+ "because IPA points-to information is available.\n\n");
+
+ /* But still dump what we have remaining it. */
+ dump_alias_info (dump_file);
+
+ if (dump_flags & TDF_DETAILS)
+ dump_referenced_vars (dump_file);
+ }
+
+ return 0;
+ }
+
+ /* For each pointer P_i, determine the sets of variables that P_i may
+ point-to. Compute the reachability set of escaped and call-used
+ variables. */
+ compute_points_to_sets ();
+
+ /* Debugging dumps. */
+ if (dump_file)
+ {
+ dump_alias_info (dump_file);
+
+ if (dump_flags & TDF_DETAILS)
+ dump_referenced_vars (dump_file);
+ }
+
+ /* Deallocate memory used by aliasing data structures and the internal
+ points-to solution. */
+ delete_points_to_sets ();
+
+ gcc_assert (!need_ssa_update_p (cfun));
+
+ return 0;
+}
+
+static bool
+gate_tree_pta (void)
+{
+ return flag_tree_pta;
+}
+
+/* A dummy pass to cause points-to information to be computed via
+ TODO_rebuild_alias. */
+
+struct gimple_opt_pass pass_build_alias =
+{
+ {
+ GIMPLE_PASS,
+ "alias", /* name */
+ gate_tree_pta, /* gate */
+ NULL, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ }
+};
+
+/* A dummy pass to cause points-to information to be computed via
+ TODO_rebuild_alias. */
+
+struct gimple_opt_pass pass_build_ealias =
+{
+ {
+ GIMPLE_PASS,
+ "ealias", /* name */
+ gate_tree_pta, /* gate */
+ NULL, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ PROP_cfg | PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ }
+};
+
+
/* Return true if we should execute IPA PTA. */
static bool
gate_ipa_pta (void)
{
- return (flag_ipa_pta
+ return (optimize
+ && flag_ipa_pta
/* Don't bother doing anything if the program has errors. */
- && !(errorcount || sorrycount));
+ && !seen_error ());
}
+/* IPA PTA solutions for ESCAPED. */
+struct pt_solution ipa_escaped_pt
+ = { true, false, false, false, false, false, false, NULL };
+
/* Execute the driver for IPA PTA. */
static unsigned int
ipa_pta_execute (void)
{
struct cgraph_node *node;
- struct scc_info *si;
+ struct varpool_node *var;
+ int from;
in_ipa_mode = 1;
+
init_alias_heapvars ();
init_alias_vars ();
+ /* Build the constraints. */
for (node = cgraph_nodes; node; node = node->next)
{
- if (!node->analyzed || cgraph_is_master_clone (node))
- {
- unsigned int varid;
+ struct cgraph_node *alias;
+ varinfo_t vi;
- varid = create_function_info_for (node->decl,
- cgraph_node_name (node));
- if (node->local.externally_visible)
- {
- varinfo_t fi = get_varinfo (varid);
- for (; fi; fi = fi->next)
- make_constraint_from (fi, anything_id);
- }
- }
+ /* Nodes without a body are not interesting. Especially do not
+ visit clones at this point for now - we get duplicate decls
+ there for inline clones at least. */
+ if (!gimple_has_body_p (node->decl)
+ || node->clone_of)
+ continue;
+
+ vi = create_function_info_for (node->decl,
+ alias_get_name (node->decl));
+
+ /* Associate the varinfo node with all aliases. */
+ for (alias = node->same_body; alias; alias = alias->next)
+ insert_vi_for_tree (alias->decl, vi);
+ }
+
+ /* Create constraints for global variables and their initializers. */
+ for (var = varpool_nodes; var; var = var->next)
+ {
+ struct varpool_node *alias;
+ varinfo_t vi;
+
+ vi = get_vi_for_tree (var->decl);
+
+ /* Associate the varinfo node with all aliases. */
+ for (alias = var->extra_name; alias; alias = alias->next)
+ insert_vi_for_tree (alias->decl, vi);
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file,
+ "Generating constraints for global initializers\n\n");
+ dump_constraints (dump_file, 0);
+ fprintf (dump_file, "\n");
}
+ from = VEC_length (constraint_t, constraints);
+
for (node = cgraph_nodes; node; node = node->next)
{
- if (node->analyzed && cgraph_is_master_clone (node))
+ struct function *func;
+ basic_block bb;
+ tree old_func_decl;
+
+ /* Nodes without a body are not interesting. */
+ if (!gimple_has_body_p (node->decl)
+ || node->clone_of)
+ continue;
+
+ if (dump_file)
{
- struct function *func = DECL_STRUCT_FUNCTION (node->decl);
- basic_block bb;
- tree old_func_decl = current_function_decl;
- if (dump_file)
- fprintf (dump_file,
- "Generating constraints for %s\n",
- cgraph_node_name (node));
- push_cfun (func);
- current_function_decl = node->decl;
+ fprintf (dump_file,
+ "Generating constraints for %s", cgraph_node_name (node));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ fprintf (dump_file, " (%s)",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ fprintf (dump_file, "\n");
+ }
+
+ func = DECL_STRUCT_FUNCTION (node->decl);
+ old_func_decl = current_function_decl;
+ push_cfun (func);
+ current_function_decl = node->decl;
- FOR_EACH_BB_FN (bb, func)
+ /* For externally visible functions use local constraints for
+ their arguments. For local functions we see all callers
+ and thus do not need initial constraints for parameters. */
+ if (node->local.externally_visible)
+ intra_create_variable_infos ();
+
+ /* Build constriants for the function body. */
+ FOR_EACH_BB_FN (bb, func)
+ {
+ gimple_stmt_iterator gsi;
+
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple_stmt_iterator gsi;
+ gimple phi = gsi_stmt (gsi);
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
- gsi_next (&gsi))
- {
- gimple phi = gsi_stmt (gsi);
+ if (is_gimple_reg (gimple_phi_result (phi)))
+ find_func_aliases (phi);
+ }
- if (is_gimple_reg (gimple_phi_result (phi)))
- find_func_aliases (phi);
- }
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- find_func_aliases (gsi_stmt (gsi));
+ find_func_aliases (stmt);
+ find_func_clobbers (stmt);
}
- current_function_decl = old_func_decl;
- pop_cfun ();
}
- else
+
+ current_function_decl = old_func_decl;
+ pop_cfun ();
+
+ if (dump_file)
{
- /* Make point to anything. */
+ fprintf (dump_file, "\n");
+ dump_constraints (dump_file, from);
+ fprintf (dump_file, "\n");
}
+ from = VEC_length (constraint_t, constraints);
}
- if (dump_file)
+ /* From the constraints compute the points-to sets. */
+ solve_constraints ();
+
+ /* Compute the global points-to sets for ESCAPED.
+ ??? Note that the computed escape set is not correct
+ for the whole unit as we fail to consider graph edges to
+ externally visible functions. */
+ find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
+
+ /* Make sure the ESCAPED solution (which is used as placeholder in
+ other solutions) does not reference itself. This simplifies
+ points-to solution queries. */
+ ipa_escaped_pt.ipa_escaped = 0;
+
+ /* Assign the points-to sets to the SSA names in the unit. */
+ for (node = cgraph_nodes; node; node = node->next)
{
- fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
- dump_constraints (dump_file);
- }
+ tree ptr;
+ struct function *fn;
+ unsigned i;
+ varinfo_t fi;
+ basic_block bb;
+ struct pt_solution uses, clobbers;
+ struct cgraph_edge *e;
+
+ /* Nodes without a body are not interesting. */
+ if (!gimple_has_body_p (node->decl)
+ || node->clone_of)
+ continue;
- if (dump_file)
- fprintf (dump_file,
- "\nCollapsing static cycles and doing variable "
- "substitution:\n");
+ fn = DECL_STRUCT_FUNCTION (node->decl);
- init_graph (VEC_length (varinfo_t, varmap) * 2);
- build_pred_graph ();
- si = perform_var_substitution (graph);
- rewrite_constraints (graph, si);
+ /* Compute the points-to sets for pointer SSA_NAMEs. */
+ for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
+ {
+ if (ptr
+ && POINTER_TYPE_P (TREE_TYPE (ptr)))
+ find_what_p_points_to (ptr);
+ }
- build_succ_graph ();
- free_var_substitution_info (si);
- move_complex_constraints (graph);
- unite_pointer_equivalences (graph);
- find_indirect_cycles (graph);
+ /* Compute the call-use and call-clobber sets for all direct calls. */
+ fi = lookup_vi_for_tree (node->decl);
+ gcc_assert (fi->is_fn_info);
+ find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
+ &clobbers);
+ find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
+ for (e = node->callers; e; e = e->next_caller)
+ {
+ if (!e->call_stmt)
+ continue;
- /* Implicit nodes and predecessors are no longer necessary at this
- point. */
- remove_preds_and_fake_succs (graph);
+ *gimple_call_clobber_set (e->call_stmt) = clobbers;
+ *gimple_call_use_set (e->call_stmt) = uses;
+ }
- if (dump_file)
- fprintf (dump_file, "\nSolving graph\n");
+ /* Compute the call-use and call-clobber sets for indirect calls
+ and calls to external functions. */
+ FOR_EACH_BB_FN (bb, fn)
+ {
+ gimple_stmt_iterator gsi;
- solve_graph (graph);
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ struct pt_solution *pt;
+ varinfo_t vi;
+ tree decl;
- if (dump_file)
- dump_sa_points_to_info (dump_file);
+ if (!is_gimple_call (stmt))
+ continue;
+
+ /* Handle direct calls to external functions. */
+ decl = gimple_call_fndecl (stmt);
+ if (decl
+ && (!(fi = lookup_vi_for_tree (decl))
+ || !fi->is_fn_info))
+ {
+ pt = gimple_call_use_set (stmt);
+ if (gimple_call_flags (stmt) & ECF_CONST)
+ memset (pt, 0, sizeof (struct pt_solution));
+ else if ((vi = lookup_call_use_vi (stmt)) != NULL)
+ {
+ find_what_var_points_to (vi, pt);
+ /* Escaped (and thus nonlocal) variables are always
+ implicitly used by calls. */
+ /* ??? ESCAPED can be empty even though NONLOCAL
+ always escaped. */
+ pt->nonlocal = 1;
+ pt->ipa_escaped = 1;
+ }
+ else
+ {
+ /* If there is nothing special about this call then
+ we have made everything that is used also escape. */
+ *pt = ipa_escaped_pt;
+ pt->nonlocal = 1;
+ }
+
+ pt = gimple_call_clobber_set (stmt);
+ if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
+ memset (pt, 0, sizeof (struct pt_solution));
+ else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
+ {
+ find_what_var_points_to (vi, pt);
+ /* Escaped (and thus nonlocal) variables are always
+ implicitly clobbered by calls. */
+ /* ??? ESCAPED can be empty even though NONLOCAL
+ always escaped. */
+ pt->nonlocal = 1;
+ pt->ipa_escaped = 1;
+ }
+ else
+ {
+ /* If there is nothing special about this call then
+ we have made everything that is used also escape. */
+ *pt = ipa_escaped_pt;
+ pt->nonlocal = 1;
+ }
+ }
+
+ /* Handle indirect calls. */
+ if (!decl
+ && (fi = get_fi_for_callee (stmt)))
+ {
+ /* We need to accumulate all clobbers/uses of all possible
+ callees. */
+ fi = get_varinfo (find (fi->id));
+ /* If we cannot constrain the set of functions we'll end up
+ calling we end up using/clobbering everything. */
+ if (bitmap_bit_p (fi->solution, anything_id)
+ || bitmap_bit_p (fi->solution, nonlocal_id)
+ || bitmap_bit_p (fi->solution, escaped_id))
+ {
+ pt_solution_reset (gimple_call_clobber_set (stmt));
+ pt_solution_reset (gimple_call_use_set (stmt));
+ }
+ else
+ {
+ bitmap_iterator bi;
+ unsigned i;
+ struct pt_solution *uses, *clobbers;
+
+ uses = gimple_call_use_set (stmt);
+ clobbers = gimple_call_clobber_set (stmt);
+ memset (uses, 0, sizeof (struct pt_solution));
+ memset (clobbers, 0, sizeof (struct pt_solution));
+ EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
+ {
+ struct pt_solution sol;
+
+ vi = get_varinfo (i);
+ if (!vi->is_fn_info)
+ {
+ /* ??? We could be more precise here? */
+ uses->nonlocal = 1;
+ uses->ipa_escaped = 1;
+ clobbers->nonlocal = 1;
+ clobbers->ipa_escaped = 1;
+ continue;
+ }
+
+ if (!uses->anything)
+ {
+ find_what_var_points_to
+ (first_vi_for_offset (vi, fi_uses), &sol);
+ pt_solution_ior_into (uses, &sol);
+ }
+ if (!clobbers->anything)
+ {
+ find_what_var_points_to
+ (first_vi_for_offset (vi, fi_clobbers), &sol);
+ pt_solution_ior_into (clobbers, &sol);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn->gimple_df->ipa_pta = true;
+ }
- in_ipa_mode = 0;
- delete_alias_heapvars ();
delete_points_to_sets ();
+
+ in_ipa_mode = 0;
+
return 0;
}
}
};
-/* Initialize the heapvar for statement mapping. */
-void
-init_alias_heapvars (void)
-{
- if (!heapvar_for_stmt)
- heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
- NULL);
-}
-
-void
-delete_alias_heapvars (void)
-{
- htab_delete (heapvar_for_stmt);
- heapvar_for_stmt = NULL;
-}
#include "gt-tree-ssa-structalias.h"