/* Tree based points-to analysis
- Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
#include "obstack.h"
#include "bitmap.h"
#include "flags.h"
-#include "rtl.h"
-#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
#include "tree.h"
#include "tree-flow.h"
#include "tree-inline.h"
-#include "diagnostic.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
#include "gimple.h"
#include "hashtab.h"
#include "function.h"
And probably more. */
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
-htab_t heapvar_for_stmt;
-
static bool use_field_sensitive = true;
static int in_ipa_mode = 0;
escaped_id = 3, nonlocal_id = 4,
storedanything_id = 5, integer_id = 6 };
-struct GTY(()) heapvar_map {
- struct tree_map map;
- unsigned HOST_WIDE_INT offset;
-};
-
-static int
-heapvar_map_eq (const void *p1, const void *p2)
-{
- const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
- const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
- return (h1->map.base.from == h2->map.base.from
- && h1->offset == h2->offset);
-}
-
-static unsigned int
-heapvar_map_hash (struct heapvar_map *h)
-{
- return iterative_hash_host_wide_int (h->offset,
- htab_hash_pointer (h->map.base.from));
-}
-
-/* Lookup a heap var for FROM, and return it if we find one. */
-
-static tree
-heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
-{
- struct heapvar_map *h, in;
- in.map.base.from = from;
- in.offset = offset;
- h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
- heapvar_map_hash (&in));
- if (h)
- return h->map.to;
- return NULL_TREE;
-}
-
-/* Insert a mapping FROM->TO in the heap var for statement
- hashtable. */
-
-static void
-heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
-{
- struct heapvar_map *h;
- void **loc;
-
- h = GGC_NEW (struct heapvar_map);
- h->map.base.from = from;
- h->offset = offset;
- h->map.hash = heapvar_map_hash (h);
- h->map.to = to;
- loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
- gcc_assert (*loc == NULL);
- *(struct heapvar_map **) loc = h;
-}
-
/* Return a new variable info structure consisting for a variable
named NAME, and using constraint graph node NODE. Append it
to the vector of variable info structures. */
ret->is_global_var = (t == NULL_TREE);
ret->is_fn_info = false;
if (t && DECL_P (t))
- ret->is_global_var = is_global_var (t);
+ ret->is_global_var = (is_global_var (t)
+ /* We have to treat even local register variables
+ as escape points. */
+ || (TREE_CODE (t) == VAR_DECL
+ && DECL_HARD_REGISTER (t)));
ret->solution = BITMAP_ALLOC (&pta_obstack);
- ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
+ ret->oldsolution = NULL;
ret->next = NULL;
stats.total_vars++;
typedef struct constraint_expr ce_s;
DEF_VEC_O(ce_s);
DEF_VEC_ALLOC_O(ce_s, heap);
-static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
+static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
static void get_constraint_for (tree, VEC(ce_s, heap) **);
+static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
static void do_deref (VEC (ce_s, heap) **);
/* Our set constraints are made up of two constraint expressions, one
fprintf (file, " + UNKNOWN");
else if (c->rhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
- fprintf (file, "\n");
}
/* Print out constraint C to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint (constraint_t c)
{
dump_constraint (stderr, c);
+ fprintf (stderr, "\n");
}
/* Print out all constraints to FILE */
int i;
constraint_t c;
for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
- dump_constraint (file, c);
+ if (c)
+ {
+ dump_constraint (file, c);
+ fprintf (file, "\n");
+ }
}
/* Print out all constraints to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraints (void)
{
dump_constraints (stderr, 0);
}
-/* Print out to FILE the edge in the constraint graph that is created by
- constraint c. The edge may have a label, depending on the type of
- constraint that it represents. If complex1, e.g: a = *b, then the label
- is "=*", if complex2, e.g: *a = b, then the label is "*=", if
- complex with an offset, e.g: a = b + 8, then the label is "+".
- Otherwise the edge has no label. */
-
-static void
-dump_constraint_edge (FILE *file, constraint_t c)
-{
- if (c->rhs.type != ADDRESSOF)
- {
- const char *src = get_varinfo (c->rhs.var)->name;
- const char *dst = get_varinfo (c->lhs.var)->name;
- fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
- /* Due to preprocessing of constraints, instructions like *a = *b are
- illegal; thus, we do not have to handle such cases. */
- if (c->lhs.type == DEREF)
- fprintf (file, " [ label=\"*=\" ] ;\n");
- else if (c->rhs.type == DEREF)
- fprintf (file, " [ label=\"=*\" ] ;\n");
- else
- {
- /* We must check the case where the constraint is an offset.
- In this case, it is treated as a complex constraint. */
- if (c->rhs.offset != c->lhs.offset)
- fprintf (file, " [ label=\"+\" ] ;\n");
- else
- fprintf (file, " ;\n");
- }
- }
-}
-
/* Print the constraint graph in dot format. */
static void
dump_constraint_graph (FILE *file)
{
- unsigned int i=0, size;
- constraint_t c;
+ unsigned int i;
/* Only print the graph if it has already been initialized: */
if (!graph)
return;
- /* Print the constraints used to produce the constraint graph. The
- constraints will be printed as comments in the dot file: */
- fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
- dump_constraints (file, 0);
- fprintf (file, "*/\n");
-
/* Prints the header of the dot file: */
- fprintf (file, "\n\n// The constraint graph in dot format:\n");
fprintf (file, "strict digraph {\n");
fprintf (file, " node [\n shape = box\n ]\n");
fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
- fprintf (file, "\n // List of nodes in the constraint graph:\n");
-
- /* The next lines print the nodes in the graph. In order to get the
- number of nodes in the graph, we must choose the minimum between the
- vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
- yet been initialized, then graph->size == 0, otherwise we must only
- read nodes that have an entry in VEC (varinfo_t, varmap). */
- size = VEC_length (varinfo_t, varmap);
- size = size < graph->size ? size : graph->size;
- for (i = 0; i < size; i++)
+ fprintf (file, "\n // List of nodes and complex constraints in "
+ "the constraint graph:\n");
+
+ /* The next lines print the nodes in the graph together with the
+ complex constraints attached to them. */
+ for (i = 0; i < graph->size; i++)
{
- const char *name = get_varinfo (graph->rep[i])->name;
- fprintf (file, " \"%s\" ;\n", name);
+ if (find (i) != i)
+ continue;
+ if (i < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (i)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
+ if (graph->complex[i])
+ {
+ unsigned j;
+ constraint_t c;
+ fprintf (file, " [label=\"\\N\\n");
+ for (j = 0; VEC_iterate (constraint_t, graph->complex[i], j, c); ++j)
+ {
+ dump_constraint (file, c);
+ fprintf (file, "\\l");
+ }
+ fprintf (file, "\"]");
+ }
+ fprintf (file, ";\n");
}
- /* Go over the list of constraints printing the edges in the constraint
- graph. */
- fprintf (file, "\n // The constraint edges:\n");
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
- if (c)
- dump_constraint_edge (file, c);
+ /* Go over the edges. */
+ fprintf (file, "\n // Edges in the constraint graph:\n");
+ for (i = 0; i < graph->size; i++)
+ {
+ unsigned j;
+ bitmap_iterator bi;
+ if (find (i) != i)
+ continue;
+ EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
+ {
+ unsigned to = find (j);
+ if (i == to)
+ continue;
+ if (i < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (i)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
+ fprintf (file, " -> ");
+ if (to < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (to)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
+ fprintf (file, ";\n");
+ }
+ }
- /* Prints the tail of the dot file. By now, only the closing bracket. */
- fprintf (file, "}\n\n\n");
+ /* Prints the tail of the dot file. */
+ fprintf (file, "}\n");
}
/* Print out the constraint graph to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint_graph (void)
{
dump_constraint_graph (stderr);
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
{
if (constraint_vec_find (*to, *c) == NULL)
{
gcc_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
- for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
{
/* In complex constraints for node src, we may have either
a = *src, and *src = a, or an offseted constraint which are
for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
graph->indirect_cycles[j] = -1;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
unsigned i, t;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs;
struct constraint_expr rhs;
/* Changed variables on the last iteration. */
-static unsigned int changed_count;
-static sbitmap changed;
+static bitmap changed;
/* Strongly Connected Component visitation info. */
/* Mark TO as changed if FROM was changed. If TO was already marked
as changed, decrease the changed count. */
- if (update_changed && TEST_BIT (changed, from))
+ if (update_changed
+ && bitmap_bit_p (changed, from))
{
- RESET_BIT (changed, from);
- if (!TEST_BIT (changed, to))
- SET_BIT (changed, to);
- else
- {
- gcc_assert (changed_count > 0);
- changed_count--;
- }
+ bitmap_clear_bit (changed, from);
+ bitmap_set_bit (changed, to);
}
if (get_varinfo (from)->solution)
{
if (bitmap_ior_into (get_varinfo (to)->solution,
get_varinfo (from)->solution))
{
- if (update_changed && !TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- changed_count++;
- }
+ if (update_changed)
+ bitmap_set_bit (changed, to);
}
BITMAP_FREE (get_varinfo (from)->solution);
- BITMAP_FREE (get_varinfo (from)->oldsolution);
+ if (get_varinfo (from)->oldsolution)
+ BITMAP_FREE (get_varinfo (from)->oldsolution);
- if (stats.iterations > 0)
- {
- BITMAP_FREE (get_varinfo (to)->oldsolution);
- get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
- }
+ if (stats.iterations > 0
+ && get_varinfo (to)->oldsolution)
+ BITMAP_FREE (get_varinfo (to)->oldsolution);
}
if (valid_graph_edge (graph, to, to))
{
if (flag)
{
get_varinfo (lhs)->solution = sol;
- if (!TEST_BIT (changed, lhs))
- {
- SET_BIT (changed, lhs);
- changed_count++;
- }
+ bitmap_set_bit (changed, lhs);
}
}
if (add_graph_edge (graph, t, rhs))
{
if (bitmap_ior_into (get_varinfo (t)->solution, sol))
- {
- if (!TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
- }
+ bitmap_set_bit (changed, t);
}
return;
}
{
t = find (escaped_id);
if (add_graph_edge (graph, t, rhs)
- && bitmap_ior_into (get_varinfo (t)->solution, sol)
- && !TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
+ && bitmap_ior_into (get_varinfo (t)->solution, sol))
+ bitmap_set_bit (changed, t);
/* Enough to let rhs escape once. */
escaped_p = true;
}
t = find (v->id);
if (add_graph_edge (graph, t, rhs)
- && bitmap_ior_into (get_varinfo (t)->solution, sol)
- && !TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
+ && bitmap_ior_into (get_varinfo (t)->solution, sol))
+ bitmap_set_bit (changed, t);
}
/* If the variable is not exactly at the requested offset
if (flag)
{
get_varinfo (c->lhs.var)->solution = tmp;
- if (!TEST_BIT (changed, c->lhs.var))
- {
- SET_BIT (changed, c->lhs.var);
- changed_count++;
- }
+ bitmap_set_bit (changed, c->lhs.var);
}
}
}
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
if (c)
{
for (j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
"ignoring constraint:",
get_varinfo (lhs.var)->name);
dump_constraint (dump_file, c);
+ fprintf (dump_file, "\n");
}
VEC_replace (constraint_t, constraints, i, NULL);
continue;
"ignoring constraint:",
get_varinfo (rhs.var)->name);
dump_constraint (dump_file, c);
+ fprintf (dump_file, "\n");
}
VEC_replace (constraint_t, constraints, i, NULL);
continue;
unsigned int i;
bitmap pts;
- changed_count = 0;
- changed = sbitmap_alloc (size);
- sbitmap_zero (changed);
+ changed = BITMAP_ALLOC (NULL);
/* Mark all initial non-collapsed nodes as changed. */
for (i = 0; i < size; i++)
if (find (i) == i && !bitmap_empty_p (ivi->solution)
&& ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
|| VEC_length (constraint_t, graph->complex[i]) > 0))
- {
- SET_BIT (changed, i);
- changed_count++;
- }
+ bitmap_set_bit (changed, i);
}
/* Allocate a bitmap to be used to store the changed bits. */
pts = BITMAP_ALLOC (&pta_obstack);
- while (changed_count > 0)
+ while (!bitmap_empty_p (changed))
{
unsigned int i;
struct topo_info *ti = init_topo_info ();
/* If the node has changed, we need to process the
complex constraints and outgoing edges again. */
- if (TEST_BIT (changed, i))
+ if (bitmap_clear_bit (changed, i))
{
unsigned int j;
constraint_t c;
bitmap solution;
VEC(constraint_t,heap) *complex = graph->complex[i];
+ varinfo_t vi = get_varinfo (i);
bool solution_empty;
- RESET_BIT (changed, i);
- changed_count--;
-
/* Compute the changed set of solution bits. */
- bitmap_and_compl (pts, get_varinfo (i)->solution,
- get_varinfo (i)->oldsolution);
+ if (vi->oldsolution)
+ bitmap_and_compl (pts, vi->solution, vi->oldsolution);
+ else
+ bitmap_copy (pts, vi->solution);
if (bitmap_empty_p (pts))
continue;
- bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
+ if (vi->oldsolution)
+ bitmap_ior_into (vi->oldsolution, pts);
+ else
+ {
+ vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
+ bitmap_copy (vi->oldsolution, pts);
+ }
- solution = get_varinfo (i)->solution;
+ solution = vi->solution;
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
+ FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
{
/* XXX: This is going to unsort the constraints in
some cases, which will occasionally add duplicate
if (flag)
{
get_varinfo (to)->solution = tmp;
- if (!TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- changed_count++;
- }
+ bitmap_set_bit (changed, to);
}
}
}
}
BITMAP_FREE (pts);
- sbitmap_free (changed);
+ BITMAP_FREE (changed);
bitmap_obstack_release (&oldpta_obstack);
}
static const char *
alias_get_name (tree decl)
{
- const char *res = get_name (decl);
+ const char *res;
char *temp;
int num_printed = 0;
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ else
+ res= get_name (decl);
if (res != NULL)
return res;
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (t) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (t))
{
get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
return;
}
+ /* For global variables resort to the alias target. */
+ if (TREE_CODE (t) == VAR_DECL
+ && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
+ {
+ struct varpool_node *node = varpool_get_node (t);
+ if (node && node->alias)
+ {
+ node = varpool_variable_node (node, NULL);
+ t = node->decl;
+ }
+ }
+
vi = get_vi_for_tree (t);
cexpr.var = vi->id;
cexpr.type = SCALAR;
}
}
-/* Return true if T is a type that could contain pointers. */
-
-static bool
-type_could_have_pointers (tree type)
-{
- if (POINTER_TYPE_P (type))
- return true;
-
- if (TREE_CODE (type) == ARRAY_TYPE)
- return type_could_have_pointers (TREE_TYPE (type));
-
- /* A function or method can consume pointers.
- ??? We could be more precise here. */
- if (TREE_CODE (type) == FUNCTION_TYPE
- || TREE_CODE (type) == METHOD_TYPE)
- return true;
-
- return AGGREGATE_TYPE_P (type);
-}
-
-/* Return true if T is a variable of a type that could contain
- pointers. */
-
-static bool
-could_have_pointers (tree t)
-{
- return type_could_have_pointers (TREE_TYPE (t));
-}
/* Return the position, in bits, of FIELD_DECL from the beginning of its
structure. */
static HOST_WIDE_INT
bitpos_of_field (const tree fdecl)
{
-
if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
|| !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
return -1;
- return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
+ return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
+ TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
}
{
struct constraint_expr c;
unsigned int j, n;
- HOST_WIDE_INT rhsunitoffset, rhsoffset;
+ HOST_WIDE_INT rhsoffset;
/* If we do not do field-sensitive PTA adding offsets to pointers
does not change the points-to solution. */
if (!use_field_sensitive)
{
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
return;
}
solution which includes all sub-fields of all pointed-to
variables of ptr. */
if (offset == NULL_TREE
- || !host_integerp (offset, 0))
+ || TREE_CODE (offset) != INTEGER_CST)
rhsoffset = UNKNOWN_OFFSET;
else
{
- /* Make sure the bit-offset also fits. */
- rhsunitoffset = TREE_INT_CST_LOW (offset);
- rhsoffset = rhsunitoffset * BITS_PER_UNIT;
- if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ /* Sign-extend the offset. */
+ double_int soffset
+ = double_int_sext (tree_to_double_int (offset),
+ TYPE_PRECISION (TREE_TYPE (offset)));
+ if (!double_int_fits_in_shwi_p (soffset))
rhsoffset = UNKNOWN_OFFSET;
+ else
+ {
+ /* Make sure the bit-offset also fits. */
+ HOST_WIDE_INT rhsunitoffset = soffset.low;
+ rhsoffset = rhsunitoffset * BITS_PER_UNIT;
+ if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ rhsoffset = UNKNOWN_OFFSET;
+ }
}
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
if (rhsoffset == 0)
return;
/* Given a COMPONENT_REF T, return the constraint_expr vector for it.
- If address_p is true the result will be taken its address of. */
+ If address_p is true the result will be taken its address of.
+ If lhs_p is true then the constraint expression is assumed to be used
+ as the lhs. */
static void
get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
- bool address_p)
+ bool address_p, bool lhs_p)
{
tree orig_t = t;
HOST_WIDE_INT bitsize = -1;
&0->a.b */
forzero = t;
while (handled_component_p (forzero)
- || INDIRECT_REF_P (forzero))
+ || INDIRECT_REF_P (forzero)
+ || TREE_CODE (forzero) == MEM_REF)
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
return;
}
+ /* Handle type-punning through unions. If we are extracting a pointer
+ from a union via a possibly type-punning access that pointer
+ points to anything, similar to a conversion of an integer to
+ a pointer. */
+ if (!lhs_p)
+ {
+ tree u;
+ for (u = t;
+ TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
+ u = TREE_OPERAND (u, 0))
+ if (TREE_CODE (u) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
+ {
+ struct constraint_expr temp;
+
+ temp.offset = 0;
+ temp.var = anything_id;
+ temp.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, *results, &temp);
+ return;
+ }
+ }
+
t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
- get_constraint_for_1 (t, results, true);
+ get_constraint_for_1 (t, results, true, lhs_p);
gcc_assert (VEC_length (ce_s, *results) == 1);
result = VEC_last (ce_s, *results);
cexpr.var = curr->id;
VEC_safe_push (ce_s, heap, *results, &cexpr);
}
- else
+ else if (VEC_length (ce_s, *results) == 0)
/* Assert that we found *some* field there. The user couldn't be
accessing *only* padding. */
/* Still the user could access one past the end of an array
embedded in a struct resulting in accessing *only* padding. */
- gcc_assert (VEC_length (ce_s, *results) >= 1
- || ref_contains_array_ref (orig_t));
+ /* Or accessing only padding via type-punning to a type
+ that has a filed just in padding space. */
+ {
+ cexpr.type = SCALAR;
+ cexpr.var = anything_id;
+ cexpr.offset = 0;
+ VEC_safe_push (ce_s, heap, *results, &cexpr);
+ }
}
else if (bitmaxsize == 0)
{
at most one subfiled of any variable. */
if (bitpos == -1
|| bitsize != bitmaxsize
- || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
+ || result->offset == UNKNOWN_OFFSET)
result->offset = UNKNOWN_OFFSET;
else
- result->offset = bitpos;
+ result->offset += bitpos;
}
else if (result->type == ADDRESSOF)
{
struct constraint_expr *c;
unsigned int i = 0;
- for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
{
if (c->type == SCALAR)
c->type = DEREF;
}
}
-static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
-
/* Given a tree T, return the constraint expression for taking the
address of it. */
struct constraint_expr *c;
unsigned int i;
- get_constraint_for_1 (t, results, true);
+ get_constraint_for_1 (t, results, true, true);
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, *results, i, c)
{
if (c->type == DEREF)
c->type = SCALAR;
/* Given a tree T, return the constraint expression for it. */
static void
-get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
+get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
+ bool lhs_p)
{
struct constraint_expr temp;
if (flag_delete_null_pointer_checks)
temp.var = nothing_id;
else
- temp.var = anything_id;
+ temp.var = nonlocal_id;
temp.type = ADDRESSOF;
temp.offset = 0;
VEC_safe_push (ce_s, heap, *results, &temp);
{
switch (TREE_CODE (t))
{
- case INDIRECT_REF:
+ case MEM_REF:
{
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ struct constraint_expr cs;
+ varinfo_t vi, curr;
+ get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1), results);
do_deref (results);
+
+ /* If we are not taking the address then make sure to process
+ all subvariables we might access. */
+ if (address_p)
+ return;
+
+ cs = *VEC_last (ce_s, *results);
+ if (cs.type == DEREF)
+ {
+ /* For dereferences this means we have to defer it
+ to solving time. */
+ VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
+ return;
+ }
+ if (cs.type != SCALAR)
+ return;
+
+ vi = get_varinfo (cs.var);
+ curr = vi->next;
+ if (!vi->is_full_var
+ && curr)
+ {
+ unsigned HOST_WIDE_INT size;
+ if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
+ size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
+ else
+ size = -1;
+ for (; curr; curr = curr->next)
+ {
+ if (curr->offset - vi->offset < size)
+ {
+ cs.var = curr->id;
+ VEC_safe_push (ce_s, heap, *results, &cs);
+ }
+ else
+ break;
+ }
+ }
return;
}
case ARRAY_REF:
case ARRAY_RANGE_REF:
case COMPONENT_REF:
- get_constraint_for_component_ref (t, results, address_p);
+ get_constraint_for_component_ref (t, results, address_p, lhs_p);
return;
case VIEW_CONVERT_EXPR:
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
+ lhs_p);
return;
/* We are missing handling for TARGET_MEM_REF here. */
default:;
{
struct constraint_expr *rhsp;
unsigned j;
- get_constraint_for_1 (val, &tmp, address_p);
- for (j = 0; VEC_iterate (ce_s, tmp, j, rhsp); ++j)
+ get_constraint_for_1 (val, &tmp, address_p, lhs_p);
+ FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
VEC_safe_push (ce_s, heap, *results, rhsp);
VEC_truncate (ce_s, tmp, 0);
}
get_constraint_for_ssa_var (t, results, address_p);
return;
}
+ case tcc_constant:
+ {
+ /* We cannot refer to automatic variables through constants. */
+ temp.type = ADDRESSOF;
+ temp.var = nonlocal_id;
+ temp.offset = 0;
+ VEC_safe_push (ce_s, heap, *results, &temp);
+ return;
+ }
default:;
}
{
gcc_assert (VEC_length (ce_s, *results) == 0);
- get_constraint_for_1 (t, results, false);
+ get_constraint_for_1 (t, results, false, true);
+}
+
+/* Given a gimple tree T, return the constraint expression vector for it
+ to be used as the rhs of a constraint. */
+
+static void
+get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
+{
+ gcc_assert (VEC_length (ce_s, *results) == 0);
+
+ get_constraint_for_1 (t, results, false, false);
}
if (VEC_length (ce_s, lhsc) <= 1
|| VEC_length (ce_s, rhsc) <= 1)
{
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (*lhsp, *rhsp));
}
else
{
struct constraint_expr tmp;
tmp = new_scalar_tmp_constraint_exp ("allalltmp");
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (tmp, *rhsp));
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, tmp));
}
}
unsigned j;
get_constraint_for (lhsop, &lhsc);
- get_constraint_for (rhsop, &rhsc);
+ get_constraint_for_rhs (rhsop, &rhsc);
lhsp = VEC_index (ce_s, lhsc, 0);
rhsp = VEC_index (ce_s, rhsc, 0);
if (lhsp->type == DEREF
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
- && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
- rhsv->offset + lhsoffset, rhsv->size))
+ && (lhsv->is_full_var
+ || rhsv->is_full_var
+ || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
+ rhsv->offset + lhsoffset, rhsv->size)))
process_constraint (new_constraint (*lhsp, *rhsp));
- if (lhsv->offset + rhsoffset + lhsv->size
- > rhsv->offset + lhsoffset + rhsv->size)
+ if (!rhsv->is_full_var
+ && (lhsv->is_full_var
+ || (lhsv->offset + rhsoffset + lhsv->size
+ > rhsv->offset + lhsoffset + rhsv->size)))
{
++k;
if (k >= VEC_length (ce_s, rhsc))
VEC_free (ce_s, heap, rhsc);
}
-/* Create a constraint ID = OP. */
+/* Create constraints ID = { rhsc }. */
static void
-make_constraint_to (unsigned id, tree op)
+make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
{
- VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
struct constraint_expr includes;
unsigned int j;
includes.offset = 0;
includes.type = SCALAR;
- get_constraint_for (op, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
process_constraint (new_constraint (includes, *c));
+}
+
+/* Create a constraint ID = OP. */
+
+static void
+make_constraint_to (unsigned id, tree op)
+{
+ VEC(ce_s, heap) *rhsc = NULL;
+ get_constraint_for_rhs (op, &rhsc);
+ make_constraints_to (id, rhsc);
VEC_free (ce_s, heap, rhsc);
}
process_constraint (new_constraint (lhs, rhs));
}
-/* Create a new artificial heap variable with NAME and make a
- constraint from it to LHS. Return the created variable. */
-
-static varinfo_t
-make_constraint_from_heapvar (varinfo_t lhs, const char *name)
-{
- varinfo_t vi;
- tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
+/* Temporary storage for fake var decls. */
+struct obstack fake_var_decl_obstack;
- if (heapvar == NULL_TREE)
- {
- var_ann_t ann;
- heapvar = create_tmp_var_raw (ptr_type_node, name);
- DECL_EXTERNAL (heapvar) = 1;
+/* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
- heapvar_insert (lhs->decl, lhs->offset, heapvar);
+static tree
+build_fake_var_decl (tree type)
+{
+ tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
+ memset (decl, 0, sizeof (struct tree_var_decl));
+ TREE_SET_CODE (decl, VAR_DECL);
+ TREE_TYPE (decl) = type;
+ DECL_UID (decl) = allocate_decl_uid ();
+ SET_DECL_PT_UID (decl, -1);
+ layout_decl (decl, 0);
+ return decl;
+}
- ann = get_var_ann (heapvar);
- ann->is_heapvar = 1;
- }
+/* Create a new artificial heap variable with NAME.
+ Return the created variable. */
- /* For global vars we need to add a heapvar to the list of referenced
- vars of a different function than it was created for originally. */
- if (cfun && gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
+static varinfo_t
+make_heapvar (const char *name)
+{
+ varinfo_t vi;
+ tree heapvar;
+
+ heapvar = build_fake_var_decl (ptr_type_node);
+ DECL_EXTERNAL (heapvar) = 1;
vi = new_var_info (heapvar, name);
vi->is_artificial_var = true;
vi->is_full_var = true;
insert_vi_for_tree (heapvar, vi);
+ return vi;
+}
+
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Return the created variable. */
+
+static varinfo_t
+make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi = make_heapvar (name);
make_constraint_from (lhs, vi->id);
return vi;
{
struct constraint_expr rhsc;
unsigned i;
+ bool returns_uses = false;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
+ int flags = gimple_call_arg_flags (stmt, i);
+
+ /* If the argument is not used we can ignore it. */
+ if (flags & EAF_UNUSED)
+ continue;
- /* Find those pointers being passed, and make sure they end up
- pointing to anything. */
- if (could_have_pointers (arg))
+ /* As we compute ESCAPED context-insensitive we do not gain
+ any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
+ set. The argument would still get clobbered through the
+ escape solution.
+ ??? We might get away with less (and more precise) constraints
+ if using a temporary for transitively closing things. */
+ if ((flags & EAF_NOCLOBBER)
+ && (flags & EAF_NOESCAPE))
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ make_transitive_closure_constraints (uses);
+ make_constraint_to (uses->id, arg);
+ returns_uses = true;
+ }
+ else if (flags & EAF_NOESCAPE)
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ varinfo_t clobbers = get_call_clobber_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ {
+ make_transitive_closure_constraints (uses);
+ make_transitive_closure_constraints (clobbers);
+ }
+ make_constraint_to (uses->id, arg);
+ make_constraint_to (clobbers->id, arg);
+ returns_uses = true;
+ }
+ else
make_escape_constraint (arg);
}
+ /* If we added to the calls uses solution make sure we account for
+ pointers to it to be returned. */
+ if (returns_uses)
+ {
+ rhsc.var = get_call_use_vi (stmt)->id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
+ }
+
/* The static chain escapes as well. */
if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
+ FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
VEC_free(ce_s, heap, tmpc);
}
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
+handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
+ tree fndecl)
{
VEC(ce_s, heap) *lhsc = NULL;
get_constraint_for (lhs, &lhsc);
-
- if (flags & ECF_MALLOC)
+ /* If the store is to a global decl make sure to
+ add proper escape constraints. */
+ lhs = get_base_address (lhs);
+ if (lhs
+ && DECL_P (lhs)
+ && is_global_var (lhs))
+ {
+ struct constraint_expr tmpc;
+ tmpc.var = escaped_id;
+ tmpc.offset = 0;
+ tmpc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, lhsc, &tmpc);
+ }
+
+ /* If the call returns an argument unmodified override the rhs
+ constraints. */
+ flags = gimple_call_return_flags (stmt);
+ if (flags & ERF_RETURNS_ARG
+ && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
+ {
+ tree arg;
+ rhsc = NULL;
+ arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
+ get_constraint_for (arg, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ else if (flags & ERF_NOALIAS)
{
varinfo_t vi;
- vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
+ struct constraint_expr tmpc;
+ rhsc = NULL;
+ vi = make_heapvar ("HEAP");
/* We delay marking allocated storage global until we know if
it escapes. */
DECL_EXTERNAL (vi->decl) = 0;
vi->is_global_var = 0;
/* If this is not a real malloc call assume the memory was
- initialized and thus may point to global memory. All
+ initialized and thus may point to global memory. All
builtin functions with the malloc attribute behave in a sane way. */
if (!fndecl
|| DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
make_constraint_from (vi, nonlocal_id);
+ tmpc.var = vi->id;
+ tmpc.offset = 0;
+ tmpc.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, rhsc, &tmpc);
}
- else if (VEC_length (ce_s, rhsc) > 0)
- {
- /* If the store is to a global decl make sure to
- add proper escape constraints. */
- lhs = get_base_address (lhs);
- if (lhs
- && DECL_P (lhs)
- && is_global_var (lhs))
- {
- struct constraint_expr tmpc;
- tmpc.var = escaped_id;
- tmpc.offset = 0;
- tmpc.type = SCALAR;
- VEC_safe_push (ce_s, heap, lhsc, &tmpc);
- }
- process_all_all_constraints (lhsc, rhsc);
- }
+
+ process_all_all_constraints (lhsc, rhsc);
+
VEC_free (ce_s, heap, lhsc);
}
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
-
- if (could_have_pointers (arg))
- {
- VEC(ce_s, heap) *argc = NULL;
- unsigned i;
- struct constraint_expr *argp;
- get_constraint_for (arg, &argc);
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
- VEC_safe_push (ce_s, heap, *results, argp);
- VEC_free(ce_s, heap, argc);
- }
+ VEC(ce_s, heap) *argc = NULL;
+ unsigned i;
+ struct constraint_expr *argp;
+ get_constraint_for_rhs (arg, &argc);
+ FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
+ VEC_safe_push (ce_s, heap, *results, argp);
+ VEC_free(ce_s, heap, argc);
}
/* May return addresses of globals. */
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
-
- if (could_have_pointers (arg))
+ if (!uses)
{
- if (!uses)
- {
- uses = get_call_use_vi (stmt);
- make_transitive_closure_constraints (uses);
- }
- make_constraint_to (uses->id, arg);
+ uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
}
+ make_constraint_to (uses->id, arg);
}
/* The static chain is used as well. */
static varinfo_t
get_fi_for_callee (gimple call)
{
- tree decl;
+ tree decl, fn = gimple_call_fn (call);
+
+ if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
+ fn = OBJ_TYPE_REF_EXPR (fn);
/* If we can directly resolve the function being called, do so.
Otherwise, it must be some sort of indirect expression that
we should still be able to handle. */
- decl = gimple_call_fndecl (call);
+ decl = gimple_call_addr_fndecl (fn);
if (decl)
return get_vi_for_tree (decl);
- decl = gimple_call_fn (call);
- /* The function can be either an SSA name pointer or,
- worse, an OBJ_TYPE_REF. In this case we have no
+ /* If the function is anything other than a SSA name pointer we have no
clue and should be getting ANYFN (well, ANYTHING for now). */
- if (TREE_CODE (decl) == SSA_NAME)
- {
- if (TREE_CODE (decl) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
- && SSA_NAME_IS_DEFAULT_DEF (decl))
- decl = SSA_NAME_VAR (decl);
- return get_vi_for_tree (decl);
- }
- else if (TREE_CODE (decl) == INTEGER_CST
- || TREE_CODE (decl) == OBJ_TYPE_REF)
+ if (!fn || TREE_CODE (fn) != SSA_NAME)
return get_varinfo (anything_id);
- else
- gcc_unreachable ();
+
+ if ((TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)
+ && SSA_NAME_IS_DEFAULT_DEF (fn))
+ fn = SSA_NAME_VAR (fn);
+
+ return get_vi_for_tree (fn);
}
-/* Walk statement T setting up aliasing constraints according to the
- references found in T. This function is the main part of the
- constraint builder. AI points to auxiliary alias information used
- when building alias sets and computing alias grouping heuristics. */
+/* Create constraints for the builtin call T. Return true if the call
+ was handled, otherwise false. */
-static void
-find_func_aliases (gimple origt)
+static bool
+find_func_aliases_for_builtin_call (gimple t)
{
- gimple t = origt;
+ tree fndecl = gimple_call_fndecl (t);
VEC(ce_s, heap) *lhsc = NULL;
VEC(ce_s, heap) *rhsc = NULL;
- struct constraint_expr *c;
varinfo_t fi;
- /* Now build constraints expressions. */
- if (gimple_code (t) == GIMPLE_PHI)
- {
- gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
-
- /* Only care about pointers and structures containing
- pointers. */
- if (could_have_pointers (gimple_phi_result (t)))
+ if (fndecl != NULL_TREE
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ /* ??? All builtins that are handled here need to be handled
+ in the alias-oracle query functions explicitly! */
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ /* All the following functions return a pointer to the same object
+ as their first argument points to. The functions do not add
+ to the ESCAPED solution. The functions make the first argument
+ pointed to memory point to what the second argument pointed to
+ memory points to. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ case BUILT_IN_STRCPY_CHK:
+ case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_MEMCPY_CHK:
+ case BUILT_IN_MEMMOVE_CHK:
+ case BUILT_IN_MEMPCPY_CHK:
+ case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STRCAT_CHK:
+ case BUILT_IN_STRNCAT_CHK:
{
- size_t i;
- unsigned int j;
-
- /* For a phi node, assign all the arguments to
- the result. */
- get_constraint_for (gimple_phi_result (t), &lhsc);
- for (i = 0; i < gimple_phi_num_args (t); i++)
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 1 : 0));
+ tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 0 : 1));
+ if (res != NULL_TREE)
{
- tree strippedrhs = PHI_ARG_DEF (t, i);
-
- STRIP_NOPS (strippedrhs);
- get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
-
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
- {
- struct constraint_expr *c2;
- while (VEC_length (ce_s, rhsc) > 0)
- {
- c2 = VEC_last (ce_s, rhsc);
- process_constraint (new_constraint (*c, *c2));
- VEC_pop (ce_s, rhsc);
- }
- }
+ get_constraint_for (res, &lhsc);
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK)
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
+ else
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
}
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ return true;
}
- }
- /* In IPA mode, we need to generate constraints to pass call
- arguments through their calls. There are two cases,
- either a GIMPLE_CALL returning a value, or just a plain
- GIMPLE_CALL when we are not.
-
- In non-ipa mode, we need to generate constraints for each
- pointer passed by address. */
- else if (is_gimple_call (t))
- {
- tree fndecl = gimple_call_fndecl (t);
- if (fndecl != NULL_TREE
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- /* ??? All builtins that are handled here need to be handled
- in the alias-oracle query functions explicitly! */
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- /* All the following functions return a pointer to the same object
- as their first argument points to. The functions do not add
- to the ESCAPED solution. The functions make the first argument
- pointed to memory point to what the second argument pointed to
- memory points to. */
- case BUILT_IN_STRCPY:
- case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
- case BUILT_IN_MEMCPY:
- case BUILT_IN_MEMMOVE:
- case BUILT_IN_MEMPCPY:
- case BUILT_IN_STPCPY:
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STRCAT:
- case BUILT_IN_STRNCAT:
+ case BUILT_IN_MEMSET:
+ case BUILT_IN_MEMSET_CHK:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ struct constraint_expr ac;
+ if (res != NULL_TREE)
{
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
- == BUILT_IN_BCOPY ? 1 : 0));
- tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
- == BUILT_IN_BCOPY ? 0 : 1));
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
- get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
- else
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
- do_deref (&lhsc);
- do_deref (&rhsc);
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, lhsc);
VEC_free (ce_s, heap, rhsc);
- return;
}
- case BUILT_IN_MEMSET:
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ do_deref (&lhsc);
+ if (flag_delete_null_pointer_checks
+ && integer_zerop (gimple_call_arg (t, 1)))
{
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, 0);
- unsigned i;
- ce_s *lhsp;
- struct constraint_expr ac;
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- do_deref (&lhsc);
- if (flag_delete_null_pointer_checks
- && integer_zerop (gimple_call_arg (t, 1)))
- {
- ac.type = ADDRESSOF;
- ac.var = nothing_id;
- }
- else
- {
- ac.type = SCALAR;
- ac.var = integer_id;
- }
- ac.offset = 0;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- process_constraint (new_constraint (*lhsp, ac));
- VEC_free (ce_s, heap, lhsc);
- return;
+ ac.type = ADDRESSOF;
+ ac.var = nothing_id;
}
- /* All the following functions do not return pointers, do not
- modify the points-to sets of memory reachable from their
- arguments and do not add to the ESCAPED solution. */
- case BUILT_IN_SINCOS:
- case BUILT_IN_SINCOSF:
- case BUILT_IN_SINCOSL:
- case BUILT_IN_FREXP:
- case BUILT_IN_FREXPF:
- case BUILT_IN_FREXPL:
- case BUILT_IN_GAMMA_R:
- case BUILT_IN_GAMMAF_R:
- case BUILT_IN_GAMMAL_R:
- case BUILT_IN_LGAMMA_R:
- case BUILT_IN_LGAMMAF_R:
- case BUILT_IN_LGAMMAL_R:
- case BUILT_IN_MODF:
- case BUILT_IN_MODFF:
- case BUILT_IN_MODFL:
- case BUILT_IN_REMQUO:
- case BUILT_IN_REMQUOF:
- case BUILT_IN_REMQUOL:
- case BUILT_IN_FREE:
- return;
- /* Trampolines are special - they set up passing the static
- frame. */
- case BUILT_IN_INIT_TRAMPOLINE:
+ else
{
- tree tramp = gimple_call_arg (t, 0);
- tree nfunc = gimple_call_arg (t, 1);
- tree frame = gimple_call_arg (t, 2);
- unsigned i;
- struct constraint_expr lhs, *rhsp;
- if (in_ipa_mode)
- {
- varinfo_t nfi = NULL;
- gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
- nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
- if (nfi)
- {
- lhs = get_function_part_constraint (nfi, fi_static_chain);
- get_constraint_for (frame, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
-
- /* Make the frame point to the function for
- the trampoline adjustment call. */
- get_constraint_for (tramp, &lhsc);
- do_deref (&lhsc);
- get_constraint_for (nfunc, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
-
- return;
- }
- }
- /* Else fallthru to generic handling which will let
- the frame escape. */
- break;
+ ac.type = SCALAR;
+ ac.var = integer_id;
}
- case BUILT_IN_ADJUST_TRAMPOLINE:
+ ac.offset = 0;
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ process_constraint (new_constraint (*lhsp, ac));
+ VEC_free (ce_s, heap, lhsc);
+ return true;
+ }
+ case BUILT_IN_ASSUME_ALIGNED:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ if (res != NULL_TREE)
{
- tree tramp = gimple_call_arg (t, 0);
- tree res = gimple_call_lhs (t);
- if (in_ipa_mode && res)
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ return true;
+ }
+ /* All the following functions do not return pointers, do not
+ modify the points-to sets of memory reachable from their
+ arguments and do not add to the ESCAPED solution. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_FREE:
+ return true;
+ /* Trampolines are special - they set up passing the static
+ frame. */
+ case BUILT_IN_INIT_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree nfunc = gimple_call_arg (t, 1);
+ tree frame = gimple_call_arg (t, 2);
+ unsigned i;
+ struct constraint_expr lhs, *rhsp;
+ if (in_ipa_mode)
+ {
+ varinfo_t nfi = NULL;
+ gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
+ nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
+ if (nfi)
{
- get_constraint_for (res, &lhsc);
- get_constraint_for (tramp, &rhsc);
- do_deref (&rhsc);
+ lhs = get_function_part_constraint (nfi, fi_static_chain);
+ get_constraint_for (frame, &rhsc);
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+
+ /* Make the frame point to the function for
+ the trampoline adjustment call. */
+ get_constraint_for (tramp, &lhsc);
+ do_deref (&lhsc);
+ get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc);
VEC_free (ce_s, heap, rhsc);
VEC_free (ce_s, heap, lhsc);
+
+ return true;
}
- return;
}
- /* Variadic argument handling needs to be handled in IPA
- mode as well. */
- case BUILT_IN_VA_START:
+ /* Else fallthru to generic handling which will let
+ the frame escape. */
+ break;
+ }
+ case BUILT_IN_ADJUST_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree res = gimple_call_lhs (t);
+ if (in_ipa_mode && res)
{
- if (in_ipa_mode)
- {
- tree valist = gimple_call_arg (t, 0);
- struct constraint_expr rhs, *lhsp;
- unsigned i;
- /* The va_list gets access to pointers in variadic
- arguments. */
- fi = lookup_vi_for_tree (cfun->decl);
- gcc_assert (fi != NULL);
- get_constraint_for (valist, &lhsc);
- do_deref (&lhsc);
- rhs = get_function_part_constraint (fi, ~0);
- rhs.type = ADDRESSOF;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- process_constraint (new_constraint (*lhsp, rhs));
- VEC_free (ce_s, heap, lhsc);
- /* va_list is clobbered. */
- make_constraint_to (get_call_clobber_vi (t)->id, valist);
- return;
- }
- break;
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (tramp, &rhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
+ VEC_free (ce_s, heap, lhsc);
}
- /* va_end doesn't have any effect that matters. */
- case BUILT_IN_VA_END:
- return;
- /* printf-style functions may have hooks to set pointers to
- point to somewhere into the generated string. Leave them
- for a later excercise... */
- default:
- /* Fallthru to general call handling. */;
- }
- if (!in_ipa_mode
- || (fndecl
- && (!(fi = lookup_vi_for_tree (fndecl))
- || !fi->is_fn_info)))
+ return true;
+ }
+ /* Variadic argument handling needs to be handled in IPA
+ mode as well. */
+ case BUILT_IN_VA_START:
{
- VEC(ce_s, heap) *rhsc = NULL;
- int flags = gimple_call_flags (t);
-
- /* Const functions can return their arguments and addresses
- of global memory but not of escaped memory. */
- if (flags & (ECF_CONST|ECF_NOVOPS))
+ tree valist = gimple_call_arg (t, 0);
+ struct constraint_expr rhs, *lhsp;
+ unsigned i;
+ get_constraint_for (valist, &lhsc);
+ do_deref (&lhsc);
+ /* The va_list gets access to pointers in variadic
+ arguments. Which we know in the case of IPA analysis
+ and otherwise are just all nonlocal variables. */
+ if (in_ipa_mode)
{
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_const_call (t, &rhsc);
+ fi = lookup_vi_for_tree (cfun->decl);
+ rhs = get_function_part_constraint (fi, ~0);
+ rhs.type = ADDRESSOF;
}
- /* Pure functions can return addresses in and of memory
- reachable from their arguments, but they are not an escape
- point for reachable memory of their arguments. */
- else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
- handle_pure_call (t, &rhsc);
else
- handle_rhs_call (t, &rhsc);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
- VEC_free (ce_s, heap, rhsc);
+ {
+ rhs.var = nonlocal_id;
+ rhs.type = ADDRESSOF;
+ rhs.offset = 0;
+ }
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ process_constraint (new_constraint (*lhsp, rhs));
+ VEC_free (ce_s, heap, lhsc);
+ /* va_list is clobbered. */
+ make_constraint_to (get_call_clobber_vi (t)->id, valist);
+ return true;
}
- else
+ /* va_end doesn't have any effect that matters. */
+ case BUILT_IN_VA_END:
+ return true;
+ /* Alternate return. Simply give up for now. */
+ case BUILT_IN_RETURN:
{
- tree lhsop;
- unsigned j;
+ fi = NULL;
+ if (!in_ipa_mode
+ || !(fi = get_vi_for_tree (cfun->decl)))
+ make_constraint_from (get_varinfo (escaped_id), anything_id);
+ else if (in_ipa_mode
+ && fi != NULL)
+ {
+ struct constraint_expr lhs, rhs;
+ lhs = get_function_part_constraint (fi, fi_result);
+ rhs.var = anything_id;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+ }
+ return true;
+ }
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
- fi = get_fi_for_callee (t);
+ return false;
+}
- /* Assign all the passed arguments to the appropriate incoming
- parameters of the function. */
- for (j = 0; j < gimple_call_num_args (t); j++)
- {
- struct constraint_expr lhs ;
- struct constraint_expr *rhsp;
- tree arg = gimple_call_arg (t, j);
+/* Create constraints for the call T. */
- if (!could_have_pointers (arg))
- continue;
+static void
+find_func_aliases_for_call (gimple t)
+{
+ tree fndecl = gimple_call_fndecl (t);
+ VEC(ce_s, heap) *lhsc = NULL;
+ VEC(ce_s, heap) *rhsc = NULL;
+ varinfo_t fi;
- get_constraint_for (arg, &rhsc);
- lhs = get_function_part_constraint (fi, fi_parm_base + j);
- while (VEC_length (ce_s, rhsc) != 0)
- {
- rhsp = VEC_last (ce_s, rhsc);
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_pop (ce_s, rhsc);
- }
- }
+ if (fndecl != NULL_TREE
+ && DECL_BUILT_IN (fndecl)
+ && find_func_aliases_for_builtin_call (t))
+ return;
+
+ fi = get_fi_for_callee (t);
+ if (!in_ipa_mode
+ || (fndecl && !fi->is_fn_info))
+ {
+ VEC(ce_s, heap) *rhsc = NULL;
+ int flags = gimple_call_flags (t);
+
+ /* Const functions can return their arguments and addresses
+ of global memory but not of escaped memory. */
+ if (flags & (ECF_CONST|ECF_NOVOPS))
+ {
+ if (gimple_call_lhs (t))
+ handle_const_call (t, &rhsc);
+ }
+ /* Pure functions can return addresses in and of memory
+ reachable from their arguments, but they are not an escape
+ point for reachable memory of their arguments. */
+ else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
+ handle_pure_call (t, &rhsc);
+ else
+ handle_rhs_call (t, &rhsc);
+ if (gimple_call_lhs (t))
+ handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ else
+ {
+ tree lhsop;
+ unsigned j;
- /* If we are returning a value, assign it to the result. */
- lhsop = gimple_call_lhs (t);
- if (lhsop
- && could_have_pointers (lhsop))
+ /* Assign all the passed arguments to the appropriate incoming
+ parameters of the function. */
+ for (j = 0; j < gimple_call_num_args (t); j++)
+ {
+ struct constraint_expr lhs ;
+ struct constraint_expr *rhsp;
+ tree arg = gimple_call_arg (t, j);
+
+ get_constraint_for_rhs (arg, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_parm_base + j);
+ while (VEC_length (ce_s, rhsc) != 0)
{
- struct constraint_expr rhs;
- struct constraint_expr *lhsp;
-
- get_constraint_for (lhsop, &lhsc);
- rhs = get_function_part_constraint (fi, fi_result);
- if (fndecl
- && DECL_RESULT (fndecl)
- && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
- {
- VEC(ce_s, heap) *tem = NULL;
- VEC_safe_push (ce_s, heap, tem, &rhs);
- do_deref (&tem);
- rhs = *VEC_index (ce_s, tem, 0);
- VEC_free(ce_s, heap, tem);
- }
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhs));
+ rhsp = VEC_last (ce_s, rhsc);
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_pop (ce_s, rhsc);
}
+ }
+
+ /* If we are returning a value, assign it to the result. */
+ lhsop = gimple_call_lhs (t);
+ if (lhsop)
+ {
+ struct constraint_expr rhs;
+ struct constraint_expr *lhsp;
- /* If we pass the result decl by reference, honor that. */
- if (lhsop
- && fndecl
+ get_constraint_for (lhsop, &lhsc);
+ rhs = get_function_part_constraint (fi, fi_result);
+ if (fndecl
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- struct constraint_expr lhs;
- struct constraint_expr *rhsp;
-
- get_constraint_for_address_of (lhsop, &rhsc);
- lhs = get_function_part_constraint (fi, fi_result);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ VEC(ce_s, heap) *tem = NULL;
+ VEC_safe_push (ce_s, heap, tem, &rhs);
+ do_deref (&tem);
+ rhs = *VEC_index (ce_s, tem, 0);
+ VEC_free(ce_s, heap, tem);
}
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
+ process_constraint (new_constraint (*lhsp, rhs));
+ }
- /* If we use a static chain, pass it along. */
- if (gimple_call_chain (t))
- {
- struct constraint_expr lhs;
- struct constraint_expr *rhsp;
+ /* If we pass the result decl by reference, honor that. */
+ if (lhsop
+ && fndecl
+ && DECL_RESULT (fndecl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
- get_constraint_for (gimple_call_chain (t), &rhsc);
- lhs = get_function_part_constraint (fi, fi_static_chain);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
- process_constraint (new_constraint (lhs, *rhsp));
+ get_constraint_for_address_of (lhsop, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_result);
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ VEC_free (ce_s, heap, rhsc);
+ }
+
+ /* If we use a static chain, pass it along. */
+ if (gimple_call_chain (t))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
+
+ get_constraint_for (gimple_call_chain (t), &rhsc);
+ lhs = get_function_part_constraint (fi, fi_static_chain);
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ }
+ }
+}
+
+/* Walk statement T setting up aliasing constraints according to the
+ references found in T. This function is the main part of the
+ constraint builder. AI points to auxiliary alias information used
+ when building alias sets and computing alias grouping heuristics. */
+
+static void
+find_func_aliases (gimple origt)
+{
+ gimple t = origt;
+ VEC(ce_s, heap) *lhsc = NULL;
+ VEC(ce_s, heap) *rhsc = NULL;
+ struct constraint_expr *c;
+ varinfo_t fi;
+
+ /* Now build constraints expressions. */
+ if (gimple_code (t) == GIMPLE_PHI)
+ {
+ size_t i;
+ unsigned int j;
+
+ /* For a phi node, assign all the arguments to
+ the result. */
+ get_constraint_for (gimple_phi_result (t), &lhsc);
+ for (i = 0; i < gimple_phi_num_args (t); i++)
+ {
+ tree strippedrhs = PHI_ARG_DEF (t, i);
+
+ STRIP_NOPS (strippedrhs);
+ get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
+
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
+ {
+ struct constraint_expr *c2;
+ while (VEC_length (ce_s, rhsc) > 0)
+ {
+ c2 = VEC_last (ce_s, rhsc);
+ process_constraint (new_constraint (*c, *c2));
+ VEC_pop (ce_s, rhsc);
+ }
}
}
}
+ /* In IPA mode, we need to generate constraints to pass call
+ arguments through their calls. There are two cases,
+ either a GIMPLE_CALL returning a value, or just a plain
+ GIMPLE_CALL when we are not.
+
+ In non-ipa mode, we need to generate constraints for each
+ pointer passed by address. */
+ else if (is_gimple_call (t))
+ find_func_aliases_for_call (t);
+
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
points if they have pointer operands. */
- else if (is_gimple_assign (t)
- && could_have_pointers (gimple_assign_lhs (t)))
+ else if (is_gimple_assign (t))
{
/* Otherwise, just a regular assignment statement. */
tree lhsop = gimple_assign_lhs (t);
do_structure_copy (lhsop, rhsop);
else
{
- struct constraint_expr temp;
+ enum tree_code code = gimple_assign_rhs_code (t);
+
get_constraint_for (lhsop, &lhsc);
- if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
+ if (code == POINTER_PLUS_EXPR)
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
gimple_assign_rhs2 (t), &rhsc);
- else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
+ else if (code == BIT_AND_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
+ {
+ /* Aligning a pointer via a BIT_AND_EXPR is offsetting
+ the pointer. Handle it by offsetting it by UNKNOWN. */
+ get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
+ NULL_TREE, &rhsc);
+ }
+ else if ((CONVERT_EXPR_CODE_P (code)
&& !(POINTER_TYPE_P (gimple_expr_type (t))
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
|| gimple_assign_single_p (t))
- get_constraint_for (rhsop, &rhsc);
+ get_constraint_for_rhs (rhsop, &rhsc);
+ else if (truth_value_p (code))
+ /* Truth value results are not pointer (parts). Or at least
+ very very unreasonable obfuscation of a part. */
+ ;
else
{
- temp.type = ADDRESSOF;
- temp.var = anything_id;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, rhsc, &temp);
+ /* All other operations are merges. */
+ VEC (ce_s, heap) *tmp = NULL;
+ struct constraint_expr *rhsp;
+ unsigned i, j;
+ get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
+ for (i = 2; i < gimple_num_ops (t); ++i)
+ {
+ get_constraint_for_rhs (gimple_op (t, i), &tmp);
+ FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
+ VEC_safe_push (ce_s, heap, rhsc, rhsp);
+ VEC_truncate (ce_s, tmp, 0);
+ }
+ VEC_free (ce_s, heap, tmp);
}
process_all_all_constraints (lhsc, rhsc);
}
make_constraint_from_restrict (get_vi_for_tree (lhsop),
"CAST_RESTRICT");
}
- /* For conversions of pointers to non-pointers the pointer escapes. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
- && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
- {
- make_escape_constraint (gimple_assign_rhs1 (t));
- }
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
- && gimple_return_retval (t) != NULL_TREE
- && could_have_pointers (gimple_return_retval (t)))
+ && gimple_return_retval (t) != NULL_TREE)
{
fi = NULL;
if (!in_ipa_mode
unsigned i;
lhs = get_function_part_constraint (fi, fi_result);
- get_constraint_for (gimple_return_retval (t), &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
/* The asm may read global memory, so outputs may point to
any global memory. */
- if (op && could_have_pointers (op))
+ if (op)
{
VEC(ce_s, heap) *lhsc = NULL;
struct constraint_expr rhsc, *lhsp;
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
VEC_free (ce_s, heap, lhsc);
}
/* Strictly we'd only need the constraint to ESCAPED if
the asm clobbers memory, otherwise using something
along the lines of per-call clobbers/uses would be enough. */
- else if (op && could_have_pointers (op))
+ else if (op)
make_escape_constraint (op);
}
}
VEC(ce_s, heap) *ptrc = NULL;
struct constraint_expr *c, lhs;
unsigned i;
- get_constraint_for (ptr, &ptrc);
+ get_constraint_for_rhs (ptr, &ptrc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
process_constraint (new_constraint (lhs, *c));
VEC_free (ce_s, heap, ptrc);
}
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhsc, *rhsp;
unsigned i;
lhsc = get_function_part_constraint (fi, fi_clobbers);
get_constraint_for_address_of (lhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhs, *rhsp;
unsigned i;
lhs = get_function_part_constraint (fi, fi_uses);
get_constraint_for_address_of (rhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
+ case BUILT_IN_STRCPY_CHK:
+ case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_MEMCPY_CHK:
+ case BUILT_IN_MEMMOVE_CHK:
+ case BUILT_IN_MEMPCPY_CHK:
+ case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STRCAT_CHK:
+ case BUILT_IN_STRNCAT_CHK:
{
tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
== BUILT_IN_BCOPY ? 1 : 0));
struct constraint_expr *rhsp, *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
VEC_free (ce_s, heap, lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
return;
/* The following function clobbers memory pointed to by
its argument. */
case BUILT_IN_MEMSET:
+ case BUILT_IN_MEMSET_CHK:
{
tree dest = gimple_call_arg (t, 0);
unsigned i;
ce_s *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
VEC_free (ce_s, heap, lhsc);
return;
return;
}
/* The following functions neither read nor clobber memory. */
+ case BUILT_IN_ASSUME_ALIGNED:
case BUILT_IN_FREE:
return;
/* Trampolines are of no interest to us. */
continue;
get_constraint_for_address_of (arg, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
unsigned has_unknown_size : 1;
+ unsigned must_have_pointers : 1;
+
unsigned may_have_pointers : 1;
unsigned only_restrict_pointers : 1;
static void
sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
{
- qsort (VEC_address (fieldoff_s, fieldstack),
- VEC_length (fieldoff_s, fieldstack),
- sizeof (fieldoff_s),
- fieldoff_compare);
+ VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
}
/* Return true if V is a tree that we can have subvars for.
return false;
}
+/* Return true if T is a type that does contain pointers. */
+
+static bool
+type_must_have_pointers (tree type)
+{
+ if (POINTER_TYPE_P (type))
+ return true;
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ return type_must_have_pointers (TREE_TYPE (type));
+
+ /* A function or method can have pointers as arguments, so track
+ those separately. */
+ if (TREE_CODE (type) == FUNCTION_TYPE
+ || TREE_CODE (type) == METHOD_TYPE)
+ return true;
+
+ return false;
+}
+
+static bool
+field_must_have_pointers (tree t)
+{
+ return type_must_have_pointers (TREE_TYPE (t));
+}
+
/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
the fields of TYPE onto fieldstack, recording their offsets along
the way.
if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
return false;
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL)
{
bool push = false;
{
fieldoff_s *pair = NULL;
bool has_unknown_size = false;
+ bool must_have_pointers_p;
if (!VEC_empty (fieldoff_s, *fieldstack))
pair = VEC_last (fieldoff_s, *fieldstack);
+ /* If there isn't anything at offset zero, create sth. */
+ if (!pair
+ && offset + foff != 0)
+ {
+ pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
+ pair->offset = 0;
+ pair->size = offset + foff;
+ pair->has_unknown_size = false;
+ pair->must_have_pointers = false;
+ pair->may_have_pointers = false;
+ pair->only_restrict_pointers = false;
+ }
+
if (!DECL_SIZE (field)
|| !host_integerp (DECL_SIZE (field), 1))
has_unknown_size = true;
/* If adjacent fields do not contain pointers merge them. */
+ must_have_pointers_p = field_must_have_pointers (field);
if (pair
- && !pair->may_have_pointers
- && !pair->has_unknown_size
&& !has_unknown_size
- && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
- && !could_have_pointers (field))
+ && !must_have_pointers_p
+ && !pair->must_have_pointers
+ && !pair->has_unknown_size
+ && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
{
pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
}
pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
else
pair->size = -1;
- pair->may_have_pointers = could_have_pointers (field);
+ pair->must_have_pointers = must_have_pointers_p;
+ pair->may_have_pointers = true;
pair->only_restrict_pointers
= (!has_unknown_size
&& POINTER_TYPE_P (TREE_TYPE (field))
/* Capture named arguments for K&R functions. They do not
have a prototype and thus no TYPE_ARG_TYPES. */
- for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
++num;
/* Check if the function has variadic arguments. */
/* Creation function node for DECL, using NAME, and return the index
of the variable we've created for the function. */
-static unsigned int
+static varinfo_t
create_function_info_for (tree decl, const char *name)
{
struct function *fn = DECL_STRUCT_FUNCTION (decl);
resultvi->fullsize = vi->fullsize;
resultvi->is_full_var = true;
if (DECL_RESULT (decl))
- resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
+ resultvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < resultvi->offset);
prev_vi->next = resultvi;
prev_vi = resultvi;
argvi->is_full_var = true;
argvi->fullsize = vi->fullsize;
if (arg)
- argvi->may_have_pointers = could_have_pointers (arg);
+ argvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < argvi->offset);
prev_vi->next = argvi;
prev_vi = argvi;
if (arg)
{
insert_vi_for_tree (arg, argvi);
- arg = TREE_CHAIN (arg);
+ arg = DECL_CHAIN (arg);
}
}
free (tempname);
/* We need sth that can be pointed to for va_start. */
- decl = create_tmp_var_raw (ptr_type_node, name);
- get_var_ann (decl);
+ decl = build_fake_var_decl (ptr_type_node);
argvi = new_var_info (decl, newname);
argvi->offset = fi_parm_base + num_args;
prev_vi = argvi;
}
- return vi->id;
+ return vi;
}
unsigned int i;
HOST_WIDE_INT lastoffset = -1;
- for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
{
if (fo->offset == lastoffset)
return true;
vi->fullsize = ~0;
vi->is_unknown_size_var = true;
vi->is_full_var = true;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
return vi;
}
{
vi = new_var_info (decl, name);
vi->offset = 0;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
vi->fullsize = TREE_INT_CST_LOW (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
insert_vi_for_tree (decl, vi);
+ if (TREE_CODE (decl) != VAR_DECL)
+ return id;
+
/* Create initial constraints for globals. */
for (; vi; vi = vi->next)
{
|| vi->only_restrict_pointers)
make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
- /* For escaped variables initialize them from nonlocal. */
+ /* In non-IPA mode the initializer from nonlocal is all we need. */
if (!in_ipa_mode
- || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
+ || DECL_HARD_REGISTER (decl))
make_copy_constraint (vi, nonlocal_id);
- /* If this is a global variable with an initializer and we are in
- IPA mode generate constraints for it. In non-IPA mode
- the initializer from nonlocal is all we need. */
- if (in_ipa_mode
- && DECL_INITIAL (decl))
+ else
{
- VEC (ce_s, heap) *rhsc = NULL;
- struct constraint_expr lhs, *rhsp;
- unsigned i;
- get_constraint_for (DECL_INITIAL (decl), &rhsc);
- lhs.var = vi->id;
- lhs.offset = 0;
- lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
- process_constraint (new_constraint (lhs, *rhsp));
- /* If this is a variable that escapes from the unit
- the initializer escapes as well. */
- if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
+ struct varpool_node *vnode = varpool_get_node (decl);
+
+ /* For escaped variables initialize them from nonlocal. */
+ if (!varpool_all_refs_explicit_p (vnode))
+ make_copy_constraint (vi, nonlocal_id);
+
+ /* If this is a global variable with an initializer and we are in
+ IPA mode generate constraints for it. */
+ if (DECL_INITIAL (decl))
{
- lhs.var = escaped_id;
+ VEC (ce_s, heap) *rhsc = NULL;
+ struct constraint_expr lhs, *rhsp;
+ unsigned i;
+ get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
+ lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
+ /* If this is a variable that escapes from the unit
+ the initializer escapes as well. */
+ if (!varpool_all_refs_explicit_p (vnode))
+ {
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ }
+ VEC_free (ce_s, heap, rhsc);
}
- VEC_free (ce_s, heap, rhsc);
}
}
/* Print the points-to solution for VAR to stdout. */
-void
+DEBUG_FUNCTION void
debug_solution_for_var (unsigned int var)
{
dump_solution_for_var (stdout, var);
/* For each incoming pointer argument arg, create the constraint ARG
= NONLOCAL or a dummy variable if it is a restrict qualified
passed-by-reference argument. */
- for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
{
varinfo_t p;
- if (!could_have_pointers (t))
- continue;
-
/* For restrict qualified pointers to objects passed by
reference build a real representative for the pointed-to object. */
if (DECL_BY_REFERENCE (t)
{
struct constraint_expr lhsc, rhsc;
varinfo_t vi;
- tree heapvar = heapvar_lookup (t, 0);
- if (heapvar == NULL_TREE)
- {
- var_ann_t ann;
- heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
- "PARM_NOALIAS");
- DECL_EXTERNAL (heapvar) = 1;
- heapvar_insert (t, 0, heapvar);
- ann = get_var_ann (heapvar);
- ann->is_heapvar = 1;
- }
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
+ tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
+ DECL_EXTERNAL (heapvar) = 1;
+ vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
+ insert_vi_for_tree (heapvar, vi);
lhsc.var = get_vi_for_tree (t)->id;
lhsc.type = SCALAR;
lhsc.offset = 0;
- rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
+ rhsc.var = vi->id;
rhsc.type = ADDRESSOF;
rhsc.offset = 0;
process_constraint (new_constraint (lhsc, rhsc));
vi->is_restrict_var = 1;
+ for (; vi; vi = vi->next)
+ if (vi->may_have_pointers)
+ {
+ if (vi->only_restrict_pointers)
+ make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
+ make_copy_constraint (vi, nonlocal_id);
+ }
continue;
}
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (p))
lookup_p = SSA_NAME_VAR (p);
pt->vars_contains_restrict = vars_contains_restrict;
}
+/* Set the points-to solution *PT to point only to the variable VAR. */
+
+void
+pt_solution_set_var (struct pt_solution *pt, tree var)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = BITMAP_GGC_ALLOC ();
+ bitmap_set_bit (pt->vars, DECL_PT_UID (var));
+ pt->vars_contains_global = is_global_var (var);
+}
+
/* Computes the union of the points-to solutions *DEST and *SRC and
stores the result in *DEST. This changes the points-to bitmap
of *DEST and thus may not be used if that might be shared.
/* Debug points-to information to stderr. */
-void
+DEBUG_FUNCTION void
debug_sa_points_to_info (void)
{
dump_sa_points_to_info (stderr);
shared_bitmap_table = htab_create (511, shared_bitmap_hash,
shared_bitmap_eq, free);
init_base_vars ();
+
+ gcc_obstack_init (&fake_var_decl_obstack);
}
/* Remove the REF and ADDRESS edges from GRAPH, as well as all the
bitmap_obstack_release (&predbitmap_obstack);
}
-/* Initialize the heapvar for statement mapping. */
-
-static void
-init_alias_heapvars (void)
-{
- if (!heapvar_for_stmt)
- heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
- NULL);
-}
-
-/* Delete the heapvar for statement mapping. */
-
-void
-delete_alias_heapvars (void)
-{
- if (heapvar_for_stmt)
- htab_delete (heapvar_for_stmt);
- heapvar_for_stmt = NULL;
-}
-
/* Solve the constraint set. */
static void
rewrite_constraints (graph, si);
build_succ_graph ();
- free_var_substitution_info (si);
- if (dump_file && (dump_flags & TDF_GRAPH))
- dump_constraint_graph (dump_file);
+ free_var_substitution_info (si);
+ /* Attach complex constraints to graph nodes. */
move_complex_constraints (graph);
if (dump_file)
point. */
remove_preds_and_fake_succs (graph);
+ if (dump_file && (dump_flags & TDF_GRAPH))
+ {
+ fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
+ "in dot format:\n");
+ dump_constraint_graph (dump_file);
+ fprintf (dump_file, "\n\n");
+ }
+
if (dump_file)
fprintf (dump_file, "Solving graph\n");
solve_graph (graph);
+ if (dump_file && (dump_flags & TDF_GRAPH))
+ {
+ fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
+ "in dot format:\n");
+ dump_constraint_graph (dump_file);
+ fprintf (dump_file, "\n\n");
+ }
+
if (dump_file)
dump_sa_points_to_info (dump_file);
}
timevar_push (TV_TREE_PTA);
init_alias_vars ();
- init_alias_heapvars ();
intra_create_variable_infos ();
cfun->gimple_df->escaped.escaped = 0;
/* Mark escaped HEAP variables as global. */
- for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
+ FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
if (vi->is_heap_var
&& !vi->is_restrict_var
&& !vi->is_global_var)
VEC_free (varinfo_t, heap, varmap);
free_alloc_pool (variable_info_pool);
free_alloc_pool (constraint_pool);
+
+ obstack_free (&fake_var_decl_obstack, NULL);
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ TODO_rebuild_alias /* todo_flags_finish */
}
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ TODO_rebuild_alias /* todo_flags_finish */
}
};
return (optimize
&& flag_ipa_pta
/* Don't bother doing anything if the program has errors. */
- && !(errorcount || sorrycount));
+ && !seen_error ());
}
/* IPA PTA solutions for ESCAPED. */
struct pt_solution ipa_escaped_pt
= { true, false, false, false, false, false, false, NULL };
+/* Associate node with varinfo DATA. Worker for
+ cgraph_for_node_and_aliases. */
+static bool
+associate_varinfo_to_alias (struct cgraph_node *node, void *data)
+{
+ if (node->alias || node->thunk.thunk_p)
+ insert_vi_for_tree (node->decl, (varinfo_t)data);
+ return false;
+}
+
/* Execute the driver for IPA PTA. */
static unsigned int
ipa_pta_execute (void)
in_ipa_mode = 1;
- init_alias_heapvars ();
init_alias_vars ();
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ dump_cgraph (dump_file);
+ fprintf (dump_file, "\n");
+ }
+
/* Build the constraints. */
for (node = cgraph_nodes; node; node = node->next)
{
+ varinfo_t vi;
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
- create_function_info_for (node->decl,
- cgraph_node_name (node));
+ gcc_assert (!node->clone_of);
+
+ vi = create_function_info_for (node->decl,
+ alias_get_name (node->decl));
+ cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
for (var = varpool_nodes; var; var = var->next)
- get_vi_for_tree (var->decl);
+ {
+ if (var->alias)
+ continue;
+
+ get_vi_for_tree (var->decl);
+ }
if (dump_file)
{
tree old_func_decl;
/* Nodes without a body are not interesting. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
if (dump_file)
- fprintf (dump_file,
- "Generating constraints for %s\n",
- cgraph_node_name (node));
+ {
+ fprintf (dump_file,
+ "Generating constraints for %s", cgraph_node_name (node));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ fprintf (dump_file, " (%s)",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ fprintf (dump_file, "\n");
+ }
func = DECL_STRUCT_FUNCTION (node->decl);
old_func_decl = current_function_decl;
push_cfun (func);
current_function_decl = node->decl;
- /* For externally visible functions use local constraints for
- their arguments. For local functions we see all callers
- and thus do not need initial constraints for parameters. */
- if (node->local.externally_visible)
- intra_create_variable_infos ();
+ /* For externally visible or attribute used annotated functions use
+ local constraints for their arguments.
+ For local functions we see all callers and thus do not need initial
+ constraints for parameters. */
+ if (node->reachable_from_other_partition
+ || node->local.externally_visible
+ || node->needed)
+ {
+ intra_create_variable_infos ();
+
+ /* We also need to make function return values escape. Nothing
+ escapes by returning from main though. */
+ if (!MAIN_NAME_P (DECL_NAME (node->decl)))
+ {
+ varinfo_t fi, rvi;
+ fi = lookup_vi_for_tree (node->decl);
+ rvi = first_vi_for_offset (fi, fi_result);
+ if (rvi && rvi->offset == fi_result)
+ {
+ struct constraint_expr includes;
+ struct constraint_expr var;
+ includes.var = escaped_id;
+ includes.offset = 0;
+ includes.type = SCALAR;
+ var.var = rvi->id;
+ var.offset = 0;
+ var.type = SCALAR;
+ process_constraint (new_constraint (includes, var));
+ }
+ }
+ }
/* Build constriants for the function body. */
FOR_EACH_BB_FN (bb, func)
struct cgraph_edge *e;
/* Nodes without a body are not interesting. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
fn = DECL_STRUCT_FUNCTION (node->decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
- for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
+ FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
{
if (ptr
&& POINTER_TYPE_P (TREE_TYPE (ptr)))
TODO_update_ssa /* todo_flags_finish */
}
};
-
-
-#include "gt-tree-ssa-structalias.h"