/* Tree based points-to analysis
- Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 2005-2013 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
This file is part of GCC.
#include "obstack.h"
#include "bitmap.h"
#include "flags.h"
-#include "rtl.h"
-#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
-#include "output.h"
#include "tree.h"
#include "tree-flow.h"
#include "tree-inline.h"
-#include "diagnostic.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
#include "gimple.h"
#include "hashtab.h"
#include "function.h"
#include "cgraph.h"
#include "tree-pass.h"
-#include "timevar.h"
#include "alloc-pool.h"
#include "splay-tree.h"
#include "params.h"
And probably more. */
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
-htab_t heapvar_for_stmt;
-
static bool use_field_sensitive = true;
static int in_ipa_mode = 0;
struct constraint;
typedef struct constraint *constraint_t;
-DEF_VEC_P(constraint_t);
-DEF_VEC_ALLOC_P(constraint_t,heap);
#define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
if (a) \
/* True if this is a heap variable. */
unsigned int is_heap_var : 1;
- /* True if this is a variable tracking a restrict pointer source. */
- unsigned int is_restrict_var : 1;
-
/* True if this field may contain pointers. */
unsigned int may_have_pointers : 1;
static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
unsigned HOST_WIDE_INT);
static varinfo_t lookup_vi_for_tree (tree);
+static inline bool type_can_have_subvars (const_tree);
/* Pool of variable info structures. */
static alloc_pool variable_info_pool;
-DEF_VEC_P(varinfo_t);
-
-DEF_VEC_ALLOC_P(varinfo_t, heap);
+/* Map varinfo to final pt_solution. */
+static pointer_map_t *final_solutions;
+struct obstack final_solutions_obstack;
/* Table of variable info structures for constraint variables.
Indexed directly by variable info id. */
-static VEC(varinfo_t,heap) *varmap;
+static vec<varinfo_t> varmap;
/* Return the varmap element N */
static inline varinfo_t
get_varinfo (unsigned int n)
{
- return VEC_index (varinfo_t, varmap, n);
+ return varmap[n];
}
/* Static IDs for the special variables. */
escaped_id = 3, nonlocal_id = 4,
storedanything_id = 5, integer_id = 6 };
-struct GTY(()) heapvar_map {
- struct tree_map map;
- unsigned HOST_WIDE_INT offset;
-};
-
-static int
-heapvar_map_eq (const void *p1, const void *p2)
-{
- const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
- const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
- return (h1->map.base.from == h2->map.base.from
- && h1->offset == h2->offset);
-}
-
-static unsigned int
-heapvar_map_hash (struct heapvar_map *h)
-{
- return iterative_hash_host_wide_int (h->offset,
- htab_hash_pointer (h->map.base.from));
-}
-
-/* Lookup a heap var for FROM, and return it if we find one. */
-
-static tree
-heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
-{
- struct heapvar_map *h, in;
- in.map.base.from = from;
- in.offset = offset;
- h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
- heapvar_map_hash (&in));
- if (h)
- return h->map.to;
- return NULL_TREE;
-}
-
-/* Insert a mapping FROM->TO in the heap var for statement
- hashtable. */
-
-static void
-heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
-{
- struct heapvar_map *h;
- void **loc;
-
- h = GGC_NEW (struct heapvar_map);
- h->map.base.from = from;
- h->offset = offset;
- h->map.hash = heapvar_map_hash (h);
- h->map.to = to;
- loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
- gcc_assert (*loc == NULL);
- *(struct heapvar_map **) loc = h;
-}
-
/* Return a new variable info structure consisting for a variable
named NAME, and using constraint graph node NODE. Append it
to the vector of variable info structures. */
static varinfo_t
new_var_info (tree t, const char *name)
{
- unsigned index = VEC_length (varinfo_t, varmap);
+ unsigned index = varmap.length ();
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
ret->id = index;
ret->is_unknown_size_var = false;
ret->is_full_var = (t == NULL_TREE);
ret->is_heap_var = false;
- ret->is_restrict_var = false;
ret->may_have_pointers = true;
ret->only_restrict_pointers = false;
ret->is_global_var = (t == NULL_TREE);
ret->is_fn_info = false;
if (t && DECL_P (t))
- ret->is_global_var = is_global_var (t);
+ ret->is_global_var = (is_global_var (t)
+ /* We have to treat even local register variables
+ as escape points. */
+ || (TREE_CODE (t) == VAR_DECL
+ && DECL_HARD_REGISTER (t)));
ret->solution = BITMAP_ALLOC (&pta_obstack);
- ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
+ ret->oldsolution = NULL;
ret->next = NULL;
stats.total_vars++;
- VEC_safe_push (varinfo_t, heap, varmap, ret);
+ varmap.safe_push (ret);
return ret;
}
#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
typedef struct constraint_expr ce_s;
-DEF_VEC_O(ce_s);
-DEF_VEC_ALLOC_O(ce_s, heap);
-static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
-static void get_constraint_for (tree, VEC(ce_s, heap) **);
-static void do_deref (VEC (ce_s, heap) **);
+static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
+static void get_constraint_for (tree, vec<ce_s> *);
+static void get_constraint_for_rhs (tree, vec<ce_s> *);
+static void do_deref (vec<ce_s> *);
/* Our set constraints are made up of two constraint expressions, one
LHS, and one RHS.
/* List of constraints that we use to build the constraint graph from. */
-static VEC(constraint_t,heap) *constraints;
+static vec<constraint_t> constraints;
static alloc_pool constraint_pool;
/* The constraint graph is represented as an array of bitmaps
/* Vector of complex constraints for each graph node. Complex
constraints are those involving dereferences or offsets that are
not 0. */
- VEC(constraint_t,heap) **complex;
+ vec<constraint_t> *complex;
};
static constraint_graph_t graph;
cycle finding, we create nodes to represent dereferences and
address taken constraints. These represent where these start and
end. */
-#define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
+#define FIRST_REF_NODE (varmap).length ()
#define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
/* Return the representative node for NODE, if NODE has been unioned
fprintf (file, " + UNKNOWN");
else if (c->rhs.offset != 0)
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
- fprintf (file, "\n");
}
/* Print out constraint C to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint (constraint_t c)
{
dump_constraint (stderr, c);
+ fprintf (stderr, "\n");
}
/* Print out all constraints to FILE */
{
int i;
constraint_t c;
- for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
- dump_constraint (file, c);
+ for (i = from; constraints.iterate (i, &c); i++)
+ if (c)
+ {
+ dump_constraint (file, c);
+ fprintf (file, "\n");
+ }
}
/* Print out all constraints to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraints (void)
{
dump_constraints (stderr, 0);
}
-/* Print out to FILE the edge in the constraint graph that is created by
- constraint c. The edge may have a label, depending on the type of
- constraint that it represents. If complex1, e.g: a = *b, then the label
- is "=*", if complex2, e.g: *a = b, then the label is "*=", if
- complex with an offset, e.g: a = b + 8, then the label is "+".
- Otherwise the edge has no label. */
-
-static void
-dump_constraint_edge (FILE *file, constraint_t c)
-{
- if (c->rhs.type != ADDRESSOF)
- {
- const char *src = get_varinfo (c->rhs.var)->name;
- const char *dst = get_varinfo (c->lhs.var)->name;
- fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
- /* Due to preprocessing of constraints, instructions like *a = *b are
- illegal; thus, we do not have to handle such cases. */
- if (c->lhs.type == DEREF)
- fprintf (file, " [ label=\"*=\" ] ;\n");
- else if (c->rhs.type == DEREF)
- fprintf (file, " [ label=\"=*\" ] ;\n");
- else
- {
- /* We must check the case where the constraint is an offset.
- In this case, it is treated as a complex constraint. */
- if (c->rhs.offset != c->lhs.offset)
- fprintf (file, " [ label=\"+\" ] ;\n");
- else
- fprintf (file, " ;\n");
- }
- }
-}
-
/* Print the constraint graph in dot format. */
static void
dump_constraint_graph (FILE *file)
{
- unsigned int i=0, size;
- constraint_t c;
+ unsigned int i;
/* Only print the graph if it has already been initialized: */
if (!graph)
return;
- /* Print the constraints used to produce the constraint graph. The
- constraints will be printed as comments in the dot file: */
- fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
- dump_constraints (file, 0);
- fprintf (file, "*/\n");
-
/* Prints the header of the dot file: */
- fprintf (file, "\n\n// The constraint graph in dot format:\n");
fprintf (file, "strict digraph {\n");
fprintf (file, " node [\n shape = box\n ]\n");
fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
- fprintf (file, "\n // List of nodes in the constraint graph:\n");
-
- /* The next lines print the nodes in the graph. In order to get the
- number of nodes in the graph, we must choose the minimum between the
- vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
- yet been initialized, then graph->size == 0, otherwise we must only
- read nodes that have an entry in VEC (varinfo_t, varmap). */
- size = VEC_length (varinfo_t, varmap);
- size = size < graph->size ? size : graph->size;
- for (i = 0; i < size; i++)
+ fprintf (file, "\n // List of nodes and complex constraints in "
+ "the constraint graph:\n");
+
+ /* The next lines print the nodes in the graph together with the
+ complex constraints attached to them. */
+ for (i = 0; i < graph->size; i++)
{
- const char *name = get_varinfo (graph->rep[i])->name;
- fprintf (file, " \"%s\" ;\n", name);
+ if (find (i) != i)
+ continue;
+ if (i < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (i)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
+ if (graph->complex[i].exists ())
+ {
+ unsigned j;
+ constraint_t c;
+ fprintf (file, " [label=\"\\N\\n");
+ for (j = 0; graph->complex[i].iterate (j, &c); ++j)
+ {
+ dump_constraint (file, c);
+ fprintf (file, "\\l");
+ }
+ fprintf (file, "\"]");
+ }
+ fprintf (file, ";\n");
}
- /* Go over the list of constraints printing the edges in the constraint
- graph. */
- fprintf (file, "\n // The constraint edges:\n");
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
- if (c)
- dump_constraint_edge (file, c);
+ /* Go over the edges. */
+ fprintf (file, "\n // Edges in the constraint graph:\n");
+ for (i = 0; i < graph->size; i++)
+ {
+ unsigned j;
+ bitmap_iterator bi;
+ if (find (i) != i)
+ continue;
+ EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
+ {
+ unsigned to = find (j);
+ if (i == to)
+ continue;
+ if (i < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (i)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
+ fprintf (file, " -> ");
+ if (to < FIRST_REF_NODE)
+ fprintf (file, "\"%s\"", get_varinfo (to)->name);
+ else
+ fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
+ fprintf (file, ";\n");
+ }
+ }
- /* Prints the tail of the dot file. By now, only the closing bracket. */
- fprintf (file, "}\n\n\n");
+ /* Prints the tail of the dot file. */
+ fprintf (file, "}\n");
}
/* Print out the constraint graph to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint_graph (void)
{
dump_constraint_graph (stderr);
arbitrary, but consistent, in order to give them an ordering. */
static bool
-constraint_less (const constraint_t a, const constraint_t b)
+constraint_less (const constraint_t &a, const constraint_t &b)
{
if (constraint_expr_less (a->lhs, b->lhs))
return true;
/* Find a constraint LOOKFOR in the sorted constraint vector VEC */
static constraint_t
-constraint_vec_find (VEC(constraint_t,heap) *vec,
+constraint_vec_find (vec<constraint_t> vec,
struct constraint lookfor)
{
unsigned int place;
constraint_t found;
- if (vec == NULL)
+ if (!vec.exists ())
return NULL;
- place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
- if (place >= VEC_length (constraint_t, vec))
+ place = vec.lower_bound (&lookfor, constraint_less);
+ if (place >= vec.length ())
return NULL;
- found = VEC_index (constraint_t, vec, place);
+ found = vec[place];
if (!constraint_equal (*found, lookfor))
return NULL;
return found;
/* Union two constraint vectors, TO and FROM. Put the result in TO. */
static void
-constraint_set_union (VEC(constraint_t,heap) **to,
- VEC(constraint_t,heap) **from)
+constraint_set_union (vec<constraint_t> *to,
+ vec<constraint_t> *from)
{
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
+ FOR_EACH_VEC_ELT (*from, i, c)
{
if (constraint_vec_find (*to, *c) == NULL)
{
- unsigned int place = VEC_lower_bound (constraint_t, *to, c,
- constraint_less);
- VEC_safe_insert (constraint_t, heap, *to, place, c);
+ unsigned int place = to->lower_bound (c, constraint_less);
+ to->safe_insert (place, c);
}
}
}
insert_into_complex (constraint_graph_t graph,
unsigned int var, constraint_t c)
{
- VEC (constraint_t, heap) *complex = graph->complex[var];
- unsigned int place = VEC_lower_bound (constraint_t, complex, c,
- constraint_less);
+ vec<constraint_t> complex = graph->complex[var];
+ unsigned int place = complex.lower_bound (c, constraint_less);
/* Only insert constraints that do not already exist. */
- if (place >= VEC_length (constraint_t, complex)
- || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
- VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
+ if (place >= complex.length ()
+ || !constraint_equal (*c, *complex[place]))
+ graph->complex[var].safe_insert (place, c);
}
gcc_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
- for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
+ FOR_EACH_VEC_ELT (graph->complex[from], i, c)
{
/* In complex constraints for node src, we may have either
a = *src, and *src = a, or an offseted constraint which are
c->rhs.var = to;
}
constraint_set_union (&graph->complex[to], &graph->complex[from]);
- VEC_free (constraint_t, heap, graph->complex[from]);
- graph->complex[from] = NULL;
+ graph->complex[from].release ();
}
graph->succs = XCNEWVEC (bitmap, graph->size);
graph->indirect_cycles = XNEWVEC (int, graph->size);
graph->rep = XNEWVEC (unsigned int, graph->size);
- graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
+ /* ??? Macros do not support template types with multiple arguments,
+ so we use a typedef to work around it. */
+ typedef vec<constraint_t> vec_constraint_t_heap;
+ graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
graph->pe = XCNEWVEC (unsigned int, graph->size);
graph->pe_rep = XNEWVEC (int, graph->size);
graph->eq_rep = XNEWVEC (int, graph->size);
graph->direct_nodes = sbitmap_alloc (graph->size);
graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
- sbitmap_zero (graph->direct_nodes);
+ bitmap_clear (graph->direct_nodes);
for (j = 0; j < FIRST_REF_NODE; j++)
{
if (!get_varinfo (j)->is_special_var)
- SET_BIT (graph->direct_nodes, j);
+ bitmap_set_bit (graph->direct_nodes, j);
}
for (j = 0; j < graph->size; j++)
graph->eq_rep[j] = -1;
- for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
+ for (j = 0; j < varmap.length (); j++)
graph->indirect_cycles[j] = -1;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
else
- RESET_BIT (graph->direct_nodes, lhsvar);
+ bitmap_clear_bit (graph->direct_nodes, lhsvar);
}
else if (rhs.type == ADDRESSOF)
{
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
/* All related variables are no longer direct nodes. */
- RESET_BIT (graph->direct_nodes, rhsvar);
+ bitmap_clear_bit (graph->direct_nodes, rhsvar);
v = get_varinfo (rhsvar);
if (!v->is_full_var)
{
v = lookup_vi_for_tree (v->decl);
do
{
- RESET_BIT (graph->direct_nodes, v->id);
+ bitmap_clear_bit (graph->direct_nodes, v->id);
v = v->next;
}
while (v != NULL);
else if (lhs.offset != 0 || rhs.offset != 0)
{
if (rhs.offset != 0)
- RESET_BIT (graph->direct_nodes, lhs.var);
+ bitmap_clear_bit (graph->direct_nodes, lhs.var);
else if (lhs.offset != 0)
- RESET_BIT (graph->direct_nodes, rhs.var);
+ bitmap_clear_bit (graph->direct_nodes, rhs.var);
}
}
}
unsigned i, t;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs;
struct constraint_expr rhs;
t = find (storedanything_id);
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
{
- if (!TEST_BIT (graph->direct_nodes, i)
+ if (!bitmap_bit_p (graph->direct_nodes, i)
&& get_varinfo (i)->may_have_pointers)
add_graph_edge (graph, find (i), t);
}
/* Changed variables on the last iteration. */
-static unsigned int changed_count;
-static sbitmap changed;
+static bitmap changed;
/* Strongly Connected Component visitation info. */
unsigned int *dfs;
unsigned int *node_mapping;
int current_index;
- VEC(unsigned,heap) *scc_stack;
+ vec<unsigned> scc_stack;
};
bitmap_iterator bi;
unsigned int my_dfs;
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
break;
w = find (i);
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
scc_visit (graph, si, w);
{
unsigned int t = find (w);
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- if (VEC_length (unsigned, si->scc_stack) > 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ if (si->scc_stack.length () > 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
bitmap scc = BITMAP_ALLOC (NULL);
unsigned int lowest_node;
bitmap_set_bit (scc, n);
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
bitmap_set_bit (scc, w);
}
}
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Unify node FROM into node TO, updating the changed count if
/* Mark TO as changed if FROM was changed. If TO was already marked
as changed, decrease the changed count. */
- if (update_changed && TEST_BIT (changed, from))
+ if (update_changed
+ && bitmap_bit_p (changed, from))
{
- RESET_BIT (changed, from);
- if (!TEST_BIT (changed, to))
- SET_BIT (changed, to);
- else
- {
- gcc_assert (changed_count > 0);
- changed_count--;
- }
+ bitmap_clear_bit (changed, from);
+ bitmap_set_bit (changed, to);
}
if (get_varinfo (from)->solution)
{
if (bitmap_ior_into (get_varinfo (to)->solution,
get_varinfo (from)->solution))
{
- if (update_changed && !TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- changed_count++;
- }
+ if (update_changed)
+ bitmap_set_bit (changed, to);
}
BITMAP_FREE (get_varinfo (from)->solution);
- BITMAP_FREE (get_varinfo (from)->oldsolution);
+ if (get_varinfo (from)->oldsolution)
+ BITMAP_FREE (get_varinfo (from)->oldsolution);
- if (stats.iterations > 0)
- {
- BITMAP_FREE (get_varinfo (to)->oldsolution);
- get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
- }
+ if (stats.iterations > 0
+ && get_varinfo (to)->oldsolution)
+ BITMAP_FREE (get_varinfo (to)->oldsolution);
}
if (valid_graph_edge (graph, to, to))
{
sbitmap visited;
/* Array that stores the topological order of the graph, *in
reverse*. */
- VEC(unsigned,heap) *topo_order;
+ vec<unsigned> topo_order;
};
size_t size = graph->size;
struct topo_info *ti = XNEW (struct topo_info);
ti->visited = sbitmap_alloc (size);
- sbitmap_zero (ti->visited);
- ti->topo_order = VEC_alloc (unsigned, heap, 1);
+ bitmap_clear (ti->visited);
+ ti->topo_order.create (1);
return ti;
}
free_topo_info (struct topo_info *ti)
{
sbitmap_free (ti->visited);
- VEC_free (unsigned, heap, ti->topo_order);
+ ti->topo_order.release ();
free (ti);
}
bitmap_iterator bi;
unsigned int j;
- SET_BIT (ti->visited, n);
+ bitmap_set_bit (ti->visited, n);
if (graph->succs[n])
EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
{
- if (!TEST_BIT (ti->visited, j))
+ if (!bitmap_bit_p (ti->visited, j))
topo_visit (graph, ti, j);
}
- VEC_safe_push (unsigned, heap, ti->topo_order, n);
+ ti->topo_order.safe_push (n);
}
/* Process a constraint C that represents x = *(y + off), using DELTA as the
if (flag)
{
get_varinfo (lhs)->solution = sol;
- if (!TEST_BIT (changed, lhs))
- {
- SET_BIT (changed, lhs);
- changed_count++;
- }
+ bitmap_set_bit (changed, lhs);
}
}
if (add_graph_edge (graph, t, rhs))
{
if (bitmap_ior_into (get_varinfo (t)->solution, sol))
- {
- if (!TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
- }
+ bitmap_set_bit (changed, t);
}
return;
}
{
t = find (escaped_id);
if (add_graph_edge (graph, t, rhs)
- && bitmap_ior_into (get_varinfo (t)->solution, sol)
- && !TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
+ && bitmap_ior_into (get_varinfo (t)->solution, sol))
+ bitmap_set_bit (changed, t);
/* Enough to let rhs escape once. */
escaped_p = true;
}
t = find (v->id);
if (add_graph_edge (graph, t, rhs)
- && bitmap_ior_into (get_varinfo (t)->solution, sol)
- && !TEST_BIT (changed, t))
- {
- SET_BIT (changed, t);
- changed_count++;
- }
+ && bitmap_ior_into (get_varinfo (t)->solution, sol))
+ bitmap_set_bit (changed, t);
}
/* If the variable is not exactly at the requested offset
if (flag)
{
get_varinfo (c->lhs.var)->solution = tmp;
- if (!TEST_BIT (changed, c->lhs.var))
- {
- SET_BIT (changed, c->lhs.var);
- changed_count++;
- }
+ bitmap_set_bit (changed, c->lhs.var);
}
}
}
si->current_index = 0;
si->visited = sbitmap_alloc (size);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
si->deleted = sbitmap_alloc (size);
- sbitmap_zero (si->deleted);
+ bitmap_clear (si->deleted);
si->node_mapping = XNEWVEC (unsigned int, size);
si->dfs = XCNEWVEC (unsigned int, size);
for (i = 0; i < size; i++)
si->node_mapping[i] = i;
- si->scc_stack = VEC_alloc (unsigned, heap, 1);
+ si->scc_stack.create (1);
return si;
}
sbitmap_free (si->deleted);
free (si->node_mapping);
free (si->dfs);
- VEC_free (unsigned, heap, si->scc_stack);
+ si->scc_stack.release ();
free (si);
}
struct scc_info *si = init_scc_info (size);
for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
- if (!TEST_BIT (si->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (si->visited, i) && find (i) == i)
scc_visit (graph, si, i);
free_scc_info (si);
unsigned int size = graph->size;
for (i = 0; i != size; ++i)
- if (!TEST_BIT (ti->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
topo_visit (graph, ti, i);
}
&& bitmap_equal_p (eql1->labels, eql2->labels));
}
-/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
- contains. */
+/* Lookup a equivalence class in TABLE by the bitmap of LABELS with
+ hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
+ is equivalent to. */
-static unsigned int
-equiv_class_lookup (htab_t table, bitmap labels)
+static equiv_class_label *
+equiv_class_lookup_or_add (htab_t table, bitmap labels)
{
- void **slot;
- struct equiv_class_label ecl;
+ equiv_class_label **slot;
+ equiv_class_label ecl;
ecl.labels = labels;
ecl.hashcode = bitmap_hash (labels);
+ slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
+ ecl.hashcode, INSERT);
+ if (!*slot)
+ {
+ *slot = XNEW (struct equiv_class_label);
+ (*slot)->labels = labels;
+ (*slot)->hashcode = ecl.hashcode;
+ (*slot)->equivalence_class = 0;
+ }
- slot = htab_find_slot_with_hash (table, &ecl,
- ecl.hashcode, NO_INSERT);
- if (!slot)
- return 0;
- else
- return ((equiv_class_label_t) *slot)->equivalence_class;
-}
-
-
-/* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
- to TABLE. */
-
-static void
-equiv_class_add (htab_t table, unsigned int equivalence_class,
- bitmap labels)
-{
- void **slot;
- equiv_class_label_t ecl = XNEW (struct equiv_class_label);
-
- ecl->labels = labels;
- ecl->equivalence_class = equivalence_class;
- ecl->hashcode = bitmap_hash (labels);
-
- slot = htab_find_slot_with_hash (table, ecl,
- ecl->hashcode, INSERT);
- gcc_assert (!*slot);
- *slot = (void *) ecl;
+ return *slot;
}
/* Perform offline variable substitution.
unsigned int my_dfs;
gcc_assert (si->node_mapping[n] == n);
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
si->node_mapping[w] = n;
- if (!TEST_BIT (graph->direct_nodes, w))
- RESET_BIT (graph->direct_nodes, n);
+ if (!bitmap_bit_p (graph->direct_nodes, w))
+ bitmap_clear_bit (graph->direct_nodes, n);
/* Unify our nodes. */
if (graph->preds[w])
graph->points_to[w]);
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Label pointer equivalences. */
static void
label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
- unsigned int i;
+ unsigned int i, first_pred;
bitmap_iterator bi;
- SET_BIT (si->visited, n);
- if (!graph->points_to[n])
- graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ bitmap_set_bit (si->visited, n);
/* Label and union our incoming edges's points to sets. */
+ first_pred = -1U;
EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
{
unsigned int w = si->node_mapping[i];
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
label_visit (graph, si, w);
/* Skip unused edges */
continue;
if (graph->points_to[w])
- bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
+ {
+ if (!graph->points_to[n])
+ {
+ if (first_pred == -1U)
+ first_pred = w;
+ else
+ {
+ graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ bitmap_ior (graph->points_to[n],
+ graph->points_to[first_pred],
+ graph->points_to[w]);
+ }
+ }
+ else
+ bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
+ }
+ }
+
+ /* Indirect nodes get fresh variables and a new pointer equiv class. */
+ if (!bitmap_bit_p (graph->direct_nodes, n))
+ {
+ if (!graph->points_to[n])
+ {
+ graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ if (first_pred != -1U)
+ bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
+ }
+ bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
+ graph->pointer_label[n] = pointer_equiv_class++;
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
+ graph->points_to[n]);
+ ecl->equivalence_class = graph->pointer_label[n];
+ return;
+ }
+
+ /* If there was only a single non-empty predecessor the pointer equiv
+ class is the same. */
+ if (!graph->points_to[n])
+ {
+ if (first_pred != -1U)
+ {
+ graph->pointer_label[n] = graph->pointer_label[first_pred];
+ graph->points_to[n] = graph->points_to[first_pred];
+ }
+ return;
}
- /* Indirect nodes get fresh variables. */
- if (!TEST_BIT (graph->direct_nodes, n))
- bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
if (!bitmap_empty_p (graph->points_to[n]))
{
- unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
- graph->points_to[n]);
- if (!label)
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
+ graph->points_to[n]);
+ if (ecl->equivalence_class == 0)
+ ecl->equivalence_class = pointer_equiv_class++;
+ else
{
- label = pointer_equiv_class++;
- equiv_class_add (pointer_equiv_class_table,
- label, graph->points_to[n]);
+ BITMAP_FREE (graph->points_to[n]);
+ graph->points_to[n] = ecl->labels;
}
- graph->pointer_label[n] = label;
+ graph->pointer_label[n] = ecl->equivalence_class;
}
}
/* Condense the nodes, which means to find SCC's, count incoming
predecessors, and unite nodes in SCC's. */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
condense_visit (graph, si, si->node_mapping[i]);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
/* Actually the label the nodes for pointer equivalences */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
label_visit (graph, si, si->node_mapping[i]);
/* Calculate location equivalence labels. */
bitmap pointed_by;
bitmap_iterator bi;
unsigned int j;
- unsigned int label;
if (!graph->pointed_by[i])
continue;
/* Look up the location equivalence label if one exists, or make
one otherwise. */
- label = equiv_class_lookup (location_equiv_class_table,
- pointed_by);
- if (label == 0)
- {
- label = location_equiv_class++;
- equiv_class_add (location_equiv_class_table,
- label, pointed_by);
- }
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
+ if (ecl->equivalence_class == 0)
+ ecl->equivalence_class = location_equiv_class++;
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
get_varinfo (i)->name);
BITMAP_FREE (pointed_by);
}
- graph->loc_label[i] = label;
+ graph->loc_label[i] = ecl->equivalence_class;
}
if (dump_file && (dump_flags & TDF_DETAILS))
for (i = 0; i < FIRST_REF_NODE; i++)
{
- bool direct_node = TEST_BIT (graph->direct_nodes, i);
- fprintf (dump_file,
- "Equivalence classes for %s node id %d:%s are pointer: %d"
- ", location:%d\n",
- direct_node ? "Direct node" : "Indirect node", i,
- get_varinfo (i)->name,
- graph->pointer_label[si->node_mapping[i]],
- graph->loc_label[si->node_mapping[i]]);
+ unsigned j = si->node_mapping[i];
+ if (j != i)
+ fprintf (dump_file, "%s node id %d (%s) mapped to SCC leader "
+ "node id %d (%s)\n",
+ bitmap_bit_p (graph->direct_nodes, i)
+ ? "Direct" : "Indirect", i, get_varinfo (i)->name,
+ j, get_varinfo (j)->name);
+ else
+ fprintf (dump_file,
+ "Equivalence classes for %s node id %d (%s): pointer %d"
+ ", location %d\n",
+ bitmap_bit_p (graph->direct_nodes, i)
+ ? "direct" : "indirect", i, get_varinfo (i)->name,
+ graph->pointer_label[i], graph->loc_label[i]);
}
/* Quickly eliminate our non-pointer variables. */
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
if (c)
{
for (j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
"ignoring constraint:",
get_varinfo (lhs.var)->name);
dump_constraint (dump_file, c);
+ fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
"ignoring constraint:",
get_varinfo (rhs.var)->name);
dump_constraint (dump_file, c);
+ fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
&& !bitmap_empty_p (get_varinfo (node)->solution))
{
unsigned int i;
- VEC(unsigned,heap) *queue = NULL;
+ vec<unsigned> queue = vNULL;
int queuepos;
unsigned int to = find (graph->indirect_cycles[node]);
bitmap_iterator bi;
if (find (i) == i && i != to)
{
if (unite (to, i))
- VEC_safe_push (unsigned, heap, queue, i);
+ queue.safe_push (i);
}
}
for (queuepos = 0;
- VEC_iterate (unsigned, queue, queuepos, i);
+ queue.iterate (queuepos, &i);
queuepos++)
{
unify_nodes (graph, to, i, true);
}
- VEC_free (unsigned, heap, queue);
+ queue.release ();
return true;
}
return false;
unsigned int i;
bitmap pts;
- changed_count = 0;
- changed = sbitmap_alloc (size);
- sbitmap_zero (changed);
+ changed = BITMAP_ALLOC (NULL);
/* Mark all initial non-collapsed nodes as changed. */
for (i = 0; i < size; i++)
varinfo_t ivi = get_varinfo (i);
if (find (i) == i && !bitmap_empty_p (ivi->solution)
&& ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
- || VEC_length (constraint_t, graph->complex[i]) > 0))
- {
- SET_BIT (changed, i);
- changed_count++;
- }
+ || graph->complex[i].length () > 0))
+ bitmap_set_bit (changed, i);
}
/* Allocate a bitmap to be used to store the changed bits. */
pts = BITMAP_ALLOC (&pta_obstack);
- while (changed_count > 0)
+ while (!bitmap_empty_p (changed))
{
unsigned int i;
struct topo_info *ti = init_topo_info ();
compute_topo_order (graph, ti);
- while (VEC_length (unsigned, ti->topo_order) != 0)
+ while (ti->topo_order.length () != 0)
{
- i = VEC_pop (unsigned, ti->topo_order);
+ i = ti->topo_order.pop ();
/* If this variable is not a representative, skip it. */
if (find (i) != i)
/* If the node has changed, we need to process the
complex constraints and outgoing edges again. */
- if (TEST_BIT (changed, i))
+ if (bitmap_clear_bit (changed, i))
{
unsigned int j;
constraint_t c;
bitmap solution;
- VEC(constraint_t,heap) *complex = graph->complex[i];
+ vec<constraint_t> complex = graph->complex[i];
+ varinfo_t vi = get_varinfo (i);
bool solution_empty;
- RESET_BIT (changed, i);
- changed_count--;
-
/* Compute the changed set of solution bits. */
- bitmap_and_compl (pts, get_varinfo (i)->solution,
- get_varinfo (i)->oldsolution);
+ if (vi->oldsolution)
+ bitmap_and_compl (pts, vi->solution, vi->oldsolution);
+ else
+ bitmap_copy (pts, vi->solution);
if (bitmap_empty_p (pts))
continue;
- bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
+ if (vi->oldsolution)
+ bitmap_ior_into (vi->oldsolution, pts);
+ else
+ {
+ vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
+ bitmap_copy (vi->oldsolution, pts);
+ }
- solution = get_varinfo (i)->solution;
+ solution = vi->solution;
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
+ FOR_EACH_VEC_ELT (complex, j, c)
{
/* XXX: This is going to unsort the constraints in
some cases, which will occasionally add duplicate
if (flag)
{
get_varinfo (to)->solution = tmp;
- if (!TEST_BIT (changed, to))
- {
- SET_BIT (changed, to);
- changed_count++;
- }
+ bitmap_set_bit (changed, to);
}
}
}
}
BITMAP_FREE (pts);
- sbitmap_free (changed);
+ BITMAP_FREE (changed);
bitmap_obstack_release (&oldpta_obstack);
}
static const char *
alias_get_name (tree decl)
{
- const char *res = get_name (decl);
+ const char *res = NULL;
char *temp;
int num_printed = 0;
- if (res != NULL)
- return res;
-
- res = "NULL";
if (!dump_file)
- return res;
+ return "NULL";
if (TREE_CODE (decl) == SSA_NAME)
{
- num_printed = asprintf (&temp, "%s_%u",
- alias_get_name (SSA_NAME_VAR (decl)),
- SSA_NAME_VERSION (decl));
+ res = get_name (decl);
+ if (res)
+ num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
+ else
+ num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
+ if (num_printed > 0)
+ {
+ res = ggc_strdup (temp);
+ free (temp);
+ }
}
else if (DECL_P (decl))
{
- num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
- }
- if (num_printed > 0)
- {
- res = ggc_strdup (temp);
- free (temp);
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ else
+ {
+ res = get_name (decl);
+ if (!res)
+ {
+ num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
+ if (num_printed > 0)
+ {
+ res = ggc_strdup (temp);
+ free (temp);
+ }
+ }
+ }
}
- return res;
+ if (res != NULL)
+ return res;
+
+ return "NULL";
}
/* Find the variable id for tree T in the map.
If address_p is true, the result will be taken its address of. */
static void
-get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
+get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
{
struct constraint_expr cexpr;
varinfo_t vi;
/* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
- gcc_assert (SSA_VAR_P (t) || DECL_P (t));
+ gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (t) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
- && SSA_NAME_IS_DEFAULT_DEF (t))
+ && SSA_NAME_IS_DEFAULT_DEF (t)
+ && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
{
get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
return;
}
+ /* For global variables resort to the alias target. */
+ if (TREE_CODE (t) == VAR_DECL
+ && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
+ {
+ struct varpool_node *node = varpool_get_node (t);
+ if (node && node->alias)
+ {
+ node = varpool_variable_node (node, NULL);
+ t = node->symbol.decl;
+ }
+ }
+
vi = get_vi_for_tree (t);
cexpr.var = vi->id;
cexpr.type = SCALAR;
for (; vi; vi = vi->next)
{
cexpr.var = vi->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
return;
}
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
/* Process constraint T, performing various simplifications and then
struct constraint_expr rhs = t->rhs;
struct constraint_expr lhs = t->lhs;
- gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
- gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
+ gcc_assert (rhs.var < varmap.length ());
+ gcc_assert (lhs.var < varmap.length ());
/* If we didn't get any useful constraint from the lhs we get
&ANYTHING as fallback from get_constraint_for. Deal with
else
{
gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
- VEC_safe_push (constraint_t, heap, constraints, t);
+ constraints.safe_push (t);
}
}
-/* Return true if T is a type that could contain pointers. */
-
-static bool
-type_could_have_pointers (tree type)
-{
- if (POINTER_TYPE_P (type))
- return true;
-
- if (TREE_CODE (type) == ARRAY_TYPE)
- return type_could_have_pointers (TREE_TYPE (type));
-
- /* A function or method can consume pointers.
- ??? We could be more precise here. */
- if (TREE_CODE (type) == FUNCTION_TYPE
- || TREE_CODE (type) == METHOD_TYPE)
- return true;
-
- return AGGREGATE_TYPE_P (type);
-}
-
-/* Return true if T is a variable of a type that could contain
- pointers. */
-
-static bool
-could_have_pointers (tree t)
-{
- return type_could_have_pointers (TREE_TYPE (t));
-}
/* Return the position, in bits, of FIELD_DECL from the beginning of its
structure. */
static HOST_WIDE_INT
bitpos_of_field (const tree fdecl)
{
-
if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
|| !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
return -1;
- return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
+ return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
+ TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
}
static void
get_constraint_for_ptr_offset (tree ptr, tree offset,
- VEC (ce_s, heap) **results)
+ vec<ce_s> *results)
{
struct constraint_expr c;
unsigned int j, n;
- HOST_WIDE_INT rhsunitoffset, rhsoffset;
+ HOST_WIDE_INT rhsoffset;
/* If we do not do field-sensitive PTA adding offsets to pointers
does not change the points-to solution. */
if (!use_field_sensitive)
{
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
return;
}
solution which includes all sub-fields of all pointed-to
variables of ptr. */
if (offset == NULL_TREE
- || !host_integerp (offset, 0))
+ || TREE_CODE (offset) != INTEGER_CST)
rhsoffset = UNKNOWN_OFFSET;
else
{
- /* Make sure the bit-offset also fits. */
- rhsunitoffset = TREE_INT_CST_LOW (offset);
- rhsoffset = rhsunitoffset * BITS_PER_UNIT;
- if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ /* Sign-extend the offset. */
+ double_int soffset = tree_to_double_int (offset)
+ .sext (TYPE_PRECISION (TREE_TYPE (offset)));
+ if (!soffset.fits_shwi ())
rhsoffset = UNKNOWN_OFFSET;
+ else
+ {
+ /* Make sure the bit-offset also fits. */
+ HOST_WIDE_INT rhsunitoffset = soffset.low;
+ rhsoffset = rhsunitoffset * BITS_PER_UNIT;
+ if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
+ rhsoffset = UNKNOWN_OFFSET;
+ }
}
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
if (rhsoffset == 0)
return;
/* As we are eventually appending to the solution do not use
- VEC_iterate here. */
- n = VEC_length (ce_s, *results);
+ vec::iterate here. */
+ n = results->length ();
for (j = 0; j < n; j++)
{
varinfo_t curr;
- c = *VEC_index (ce_s, *results, j);
+ c = (*results)[j];
curr = get_varinfo (c.var);
if (c.type == ADDRESSOF
c2.type = ADDRESSOF;
c2.offset = 0;
if (c2.var != c.var)
- VEC_safe_push (ce_s, heap, *results, &c2);
+ results->safe_push (c2);
temp = temp->next;
}
while (temp);
c2.var = temp->next->id;
c2.type = ADDRESSOF;
c2.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &c2);
+ results->safe_push (c2);
}
c.var = temp->id;
c.offset = 0;
else
c.offset = rhsoffset;
- VEC_replace (ce_s, *results, j, &c);
+ (*results)[j] = c;
}
}
/* Given a COMPONENT_REF T, return the constraint_expr vector for it.
- If address_p is true the result will be taken its address of. */
+ If address_p is true the result will be taken its address of.
+ If lhs_p is true then the constraint expression is assumed to be used
+ as the lhs. */
static void
-get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
- bool address_p)
+get_constraint_for_component_ref (tree t, vec<ce_s> *results,
+ bool address_p, bool lhs_p)
{
tree orig_t = t;
HOST_WIDE_INT bitsize = -1;
HOST_WIDE_INT bitmaxsize = -1;
HOST_WIDE_INT bitpos;
tree forzero;
- struct constraint_expr *result;
/* Some people like to do cute things like take the address of
&0->a.b */
forzero = t;
while (handled_component_p (forzero)
- || INDIRECT_REF_P (forzero))
+ || INDIRECT_REF_P (forzero)
+ || TREE_CODE (forzero) == MEM_REF)
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
temp.offset = 0;
temp.var = integer_id;
temp.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
+ /* Handle type-punning through unions. If we are extracting a pointer
+ from a union via a possibly type-punning access that pointer
+ points to anything, similar to a conversion of an integer to
+ a pointer. */
+ if (!lhs_p)
+ {
+ tree u;
+ for (u = t;
+ TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
+ u = TREE_OPERAND (u, 0))
+ if (TREE_CODE (u) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
+ {
+ struct constraint_expr temp;
+
+ temp.offset = 0;
+ temp.var = anything_id;
+ temp.type = ADDRESSOF;
+ results->safe_push (temp);
+ return;
+ }
+ }
+
t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
- get_constraint_for_1 (t, results, true);
- gcc_assert (VEC_length (ce_s, *results) == 1);
- result = VEC_last (ce_s, *results);
+ get_constraint_for_1 (t, results, true, lhs_p);
+ gcc_assert (results->length () == 1);
+ struct constraint_expr &result = results->last ();
- if (result->type == SCALAR
- && get_varinfo (result->var)->is_full_var)
+ if (result.type == SCALAR
+ && get_varinfo (result.var)->is_full_var)
/* For single-field vars do not bother about the offset. */
- result->offset = 0;
- else if (result->type == SCALAR)
+ result.offset = 0;
+ else if (result.type == SCALAR)
{
/* In languages like C, you can access one past the end of an
array. You aren't allowed to dereference it, so we can
ignore this constraint. When we handle pointer subtraction,
we may have to do something cute here. */
- if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
+ if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
&& bitmaxsize != 0)
{
/* It's also not true that the constraint will actually start at the
right offset, it may start in some padding. We only care about
setting the constraint to the first actual field it touches, so
walk to find it. */
- struct constraint_expr cexpr = *result;
+ struct constraint_expr cexpr = result;
varinfo_t curr;
- VEC_pop (ce_s, *results);
+ results->pop ();
cexpr.offset = 0;
for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
{
bitpos, bitmaxsize))
{
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
if (address_p)
break;
}
/* If we are going to take the address of this field then
to be able to compute reachability correctly add at least
the last field of the variable. */
- if (address_p
- && VEC_length (ce_s, *results) == 0)
+ if (address_p && results->length () == 0)
{
curr = get_varinfo (cexpr.var);
while (curr->next != NULL)
curr = curr->next;
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
- else
+ else if (results->length () == 0)
/* Assert that we found *some* field there. The user couldn't be
accessing *only* padding. */
/* Still the user could access one past the end of an array
embedded in a struct resulting in accessing *only* padding. */
- gcc_assert (VEC_length (ce_s, *results) >= 1
- || ref_contains_array_ref (orig_t));
+ /* Or accessing only padding via type-punning to a type
+ that has a filed just in padding space. */
+ {
+ cexpr.type = SCALAR;
+ cexpr.var = anything_id;
+ cexpr.offset = 0;
+ results->safe_push (cexpr);
+ }
}
else if (bitmaxsize == 0)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to past the end of variable, ignoring\n");
}
- else if (result->type == DEREF)
+ else if (result.type == DEREF)
{
/* If we do not know exactly where the access goes say so. Note
that only for non-structure accesses we know that we access
at most one subfiled of any variable. */
if (bitpos == -1
|| bitsize != bitmaxsize
- || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
- result->offset = UNKNOWN_OFFSET;
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
+ || result.offset == UNKNOWN_OFFSET)
+ result.offset = UNKNOWN_OFFSET;
else
- result->offset = bitpos;
+ result.offset += bitpos;
}
- else if (result->type == ADDRESSOF)
+ else if (result.type == ADDRESSOF)
{
/* We can end up here for component references on a
VIEW_CONVERT_EXPR <>(&foobar). */
- result->type = SCALAR;
- result->var = anything_id;
- result->offset = 0;
+ result.type = SCALAR;
+ result.var = anything_id;
+ result.offset = 0;
}
else
gcc_unreachable ();
This is needed so that we can handle dereferencing DEREF constraints. */
static void
-do_deref (VEC (ce_s, heap) **constraints)
+do_deref (vec<ce_s> *constraints)
{
struct constraint_expr *c;
unsigned int i = 0;
- for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (*constraints, i, c)
{
if (c->type == SCALAR)
c->type = DEREF;
}
}
-static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
-
/* Given a tree T, return the constraint expression for taking the
address of it. */
static void
-get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
+get_constraint_for_address_of (tree t, vec<ce_s> *results)
{
struct constraint_expr *c;
unsigned int i;
- get_constraint_for_1 (t, results, true);
+ get_constraint_for_1 (t, results, true, true);
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
+ FOR_EACH_VEC_ELT (*results, i, c)
{
if (c->type == DEREF)
c->type = SCALAR;
/* Given a tree T, return the constraint expression for it. */
static void
-get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
+get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
+ bool lhs_p)
{
struct constraint_expr temp;
in that case *NULL does not fail, so it _should_ alias *anything.
It is not worth adding a new option or renaming the existing one,
since this case is relatively obscure. */
- if (flag_delete_null_pointer_checks
- && ((TREE_CODE (t) == INTEGER_CST
- && integer_zerop (t))
- /* The only valid CONSTRUCTORs in gimple with pointer typed
- elements are zero-initializer. But in IPA mode we also
- process global initializers, so verify at least. */
- || (TREE_CODE (t) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (t) == 0)))
- {
- temp.var = nothing_id;
+ if ((TREE_CODE (t) == INTEGER_CST
+ && integer_zerop (t))
+ /* The only valid CONSTRUCTORs in gimple with pointer typed
+ elements are zero-initializer. But in IPA mode we also
+ process global initializers, so verify at least. */
+ || (TREE_CODE (t) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (t) == 0))
+ {
+ if (flag_delete_null_pointer_checks)
+ temp.var = nothing_id;
+ else
+ temp.var = nonlocal_id;
temp.type = ADDRESSOF;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
temp.var = readonly_id;
temp.type = SCALAR;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
{
switch (TREE_CODE (t))
{
- case INDIRECT_REF:
+ case MEM_REF:
{
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ struct constraint_expr cs;
+ varinfo_t vi, curr;
+ get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1), results);
do_deref (results);
+
+ /* If we are not taking the address then make sure to process
+ all subvariables we might access. */
+ if (address_p)
+ return;
+
+ cs = results->last ();
+ if (cs.type == DEREF
+ && type_can_have_subvars (TREE_TYPE (t)))
+ {
+ /* For dereferences this means we have to defer it
+ to solving time. */
+ results->last ().offset = UNKNOWN_OFFSET;
+ return;
+ }
+ if (cs.type != SCALAR)
+ return;
+
+ vi = get_varinfo (cs.var);
+ curr = vi->next;
+ if (!vi->is_full_var
+ && curr)
+ {
+ unsigned HOST_WIDE_INT size;
+ if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
+ size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
+ else
+ size = -1;
+ for (; curr; curr = curr->next)
+ {
+ if (curr->offset - vi->offset < size)
+ {
+ cs.var = curr->id;
+ results->safe_push (cs);
+ }
+ else
+ break;
+ }
+ }
return;
}
case ARRAY_REF:
case ARRAY_RANGE_REF:
case COMPONENT_REF:
- get_constraint_for_component_ref (t, results, address_p);
+ get_constraint_for_component_ref (t, results, address_p, lhs_p);
return;
case VIEW_CONVERT_EXPR:
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
+ lhs_p);
return;
/* We are missing handling for TARGET_MEM_REF here. */
default:;
{
unsigned int i;
tree val;
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vNULL;
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
{
struct constraint_expr *rhsp;
unsigned j;
- get_constraint_for_1 (val, &tmp, address_p);
- for (j = 0; VEC_iterate (ce_s, tmp, j, rhsp); ++j)
- VEC_safe_push (ce_s, heap, *results, rhsp);
- VEC_truncate (ce_s, tmp, 0);
+ get_constraint_for_1 (val, &tmp, address_p, lhs_p);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ results->safe_push (*rhsp);
+ tmp.truncate (0);
}
- VEC_free (ce_s, heap, tmp);
+ tmp.release ();
/* We do not know whether the constructor was complete,
so technically we have to add &NOTHING or &ANYTHING
like we do for an empty constructor as well. */
get_constraint_for_ssa_var (t, results, address_p);
return;
}
+ case tcc_constant:
+ {
+ /* We cannot refer to automatic variables through constants. */
+ temp.type = ADDRESSOF;
+ temp.var = nonlocal_id;
+ temp.offset = 0;
+ results->safe_push (temp);
+ return;
+ }
default:;
}
temp.type = ADDRESSOF;
temp.var = anything_id;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
}
/* Given a gimple tree T, return the constraint expression vector for it. */
static void
-get_constraint_for (tree t, VEC (ce_s, heap) **results)
+get_constraint_for (tree t, vec<ce_s> *results)
{
- gcc_assert (VEC_length (ce_s, *results) == 0);
+ gcc_assert (results->length () == 0);
- get_constraint_for_1 (t, results, false);
+ get_constraint_for_1 (t, results, false, true);
+}
+
+/* Given a gimple tree T, return the constraint expression vector for it
+ to be used as the rhs of a constraint. */
+
+static void
+get_constraint_for_rhs (tree t, vec<ce_s> *results)
+{
+ gcc_assert (results->length () == 0);
+
+ get_constraint_for_1 (t, results, false, false);
}
entries in *LHSC. */
static void
-process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
+process_all_all_constraints (vec<ce_s> lhsc,
+ vec<ce_s> rhsc)
{
struct constraint_expr *lhsp, *rhsp;
unsigned i, j;
- if (VEC_length (ce_s, lhsc) <= 1
- || VEC_length (ce_s, rhsc) <= 1)
+ if (lhsc.length () <= 1 || rhsc.length () <= 1)
{
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (*lhsp, *rhsp));
}
else
{
struct constraint_expr tmp;
tmp = new_scalar_tmp_constraint_exp ("allalltmp");
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (tmp, *rhsp));
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, tmp));
}
}
do_structure_copy (tree lhsop, tree rhsop)
{
struct constraint_expr *lhsp, *rhsp;
- VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
unsigned j;
get_constraint_for (lhsop, &lhsc);
- get_constraint_for (rhsop, &rhsc);
- lhsp = VEC_index (ce_s, lhsc, 0);
- rhsp = VEC_index (ce_s, rhsc, 0);
+ get_constraint_for_rhs (rhsop, &rhsc);
+ lhsp = &lhsc[0];
+ rhsp = &rhsc[0];
if (lhsp->type == DEREF
|| (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
|| rhsp->type == DEREF)
{
if (lhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, lhsc) == 1);
+ gcc_assert (lhsc.length () == 1);
lhsp->offset = UNKNOWN_OFFSET;
}
if (rhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, rhsc) == 1);
+ gcc_assert (rhsc.length () == 1);
rhsp->offset = UNKNOWN_OFFSET;
}
process_all_all_constraints (lhsc, rhsc);
unsigned k = 0;
get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
+ for (j = 0; lhsc.iterate (j, &lhsp);)
{
varinfo_t lhsv, rhsv;
- rhsp = VEC_index (ce_s, rhsc, k);
+ rhsp = &rhsc[k];
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
- && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
- rhsv->offset + lhsoffset, rhsv->size))
+ && (lhsv->is_full_var
+ || rhsv->is_full_var
+ || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
+ rhsv->offset + lhsoffset, rhsv->size)))
process_constraint (new_constraint (*lhsp, *rhsp));
- if (lhsv->offset + rhsoffset + lhsv->size
- > rhsv->offset + lhsoffset + rhsv->size)
+ if (!rhsv->is_full_var
+ && (lhsv->is_full_var
+ || (lhsv->offset + rhsoffset + lhsv->size
+ > rhsv->offset + lhsoffset + rhsv->size)))
{
++k;
- if (k >= VEC_length (ce_s, rhsc))
+ if (k >= rhsc.length ())
break;
}
else
else
gcc_unreachable ();
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
-/* Create a constraint ID = OP. */
+/* Create constraints ID = { rhsc }. */
static void
-make_constraint_to (unsigned id, tree op)
+make_constraints_to (unsigned id, vec<ce_s> rhsc)
{
- VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
struct constraint_expr includes;
unsigned int j;
includes.offset = 0;
includes.type = SCALAR;
- get_constraint_for (op, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
+ FOR_EACH_VEC_ELT (rhsc, j, c)
process_constraint (new_constraint (includes, *c));
- VEC_free (ce_s, heap, rhsc);
+}
+
+/* Create a constraint ID = OP. */
+
+static void
+make_constraint_to (unsigned id, tree op)
+{
+ vec<ce_s> rhsc = vNULL;
+ get_constraint_for_rhs (op, &rhsc);
+ make_constraints_to (id, rhsc);
+ rhsc.release ();
}
/* Create a constraint ID = &FROM. */
process_constraint (new_constraint (lhs, rhs));
}
-/* Create a new artificial heap variable with NAME and make a
- constraint from it to LHS. Return the created variable. */
-
-static varinfo_t
-make_constraint_from_heapvar (varinfo_t lhs, const char *name)
-{
- varinfo_t vi;
- tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
+/* Temporary storage for fake var decls. */
+struct obstack fake_var_decl_obstack;
- if (heapvar == NULL_TREE)
- {
- var_ann_t ann;
- heapvar = create_tmp_var_raw (ptr_type_node, name);
- DECL_EXTERNAL (heapvar) = 1;
+/* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
- heapvar_insert (lhs->decl, lhs->offset, heapvar);
+static tree
+build_fake_var_decl (tree type)
+{
+ tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
+ memset (decl, 0, sizeof (struct tree_var_decl));
+ TREE_SET_CODE (decl, VAR_DECL);
+ TREE_TYPE (decl) = type;
+ DECL_UID (decl) = allocate_decl_uid ();
+ SET_DECL_PT_UID (decl, -1);
+ layout_decl (decl, 0);
+ return decl;
+}
- ann = get_var_ann (heapvar);
- ann->is_heapvar = 1;
- }
+/* Create a new artificial heap variable with NAME.
+ Return the created variable. */
- /* For global vars we need to add a heapvar to the list of referenced
- vars of a different function than it was created for originally. */
- if (cfun && gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
+static varinfo_t
+make_heapvar (const char *name)
+{
+ varinfo_t vi;
+ tree heapvar;
+
+ heapvar = build_fake_var_decl (ptr_type_node);
+ DECL_EXTERNAL (heapvar) = 1;
vi = new_var_info (heapvar, name);
vi->is_artificial_var = true;
vi->is_full_var = true;
insert_vi_for_tree (heapvar, vi);
- make_constraint_from (lhs, vi->id);
-
return vi;
}
constraint from it to LHS. Set flags according to a tag used
for tracking restrict pointers. */
-static void
+static varinfo_t
make_constraint_from_restrict (varinfo_t lhs, const char *name)
{
- varinfo_t vi;
- vi = make_constraint_from_heapvar (lhs, name);
- vi->is_restrict_var = 1;
- vi->is_global_var = 0;
- vi->is_special_var = 1;
- vi->may_have_pointers = 0;
+ varinfo_t vi = make_heapvar (name);
+ vi->is_global_var = 1;
+ vi->may_have_pointers = 1;
+ make_constraint_from (lhs, vi->id);
+ return vi;
+}
+
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Set flags according to a tag used
+ for tracking restrict pointers and make the artificial heap
+ point to global memory. */
+
+static varinfo_t
+make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi = make_constraint_from_restrict (lhs, name);
+ make_copy_constraint (vi, nonlocal_id);
+ return vi;
}
/* In IPA mode there are varinfos for different aspects of reach
RHS. */
static void
-handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_rhs_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
+ bool returns_uses = false;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
+ int flags = gimple_call_arg_flags (stmt, i);
- /* Find those pointers being passed, and make sure they end up
- pointing to anything. */
- if (could_have_pointers (arg))
- make_escape_constraint (arg);
- }
+ /* If the argument is not used we can ignore it. */
+ if (flags & EAF_UNUSED)
+ continue;
- /* The static chain escapes as well. */
- if (gimple_call_chain (stmt))
+ /* As we compute ESCAPED context-insensitive we do not gain
+ any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
+ set. The argument would still get clobbered through the
+ escape solution. */
+ if ((flags & EAF_NOCLOBBER)
+ && (flags & EAF_NOESCAPE))
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ {
+ varinfo_t tem = new_var_info (NULL_TREE, "callarg");
+ make_constraint_to (tem->id, arg);
+ make_transitive_closure_constraints (tem);
+ make_copy_constraint (uses, tem->id);
+ }
+ else
+ make_constraint_to (uses->id, arg);
+ returns_uses = true;
+ }
+ else if (flags & EAF_NOESCAPE)
+ {
+ struct constraint_expr lhs, rhs;
+ varinfo_t uses = get_call_use_vi (stmt);
+ varinfo_t clobbers = get_call_clobber_vi (stmt);
+ varinfo_t tem = new_var_info (NULL_TREE, "callarg");
+ make_constraint_to (tem->id, arg);
+ if (!(flags & EAF_DIRECT))
+ make_transitive_closure_constraints (tem);
+ make_copy_constraint (uses, tem->id);
+ make_copy_constraint (clobbers, tem->id);
+ /* Add *tem = nonlocal, do not add *tem = callused as
+ EAF_NOESCAPE parameters do not escape to other parameters
+ and all other uses appear in NONLOCAL as well. */
+ lhs.type = DEREF;
+ lhs.var = tem->id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = nonlocal_id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
+ returns_uses = true;
+ }
+ else
+ make_escape_constraint (arg);
+ }
+
+ /* If we added to the calls uses solution make sure we account for
+ pointers to it to be returned. */
+ if (returns_uses)
+ {
+ rhsc.var = get_call_use_vi (stmt)->id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ results->safe_push (rhsc);
+ }
+
+ /* The static chain escapes as well. */
+ if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
/* And if we applied NRV the address of the return slot escapes as well. */
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
- VEC(ce_s, heap) *tmpc = NULL;
+ vec<ce_s> tmpc = vNULL;
struct constraint_expr lhsc, *c;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
+ FOR_EACH_VEC_ELT (tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
- VEC_free(ce_s, heap, tmpc);
+ tmpc.release ();
}
/* Regular functions return nonlocal memory. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
+handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
+ tree fndecl)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
get_constraint_for (lhs, &lhsc);
-
- if (flags & ECF_MALLOC)
+ /* If the store is to a global decl make sure to
+ add proper escape constraints. */
+ lhs = get_base_address (lhs);
+ if (lhs
+ && DECL_P (lhs)
+ && is_global_var (lhs))
+ {
+ struct constraint_expr tmpc;
+ tmpc.var = escaped_id;
+ tmpc.offset = 0;
+ tmpc.type = SCALAR;
+ lhsc.safe_push (tmpc);
+ }
+
+ /* If the call returns an argument unmodified override the rhs
+ constraints. */
+ flags = gimple_call_return_flags (stmt);
+ if (flags & ERF_RETURNS_ARG
+ && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
+ {
+ tree arg;
+ rhsc.create (0);
+ arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
+ get_constraint_for (arg, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ rhsc.release ();
+ }
+ else if (flags & ERF_NOALIAS)
{
varinfo_t vi;
- vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
+ struct constraint_expr tmpc;
+ rhsc.create (0);
+ vi = make_heapvar ("HEAP");
/* We delay marking allocated storage global until we know if
it escapes. */
DECL_EXTERNAL (vi->decl) = 0;
vi->is_global_var = 0;
/* If this is not a real malloc call assume the memory was
- initialized and thus may point to global memory. All
+ initialized and thus may point to global memory. All
builtin functions with the malloc attribute behave in a sane way. */
if (!fndecl
|| DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
make_constraint_from (vi, nonlocal_id);
- }
- else if (VEC_length (ce_s, rhsc) > 0)
- {
- /* If the store is to a global decl make sure to
- add proper escape constraints. */
- lhs = get_base_address (lhs);
- if (lhs
- && DECL_P (lhs)
- && is_global_var (lhs))
- {
- struct constraint_expr tmpc;
- tmpc.var = escaped_id;
- tmpc.offset = 0;
- tmpc.type = SCALAR;
- VEC_safe_push (ce_s, heap, lhsc, &tmpc);
- }
+ tmpc.var = vi->id;
+ tmpc.offset = 0;
+ tmpc.type = ADDRESSOF;
+ rhsc.safe_push (tmpc);
process_all_all_constraints (lhsc, rhsc);
+ rhsc.release ();
}
- VEC_free (ce_s, heap, lhsc);
+ else
+ process_all_all_constraints (lhsc, rhsc);
+
+ lhsc.release ();
}
/* For non-IPA mode, generate constraints necessary for a call of a
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_const_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned int k;
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* May return arguments. */
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
-
- if (could_have_pointers (arg))
- {
- VEC(ce_s, heap) *argc = NULL;
- unsigned i;
- struct constraint_expr *argp;
- get_constraint_for (arg, &argc);
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
- VEC_safe_push (ce_s, heap, *results, argp);
- VEC_free(ce_s, heap, argc);
- }
+ vec<ce_s> argc = vNULL;
+ unsigned i;
+ struct constraint_expr *argp;
+ get_constraint_for_rhs (arg, &argc);
+ FOR_EACH_VEC_ELT (argc, i, argp)
+ results->safe_push (*argp);
+ argc.release ();
}
/* May return addresses of globals. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_pure_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
-
- if (could_have_pointers (arg))
+ if (!uses)
{
- if (!uses)
- {
- uses = get_call_use_vi (stmt);
- make_transitive_closure_constraints (uses);
- }
- make_constraint_to (uses->id, arg);
+ uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
}
+ make_constraint_to (uses->id, arg);
}
/* The static chain is used as well. */
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
static varinfo_t
get_fi_for_callee (gimple call)
{
- tree decl;
+ tree decl, fn = gimple_call_fn (call);
+
+ if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
+ fn = OBJ_TYPE_REF_EXPR (fn);
/* If we can directly resolve the function being called, do so.
Otherwise, it must be some sort of indirect expression that
we should still be able to handle. */
- decl = gimple_call_fndecl (call);
+ decl = gimple_call_addr_fndecl (fn);
if (decl)
return get_vi_for_tree (decl);
- decl = gimple_call_fn (call);
- /* The function can be either an SSA name pointer or,
- worse, an OBJ_TYPE_REF. In this case we have no
+ /* If the function is anything other than a SSA name pointer we have no
clue and should be getting ANYFN (well, ANYTHING for now). */
- if (TREE_CODE (decl) == SSA_NAME)
- {
- if (TREE_CODE (decl) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
- && SSA_NAME_IS_DEFAULT_DEF (decl))
- decl = SSA_NAME_VAR (decl);
- return get_vi_for_tree (decl);
- }
- else if (TREE_CODE (decl) == INTEGER_CST
- || TREE_CODE (decl) == OBJ_TYPE_REF)
+ if (!fn || TREE_CODE (fn) != SSA_NAME)
return get_varinfo (anything_id);
- else
- gcc_unreachable ();
+
+ if (SSA_NAME_IS_DEFAULT_DEF (fn)
+ && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
+ fn = SSA_NAME_VAR (fn);
+
+ return get_vi_for_tree (fn);
}
-/* Walk statement T setting up aliasing constraints according to the
- references found in T. This function is the main part of the
- constraint builder. AI points to auxiliary alias information used
- when building alias sets and computing alias grouping heuristics. */
+/* Create constraints for the builtin call T. Return true if the call
+ was handled, otherwise false. */
-static void
-find_func_aliases (gimple origt)
+static bool
+find_func_aliases_for_builtin_call (gimple t)
{
- gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
- struct constraint_expr *c;
+ tree fndecl = gimple_call_fndecl (t);
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
varinfo_t fi;
- /* Now build constraints expressions. */
- if (gimple_code (t) == GIMPLE_PHI)
- {
- gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
-
- /* Only care about pointers and structures containing
- pointers. */
- if (could_have_pointers (gimple_phi_result (t)))
+ if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
+ /* ??? All builtins that are handled here need to be handled
+ in the alias-oracle query functions explicitly! */
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ /* All the following functions return a pointer to the same object
+ as their first argument points to. The functions do not add
+ to the ESCAPED solution. The functions make the first argument
+ pointed to memory point to what the second argument pointed to
+ memory points to. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ case BUILT_IN_STRCPY_CHK:
+ case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_MEMCPY_CHK:
+ case BUILT_IN_MEMMOVE_CHK:
+ case BUILT_IN_MEMPCPY_CHK:
+ case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
+ case BUILT_IN_STRCAT_CHK:
+ case BUILT_IN_STRNCAT_CHK:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
{
- size_t i;
- unsigned int j;
-
- /* For a phi node, assign all the arguments to
- the result. */
- get_constraint_for (gimple_phi_result (t), &lhsc);
- for (i = 0; i < gimple_phi_num_args (t); i++)
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 1 : 0));
+ tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_BCOPY ? 0 : 1));
+ if (res != NULL_TREE)
{
- tree strippedrhs = PHI_ARG_DEF (t, i);
-
- STRIP_NOPS (strippedrhs);
- get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
-
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
- {
- struct constraint_expr *c2;
- while (VEC_length (ce_s, rhsc) > 0)
- {
- c2 = VEC_last (ce_s, rhsc);
- process_constraint (new_constraint (*c, *c2));
- VEC_pop (ce_s, rhsc);
- }
- }
+ get_constraint_for (res, &lhsc);
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
+ else
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
}
- }
- /* In IPA mode, we need to generate constraints to pass call
- arguments through their calls. There are two cases,
- either a GIMPLE_CALL returning a value, or just a plain
- GIMPLE_CALL when we are not.
-
- In non-ipa mode, we need to generate constraints for each
- pointer passed by address. */
- else if (is_gimple_call (t))
- {
- tree fndecl = gimple_call_fndecl (t);
- if (fndecl != NULL_TREE
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- /* ??? All builtins that are handled here need to be handled
- in the alias-oracle query functions explicitly! */
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- /* All the following functions return a pointer to the same object
- as their first argument points to. The functions do not add
- to the ESCAPED solution. The functions make the first argument
- pointed to memory point to what the second argument pointed to
- memory points to. */
- case BUILT_IN_STRCPY:
- case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
- case BUILT_IN_MEMCPY:
- case BUILT_IN_MEMMOVE:
- case BUILT_IN_MEMPCPY:
- case BUILT_IN_STPCPY:
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STRCAT:
- case BUILT_IN_STRNCAT:
+ case BUILT_IN_MEMSET:
+ case BUILT_IN_MEMSET_CHK:
+ case BUILT_IN_TM_MEMSET:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ struct constraint_expr ac;
+ if (res != NULL_TREE)
{
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
- == BUILT_IN_BCOPY ? 1 : 0));
- tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
- == BUILT_IN_BCOPY ? 0 : 1));
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
- get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
- else
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
- do_deref (&lhsc);
- do_deref (&rhsc);
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- return;
+ lhsc.release ();
+ rhsc.release ();
}
- case BUILT_IN_MEMSET:
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ do_deref (&lhsc);
+ if (flag_delete_null_pointer_checks
+ && integer_zerop (gimple_call_arg (t, 1)))
{
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, 0);
- unsigned i;
- ce_s *lhsp;
- struct constraint_expr ac;
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- do_deref (&lhsc);
- if (flag_delete_null_pointer_checks
- && integer_zerop (gimple_call_arg (t, 1)))
- {
- ac.type = ADDRESSOF;
- ac.var = nothing_id;
- }
- else
- {
- ac.type = SCALAR;
- ac.var = integer_id;
- }
- ac.offset = 0;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- process_constraint (new_constraint (*lhsp, ac));
- VEC_free (ce_s, heap, lhsc);
- return;
+ ac.type = ADDRESSOF;
+ ac.var = nothing_id;
}
- /* All the following functions do not return pointers, do not
- modify the points-to sets of memory reachable from their
- arguments and do not add to the ESCAPED solution. */
- case BUILT_IN_SINCOS:
- case BUILT_IN_SINCOSF:
- case BUILT_IN_SINCOSL:
- case BUILT_IN_FREXP:
- case BUILT_IN_FREXPF:
- case BUILT_IN_FREXPL:
- case BUILT_IN_GAMMA_R:
- case BUILT_IN_GAMMAF_R:
- case BUILT_IN_GAMMAL_R:
- case BUILT_IN_LGAMMA_R:
- case BUILT_IN_LGAMMAF_R:
- case BUILT_IN_LGAMMAL_R:
- case BUILT_IN_MODF:
- case BUILT_IN_MODFF:
- case BUILT_IN_MODFL:
- case BUILT_IN_REMQUO:
- case BUILT_IN_REMQUOF:
- case BUILT_IN_REMQUOL:
- case BUILT_IN_FREE:
- return;
- /* Trampolines are special - they set up passing the static
- frame. */
- case BUILT_IN_INIT_TRAMPOLINE:
+ else
{
- tree tramp = gimple_call_arg (t, 0);
- tree nfunc = gimple_call_arg (t, 1);
- tree frame = gimple_call_arg (t, 2);
- unsigned i;
- struct constraint_expr lhs, *rhsp;
- if (in_ipa_mode)
- {
- varinfo_t nfi = NULL;
- gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
- nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
- if (nfi)
- {
- lhs = get_function_part_constraint (nfi, fi_static_chain);
- get_constraint_for (frame, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
-
- /* Make the frame point to the function for
- the trampoline adjustment call. */
- get_constraint_for (tramp, &lhsc);
- do_deref (&lhsc);
- get_constraint_for (nfunc, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
-
- return;
- }
- }
- /* Else fallthru to generic handling which will let
- the frame escape. */
- break;
+ ac.type = SCALAR;
+ ac.var = integer_id;
}
- case BUILT_IN_ADJUST_TRAMPOLINE:
+ ac.offset = 0;
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
+ process_constraint (new_constraint (*lhsp, ac));
+ lhsc.release ();
+ return true;
+ }
+ case BUILT_IN_ASSUME_ALIGNED:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ if (res != NULL_TREE)
{
- tree tramp = gimple_call_arg (t, 0);
- tree res = gimple_call_lhs (t);
- if (in_ipa_mode && res)
- {
- get_constraint_for (res, &lhsc);
- get_constraint_for (tramp, &rhsc);
- do_deref (&rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
- }
- return;
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
- /* Variadic argument handling needs to be handled in IPA
- mode as well. */
- case BUILT_IN_VA_START:
+ return true;
+ }
+ /* All the following functions do not return pointers, do not
+ modify the points-to sets of memory reachable from their
+ arguments and do not add to the ESCAPED solution. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_FREE:
+ return true;
+ case BUILT_IN_STRDUP:
+ case BUILT_IN_STRNDUP:
+ if (gimple_call_lhs (t))
+ {
+ handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
+ vNULL, fndecl);
+ get_constraint_for_ptr_offset (gimple_call_lhs (t),
+ NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
+ NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
+ break;
+ /* Trampolines are special - they set up passing the static
+ frame. */
+ case BUILT_IN_INIT_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree nfunc = gimple_call_arg (t, 1);
+ tree frame = gimple_call_arg (t, 2);
+ unsigned i;
+ struct constraint_expr lhs, *rhsp;
+ if (in_ipa_mode)
{
- if (in_ipa_mode)
+ varinfo_t nfi = NULL;
+ gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
+ nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
+ if (nfi)
{
- tree valist = gimple_call_arg (t, 0);
- struct constraint_expr rhs, *lhsp;
- unsigned i;
- /* The va_list gets access to pointers in variadic
- arguments. */
- fi = lookup_vi_for_tree (cfun->decl);
- gcc_assert (fi != NULL);
- get_constraint_for (valist, &lhsc);
+ lhs = get_function_part_constraint (nfi, fi_static_chain);
+ get_constraint_for (frame, &rhsc);
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ rhsc.release ();
+
+ /* Make the frame point to the function for
+ the trampoline adjustment call. */
+ get_constraint_for (tramp, &lhsc);
do_deref (&lhsc);
- rhs = get_function_part_constraint (fi, ~0);
- rhs.type = ADDRESSOF;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- process_constraint (new_constraint (*lhsp, rhs));
- VEC_free (ce_s, heap, lhsc);
- /* va_list is clobbered. */
- make_constraint_to (get_call_clobber_vi (t)->id, valist);
- return;
+ get_constraint_for (nfunc, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ rhsc.release ();
+ lhsc.release ();
+
+ return true;
}
- break;
}
- /* va_end doesn't have any effect that matters. */
- case BUILT_IN_VA_END:
- return;
- /* printf-style functions may have hooks to set pointers to
- point to somewhere into the generated string. Leave them
- for a later excercise... */
- default:
- /* Fallthru to general call handling. */;
- }
- if (!in_ipa_mode
- || (fndecl
- && (!(fi = lookup_vi_for_tree (fndecl))
- || !fi->is_fn_info)))
+ /* Else fallthru to generic handling which will let
+ the frame escape. */
+ break;
+ }
+ case BUILT_IN_ADJUST_TRAMPOLINE:
+ {
+ tree tramp = gimple_call_arg (t, 0);
+ tree res = gimple_call_lhs (t);
+ if (in_ipa_mode && res)
+ {
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (tramp, &rhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ rhsc.release ();
+ lhsc.release ();
+ }
+ return true;
+ }
+ CASE_BUILT_IN_TM_STORE (1):
+ CASE_BUILT_IN_TM_STORE (2):
+ CASE_BUILT_IN_TM_STORE (4):
+ CASE_BUILT_IN_TM_STORE (8):
+ CASE_BUILT_IN_TM_STORE (FLOAT):
+ CASE_BUILT_IN_TM_STORE (DOUBLE):
+ CASE_BUILT_IN_TM_STORE (LDOUBLE):
+ CASE_BUILT_IN_TM_STORE (M64):
+ CASE_BUILT_IN_TM_STORE (M128):
+ CASE_BUILT_IN_TM_STORE (M256):
{
- VEC(ce_s, heap) *rhsc = NULL;
- int flags = gimple_call_flags (t);
+ tree addr = gimple_call_arg (t, 0);
+ tree src = gimple_call_arg (t, 1);
- /* Const functions can return their arguments and addresses
- of global memory but not of escaped memory. */
- if (flags & (ECF_CONST|ECF_NOVOPS))
+ get_constraint_for (addr, &lhsc);
+ do_deref (&lhsc);
+ get_constraint_for (src, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
+ CASE_BUILT_IN_TM_LOAD (1):
+ CASE_BUILT_IN_TM_LOAD (2):
+ CASE_BUILT_IN_TM_LOAD (4):
+ CASE_BUILT_IN_TM_LOAD (8):
+ CASE_BUILT_IN_TM_LOAD (FLOAT):
+ CASE_BUILT_IN_TM_LOAD (DOUBLE):
+ CASE_BUILT_IN_TM_LOAD (LDOUBLE):
+ CASE_BUILT_IN_TM_LOAD (M64):
+ CASE_BUILT_IN_TM_LOAD (M128):
+ CASE_BUILT_IN_TM_LOAD (M256):
+ {
+ tree dest = gimple_call_lhs (t);
+ tree addr = gimple_call_arg (t, 0);
+
+ get_constraint_for (dest, &lhsc);
+ get_constraint_for (addr, &rhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
+ /* Variadic argument handling needs to be handled in IPA
+ mode as well. */
+ case BUILT_IN_VA_START:
+ {
+ tree valist = gimple_call_arg (t, 0);
+ struct constraint_expr rhs, *lhsp;
+ unsigned i;
+ get_constraint_for (valist, &lhsc);
+ do_deref (&lhsc);
+ /* The va_list gets access to pointers in variadic
+ arguments. Which we know in the case of IPA analysis
+ and otherwise are just all nonlocal variables. */
+ if (in_ipa_mode)
{
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_const_call (t, &rhsc);
+ fi = lookup_vi_for_tree (cfun->decl);
+ rhs = get_function_part_constraint (fi, ~0);
+ rhs.type = ADDRESSOF;
}
- /* Pure functions can return addresses in and of memory
- reachable from their arguments, but they are not an escape
- point for reachable memory of their arguments. */
- else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
- handle_pure_call (t, &rhsc);
else
- handle_rhs_call (t, &rhsc);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
- VEC_free (ce_s, heap, rhsc);
+ {
+ rhs.var = nonlocal_id;
+ rhs.type = ADDRESSOF;
+ rhs.offset = 0;
+ }
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
+ process_constraint (new_constraint (*lhsp, rhs));
+ lhsc.release ();
+ /* va_list is clobbered. */
+ make_constraint_to (get_call_clobber_vi (t)->id, valist);
+ return true;
}
- else
+ /* va_end doesn't have any effect that matters. */
+ case BUILT_IN_VA_END:
+ return true;
+ /* Alternate return. Simply give up for now. */
+ case BUILT_IN_RETURN:
{
- tree lhsop;
- unsigned j;
+ fi = NULL;
+ if (!in_ipa_mode
+ || !(fi = get_vi_for_tree (cfun->decl)))
+ make_constraint_from (get_varinfo (escaped_id), anything_id);
+ else if (in_ipa_mode
+ && fi != NULL)
+ {
+ struct constraint_expr lhs, rhs;
+ lhs = get_function_part_constraint (fi, fi_result);
+ rhs.var = anything_id;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+ }
+ return true;
+ }
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
- fi = get_fi_for_callee (t);
+ return false;
+}
- /* Assign all the passed arguments to the appropriate incoming
- parameters of the function. */
- for (j = 0; j < gimple_call_num_args (t); j++)
- {
- struct constraint_expr lhs ;
- struct constraint_expr *rhsp;
- tree arg = gimple_call_arg (t, j);
+/* Create constraints for the call T. */
- if (!could_have_pointers (arg))
- continue;
+static void
+find_func_aliases_for_call (gimple t)
+{
+ tree fndecl = gimple_call_fndecl (t);
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
+ varinfo_t fi;
- get_constraint_for (arg, &rhsc);
- lhs = get_function_part_constraint (fi, fi_parm_base + j);
- while (VEC_length (ce_s, rhsc) != 0)
- {
- rhsp = VEC_last (ce_s, rhsc);
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_pop (ce_s, rhsc);
- }
- }
+ if (fndecl != NULL_TREE
+ && DECL_BUILT_IN (fndecl)
+ && find_func_aliases_for_builtin_call (t))
+ return;
+
+ fi = get_fi_for_callee (t);
+ if (!in_ipa_mode
+ || (fndecl && !fi->is_fn_info))
+ {
+ vec<ce_s> rhsc = vNULL;
+ int flags = gimple_call_flags (t);
+
+ /* Const functions can return their arguments and addresses
+ of global memory but not of escaped memory. */
+ if (flags & (ECF_CONST|ECF_NOVOPS))
+ {
+ if (gimple_call_lhs (t))
+ handle_const_call (t, &rhsc);
+ }
+ /* Pure functions can return addresses in and of memory
+ reachable from their arguments, but they are not an escape
+ point for reachable memory of their arguments. */
+ else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
+ handle_pure_call (t, &rhsc);
+ else
+ handle_rhs_call (t, &rhsc);
+ if (gimple_call_lhs (t))
+ handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
+ rhsc.release ();
+ }
+ else
+ {
+ tree lhsop;
+ unsigned j;
+
+ /* Assign all the passed arguments to the appropriate incoming
+ parameters of the function. */
+ for (j = 0; j < gimple_call_num_args (t); j++)
+ {
+ struct constraint_expr lhs ;
+ struct constraint_expr *rhsp;
+ tree arg = gimple_call_arg (t, j);
- /* If we are returning a value, assign it to the result. */
- lhsop = gimple_call_lhs (t);
- if (lhsop
- && could_have_pointers (lhsop))
+ get_constraint_for_rhs (arg, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_parm_base + j);
+ while (rhsc.length () != 0)
{
- struct constraint_expr rhs;
- struct constraint_expr *lhsp;
-
- get_constraint_for (lhsop, &lhsc);
- rhs = get_function_part_constraint (fi, fi_result);
- if (fndecl
- && DECL_RESULT (fndecl)
- && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
- {
- VEC(ce_s, heap) *tem = NULL;
- VEC_safe_push (ce_s, heap, tem, &rhs);
- do_deref (&tem);
- rhs = *VEC_index (ce_s, tem, 0);
- VEC_free(ce_s, heap, tem);
- }
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
- process_constraint (new_constraint (*lhsp, rhs));
+ rhsp = &rhsc.last ();
+ process_constraint (new_constraint (lhs, *rhsp));
+ rhsc.pop ();
}
+ }
+
+ /* If we are returning a value, assign it to the result. */
+ lhsop = gimple_call_lhs (t);
+ if (lhsop)
+ {
+ struct constraint_expr rhs;
+ struct constraint_expr *lhsp;
- /* If we pass the result decl by reference, honor that. */
- if (lhsop
- && fndecl
+ get_constraint_for (lhsop, &lhsc);
+ rhs = get_function_part_constraint (fi, fi_result);
+ if (fndecl
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- struct constraint_expr lhs;
- struct constraint_expr *rhsp;
-
- get_constraint_for_address_of (lhsop, &rhsc);
- lhs = get_function_part_constraint (fi, fi_result);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
- process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ vec<ce_s> tem = vNULL;
+ tem.safe_push (rhs);
+ do_deref (&tem);
+ rhs = tem[0];
+ tem.release ();
}
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
+ process_constraint (new_constraint (*lhsp, rhs));
+ }
- /* If we use a static chain, pass it along. */
- if (gimple_call_chain (t))
- {
- struct constraint_expr lhs;
- struct constraint_expr *rhsp;
+ /* If we pass the result decl by reference, honor that. */
+ if (lhsop
+ && fndecl
+ && DECL_RESULT (fndecl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
- get_constraint_for (gimple_call_chain (t), &rhsc);
- lhs = get_function_part_constraint (fi, fi_static_chain);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
- process_constraint (new_constraint (lhs, *rhsp));
+ get_constraint_for_address_of (lhsop, &rhsc);
+ lhs = get_function_part_constraint (fi, fi_result);
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ rhsc.release ();
+ }
+
+ /* If we use a static chain, pass it along. */
+ if (gimple_call_chain (t))
+ {
+ struct constraint_expr lhs;
+ struct constraint_expr *rhsp;
+
+ get_constraint_for (gimple_call_chain (t), &rhsc);
+ lhs = get_function_part_constraint (fi, fi_static_chain);
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ }
+ }
+}
+
+/* Walk statement T setting up aliasing constraints according to the
+ references found in T. This function is the main part of the
+ constraint builder. AI points to auxiliary alias information used
+ when building alias sets and computing alias grouping heuristics. */
+
+static void
+find_func_aliases (gimple origt)
+{
+ gimple t = origt;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
+ struct constraint_expr *c;
+ varinfo_t fi;
+
+ /* Now build constraints expressions. */
+ if (gimple_code (t) == GIMPLE_PHI)
+ {
+ size_t i;
+ unsigned int j;
+
+ /* For a phi node, assign all the arguments to
+ the result. */
+ get_constraint_for (gimple_phi_result (t), &lhsc);
+ for (i = 0; i < gimple_phi_num_args (t); i++)
+ {
+ tree strippedrhs = PHI_ARG_DEF (t, i);
+
+ STRIP_NOPS (strippedrhs);
+ get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
+
+ FOR_EACH_VEC_ELT (lhsc, j, c)
+ {
+ struct constraint_expr *c2;
+ while (rhsc.length () > 0)
+ {
+ c2 = &rhsc.last ();
+ process_constraint (new_constraint (*c, *c2));
+ rhsc.pop ();
+ }
}
}
}
+ /* In IPA mode, we need to generate constraints to pass call
+ arguments through their calls. There are two cases,
+ either a GIMPLE_CALL returning a value, or just a plain
+ GIMPLE_CALL when we are not.
+
+ In non-ipa mode, we need to generate constraints for each
+ pointer passed by address. */
+ else if (is_gimple_call (t))
+ find_func_aliases_for_call (t);
+
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
points if they have pointer operands. */
- else if (is_gimple_assign (t)
- && could_have_pointers (gimple_assign_lhs (t)))
+ else if (is_gimple_assign (t))
{
/* Otherwise, just a regular assignment statement. */
tree lhsop = gimple_assign_lhs (t);
tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
- if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
+ if (rhsop && TREE_CLOBBER_P (rhsop))
+ /* Ignore clobbers, they don't actually store anything into
+ the LHS. */
+ ;
+ else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
do_structure_copy (lhsop, rhsop);
else
{
- struct constraint_expr temp;
+ enum tree_code code = gimple_assign_rhs_code (t);
+
get_constraint_for (lhsop, &lhsc);
- if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
+ if (code == POINTER_PLUS_EXPR)
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
gimple_assign_rhs2 (t), &rhsc);
- else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
+ else if (code == BIT_AND_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
+ {
+ /* Aligning a pointer via a BIT_AND_EXPR is offsetting
+ the pointer. Handle it by offsetting it by UNKNOWN. */
+ get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
+ NULL_TREE, &rhsc);
+ }
+ else if ((CONVERT_EXPR_CODE_P (code)
&& !(POINTER_TYPE_P (gimple_expr_type (t))
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
|| gimple_assign_single_p (t))
- get_constraint_for (rhsop, &rhsc);
+ get_constraint_for_rhs (rhsop, &rhsc);
+ else if (code == COND_EXPR)
+ {
+ /* The result is a merge of both COND_EXPR arms. */
+ vec<ce_s> tmp = vNULL;
+ struct constraint_expr *rhsp;
+ unsigned i;
+ get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
+ get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
+ FOR_EACH_VEC_ELT (tmp, i, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.release ();
+ }
+ else if (truth_value_p (code))
+ /* Truth value results are not pointer (parts). Or at least
+ very very unreasonable obfuscation of a part. */
+ ;
else
{
- temp.type = ADDRESSOF;
- temp.var = anything_id;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, rhsc, &temp);
+ /* All other operations are merges. */
+ vec<ce_s> tmp = vNULL;
+ struct constraint_expr *rhsp;
+ unsigned i, j;
+ get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
+ for (i = 2; i < gimple_num_ops (t); ++i)
+ {
+ get_constraint_for_rhs (gimple_op (t, i), &tmp);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.truncate (0);
+ }
+ tmp.release ();
}
process_all_all_constraints (lhsc, rhsc);
}
&& (!in_ipa_mode
|| DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
make_escape_constraint (rhsop);
- /* If this is a conversion of a non-restrict pointer to a
- restrict pointer track it with a new heapvar. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (rhsop))
- && POINTER_TYPE_P (TREE_TYPE (lhsop))
- && !TYPE_RESTRICT (TREE_TYPE (rhsop))
- && TYPE_RESTRICT (TREE_TYPE (lhsop)))
- make_constraint_from_restrict (get_vi_for_tree (lhsop),
- "CAST_RESTRICT");
- }
- /* For conversions of pointers to non-pointers the pointer escapes. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
- && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
- {
- make_escape_constraint (gimple_assign_rhs1 (t));
}
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
- && gimple_return_retval (t) != NULL_TREE
- && could_have_pointers (gimple_return_retval (t)))
+ && gimple_return_retval (t) != NULL_TREE)
{
fi = NULL;
if (!in_ipa_mode
unsigned i;
lhs = get_function_part_constraint (fi, fi_result);
- get_constraint_for (gimple_return_retval (t), &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
/* The asm may read global memory, so outputs may point to
any global memory. */
- if (op && could_have_pointers (op))
+ if (op)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
struct constraint_expr rhsc, *lhsp;
unsigned j;
get_constraint_for (op, &lhsc);
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
}
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
/* Strictly we'd only need the constraint to ESCAPED if
the asm clobbers memory, otherwise using something
along the lines of per-call clobbers/uses would be enough. */
- else if (op && could_have_pointers (op))
+ else if (op)
make_escape_constraint (op);
}
}
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
}
static void
process_ipa_clobber (varinfo_t fi, tree ptr)
{
- VEC(ce_s, heap) *ptrc = NULL;
+ vec<ce_s> ptrc = vNULL;
struct constraint_expr *c, lhs;
unsigned i;
- get_constraint_for (ptr, &ptrc);
+ get_constraint_for_rhs (ptr, &ptrc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
+ FOR_EACH_VEC_ELT (ptrc, i, c)
process_constraint (new_constraint (lhs, *c));
- VEC_free (ce_s, heap, ptrc);
+ ptrc.release ();
}
/* Walk statement T setting up clobber and use constraints according to the
find_func_clobbers (gimple origt)
{
gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
varinfo_t fi;
/* Add constraints for clobbered/used in IPA mode.
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhsc, *rhsp;
unsigned i;
lhsc = get_function_part_constraint (fi, fi_clobbers);
get_constraint_for_address_of (lhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhs, *rhsp;
unsigned i;
lhs = get_function_part_constraint (fi, fi_uses);
get_constraint_for_address_of (rhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
/* For builtins we do not have separate function info. For those
we do not generate escapes for we have to generate clobbers/uses. */
- if (decl
- && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (decl))
{
/* The following functions use and clobber memory pointed to
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
+ case BUILT_IN_STRCPY_CHK:
+ case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_MEMCPY_CHK:
+ case BUILT_IN_MEMMOVE_CHK:
+ case BUILT_IN_MEMPCPY_CHK:
+ case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
+ case BUILT_IN_STRCAT_CHK:
+ case BUILT_IN_STRNCAT_CHK:
{
tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
== BUILT_IN_BCOPY ? 1 : 0));
struct constraint_expr *rhsp, *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
return;
}
/* The following function clobbers memory pointed to by
its argument. */
case BUILT_IN_MEMSET:
+ case BUILT_IN_MEMSET_CHK:
{
tree dest = gimple_call_arg (t, 0);
unsigned i;
ce_s *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
return;
}
/* The following functions clobber their second and third
return;
}
/* The following functions neither read nor clobber memory. */
+ case BUILT_IN_ASSUME_ALIGNED:
case BUILT_IN_FREE:
return;
/* Trampolines are of no interest to us. */
continue;
get_constraint_for_address_of (arg, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* Build constraints for propagating clobbers/uses along the
anything_id);
}
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
unsigned has_unknown_size : 1;
+ unsigned must_have_pointers : 1;
+
unsigned may_have_pointers : 1;
unsigned only_restrict_pointers : 1;
};
typedef struct fieldoff fieldoff_s;
-DEF_VEC_O(fieldoff_s);
-DEF_VEC_ALLOC_O(fieldoff_s,heap);
/* qsort comparison function for two fieldoff's PA and PB */
/* Sort a fieldstack according to the field offset and sizes. */
static void
-sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
+sort_fieldstack (vec<fieldoff_s> fieldstack)
{
- qsort (VEC_address (fieldoff_s, fieldstack),
- VEC_length (fieldoff_s, fieldstack),
- sizeof (fieldoff_s),
- fieldoff_compare);
+ fieldstack.qsort (fieldoff_compare);
+}
+
+/* Return true if T is a type that can have subvars. */
+
+static inline bool
+type_can_have_subvars (const_tree t)
+{
+ /* Aggregates without overlapping fields can have subvars. */
+ return TREE_CODE (t) == RECORD_TYPE;
}
/* Return true if V is a tree that we can have subvars for.
if (!DECL_P (v))
return false;
- /* Aggregates without overlapping fields can have subvars. */
- if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
+ return type_can_have_subvars (TREE_TYPE (v));
+}
+
+/* Return true if T is a type that does contain pointers. */
+
+static bool
+type_must_have_pointers (tree type)
+{
+ if (POINTER_TYPE_P (type))
+ return true;
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ return type_must_have_pointers (TREE_TYPE (type));
+
+ /* A function or method can have pointers as arguments, so track
+ those separately. */
+ if (TREE_CODE (type) == FUNCTION_TYPE
+ || TREE_CODE (type) == METHOD_TYPE)
return true;
return false;
}
+static bool
+field_must_have_pointers (tree t)
+{
+ return type_must_have_pointers (TREE_TYPE (t));
+}
+
/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
the fields of TYPE onto fieldstack, recording their offsets along
the way.
recursed for. */
static bool
-push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
+push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
HOST_WIDE_INT offset)
{
tree field;
return false;
/* If the vector of fields is growing too big, bail out early.
- Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
+ Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
sure this fails. */
- if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
return false;
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL)
{
bool push = false;
{
fieldoff_s *pair = NULL;
bool has_unknown_size = false;
+ bool must_have_pointers_p;
- if (!VEC_empty (fieldoff_s, *fieldstack))
- pair = VEC_last (fieldoff_s, *fieldstack);
+ if (!fieldstack->is_empty ())
+ pair = &fieldstack->last ();
+
+ /* If there isn't anything at offset zero, create sth. */
+ if (!pair
+ && offset + foff != 0)
+ {
+ fieldoff_s e = {0, offset + foff, false, false, false, false};
+ pair = fieldstack->safe_push (e);
+ }
if (!DECL_SIZE (field)
|| !host_integerp (DECL_SIZE (field), 1))
has_unknown_size = true;
/* If adjacent fields do not contain pointers merge them. */
+ must_have_pointers_p = field_must_have_pointers (field);
if (pair
- && !pair->may_have_pointers
- && !pair->has_unknown_size
&& !has_unknown_size
- && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
- && !could_have_pointers (field))
+ && !must_have_pointers_p
+ && !pair->must_have_pointers
+ && !pair->has_unknown_size
+ && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
{
pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
}
else
{
- pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
- pair->offset = offset + foff;
- pair->has_unknown_size = has_unknown_size;
+ fieldoff_s e;
+ e.offset = offset + foff;
+ e.has_unknown_size = has_unknown_size;
if (!has_unknown_size)
- pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
+ e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
else
- pair->size = -1;
- pair->may_have_pointers = could_have_pointers (field);
- pair->only_restrict_pointers
+ e.size = -1;
+ e.must_have_pointers = must_have_pointers_p;
+ e.may_have_pointers = true;
+ e.only_restrict_pointers
= (!has_unknown_size
&& POINTER_TYPE_P (TREE_TYPE (field))
&& TYPE_RESTRICT (TREE_TYPE (field)));
+ fieldstack->safe_push (e);
}
}
/* Capture named arguments for K&R functions. They do not
have a prototype and thus no TYPE_ARG_TYPES. */
- for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
++num;
/* Check if the function has variadic arguments. */
/* Creation function node for DECL, using NAME, and return the index
of the variable we've created for the function. */
-static unsigned int
+static varinfo_t
create_function_info_for (tree decl, const char *name)
{
struct function *fn = DECL_STRUCT_FUNCTION (decl);
resultvi->fullsize = vi->fullsize;
resultvi->is_full_var = true;
if (DECL_RESULT (decl))
- resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
+ resultvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < resultvi->offset);
prev_vi->next = resultvi;
prev_vi = resultvi;
argvi->is_full_var = true;
argvi->fullsize = vi->fullsize;
if (arg)
- argvi->may_have_pointers = could_have_pointers (arg);
+ argvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < argvi->offset);
prev_vi->next = argvi;
prev_vi = argvi;
if (arg)
{
insert_vi_for_tree (arg, argvi);
- arg = TREE_CHAIN (arg);
+ arg = DECL_CHAIN (arg);
}
}
free (tempname);
/* We need sth that can be pointed to for va_start. */
- decl = create_tmp_var_raw (ptr_type_node, name);
- get_var_ann (decl);
+ decl = build_fake_var_decl (ptr_type_node);
argvi = new_var_info (decl, newname);
argvi->offset = fi_parm_base + num_args;
prev_vi = argvi;
}
- return vi->id;
+ return vi;
}
FIELDSTACK is assumed to be sorted by offset. */
static bool
-check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
+check_for_overlaps (vec<fieldoff_s> fieldstack)
{
fieldoff_s *fo = NULL;
unsigned int i;
HOST_WIDE_INT lastoffset = -1;
- for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ FOR_EACH_VEC_ELT (fieldstack, i, fo)
{
if (fo->offset == lastoffset)
return true;
varinfo_t vi, newvi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- VEC (fieldoff_s,heap) *fieldstack = NULL;
+ vec<fieldoff_s> fieldstack = vNULL;
fieldoff_s *fo;
unsigned int i;
vi->fullsize = ~0;
vi->is_unknown_size_var = true;
vi->is_full_var = true;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
return vi;
}
push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
- for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
if (fo->has_unknown_size
|| fo->offset < 0)
{
}
if (notokay)
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
}
/* If we didn't end up collecting sub-variables create a full
variable for the decl. */
- if (VEC_length (fieldoff_s, fieldstack) <= 1
- || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack.length () <= 1
+ || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
{
vi = new_var_info (decl, name);
vi->offset = 0;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
vi->fullsize = TREE_INT_CST_LOW (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
vi = new_var_info (decl, name);
vi->fullsize = TREE_INT_CST_LOW (declsize);
for (i = 0, newvi = vi;
- VEC_iterate (fieldoff_s, fieldstack, i, fo);
+ fieldstack.iterate (i, &fo);
++i, newvi = newvi->next)
{
const char *newname = "NULL";
newvi->fullsize = vi->fullsize;
newvi->may_have_pointers = fo->may_have_pointers;
newvi->only_restrict_pointers = fo->only_restrict_pointers;
- if (i + 1 < VEC_length (fieldoff_s, fieldstack))
+ if (i + 1 < fieldstack.length ())
newvi->next = new_var_info (decl, name);
}
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
insert_vi_for_tree (decl, vi);
+ if (TREE_CODE (decl) != VAR_DECL)
+ return id;
+
/* Create initial constraints for globals. */
for (; vi; vi = vi->next)
{
if ((POINTER_TYPE_P (TREE_TYPE (decl))
&& TYPE_RESTRICT (TREE_TYPE (decl)))
|| vi->only_restrict_pointers)
- make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
+ {
+ make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
+ continue;
+ }
- /* For escaped variables initialize them from nonlocal. */
+ /* In non-IPA mode the initializer from nonlocal is all we need. */
if (!in_ipa_mode
- || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
+ || DECL_HARD_REGISTER (decl))
make_copy_constraint (vi, nonlocal_id);
- /* If this is a global variable with an initializer and we are in
- IPA mode generate constraints for it. In non-IPA mode
- the initializer from nonlocal is all we need. */
- if (in_ipa_mode
- && DECL_INITIAL (decl))
+ /* In IPA mode parse the initializer and generate proper constraints
+ for it. */
+ else
{
- VEC (ce_s, heap) *rhsc = NULL;
- struct constraint_expr lhs, *rhsp;
- unsigned i;
- get_constraint_for (DECL_INITIAL (decl), &rhsc);
- lhs.var = vi->id;
- lhs.offset = 0;
- lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
- process_constraint (new_constraint (lhs, *rhsp));
- /* If this is a variable that escapes from the unit
- the initializer escapes as well. */
- if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
+ struct varpool_node *vnode = varpool_get_node (decl);
+
+ /* For escaped variables initialize them from nonlocal. */
+ if (!varpool_all_refs_explicit_p (vnode))
+ make_copy_constraint (vi, nonlocal_id);
+
+ /* If this is a global variable with an initializer and we are in
+ IPA mode generate constraints for it. */
+ if (DECL_INITIAL (decl)
+ && vnode->analyzed)
{
- lhs.var = escaped_id;
+ vec<ce_s> rhsc = vNULL;
+ struct constraint_expr lhs, *rhsp;
+ unsigned i;
+ get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
+ lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
+ /* If this is a variable that escapes from the unit
+ the initializer escapes as well. */
+ if (!varpool_all_refs_explicit_p (vnode))
+ {
+ lhs.var = escaped_id;
+ lhs.offset = 0;
+ lhs.type = SCALAR;
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
+ process_constraint (new_constraint (lhs, *rhsp));
+ }
+ rhsc.release ();
}
- VEC_free (ce_s, heap, rhsc);
}
}
/* Print the points-to solution for VAR to stdout. */
-void
+DEBUG_FUNCTION void
debug_solution_for_var (unsigned int var)
{
dump_solution_for_var (stdout, var);
/* For each incoming pointer argument arg, create the constraint ARG
= NONLOCAL or a dummy variable if it is a restrict qualified
passed-by-reference argument. */
- for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
{
- varinfo_t p;
-
- if (!could_have_pointers (t))
- continue;
+ varinfo_t p = get_vi_for_tree (t);
/* For restrict qualified pointers to objects passed by
- reference build a real representative for the pointed-to object. */
- if (DECL_BY_REFERENCE (t)
- && POINTER_TYPE_P (TREE_TYPE (t))
- && TYPE_RESTRICT (TREE_TYPE (t)))
+ reference build a real representative for the pointed-to object.
+ Treat restrict qualified references the same. */
+ if (TYPE_RESTRICT (TREE_TYPE (t))
+ && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
+ || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
+ && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
{
struct constraint_expr lhsc, rhsc;
varinfo_t vi;
- tree heapvar = heapvar_lookup (t, 0);
- if (heapvar == NULL_TREE)
- {
- var_ann_t ann;
- heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
- "PARM_NOALIAS");
- DECL_EXTERNAL (heapvar) = 1;
- heapvar_insert (t, 0, heapvar);
- ann = get_var_ann (heapvar);
- ann->is_heapvar = 1;
- }
- if (gimple_referenced_vars (cfun))
- add_referenced_var (heapvar);
- lhsc.var = get_vi_for_tree (t)->id;
+ tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
+ DECL_EXTERNAL (heapvar) = 1;
+ vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
+ insert_vi_for_tree (heapvar, vi);
+ lhsc.var = p->id;
lhsc.type = SCALAR;
lhsc.offset = 0;
- rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
+ rhsc.var = vi->id;
rhsc.type = ADDRESSOF;
rhsc.offset = 0;
process_constraint (new_constraint (lhsc, rhsc));
- vi->is_restrict_var = 1;
+ for (; vi; vi = vi->next)
+ if (vi->may_have_pointers)
+ {
+ if (vi->only_restrict_pointers)
+ make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
+ else
+ make_copy_constraint (vi, nonlocal_id);
+ }
continue;
}
- for (p = get_vi_for_tree (t); p; p = p->next)
- {
- if (p->may_have_pointers)
- make_constraint_from (p, nonlocal_id);
- if (p->only_restrict_pointers)
- make_constraint_from_restrict (p, "PARM_RESTRICT");
- }
if (POINTER_TYPE_P (TREE_TYPE (t))
&& TYPE_RESTRICT (TREE_TYPE (t)))
- make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
+ make_constraint_from_global_restrict (p, "PARM_RESTRICT");
+ else
+ {
+ for (; p; p = p->next)
+ {
+ if (p->only_restrict_pointers)
+ make_constraint_from_global_restrict (p, "PARM_RESTRICT");
+ else if (p->may_have_pointers)
+ make_constraint_from (p, nonlocal_id);
+ }
+ }
}
/* Add a constraint for a result decl that is passed by reference. */
/* Compute the points-to solution *PT for the variable VI. */
-static void
-find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
+static struct pt_solution
+find_what_var_points_to (varinfo_t orig_vi)
{
unsigned int i;
bitmap_iterator bi;
bitmap finished_solution;
bitmap result;
varinfo_t vi;
-
- memset (pt, 0, sizeof (struct pt_solution));
+ void **slot;
+ struct pt_solution *pt;
/* This variable may have been collapsed, let's get the real
variable. */
vi = get_varinfo (find (orig_vi->id));
+ /* See if we have already computed the solution and return it. */
+ slot = pointer_map_insert (final_solutions, vi);
+ if (*slot != NULL)
+ return *(struct pt_solution *)*slot;
+
+ *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
+ memset (pt, 0, sizeof (struct pt_solution));
+
/* Translate artificial variables into SSA_NAME_PTR_INFO
attributes. */
EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
|| vi->id == integer_id)
pt->anything = 1;
}
- if (vi->is_restrict_var)
- pt->vars_contains_restrict = true;
}
/* Instead of doing extra work, simply do not create
elaborate points-to information for pt_anything pointers. */
- if (pt->anything
- && (orig_vi->is_artificial_var
- || !pt->vars_contains_restrict))
- return;
+ if (pt->anything)
+ return *pt;
/* Share the final set of variables when possible. */
finished_solution = BITMAP_GGC_ALLOC ();
pt->vars = result;
bitmap_clear (finished_solution);
}
+
+ return *pt;
}
/* Given a pointer variable P, fill in its points-to set. */
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
- && SSA_NAME_IS_DEFAULT_DEF (p))
+ && SSA_NAME_IS_DEFAULT_DEF (p)
+ && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
lookup_p = SSA_NAME_VAR (p);
vi = lookup_vi_for_tree (lookup_p);
return;
pi = get_ptr_info (p);
- find_what_var_points_to (vi, &pi->pt);
+ pi->pt = find_what_var_points_to (vi);
}
it contains restrict tag variables. */
void
-pt_solution_set (struct pt_solution *pt, bitmap vars,
- bool vars_contains_global, bool vars_contains_restrict)
+pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
{
memset (pt, 0, sizeof (struct pt_solution));
pt->vars = vars;
pt->vars_contains_global = vars_contains_global;
- pt->vars_contains_restrict = vars_contains_restrict;
+}
+
+/* Set the points-to solution *PT to point only to the variable VAR. */
+
+void
+pt_solution_set_var (struct pt_solution *pt, tree var)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = BITMAP_GGC_ALLOC ();
+ bitmap_set_bit (pt->vars, DECL_PT_UID (var));
+ pt->vars_contains_global = is_global_var (var);
}
/* Computes the union of the points-to solutions *DEST and *SRC and
dest->ipa_escaped |= src->ipa_escaped;
dest->null |= src->null;
dest->vars_contains_global |= src->vars_contains_global;
- dest->vars_contains_restrict |= src->vars_contains_restrict;
if (!src->vars)
return;
return true;
}
+/* Return true if the points-to solution *PT only point to a single var, and
+ return the var uid in *UID. */
+
+bool
+pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
+{
+ if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
+ || pt->null || pt->vars == NULL
+ || !bitmap_single_bit_set_p (pt->vars))
+ return false;
+
+ *uid = bitmap_first_set_bit (pt->vars);
+ return true;
+}
+
/* Return true if the points-to solution *PT includes global memory. */
bool
return res;
}
-/* Return true if both points-to solutions PT1 and PT2 for two restrict
- qualified pointers are possibly based on the same pointer. */
-
-bool
-pt_solutions_same_restrict_base (struct pt_solution *pt1,
- struct pt_solution *pt2)
-{
- /* If we deal with points-to solutions of two restrict qualified
- pointers solely rely on the pointed-to variable bitmap intersection.
- For two pointers that are based on each other the bitmaps will
- intersect. */
- if (pt1->vars_contains_restrict
- && pt2->vars_contains_restrict)
- {
- gcc_assert (pt1->vars && pt2->vars);
- return bitmap_intersect_p (pt1->vars, pt2->vars);
- }
-
- return true;
-}
-
/* Dump points-to information to OUTFILE. */
stats.num_implicit_edges);
}
- for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
+ for (i = 0; i < varmap.length (); i++)
{
varinfo_t vi = get_varinfo (i);
if (!vi->may_have_pointers)
/* Debug points-to information to stderr. */
-void
+DEBUG_FUNCTION void
debug_sa_points_to_info (void)
{
dump_sa_points_to_info (stderr);
/* This specifically does not use process_constraint because
process_constraint ignores all anything = anything constraints, since all
but this one are redundant. */
- VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
+ constraints.safe_push (new_constraint (lhs, rhs));
/* Create the READONLY variable, used to represent that a variable
points to readonly memory. */
sizeof (struct constraint), 30);
variable_info_pool = create_alloc_pool ("Variable info pool",
sizeof (struct variable_info), 30);
- constraints = VEC_alloc (constraint_t, heap, 8);
- varmap = VEC_alloc (varinfo_t, heap, 8);
+ constraints.create (8);
+ varmap.create (8);
vi_for_tree = pointer_map_create ();
call_stmt_vars = pointer_map_create ();
shared_bitmap_table = htab_create (511, shared_bitmap_hash,
shared_bitmap_eq, free);
init_base_vars ();
+
+ gcc_obstack_init (&fake_var_decl_obstack);
+
+ final_solutions = pointer_map_create ();
+ gcc_obstack_init (&final_solutions_obstack);
}
/* Remove the REF and ADDRESS edges from GRAPH, as well as all the
/* Now reallocate the size of the successor list as, and blow away
the predecessor bitmaps. */
- graph->size = VEC_length (varinfo_t, varmap);
+ graph->size = varmap.length ();
graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
free (graph->implicit_preds);
bitmap_obstack_release (&predbitmap_obstack);
}
-/* Initialize the heapvar for statement mapping. */
-
-static void
-init_alias_heapvars (void)
-{
- if (!heapvar_for_stmt)
- heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
- NULL);
-}
-
-/* Delete the heapvar for statement mapping. */
-
-void
-delete_alias_heapvars (void)
-{
- if (heapvar_for_stmt)
- htab_delete (heapvar_for_stmt);
- heapvar_for_stmt = NULL;
-}
-
/* Solve the constraint set. */
static void
"\nCollapsing static cycles and doing variable "
"substitution\n");
- init_graph (VEC_length (varinfo_t, varmap) * 2);
+ init_graph (varmap.length () * 2);
if (dump_file)
fprintf (dump_file, "Building predecessor graph\n");
rewrite_constraints (graph, si);
build_succ_graph ();
- free_var_substitution_info (si);
- if (dump_file && (dump_flags & TDF_GRAPH))
- dump_constraint_graph (dump_file);
+ free_var_substitution_info (si);
+ /* Attach complex constraints to graph nodes. */
move_complex_constraints (graph);
if (dump_file)
point. */
remove_preds_and_fake_succs (graph);
+ if (dump_file && (dump_flags & TDF_GRAPH))
+ {
+ fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
+ "in dot format:\n");
+ dump_constraint_graph (dump_file);
+ fprintf (dump_file, "\n\n");
+ }
+
if (dump_file)
fprintf (dump_file, "Solving graph\n");
solve_graph (graph);
+ if (dump_file && (dump_flags & TDF_GRAPH))
+ {
+ fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
+ "in dot format:\n");
+ dump_constraint_graph (dump_file);
+ fprintf (dump_file, "\n\n");
+ }
+
if (dump_file)
dump_sa_points_to_info (dump_file);
}
timevar_push (TV_TREE_PTA);
init_alias_vars ();
- init_alias_heapvars ();
intra_create_variable_infos ();
{
gimple phi = gsi_stmt (gsi);
- if (is_gimple_reg (gimple_phi_result (phi)))
+ if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (phi);
}
solve_constraints ();
/* Compute the points-to set for ESCAPED used for call-clobber analysis. */
- find_what_var_points_to (get_varinfo (escaped_id),
- &cfun->gimple_df->escaped);
+ cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
/* Make sure the ESCAPED solution (which is used as placeholder in
other solutions) does not reference itself. This simplifies
cfun->gimple_df->escaped.escaped = 0;
/* Mark escaped HEAP variables as global. */
- for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
+ FOR_EACH_VEC_ELT (varmap, i, vi)
if (vi->is_heap_var
- && !vi->is_restrict_var
&& !vi->is_global_var)
DECL_EXTERNAL (vi->decl) = vi->is_global_var
= pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
pointer_map_destroy (vi_for_tree);
pointer_map_destroy (call_stmt_vars);
bitmap_obstack_release (&pta_obstack);
- VEC_free (constraint_t, heap, constraints);
+ constraints.release ();
for (i = 0; i < graph->size; i++)
- VEC_free (constraint_t, heap, graph->complex[i]);
+ graph->complex[i].release ();
free (graph->complex);
free (graph->rep);
free (graph->indirect_cycles);
free (graph);
- VEC_free (varinfo_t, heap, varmap);
+ varmap.release ();
free_alloc_pool (variable_info_pool);
free_alloc_pool (constraint_pool);
+
+ obstack_free (&fake_var_decl_obstack, NULL);
+
+ pointer_map_destroy (final_solutions);
+ obstack_free (&final_solutions_obstack, NULL);
}
/* But still dump what we have remaining it. */
dump_alias_info (dump_file);
-
- if (dump_flags & TDF_DETAILS)
- dump_referenced_vars (dump_file);
}
return 0;
/* Debugging dumps. */
if (dump_file)
- {
- dump_alias_info (dump_file);
-
- if (dump_flags & TDF_DETAILS)
- dump_referenced_vars (dump_file);
- }
+ dump_alias_info (dump_file);
/* Deallocate memory used by aliasing data structures and the internal
points-to solution. */
{
GIMPLE_PASS,
"alias", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_tree_pta, /* gate */
NULL, /* execute */
NULL, /* sub */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ TODO_rebuild_alias /* todo_flags_finish */
}
};
{
GIMPLE_PASS,
"ealias", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_tree_pta, /* gate */
NULL, /* execute */
NULL, /* sub */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
+ TODO_rebuild_alias /* todo_flags_finish */
}
};
return (optimize
&& flag_ipa_pta
/* Don't bother doing anything if the program has errors. */
- && !(errorcount || sorrycount));
+ && !seen_error ());
}
/* IPA PTA solutions for ESCAPED. */
struct pt_solution ipa_escaped_pt
- = { true, false, false, false, false, false, false, NULL };
+ = { true, false, false, false, false, false, NULL };
+
+/* Associate node with varinfo DATA. Worker for
+ cgraph_for_node_and_aliases. */
+static bool
+associate_varinfo_to_alias (struct cgraph_node *node, void *data)
+{
+ if (node->alias || node->thunk.thunk_p)
+ insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
+ return false;
+}
/* Execute the driver for IPA PTA. */
static unsigned int
in_ipa_mode = 1;
- init_alias_heapvars ();
init_alias_vars ();
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ dump_symtab (dump_file);
+ fprintf (dump_file, "\n");
+ }
+
/* Build the constraints. */
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
+ varinfo_t vi;
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
- create_function_info_for (node->decl,
- cgraph_node_name (node));
+ gcc_assert (!node->clone_of);
+
+ vi = create_function_info_for (node->symbol.decl,
+ alias_get_name (node->symbol.decl));
+ cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
- for (var = varpool_nodes; var; var = var->next)
- get_vi_for_tree (var->decl);
+ FOR_EACH_VARIABLE (var)
+ {
+ if (var->alias)
+ continue;
+
+ get_vi_for_tree (var->symbol.decl);
+ }
if (dump_file)
{
dump_constraints (dump_file, 0);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
struct function *func;
basic_block bb;
- tree old_func_decl;
/* Nodes without a body are not interesting. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
if (dump_file)
- fprintf (dump_file,
- "Generating constraints for %s\n",
- cgraph_node_name (node));
+ {
+ fprintf (dump_file,
+ "Generating constraints for %s", cgraph_node_name (node));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
+ fprintf (dump_file, " (%s)",
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (node->symbol.decl)));
+ fprintf (dump_file, "\n");
+ }
- func = DECL_STRUCT_FUNCTION (node->decl);
- old_func_decl = current_function_decl;
+ func = DECL_STRUCT_FUNCTION (node->symbol.decl);
push_cfun (func);
- current_function_decl = node->decl;
- /* For externally visible functions use local constraints for
- their arguments. For local functions we see all callers
- and thus do not need initial constraints for parameters. */
- if (node->local.externally_visible)
- intra_create_variable_infos ();
+ /* For externally visible or attribute used annotated functions use
+ local constraints for their arguments.
+ For local functions we see all callers and thus do not need initial
+ constraints for parameters. */
+ if (node->symbol.used_from_other_partition
+ || node->symbol.externally_visible
+ || node->symbol.force_output)
+ {
+ intra_create_variable_infos ();
+
+ /* We also need to make function return values escape. Nothing
+ escapes by returning from main though. */
+ if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
+ {
+ varinfo_t fi, rvi;
+ fi = lookup_vi_for_tree (node->symbol.decl);
+ rvi = first_vi_for_offset (fi, fi_result);
+ if (rvi && rvi->offset == fi_result)
+ {
+ struct constraint_expr includes;
+ struct constraint_expr var;
+ includes.var = escaped_id;
+ includes.offset = 0;
+ includes.type = SCALAR;
+ var.var = rvi->id;
+ var.offset = 0;
+ var.type = SCALAR;
+ process_constraint (new_constraint (includes, var));
+ }
+ }
+ }
/* Build constriants for the function body. */
FOR_EACH_BB_FN (bb, func)
{
gimple phi = gsi_stmt (gsi);
- if (is_gimple_reg (gimple_phi_result (phi)))
+ if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (phi);
}
}
}
- current_function_decl = old_func_decl;
pop_cfun ();
if (dump_file)
dump_constraints (dump_file, from);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
}
/* From the constraints compute the points-to sets. */
??? Note that the computed escape set is not correct
for the whole unit as we fail to consider graph edges to
externally visible functions. */
- find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
+ ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
/* Make sure the ESCAPED solution (which is used as placeholder in
other solutions) does not reference itself. This simplifies
ipa_escaped_pt.ipa_escaped = 0;
/* Assign the points-to sets to the SSA names in the unit. */
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
tree ptr;
struct function *fn;
struct cgraph_edge *e;
/* Nodes without a body are not interesting. */
- if (!gimple_has_body_p (node->decl)
- || node->clone_of)
+ if (!cgraph_function_with_gimple_body_p (node))
continue;
- fn = DECL_STRUCT_FUNCTION (node->decl);
+ fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
- for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
+ FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
{
if (ptr
&& POINTER_TYPE_P (TREE_TYPE (ptr)))
}
/* Compute the call-use and call-clobber sets for all direct calls. */
- fi = lookup_vi_for_tree (node->decl);
+ fi = lookup_vi_for_tree (node->symbol.decl);
gcc_assert (fi->is_fn_info);
- find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
- &clobbers);
- find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
+ clobbers
+ = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
+ uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
for (e = node->callers; e; e = e->next_caller)
{
if (!e->call_stmt)
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
if (!uses->anything)
{
- find_what_var_points_to
- (first_vi_for_offset (vi, fi_uses), &sol);
+ sol = find_what_var_points_to
+ (first_vi_for_offset (vi, fi_uses));
pt_solution_ior_into (uses, &sol);
}
if (!clobbers->anything)
{
- find_what_var_points_to
- (first_vi_for_offset (vi, fi_clobbers), &sol);
+ sol = find_what_var_points_to
+ (first_vi_for_offset (vi, fi_clobbers));
pt_solution_ior_into (clobbers, &sol);
}
}
{
SIMPLE_IPA_PASS,
"pta", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_ipa_pta, /* gate */
ipa_pta_execute, /* execute */
NULL, /* sub */
TODO_update_ssa /* todo_flags_finish */
}
};
-
-
-#include "gt-tree-ssa-structalias.h"