/* Tree based points-to analysis
- Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2005-2013 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dberlin@dberlin.org>
This file is part of GCC.
#include "bitmap.h"
#include "flags.h"
#include "basic-block.h"
-#include "output.h"
#include "tree.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "function.h"
#include "cgraph.h"
#include "tree-pass.h"
-#include "timevar.h"
#include "alloc-pool.h"
#include "splay-tree.h"
#include "params.h"
struct constraint;
typedef struct constraint *constraint_t;
-DEF_VEC_P(constraint_t);
-DEF_VEC_ALLOC_P(constraint_t,heap);
#define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
if (a) \
/* True if this is a heap variable. */
unsigned int is_heap_var : 1;
- /* True if this is a variable tracking a restrict pointer source. */
- unsigned int is_restrict_var : 1;
-
/* True if this field may contain pointers. */
unsigned int may_have_pointers : 1;
static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
unsigned HOST_WIDE_INT);
static varinfo_t lookup_vi_for_tree (tree);
+static inline bool type_can_have_subvars (const_tree);
/* Pool of variable info structures. */
static alloc_pool variable_info_pool;
-DEF_VEC_P(varinfo_t);
-
-DEF_VEC_ALLOC_P(varinfo_t, heap);
+/* Map varinfo to final pt_solution. */
+static pointer_map_t *final_solutions;
+struct obstack final_solutions_obstack;
/* Table of variable info structures for constraint variables.
Indexed directly by variable info id. */
-static VEC(varinfo_t,heap) *varmap;
+static vec<varinfo_t> varmap;
/* Return the varmap element N */
static inline varinfo_t
get_varinfo (unsigned int n)
{
- return VEC_index (varinfo_t, varmap, n);
+ return varmap[n];
}
/* Static IDs for the special variables. */
static varinfo_t
new_var_info (tree t, const char *name)
{
- unsigned index = VEC_length (varinfo_t, varmap);
+ unsigned index = varmap.length ();
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
ret->id = index;
ret->is_unknown_size_var = false;
ret->is_full_var = (t == NULL_TREE);
ret->is_heap_var = false;
- ret->is_restrict_var = false;
ret->may_have_pointers = true;
ret->only_restrict_pointers = false;
ret->is_global_var = (t == NULL_TREE);
stats.total_vars++;
- VEC_safe_push (varinfo_t, heap, varmap, ret);
+ varmap.safe_push (ret);
return ret;
}
#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
typedef struct constraint_expr ce_s;
-DEF_VEC_O(ce_s);
-DEF_VEC_ALLOC_O(ce_s, heap);
-static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
-static void get_constraint_for (tree, VEC(ce_s, heap) **);
-static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
-static void do_deref (VEC (ce_s, heap) **);
+static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
+static void get_constraint_for (tree, vec<ce_s> *);
+static void get_constraint_for_rhs (tree, vec<ce_s> *);
+static void do_deref (vec<ce_s> *);
/* Our set constraints are made up of two constraint expressions, one
LHS, and one RHS.
/* List of constraints that we use to build the constraint graph from. */
-static VEC(constraint_t,heap) *constraints;
+static vec<constraint_t> constraints;
static alloc_pool constraint_pool;
/* The constraint graph is represented as an array of bitmaps
/* Vector of complex constraints for each graph node. Complex
constraints are those involving dereferences or offsets that are
not 0. */
- VEC(constraint_t,heap) **complex;
+ vec<constraint_t> *complex;
};
static constraint_graph_t graph;
cycle finding, we create nodes to represent dereferences and
address taken constraints. These represent where these start and
end. */
-#define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
+#define FIRST_REF_NODE (varmap).length ()
#define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
/* Return the representative node for NODE, if NODE has been unioned
{
int i;
constraint_t c;
- for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
+ for (i = from; constraints.iterate (i, &c); i++)
if (c)
{
dump_constraint (file, c);
fprintf (file, "\"%s\"", get_varinfo (i)->name);
else
fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
- if (graph->complex[i])
+ if (graph->complex[i].exists ())
{
unsigned j;
constraint_t c;
fprintf (file, " [label=\"\\N\\n");
- for (j = 0; VEC_iterate (constraint_t, graph->complex[i], j, c); ++j)
+ for (j = 0; graph->complex[i].iterate (j, &c); ++j)
{
dump_constraint (file, c);
fprintf (file, "\\l");
arbitrary, but consistent, in order to give them an ordering. */
static bool
-constraint_less (const constraint_t a, const constraint_t b)
+constraint_less (const constraint_t &a, const constraint_t &b)
{
if (constraint_expr_less (a->lhs, b->lhs))
return true;
/* Find a constraint LOOKFOR in the sorted constraint vector VEC */
static constraint_t
-constraint_vec_find (VEC(constraint_t,heap) *vec,
+constraint_vec_find (vec<constraint_t> vec,
struct constraint lookfor)
{
unsigned int place;
constraint_t found;
- if (vec == NULL)
+ if (!vec.exists ())
return NULL;
- place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
- if (place >= VEC_length (constraint_t, vec))
+ place = vec.lower_bound (&lookfor, constraint_less);
+ if (place >= vec.length ())
return NULL;
- found = VEC_index (constraint_t, vec, place);
+ found = vec[place];
if (!constraint_equal (*found, lookfor))
return NULL;
return found;
/* Union two constraint vectors, TO and FROM. Put the result in TO. */
static void
-constraint_set_union (VEC(constraint_t,heap) **to,
- VEC(constraint_t,heap) **from)
+constraint_set_union (vec<constraint_t> *to,
+ vec<constraint_t> *from)
{
int i;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
+ FOR_EACH_VEC_ELT (*from, i, c)
{
if (constraint_vec_find (*to, *c) == NULL)
{
- unsigned int place = VEC_lower_bound (constraint_t, *to, c,
- constraint_less);
- VEC_safe_insert (constraint_t, heap, *to, place, c);
+ unsigned int place = to->lower_bound (c, constraint_less);
+ to->safe_insert (place, c);
}
}
}
insert_into_complex (constraint_graph_t graph,
unsigned int var, constraint_t c)
{
- VEC (constraint_t, heap) *complex = graph->complex[var];
- unsigned int place = VEC_lower_bound (constraint_t, complex, c,
- constraint_less);
+ vec<constraint_t> complex = graph->complex[var];
+ unsigned int place = complex.lower_bound (c, constraint_less);
/* Only insert constraints that do not already exist. */
- if (place >= VEC_length (constraint_t, complex)
- || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
- VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
+ if (place >= complex.length ()
+ || !constraint_equal (*c, *complex[place]))
+ graph->complex[var].safe_insert (place, c);
}
gcc_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
- FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
+ FOR_EACH_VEC_ELT (graph->complex[from], i, c)
{
/* In complex constraints for node src, we may have either
a = *src, and *src = a, or an offseted constraint which are
c->rhs.var = to;
}
constraint_set_union (&graph->complex[to], &graph->complex[from]);
- VEC_free (constraint_t, heap, graph->complex[from]);
- graph->complex[from] = NULL;
+ graph->complex[from].release ();
}
graph->succs = XCNEWVEC (bitmap, graph->size);
graph->indirect_cycles = XNEWVEC (int, graph->size);
graph->rep = XNEWVEC (unsigned int, graph->size);
- graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
+ /* ??? Macros do not support template types with multiple arguments,
+ so we use a typedef to work around it. */
+ typedef vec<constraint_t> vec_constraint_t_heap;
+ graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
graph->pe = XCNEWVEC (unsigned int, graph->size);
graph->pe_rep = XNEWVEC (int, graph->size);
graph->eq_rep = XNEWVEC (int, graph->size);
graph->direct_nodes = sbitmap_alloc (graph->size);
graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
- sbitmap_zero (graph->direct_nodes);
+ bitmap_clear (graph->direct_nodes);
for (j = 0; j < FIRST_REF_NODE; j++)
{
if (!get_varinfo (j)->is_special_var)
- SET_BIT (graph->direct_nodes, j);
+ bitmap_set_bit (graph->direct_nodes, j);
}
for (j = 0; j < graph->size; j++)
graph->eq_rep[j] = -1;
- for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
+ for (j = 0; j < varmap.length (); j++)
graph->indirect_cycles[j] = -1;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
else
- RESET_BIT (graph->direct_nodes, lhsvar);
+ bitmap_clear_bit (graph->direct_nodes, lhsvar);
}
else if (rhs.type == ADDRESSOF)
{
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
/* All related variables are no longer direct nodes. */
- RESET_BIT (graph->direct_nodes, rhsvar);
+ bitmap_clear_bit (graph->direct_nodes, rhsvar);
v = get_varinfo (rhsvar);
if (!v->is_full_var)
{
v = lookup_vi_for_tree (v->decl);
do
{
- RESET_BIT (graph->direct_nodes, v->id);
+ bitmap_clear_bit (graph->direct_nodes, v->id);
v = v->next;
}
while (v != NULL);
else if (lhs.offset != 0 || rhs.offset != 0)
{
if (rhs.offset != 0)
- RESET_BIT (graph->direct_nodes, lhs.var);
+ bitmap_clear_bit (graph->direct_nodes, lhs.var);
else if (lhs.offset != 0)
- RESET_BIT (graph->direct_nodes, rhs.var);
+ bitmap_clear_bit (graph->direct_nodes, rhs.var);
}
}
}
unsigned i, t;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs;
struct constraint_expr rhs;
t = find (storedanything_id);
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
{
- if (!TEST_BIT (graph->direct_nodes, i)
+ if (!bitmap_bit_p (graph->direct_nodes, i)
&& get_varinfo (i)->may_have_pointers)
add_graph_edge (graph, find (i), t);
}
unsigned int *dfs;
unsigned int *node_mapping;
int current_index;
- VEC(unsigned,heap) *scc_stack;
+ vec<unsigned> scc_stack;
};
bitmap_iterator bi;
unsigned int my_dfs;
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
break;
w = find (i);
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
scc_visit (graph, si, w);
{
unsigned int t = find (w);
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- if (VEC_length (unsigned, si->scc_stack) > 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ if (si->scc_stack.length () > 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
bitmap scc = BITMAP_ALLOC (NULL);
unsigned int lowest_node;
bitmap_set_bit (scc, n);
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
bitmap_set_bit (scc, w);
}
}
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Unify node FROM into node TO, updating the changed count if
sbitmap visited;
/* Array that stores the topological order of the graph, *in
reverse*. */
- VEC(unsigned,heap) *topo_order;
+ vec<unsigned> topo_order;
};
size_t size = graph->size;
struct topo_info *ti = XNEW (struct topo_info);
ti->visited = sbitmap_alloc (size);
- sbitmap_zero (ti->visited);
- ti->topo_order = VEC_alloc (unsigned, heap, 1);
+ bitmap_clear (ti->visited);
+ ti->topo_order.create (1);
return ti;
}
free_topo_info (struct topo_info *ti)
{
sbitmap_free (ti->visited);
- VEC_free (unsigned, heap, ti->topo_order);
+ ti->topo_order.release ();
free (ti);
}
bitmap_iterator bi;
unsigned int j;
- SET_BIT (ti->visited, n);
+ bitmap_set_bit (ti->visited, n);
if (graph->succs[n])
EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
{
- if (!TEST_BIT (ti->visited, j))
+ if (!bitmap_bit_p (ti->visited, j))
topo_visit (graph, ti, j);
}
- VEC_safe_push (unsigned, heap, ti->topo_order, n);
+ ti->topo_order.safe_push (n);
}
/* Process a constraint C that represents x = *(y + off), using DELTA as the
si->current_index = 0;
si->visited = sbitmap_alloc (size);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
si->deleted = sbitmap_alloc (size);
- sbitmap_zero (si->deleted);
+ bitmap_clear (si->deleted);
si->node_mapping = XNEWVEC (unsigned int, size);
si->dfs = XCNEWVEC (unsigned int, size);
for (i = 0; i < size; i++)
si->node_mapping[i] = i;
- si->scc_stack = VEC_alloc (unsigned, heap, 1);
+ si->scc_stack.create (1);
return si;
}
sbitmap_free (si->deleted);
free (si->node_mapping);
free (si->dfs);
- VEC_free (unsigned, heap, si->scc_stack);
+ si->scc_stack.release ();
free (si);
}
struct scc_info *si = init_scc_info (size);
for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
- if (!TEST_BIT (si->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (si->visited, i) && find (i) == i)
scc_visit (graph, si, i);
free_scc_info (si);
unsigned int size = graph->size;
for (i = 0; i != size; ++i)
- if (!TEST_BIT (ti->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
topo_visit (graph, ti, i);
}
&& bitmap_equal_p (eql1->labels, eql2->labels));
}
-/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
- contains. */
+/* Lookup a equivalence class in TABLE by the bitmap of LABELS with
+ hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
+ is equivalent to. */
-static unsigned int
-equiv_class_lookup (htab_t table, bitmap labels)
+static equiv_class_label *
+equiv_class_lookup_or_add (htab_t table, bitmap labels)
{
- void **slot;
- struct equiv_class_label ecl;
+ equiv_class_label **slot;
+ equiv_class_label ecl;
ecl.labels = labels;
ecl.hashcode = bitmap_hash (labels);
+ slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
+ ecl.hashcode, INSERT);
+ if (!*slot)
+ {
+ *slot = XNEW (struct equiv_class_label);
+ (*slot)->labels = labels;
+ (*slot)->hashcode = ecl.hashcode;
+ (*slot)->equivalence_class = 0;
+ }
- slot = htab_find_slot_with_hash (table, &ecl,
- ecl.hashcode, NO_INSERT);
- if (!slot)
- return 0;
- else
- return ((equiv_class_label_t) *slot)->equivalence_class;
-}
-
-
-/* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
- to TABLE. */
-
-static void
-equiv_class_add (htab_t table, unsigned int equivalence_class,
- bitmap labels)
-{
- void **slot;
- equiv_class_label_t ecl = XNEW (struct equiv_class_label);
-
- ecl->labels = labels;
- ecl->equivalence_class = equivalence_class;
- ecl->hashcode = bitmap_hash (labels);
-
- slot = htab_find_slot_with_hash (table, ecl,
- ecl->hashcode, INSERT);
- gcc_assert (!*slot);
- *slot = (void *) ecl;
+ return *slot;
}
/* Perform offline variable substitution.
unsigned int my_dfs;
gcc_assert (si->node_mapping[n] == n);
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
si->node_mapping[w] = n;
- if (!TEST_BIT (graph->direct_nodes, w))
- RESET_BIT (graph->direct_nodes, n);
+ if (!bitmap_bit_p (graph->direct_nodes, w))
+ bitmap_clear_bit (graph->direct_nodes, n);
/* Unify our nodes. */
if (graph->preds[w])
graph->points_to[w]);
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Label pointer equivalences. */
static void
label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
{
- unsigned int i;
+ unsigned int i, first_pred;
bitmap_iterator bi;
- SET_BIT (si->visited, n);
- if (!graph->points_to[n])
- graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ bitmap_set_bit (si->visited, n);
/* Label and union our incoming edges's points to sets. */
+ first_pred = -1U;
EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
{
unsigned int w = si->node_mapping[i];
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
label_visit (graph, si, w);
/* Skip unused edges */
continue;
if (graph->points_to[w])
- bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
+ {
+ if (!graph->points_to[n])
+ {
+ if (first_pred == -1U)
+ first_pred = w;
+ else
+ {
+ graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ bitmap_ior (graph->points_to[n],
+ graph->points_to[first_pred],
+ graph->points_to[w]);
+ }
+ }
+ else
+ bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
+ }
+ }
+
+ /* Indirect nodes get fresh variables and a new pointer equiv class. */
+ if (!bitmap_bit_p (graph->direct_nodes, n))
+ {
+ if (!graph->points_to[n])
+ {
+ graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
+ if (first_pred != -1U)
+ bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
+ }
+ bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
+ graph->pointer_label[n] = pointer_equiv_class++;
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
+ graph->points_to[n]);
+ ecl->equivalence_class = graph->pointer_label[n];
+ return;
+ }
+
+ /* If there was only a single non-empty predecessor the pointer equiv
+ class is the same. */
+ if (!graph->points_to[n])
+ {
+ if (first_pred != -1U)
+ {
+ graph->pointer_label[n] = graph->pointer_label[first_pred];
+ graph->points_to[n] = graph->points_to[first_pred];
+ }
+ return;
}
- /* Indirect nodes get fresh variables. */
- if (!TEST_BIT (graph->direct_nodes, n))
- bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
if (!bitmap_empty_p (graph->points_to[n]))
{
- unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
- graph->points_to[n]);
- if (!label)
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
+ graph->points_to[n]);
+ if (ecl->equivalence_class == 0)
+ ecl->equivalence_class = pointer_equiv_class++;
+ else
{
- label = pointer_equiv_class++;
- equiv_class_add (pointer_equiv_class_table,
- label, graph->points_to[n]);
+ BITMAP_FREE (graph->points_to[n]);
+ graph->points_to[n] = ecl->labels;
}
- graph->pointer_label[n] = label;
+ graph->pointer_label[n] = ecl->equivalence_class;
}
}
/* Condense the nodes, which means to find SCC's, count incoming
predecessors, and unite nodes in SCC's. */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
condense_visit (graph, si, si->node_mapping[i]);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
/* Actually the label the nodes for pointer equivalences */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
label_visit (graph, si, si->node_mapping[i]);
/* Calculate location equivalence labels. */
bitmap pointed_by;
bitmap_iterator bi;
unsigned int j;
- unsigned int label;
if (!graph->pointed_by[i])
continue;
/* Look up the location equivalence label if one exists, or make
one otherwise. */
- label = equiv_class_lookup (location_equiv_class_table,
- pointed_by);
- if (label == 0)
- {
- label = location_equiv_class++;
- equiv_class_add (location_equiv_class_table,
- label, pointed_by);
- }
+ equiv_class_label_t ecl;
+ ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
+ if (ecl->equivalence_class == 0)
+ ecl->equivalence_class = location_equiv_class++;
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
get_varinfo (i)->name);
BITMAP_FREE (pointed_by);
}
- graph->loc_label[i] = label;
+ graph->loc_label[i] = ecl->equivalence_class;
}
if (dump_file && (dump_flags & TDF_DETAILS))
for (i = 0; i < FIRST_REF_NODE; i++)
{
- bool direct_node = TEST_BIT (graph->direct_nodes, i);
- fprintf (dump_file,
- "Equivalence classes for %s node id %d:%s are pointer: %d"
- ", location:%d\n",
- direct_node ? "Direct node" : "Indirect node", i,
- get_varinfo (i)->name,
- graph->pointer_label[si->node_mapping[i]],
- graph->loc_label[si->node_mapping[i]]);
+ unsigned j = si->node_mapping[i];
+ if (j != i)
+ fprintf (dump_file, "%s node id %d (%s) mapped to SCC leader "
+ "node id %d (%s)\n",
+ bitmap_bit_p (graph->direct_nodes, i)
+ ? "Direct" : "Indirect", i, get_varinfo (i)->name,
+ j, get_varinfo (j)->name);
+ else
+ fprintf (dump_file,
+ "Equivalence classes for %s node id %d (%s): pointer %d"
+ ", location %d\n",
+ bitmap_bit_p (graph->direct_nodes, i)
+ ? "direct" : "indirect", i, get_varinfo (i)->name,
+ graph->pointer_label[i], graph->loc_label[i]);
}
/* Quickly eliminate our non-pointer variables. */
int i;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
if (c)
{
for (j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
dump_constraint (dump_file, c);
fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
dump_constraint (dump_file, c);
fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
&& !bitmap_empty_p (get_varinfo (node)->solution))
{
unsigned int i;
- VEC(unsigned,heap) *queue = NULL;
+ vec<unsigned> queue = vNULL;
int queuepos;
unsigned int to = find (graph->indirect_cycles[node]);
bitmap_iterator bi;
if (find (i) == i && i != to)
{
if (unite (to, i))
- VEC_safe_push (unsigned, heap, queue, i);
+ queue.safe_push (i);
}
}
for (queuepos = 0;
- VEC_iterate (unsigned, queue, queuepos, i);
+ queue.iterate (queuepos, &i);
queuepos++)
{
unify_nodes (graph, to, i, true);
}
- VEC_free (unsigned, heap, queue);
+ queue.release ();
return true;
}
return false;
varinfo_t ivi = get_varinfo (i);
if (find (i) == i && !bitmap_empty_p (ivi->solution)
&& ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
- || VEC_length (constraint_t, graph->complex[i]) > 0))
+ || graph->complex[i].length () > 0))
bitmap_set_bit (changed, i);
}
compute_topo_order (graph, ti);
- while (VEC_length (unsigned, ti->topo_order) != 0)
+ while (ti->topo_order.length () != 0)
{
- i = VEC_pop (unsigned, ti->topo_order);
+ i = ti->topo_order.pop ();
/* If this variable is not a representative, skip it. */
if (find (i) != i)
unsigned int j;
constraint_t c;
bitmap solution;
- VEC(constraint_t,heap) *complex = graph->complex[i];
+ vec<constraint_t> complex = graph->complex[i];
varinfo_t vi = get_varinfo (i);
bool solution_empty;
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
- FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
+ FOR_EACH_VEC_ELT (complex, j, c)
{
/* XXX: This is going to unsort the constraints in
some cases, which will occasionally add duplicate
static const char *
alias_get_name (tree decl)
{
- const char *res;
+ const char *res = NULL;
char *temp;
int num_printed = 0;
- if (DECL_ASSEMBLER_NAME_SET_P (decl))
- res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
- else
- res= get_name (decl);
- if (res != NULL)
- return res;
-
- res = "NULL";
if (!dump_file)
- return res;
+ return "NULL";
if (TREE_CODE (decl) == SSA_NAME)
{
- num_printed = asprintf (&temp, "%s_%u",
- alias_get_name (SSA_NAME_VAR (decl)),
- SSA_NAME_VERSION (decl));
+ res = get_name (decl);
+ if (res)
+ num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
+ else
+ num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
+ if (num_printed > 0)
+ {
+ res = ggc_strdup (temp);
+ free (temp);
+ }
}
else if (DECL_P (decl))
{
- num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
- }
- if (num_printed > 0)
- {
- res = ggc_strdup (temp);
- free (temp);
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ else
+ {
+ res = get_name (decl);
+ if (!res)
+ {
+ num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
+ if (num_printed > 0)
+ {
+ res = ggc_strdup (temp);
+ free (temp);
+ }
+ }
+ }
}
- return res;
+ if (res != NULL)
+ return res;
+
+ return "NULL";
}
/* Find the variable id for tree T in the map.
If address_p is true, the result will be taken its address of. */
static void
-get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
+get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
{
struct constraint_expr cexpr;
varinfo_t vi;
/* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
- gcc_assert (SSA_VAR_P (t) || DECL_P (t));
+ gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (t) == SSA_NAME
+ && SSA_NAME_IS_DEFAULT_DEF (t)
&& (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
- || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
- && SSA_NAME_IS_DEFAULT_DEF (t))
+ || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
{
get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
return;
if (node && node->alias)
{
node = varpool_variable_node (node, NULL);
- t = node->decl;
+ t = node->symbol.decl;
}
}
for (; vi; vi = vi->next)
{
cexpr.var = vi->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
return;
}
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
/* Process constraint T, performing various simplifications and then
struct constraint_expr rhs = t->rhs;
struct constraint_expr lhs = t->lhs;
- gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
- gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
+ gcc_assert (rhs.var < varmap.length ());
+ gcc_assert (lhs.var < varmap.length ());
/* If we didn't get any useful constraint from the lhs we get
&ANYTHING as fallback from get_constraint_for. Deal with
else
{
gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
- VEC_safe_push (constraint_t, heap, constraints, t);
+ constraints.safe_push (t);
}
}
static void
get_constraint_for_ptr_offset (tree ptr, tree offset,
- VEC (ce_s, heap) **results)
+ vec<ce_s> *results)
{
struct constraint_expr c;
unsigned int j, n;
else
{
/* Sign-extend the offset. */
- double_int soffset
- = double_int_sext (tree_to_double_int (offset),
- TYPE_PRECISION (TREE_TYPE (offset)));
- if (!double_int_fits_in_shwi_p (soffset))
+ double_int soffset = tree_to_double_int (offset)
+ .sext (TYPE_PRECISION (TREE_TYPE (offset)));
+ if (!soffset.fits_shwi ())
rhsoffset = UNKNOWN_OFFSET;
else
{
return;
/* As we are eventually appending to the solution do not use
- VEC_iterate here. */
- n = VEC_length (ce_s, *results);
+ vec::iterate here. */
+ n = results->length ();
for (j = 0; j < n; j++)
{
varinfo_t curr;
- c = *VEC_index (ce_s, *results, j);
+ c = (*results)[j];
curr = get_varinfo (c.var);
if (c.type == ADDRESSOF
c2.type = ADDRESSOF;
c2.offset = 0;
if (c2.var != c.var)
- VEC_safe_push (ce_s, heap, *results, &c2);
+ results->safe_push (c2);
temp = temp->next;
}
while (temp);
c2.var = temp->next->id;
c2.type = ADDRESSOF;
c2.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &c2);
+ results->safe_push (c2);
}
c.var = temp->id;
c.offset = 0;
else
c.offset = rhsoffset;
- VEC_replace (ce_s, *results, j, &c);
+ (*results)[j] = c;
}
}
as the lhs. */
static void
-get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
+get_constraint_for_component_ref (tree t, vec<ce_s> *results,
bool address_p, bool lhs_p)
{
tree orig_t = t;
HOST_WIDE_INT bitmaxsize = -1;
HOST_WIDE_INT bitpos;
tree forzero;
- struct constraint_expr *result;
/* Some people like to do cute things like take the address of
&0->a.b */
temp.offset = 0;
temp.var = integer_id;
temp.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
temp.offset = 0;
temp.var = anything_id;
temp.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
}
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
get_constraint_for_1 (t, results, true, lhs_p);
- gcc_assert (VEC_length (ce_s, *results) == 1);
- result = VEC_last (ce_s, *results);
+ gcc_assert (results->length () == 1);
+ struct constraint_expr &result = results->last ();
- if (result->type == SCALAR
- && get_varinfo (result->var)->is_full_var)
+ if (result.type == SCALAR
+ && get_varinfo (result.var)->is_full_var)
/* For single-field vars do not bother about the offset. */
- result->offset = 0;
- else if (result->type == SCALAR)
+ result.offset = 0;
+ else if (result.type == SCALAR)
{
/* In languages like C, you can access one past the end of an
array. You aren't allowed to dereference it, so we can
ignore this constraint. When we handle pointer subtraction,
we may have to do something cute here. */
- if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
+ if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
&& bitmaxsize != 0)
{
/* It's also not true that the constraint will actually start at the
right offset, it may start in some padding. We only care about
setting the constraint to the first actual field it touches, so
walk to find it. */
- struct constraint_expr cexpr = *result;
+ struct constraint_expr cexpr = result;
varinfo_t curr;
- VEC_pop (ce_s, *results);
+ results->pop ();
cexpr.offset = 0;
for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
{
bitpos, bitmaxsize))
{
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
if (address_p)
break;
}
/* If we are going to take the address of this field then
to be able to compute reachability correctly add at least
the last field of the variable. */
- if (address_p
- && VEC_length (ce_s, *results) == 0)
+ if (address_p && results->length () == 0)
{
curr = get_varinfo (cexpr.var);
while (curr->next != NULL)
curr = curr->next;
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
- else if (VEC_length (ce_s, *results) == 0)
+ else if (results->length () == 0)
/* Assert that we found *some* field there. The user couldn't be
accessing *only* padding. */
/* Still the user could access one past the end of an array
cexpr.type = SCALAR;
cexpr.var = anything_id;
cexpr.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &cexpr);
+ results->safe_push (cexpr);
}
}
else if (bitmaxsize == 0)
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to past the end of variable, ignoring\n");
}
- else if (result->type == DEREF)
+ else if (result.type == DEREF)
{
/* If we do not know exactly where the access goes say so. Note
that only for non-structure accesses we know that we access
if (bitpos == -1
|| bitsize != bitmaxsize
|| AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
- || result->offset == UNKNOWN_OFFSET)
- result->offset = UNKNOWN_OFFSET;
+ || result.offset == UNKNOWN_OFFSET)
+ result.offset = UNKNOWN_OFFSET;
else
- result->offset += bitpos;
+ result.offset += bitpos;
}
- else if (result->type == ADDRESSOF)
+ else if (result.type == ADDRESSOF)
{
/* We can end up here for component references on a
VIEW_CONVERT_EXPR <>(&foobar). */
- result->type = SCALAR;
- result->var = anything_id;
- result->offset = 0;
+ result.type = SCALAR;
+ result.var = anything_id;
+ result.offset = 0;
}
else
gcc_unreachable ();
This is needed so that we can handle dereferencing DEREF constraints. */
static void
-do_deref (VEC (ce_s, heap) **constraints)
+do_deref (vec<ce_s> *constraints)
{
struct constraint_expr *c;
unsigned int i = 0;
- FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
+ FOR_EACH_VEC_ELT (*constraints, i, c)
{
if (c->type == SCALAR)
c->type = DEREF;
address of it. */
static void
-get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
+get_constraint_for_address_of (tree t, vec<ce_s> *results)
{
struct constraint_expr *c;
unsigned int i;
get_constraint_for_1 (t, results, true, true);
- FOR_EACH_VEC_ELT (ce_s, *results, i, c)
+ FOR_EACH_VEC_ELT (*results, i, c)
{
if (c->type == DEREF)
c->type = SCALAR;
/* Given a tree T, return the constraint expression for it. */
static void
-get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
+get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
bool lhs_p)
{
struct constraint_expr temp;
temp.var = nonlocal_id;
temp.type = ADDRESSOF;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
temp.var = readonly_id;
temp.type = SCALAR;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
if (address_p)
return;
- cs = *VEC_last (ce_s, *results);
- if (cs.type == DEREF)
+ cs = results->last ();
+ if (cs.type == DEREF
+ && type_can_have_subvars (TREE_TYPE (t)))
{
/* For dereferences this means we have to defer it
to solving time. */
- VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
+ results->last ().offset = UNKNOWN_OFFSET;
return;
}
if (cs.type != SCALAR)
if (curr->offset - vi->offset < size)
{
cs.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, &cs);
+ results->safe_push (cs);
}
else
break;
{
unsigned int i;
tree val;
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vNULL;
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
{
struct constraint_expr *rhsp;
unsigned j;
get_constraint_for_1 (val, &tmp, address_p, lhs_p);
- FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
- VEC_safe_push (ce_s, heap, *results, rhsp);
- VEC_truncate (ce_s, tmp, 0);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ results->safe_push (*rhsp);
+ tmp.truncate (0);
}
- VEC_free (ce_s, heap, tmp);
+ tmp.release ();
/* We do not know whether the constructor was complete,
so technically we have to add &NOTHING or &ANYTHING
like we do for an empty constructor as well. */
temp.type = ADDRESSOF;
temp.var = nonlocal_id;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
return;
}
default:;
temp.type = ADDRESSOF;
temp.var = anything_id;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, &temp);
+ results->safe_push (temp);
}
/* Given a gimple tree T, return the constraint expression vector for it. */
static void
-get_constraint_for (tree t, VEC (ce_s, heap) **results)
+get_constraint_for (tree t, vec<ce_s> *results)
{
- gcc_assert (VEC_length (ce_s, *results) == 0);
+ gcc_assert (results->length () == 0);
get_constraint_for_1 (t, results, false, true);
}
to be used as the rhs of a constraint. */
static void
-get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
+get_constraint_for_rhs (tree t, vec<ce_s> *results)
{
- gcc_assert (VEC_length (ce_s, *results) == 0);
+ gcc_assert (results->length () == 0);
get_constraint_for_1 (t, results, false, false);
}
entries in *LHSC. */
static void
-process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
+process_all_all_constraints (vec<ce_s> lhsc,
+ vec<ce_s> rhsc)
{
struct constraint_expr *lhsp, *rhsp;
unsigned i, j;
- if (VEC_length (ce_s, lhsc) <= 1
- || VEC_length (ce_s, rhsc) <= 1)
+ if (lhsc.length () <= 1 || rhsc.length () <= 1)
{
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (*lhsp, *rhsp));
}
else
{
struct constraint_expr tmp;
tmp = new_scalar_tmp_constraint_exp ("allalltmp");
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (tmp, *rhsp));
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, tmp));
}
}
do_structure_copy (tree lhsop, tree rhsop)
{
struct constraint_expr *lhsp, *rhsp;
- VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
unsigned j;
get_constraint_for (lhsop, &lhsc);
get_constraint_for_rhs (rhsop, &rhsc);
- lhsp = VEC_index (ce_s, lhsc, 0);
- rhsp = VEC_index (ce_s, rhsc, 0);
+ lhsp = &lhsc[0];
+ rhsp = &rhsc[0];
if (lhsp->type == DEREF
|| (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
|| rhsp->type == DEREF)
{
if (lhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, lhsc) == 1);
+ gcc_assert (lhsc.length () == 1);
lhsp->offset = UNKNOWN_OFFSET;
}
if (rhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, rhsc) == 1);
+ gcc_assert (rhsc.length () == 1);
rhsp->offset = UNKNOWN_OFFSET;
}
process_all_all_constraints (lhsc, rhsc);
unsigned k = 0;
get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
+ for (j = 0; lhsc.iterate (j, &lhsp);)
{
varinfo_t lhsv, rhsv;
- rhsp = VEC_index (ce_s, rhsc, k);
+ rhsp = &rhsc[k];
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
> rhsv->offset + lhsoffset + rhsv->size)))
{
++k;
- if (k >= VEC_length (ce_s, rhsc))
+ if (k >= rhsc.length ())
break;
}
else
else
gcc_unreachable ();
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
/* Create constraints ID = { rhsc }. */
static void
-make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
+make_constraints_to (unsigned id, vec<ce_s> rhsc)
{
struct constraint_expr *c;
struct constraint_expr includes;
includes.offset = 0;
includes.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
+ FOR_EACH_VEC_ELT (rhsc, j, c)
process_constraint (new_constraint (includes, *c));
}
static void
make_constraint_to (unsigned id, tree op)
{
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vNULL;
get_constraint_for_rhs (op, &rhsc);
make_constraints_to (id, rhsc);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* Create a constraint ID = &FROM. */
}
/* Create a new artificial heap variable with NAME and make a
- constraint from it to LHS. Return the created variable. */
+ constraint from it to LHS. Set flags according to a tag used
+ for tracking restrict pointers. */
static varinfo_t
-make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+make_constraint_from_restrict (varinfo_t lhs, const char *name)
{
varinfo_t vi = make_heapvar (name);
+ vi->is_global_var = 1;
+ vi->may_have_pointers = 1;
make_constraint_from (lhs, vi->id);
-
return vi;
}
/* Create a new artificial heap variable with NAME and make a
constraint from it to LHS. Set flags according to a tag used
- for tracking restrict pointers. */
+ for tracking restrict pointers and make the artificial heap
+ point to global memory. */
-static void
-make_constraint_from_restrict (varinfo_t lhs, const char *name)
+static varinfo_t
+make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
{
- varinfo_t vi;
- vi = make_constraint_from_heapvar (lhs, name);
- vi->is_restrict_var = 1;
- vi->is_global_var = 0;
- vi->is_special_var = 1;
- vi->may_have_pointers = 0;
+ varinfo_t vi = make_constraint_from_restrict (lhs, name);
+ make_copy_constraint (vi, nonlocal_id);
+ return vi;
}
/* In IPA mode there are varinfos for different aspects of reach
RHS. */
static void
-handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_rhs_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
/* As we compute ESCAPED context-insensitive we do not gain
any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
set. The argument would still get clobbered through the
- escape solution.
- ??? We might get away with less (and more precise) constraints
- if using a temporary for transitively closing things. */
+ escape solution. */
if ((flags & EAF_NOCLOBBER)
&& (flags & EAF_NOESCAPE))
{
varinfo_t uses = get_call_use_vi (stmt);
if (!(flags & EAF_DIRECT))
- make_transitive_closure_constraints (uses);
- make_constraint_to (uses->id, arg);
+ {
+ varinfo_t tem = new_var_info (NULL_TREE, "callarg");
+ make_constraint_to (tem->id, arg);
+ make_transitive_closure_constraints (tem);
+ make_copy_constraint (uses, tem->id);
+ }
+ else
+ make_constraint_to (uses->id, arg);
returns_uses = true;
}
else if (flags & EAF_NOESCAPE)
{
+ struct constraint_expr lhs, rhs;
varinfo_t uses = get_call_use_vi (stmt);
varinfo_t clobbers = get_call_clobber_vi (stmt);
+ varinfo_t tem = new_var_info (NULL_TREE, "callarg");
+ make_constraint_to (tem->id, arg);
if (!(flags & EAF_DIRECT))
- {
- make_transitive_closure_constraints (uses);
- make_transitive_closure_constraints (clobbers);
- }
- make_constraint_to (uses->id, arg);
- make_constraint_to (clobbers->id, arg);
+ make_transitive_closure_constraints (tem);
+ make_copy_constraint (uses, tem->id);
+ make_copy_constraint (clobbers, tem->id);
+ /* Add *tem = nonlocal, do not add *tem = callused as
+ EAF_NOESCAPE parameters do not escape to other parameters
+ and all other uses appear in NONLOCAL as well. */
+ lhs.type = DEREF;
+ lhs.var = tem->id;
+ lhs.offset = 0;
+ rhs.type = SCALAR;
+ rhs.var = nonlocal_id;
+ rhs.offset = 0;
+ process_constraint (new_constraint (lhs, rhs));
returns_uses = true;
}
else
rhsc.var = get_call_use_vi (stmt)->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* The static chain escapes as well. */
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
- VEC(ce_s, heap) *tmpc = NULL;
+ vec<ce_s> tmpc = vNULL;
struct constraint_expr lhsc, *c;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
+ FOR_EACH_VEC_ELT (tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
- VEC_free(ce_s, heap, tmpc);
+ tmpc.release ();
}
/* Regular functions return nonlocal memory. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
+handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
tree fndecl)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
get_constraint_for (lhs, &lhsc);
/* If the store is to a global decl make sure to
tmpc.var = escaped_id;
tmpc.offset = 0;
tmpc.type = SCALAR;
- VEC_safe_push (ce_s, heap, lhsc, &tmpc);
+ lhsc.safe_push (tmpc);
}
/* If the call returns an argument unmodified override the rhs
&& (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
{
tree arg;
- rhsc = NULL;
+ rhsc.create (0);
arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
get_constraint_for (arg, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
else if (flags & ERF_NOALIAS)
{
varinfo_t vi;
struct constraint_expr tmpc;
- rhsc = NULL;
+ rhsc.create (0);
vi = make_heapvar ("HEAP");
/* We delay marking allocated storage global until we know if
it escapes. */
tmpc.var = vi->id;
tmpc.offset = 0;
tmpc.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, rhsc, &tmpc);
+ rhsc.safe_push (tmpc);
+ process_all_all_constraints (lhsc, rhsc);
+ rhsc.release ();
}
+ else
+ process_all_all_constraints (lhsc, rhsc);
- process_all_all_constraints (lhsc, rhsc);
-
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
}
/* For non-IPA mode, generate constraints necessary for a call of a
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_const_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned int k;
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* May return arguments. */
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
- VEC(ce_s, heap) *argc = NULL;
+ vec<ce_s> argc = vNULL;
unsigned i;
struct constraint_expr *argp;
get_constraint_for_rhs (arg, &argc);
- FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
- VEC_safe_push (ce_s, heap, *results, argp);
- VEC_free(ce_s, heap, argc);
+ FOR_EACH_VEC_ELT (argc, i, argp)
+ results->safe_push (*argp);
+ argc.release ();
}
/* May return addresses of globals. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_pure_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, &rhsc);
+ results->safe_push (rhsc);
}
if (!fn || TREE_CODE (fn) != SSA_NAME)
return get_varinfo (anything_id);
- if ((TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
- || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)
- && SSA_NAME_IS_DEFAULT_DEF (fn))
+ if (SSA_NAME_IS_DEFAULT_DEF (fn)
+ && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
fn = SSA_NAME_VAR (fn);
return get_vi_for_tree (fn);
find_func_aliases_for_builtin_call (gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
varinfo_t fi;
- if (fndecl != NULL_TREE
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
/* ??? All builtins that are handled here need to be handled
in the alias-oracle query functions explicitly! */
switch (DECL_FUNCTION_CODE (fndecl))
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
case BUILT_IN_STRCAT_CHK:
case BUILT_IN_STRNCAT_CHK:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
{
tree res = gimple_call_lhs (t);
tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK)
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
else
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
return true;
}
case BUILT_IN_MEMSET:
case BUILT_IN_MEMSET_CHK:
+ case BUILT_IN_TM_MEMSET:
{
tree res = gimple_call_lhs (t);
tree dest = gimple_call_arg (t, 0);
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
do_deref (&lhsc);
ac.var = integer_id;
}
ac.offset = 0;
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
return true;
}
case BUILT_IN_ASSUME_ALIGNED:
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
return true;
}
case BUILT_IN_REMQUOL:
case BUILT_IN_FREE:
return true;
+ case BUILT_IN_STRDUP:
+ case BUILT_IN_STRNDUP:
+ if (gimple_call_lhs (t))
+ {
+ handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
+ vNULL, fndecl);
+ get_constraint_for_ptr_offset (gimple_call_lhs (t),
+ NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
+ NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
+ break;
/* Trampolines are special - they set up passing the static
frame. */
case BUILT_IN_INIT_TRAMPOLINE:
{
lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (frame, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
/* Make the frame point to the function for
the trampoline adjustment call. */
do_deref (&lhsc);
get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
return true;
}
get_constraint_for (tramp, &rhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
}
return true;
}
+ CASE_BUILT_IN_TM_STORE (1):
+ CASE_BUILT_IN_TM_STORE (2):
+ CASE_BUILT_IN_TM_STORE (4):
+ CASE_BUILT_IN_TM_STORE (8):
+ CASE_BUILT_IN_TM_STORE (FLOAT):
+ CASE_BUILT_IN_TM_STORE (DOUBLE):
+ CASE_BUILT_IN_TM_STORE (LDOUBLE):
+ CASE_BUILT_IN_TM_STORE (M64):
+ CASE_BUILT_IN_TM_STORE (M128):
+ CASE_BUILT_IN_TM_STORE (M256):
+ {
+ tree addr = gimple_call_arg (t, 0);
+ tree src = gimple_call_arg (t, 1);
+
+ get_constraint_for (addr, &lhsc);
+ do_deref (&lhsc);
+ get_constraint_for (src, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
+ CASE_BUILT_IN_TM_LOAD (1):
+ CASE_BUILT_IN_TM_LOAD (2):
+ CASE_BUILT_IN_TM_LOAD (4):
+ CASE_BUILT_IN_TM_LOAD (8):
+ CASE_BUILT_IN_TM_LOAD (FLOAT):
+ CASE_BUILT_IN_TM_LOAD (DOUBLE):
+ CASE_BUILT_IN_TM_LOAD (LDOUBLE):
+ CASE_BUILT_IN_TM_LOAD (M64):
+ CASE_BUILT_IN_TM_LOAD (M128):
+ CASE_BUILT_IN_TM_LOAD (M256):
+ {
+ tree dest = gimple_call_lhs (t);
+ tree addr = gimple_call_arg (t, 0);
+
+ get_constraint_for (dest, &lhsc);
+ get_constraint_for (addr, &rhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ lhsc.release ();
+ rhsc.release ();
+ return true;
+ }
/* Variadic argument handling needs to be handled in IPA
mode as well. */
case BUILT_IN_VA_START:
rhs.type = ADDRESSOF;
rhs.offset = 0;
}
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
/* va_list is clobbered. */
make_constraint_to (get_call_clobber_vi (t)->id, valist);
return true;
find_func_aliases_for_call (gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
varinfo_t fi;
if (fndecl != NULL_TREE
if (!in_ipa_mode
|| (fndecl && !fi->is_fn_info))
{
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vNULL;
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
handle_rhs_call (t, &rhsc);
if (gimple_call_lhs (t))
handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
else
{
get_constraint_for_rhs (arg, &rhsc);
lhs = get_function_part_constraint (fi, fi_parm_base + j);
- while (VEC_length (ce_s, rhsc) != 0)
+ while (rhsc.length () != 0)
{
- rhsp = VEC_last (ce_s, rhsc);
+ rhsp = &rhsc.last ();
process_constraint (new_constraint (lhs, *rhsp));
- VEC_pop (ce_s, rhsc);
+ rhsc.pop ();
}
}
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- VEC(ce_s, heap) *tem = NULL;
- VEC_safe_push (ce_s, heap, tem, &rhs);
+ vec<ce_s> tem = vNULL;
+ tem.safe_push (rhs);
do_deref (&tem);
- rhs = *VEC_index (ce_s, tem, 0);
- VEC_free(ce_s, heap, tem);
+ rhs = tem[0];
+ tem.release ();
}
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
}
get_constraint_for_address_of (lhsop, &rhsc);
lhs = get_function_part_constraint (fi, fi_result);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* If we use a static chain, pass it along. */
get_constraint_for (gimple_call_chain (t), &rhsc);
lhs = get_function_part_constraint (fi, fi_static_chain);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
find_func_aliases (gimple origt)
{
gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
struct constraint_expr *c;
varinfo_t fi;
STRIP_NOPS (strippedrhs);
get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
+ FOR_EACH_VEC_ELT (lhsc, j, c)
{
struct constraint_expr *c2;
- while (VEC_length (ce_s, rhsc) > 0)
+ while (rhsc.length () > 0)
{
- c2 = VEC_last (ce_s, rhsc);
+ c2 = &rhsc.last ();
process_constraint (new_constraint (*c, *c2));
- VEC_pop (ce_s, rhsc);
+ rhsc.pop ();
}
}
}
tree lhsop = gimple_assign_lhs (t);
tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
- if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
+ if (rhsop && TREE_CLOBBER_P (rhsop))
+ /* Ignore clobbers, they don't actually store anything into
+ the LHS. */
+ ;
+ else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
do_structure_copy (lhsop, rhsop);
else
{
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
|| gimple_assign_single_p (t))
get_constraint_for_rhs (rhsop, &rhsc);
+ else if (code == COND_EXPR)
+ {
+ /* The result is a merge of both COND_EXPR arms. */
+ vec<ce_s> tmp = vNULL;
+ struct constraint_expr *rhsp;
+ unsigned i;
+ get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
+ get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
+ FOR_EACH_VEC_ELT (tmp, i, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.release ();
+ }
else if (truth_value_p (code))
/* Truth value results are not pointer (parts). Or at least
very very unreasonable obfuscation of a part. */
else
{
/* All other operations are merges. */
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vNULL;
struct constraint_expr *rhsp;
unsigned i, j;
get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
for (i = 2; i < gimple_num_ops (t); ++i)
{
get_constraint_for_rhs (gimple_op (t, i), &tmp);
- FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
- VEC_safe_push (ce_s, heap, rhsc, rhsp);
- VEC_truncate (ce_s, tmp, 0);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.truncate (0);
}
- VEC_free (ce_s, heap, tmp);
+ tmp.release ();
}
process_all_all_constraints (lhsc, rhsc);
}
&& (!in_ipa_mode
|| DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
make_escape_constraint (rhsop);
- /* If this is a conversion of a non-restrict pointer to a
- restrict pointer track it with a new heapvar. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (rhsop))
- && POINTER_TYPE_P (TREE_TYPE (lhsop))
- && !TYPE_RESTRICT (TREE_TYPE (rhsop))
- && TYPE_RESTRICT (TREE_TYPE (lhsop)))
- make_constraint_from_restrict (get_vi_for_tree (lhsop),
- "CAST_RESTRICT");
}
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
lhs = get_function_part_constraint (fi, fi_result);
get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
any global memory. */
if (op)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
struct constraint_expr rhsc, *lhsp;
unsigned j;
get_constraint_for (op, &lhsc);
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
}
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
}
}
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
}
static void
process_ipa_clobber (varinfo_t fi, tree ptr)
{
- VEC(ce_s, heap) *ptrc = NULL;
+ vec<ce_s> ptrc = vNULL;
struct constraint_expr *c, lhs;
unsigned i;
get_constraint_for_rhs (ptr, &ptrc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
+ FOR_EACH_VEC_ELT (ptrc, i, c)
process_constraint (new_constraint (lhs, *c));
- VEC_free (ce_s, heap, ptrc);
+ ptrc.release ();
}
/* Walk statement T setting up clobber and use constraints according to the
find_func_clobbers (gimple origt)
{
gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vNULL;
+ vec<ce_s> rhsc = vNULL;
varinfo_t fi;
/* Add constraints for clobbered/used in IPA mode.
unsigned i;
lhsc = get_function_part_constraint (fi, fi_clobbers);
get_constraint_for_address_of (lhs, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
unsigned i;
lhs = get_function_part_constraint (fi, fi_uses);
get_constraint_for_address_of (rhs, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
/* For builtins we do not have separate function info. For those
we do not generate escapes for we have to generate clobbers/uses. */
- if (decl
- && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (decl))
{
/* The following functions use and clobber memory pointed to
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
case BUILT_IN_STRCAT_CHK:
case BUILT_IN_STRNCAT_CHK:
{
struct constraint_expr *rhsp, *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
return;
}
/* The following function clobbers memory pointed to by
ce_s *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
return;
}
/* The following functions clobber their second and third
continue;
get_constraint_for_address_of (arg, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* Build constraints for propagating clobbers/uses along the
anything_id);
}
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
};
typedef struct fieldoff fieldoff_s;
-DEF_VEC_O(fieldoff_s);
-DEF_VEC_ALLOC_O(fieldoff_s,heap);
/* qsort comparison function for two fieldoff's PA and PB */
/* Sort a fieldstack according to the field offset and sizes. */
static void
-sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
+sort_fieldstack (vec<fieldoff_s> fieldstack)
+{
+ fieldstack.qsort (fieldoff_compare);
+}
+
+/* Return true if T is a type that can have subvars. */
+
+static inline bool
+type_can_have_subvars (const_tree t)
{
- VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
+ /* Aggregates without overlapping fields can have subvars. */
+ return TREE_CODE (t) == RECORD_TYPE;
}
/* Return true if V is a tree that we can have subvars for.
if (!DECL_P (v))
return false;
- /* Aggregates without overlapping fields can have subvars. */
- if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
- return true;
-
- return false;
+ return type_can_have_subvars (TREE_TYPE (v));
}
/* Return true if T is a type that does contain pointers. */
recursed for. */
static bool
-push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
+push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
HOST_WIDE_INT offset)
{
tree field;
return false;
/* If the vector of fields is growing too big, bail out early.
- Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
+ Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
sure this fails. */
- if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
return false;
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
bool has_unknown_size = false;
bool must_have_pointers_p;
- if (!VEC_empty (fieldoff_s, *fieldstack))
- pair = VEC_last (fieldoff_s, *fieldstack);
+ if (!fieldstack->is_empty ())
+ pair = &fieldstack->last ();
/* If there isn't anything at offset zero, create sth. */
if (!pair
&& offset + foff != 0)
{
- pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
- pair->offset = 0;
- pair->size = offset + foff;
- pair->has_unknown_size = false;
- pair->must_have_pointers = false;
- pair->may_have_pointers = false;
- pair->only_restrict_pointers = false;
+ fieldoff_s e = {0, offset + foff, false, false, false, false};
+ pair = fieldstack->safe_push (e);
}
if (!DECL_SIZE (field)
}
else
{
- pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
- pair->offset = offset + foff;
- pair->has_unknown_size = has_unknown_size;
+ fieldoff_s e;
+ e.offset = offset + foff;
+ e.has_unknown_size = has_unknown_size;
if (!has_unknown_size)
- pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
+ e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
else
- pair->size = -1;
- pair->must_have_pointers = must_have_pointers_p;
- pair->may_have_pointers = true;
- pair->only_restrict_pointers
+ e.size = -1;
+ e.must_have_pointers = must_have_pointers_p;
+ e.may_have_pointers = true;
+ e.only_restrict_pointers
= (!has_unknown_size
&& POINTER_TYPE_P (TREE_TYPE (field))
&& TYPE_RESTRICT (TREE_TYPE (field)));
+ fieldstack->safe_push (e);
}
}
FIELDSTACK is assumed to be sorted by offset. */
static bool
-check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
+check_for_overlaps (vec<fieldoff_s> fieldstack)
{
fieldoff_s *fo = NULL;
unsigned int i;
HOST_WIDE_INT lastoffset = -1;
- FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
+ FOR_EACH_VEC_ELT (fieldstack, i, fo)
{
if (fo->offset == lastoffset)
return true;
varinfo_t vi, newvi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- VEC (fieldoff_s,heap) *fieldstack = NULL;
+ vec<fieldoff_s> fieldstack = vNULL;
fieldoff_s *fo;
unsigned int i;
push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
- for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
if (fo->has_unknown_size
|| fo->offset < 0)
{
}
if (notokay)
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
}
/* If we didn't end up collecting sub-variables create a full
variable for the decl. */
- if (VEC_length (fieldoff_s, fieldstack) <= 1
- || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack.length () <= 1
+ || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
{
vi = new_var_info (decl, name);
vi->offset = 0;
vi->fullsize = TREE_INT_CST_LOW (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
vi = new_var_info (decl, name);
vi->fullsize = TREE_INT_CST_LOW (declsize);
for (i = 0, newvi = vi;
- VEC_iterate (fieldoff_s, fieldstack, i, fo);
+ fieldstack.iterate (i, &fo);
++i, newvi = newvi->next)
{
const char *newname = "NULL";
newvi->fullsize = vi->fullsize;
newvi->may_have_pointers = fo->may_have_pointers;
newvi->only_restrict_pointers = fo->only_restrict_pointers;
- if (i + 1 < VEC_length (fieldoff_s, fieldstack))
+ if (i + 1 < fieldstack.length ())
newvi->next = new_var_info (decl, name);
}
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
if ((POINTER_TYPE_P (TREE_TYPE (decl))
&& TYPE_RESTRICT (TREE_TYPE (decl)))
|| vi->only_restrict_pointers)
- make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
+ {
+ make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
+ continue;
+ }
/* In non-IPA mode the initializer from nonlocal is all we need. */
if (!in_ipa_mode
|| DECL_HARD_REGISTER (decl))
make_copy_constraint (vi, nonlocal_id);
+ /* In IPA mode parse the initializer and generate proper constraints
+ for it. */
else
{
struct varpool_node *vnode = varpool_get_node (decl);
/* If this is a global variable with an initializer and we are in
IPA mode generate constraints for it. */
- if (DECL_INITIAL (decl))
+ if (DECL_INITIAL (decl)
+ && vnode->analyzed)
{
- VEC (ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vNULL;
struct constraint_expr lhs, *rhsp;
unsigned i;
get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
/* If this is a variable that escapes from the unit
the initializer escapes as well. */
lhs.var = escaped_id;
lhs.offset = 0;
lhs.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
}
passed-by-reference argument. */
for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
{
- varinfo_t p;
+ varinfo_t p = get_vi_for_tree (t);
/* For restrict qualified pointers to objects passed by
- reference build a real representative for the pointed-to object. */
- if (DECL_BY_REFERENCE (t)
- && POINTER_TYPE_P (TREE_TYPE (t))
- && TYPE_RESTRICT (TREE_TYPE (t)))
+ reference build a real representative for the pointed-to object.
+ Treat restrict qualified references the same. */
+ if (TYPE_RESTRICT (TREE_TYPE (t))
+ && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
+ || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
+ && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
{
struct constraint_expr lhsc, rhsc;
varinfo_t vi;
DECL_EXTERNAL (heapvar) = 1;
vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
insert_vi_for_tree (heapvar, vi);
- lhsc.var = get_vi_for_tree (t)->id;
+ lhsc.var = p->id;
lhsc.type = SCALAR;
lhsc.offset = 0;
rhsc.var = vi->id;
rhsc.type = ADDRESSOF;
rhsc.offset = 0;
process_constraint (new_constraint (lhsc, rhsc));
- vi->is_restrict_var = 1;
for (; vi; vi = vi->next)
if (vi->may_have_pointers)
{
if (vi->only_restrict_pointers)
- make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
- make_copy_constraint (vi, nonlocal_id);
+ make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
+ else
+ make_copy_constraint (vi, nonlocal_id);
}
continue;
}
- for (p = get_vi_for_tree (t); p; p = p->next)
- {
- if (p->may_have_pointers)
- make_constraint_from (p, nonlocal_id);
- if (p->only_restrict_pointers)
- make_constraint_from_restrict (p, "PARM_RESTRICT");
- }
if (POINTER_TYPE_P (TREE_TYPE (t))
&& TYPE_RESTRICT (TREE_TYPE (t)))
- make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
+ make_constraint_from_global_restrict (p, "PARM_RESTRICT");
+ else
+ {
+ for (; p; p = p->next)
+ {
+ if (p->only_restrict_pointers)
+ make_constraint_from_global_restrict (p, "PARM_RESTRICT");
+ else if (p->may_have_pointers)
+ make_constraint_from (p, nonlocal_id);
+ }
+ }
}
/* Add a constraint for a result decl that is passed by reference. */
/* Compute the points-to solution *PT for the variable VI. */
-static void
-find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
+static struct pt_solution
+find_what_var_points_to (varinfo_t orig_vi)
{
unsigned int i;
bitmap_iterator bi;
bitmap finished_solution;
bitmap result;
varinfo_t vi;
-
- memset (pt, 0, sizeof (struct pt_solution));
+ void **slot;
+ struct pt_solution *pt;
/* This variable may have been collapsed, let's get the real
variable. */
vi = get_varinfo (find (orig_vi->id));
+ /* See if we have already computed the solution and return it. */
+ slot = pointer_map_insert (final_solutions, vi);
+ if (*slot != NULL)
+ return *(struct pt_solution *)*slot;
+
+ *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
+ memset (pt, 0, sizeof (struct pt_solution));
+
/* Translate artificial variables into SSA_NAME_PTR_INFO
attributes. */
EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
|| vi->id == integer_id)
pt->anything = 1;
}
- if (vi->is_restrict_var)
- pt->vars_contains_restrict = true;
}
/* Instead of doing extra work, simply do not create
elaborate points-to information for pt_anything pointers. */
- if (pt->anything
- && (orig_vi->is_artificial_var
- || !pt->vars_contains_restrict))
- return;
+ if (pt->anything)
+ return *pt;
/* Share the final set of variables when possible. */
finished_solution = BITMAP_GGC_ALLOC ();
pt->vars = result;
bitmap_clear (finished_solution);
}
+
+ return *pt;
}
/* Given a pointer variable P, fill in its points-to set. */
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
+ && SSA_NAME_IS_DEFAULT_DEF (p)
&& (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
- || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
- && SSA_NAME_IS_DEFAULT_DEF (p))
+ || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
lookup_p = SSA_NAME_VAR (p);
vi = lookup_vi_for_tree (lookup_p);
return;
pi = get_ptr_info (p);
- find_what_var_points_to (vi, &pi->pt);
+ pi->pt = find_what_var_points_to (vi);
}
it contains restrict tag variables. */
void
-pt_solution_set (struct pt_solution *pt, bitmap vars,
- bool vars_contains_global, bool vars_contains_restrict)
+pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
{
memset (pt, 0, sizeof (struct pt_solution));
pt->vars = vars;
pt->vars_contains_global = vars_contains_global;
- pt->vars_contains_restrict = vars_contains_restrict;
}
/* Set the points-to solution *PT to point only to the variable VAR. */
dest->ipa_escaped |= src->ipa_escaped;
dest->null |= src->null;
dest->vars_contains_global |= src->vars_contains_global;
- dest->vars_contains_restrict |= src->vars_contains_restrict;
if (!src->vars)
return;
return true;
}
+/* Return true if the points-to solution *PT only point to a single var, and
+ return the var uid in *UID. */
+
+bool
+pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
+{
+ if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
+ || pt->null || pt->vars == NULL
+ || !bitmap_single_bit_set_p (pt->vars))
+ return false;
+
+ *uid = bitmap_first_set_bit (pt->vars);
+ return true;
+}
+
/* Return true if the points-to solution *PT includes global memory. */
bool
return res;
}
-/* Return true if both points-to solutions PT1 and PT2 for two restrict
- qualified pointers are possibly based on the same pointer. */
-
-bool
-pt_solutions_same_restrict_base (struct pt_solution *pt1,
- struct pt_solution *pt2)
-{
- /* If we deal with points-to solutions of two restrict qualified
- pointers solely rely on the pointed-to variable bitmap intersection.
- For two pointers that are based on each other the bitmaps will
- intersect. */
- if (pt1->vars_contains_restrict
- && pt2->vars_contains_restrict)
- {
- gcc_assert (pt1->vars && pt2->vars);
- return bitmap_intersect_p (pt1->vars, pt2->vars);
- }
-
- return true;
-}
-
/* Dump points-to information to OUTFILE. */
stats.num_implicit_edges);
}
- for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
+ for (i = 0; i < varmap.length (); i++)
{
varinfo_t vi = get_varinfo (i);
if (!vi->may_have_pointers)
/* This specifically does not use process_constraint because
process_constraint ignores all anything = anything constraints, since all
but this one are redundant. */
- VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
+ constraints.safe_push (new_constraint (lhs, rhs));
/* Create the READONLY variable, used to represent that a variable
points to readonly memory. */
sizeof (struct constraint), 30);
variable_info_pool = create_alloc_pool ("Variable info pool",
sizeof (struct variable_info), 30);
- constraints = VEC_alloc (constraint_t, heap, 8);
- varmap = VEC_alloc (varinfo_t, heap, 8);
+ constraints.create (8);
+ varmap.create (8);
vi_for_tree = pointer_map_create ();
call_stmt_vars = pointer_map_create ();
init_base_vars ();
gcc_obstack_init (&fake_var_decl_obstack);
+
+ final_solutions = pointer_map_create ();
+ gcc_obstack_init (&final_solutions_obstack);
}
/* Remove the REF and ADDRESS edges from GRAPH, as well as all the
/* Now reallocate the size of the successor list as, and blow away
the predecessor bitmaps. */
- graph->size = VEC_length (varinfo_t, varmap);
+ graph->size = varmap.length ();
graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
free (graph->implicit_preds);
"\nCollapsing static cycles and doing variable "
"substitution\n");
- init_graph (VEC_length (varinfo_t, varmap) * 2);
+ init_graph (varmap.length () * 2);
if (dump_file)
fprintf (dump_file, "Building predecessor graph\n");
{
gimple phi = gsi_stmt (gsi);
- if (is_gimple_reg (gimple_phi_result (phi)))
+ if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (phi);
}
solve_constraints ();
/* Compute the points-to set for ESCAPED used for call-clobber analysis. */
- find_what_var_points_to (get_varinfo (escaped_id),
- &cfun->gimple_df->escaped);
+ cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
/* Make sure the ESCAPED solution (which is used as placeholder in
other solutions) does not reference itself. This simplifies
cfun->gimple_df->escaped.escaped = 0;
/* Mark escaped HEAP variables as global. */
- FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
+ FOR_EACH_VEC_ELT (varmap, i, vi)
if (vi->is_heap_var
- && !vi->is_restrict_var
&& !vi->is_global_var)
DECL_EXTERNAL (vi->decl) = vi->is_global_var
= pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
pointer_map_destroy (vi_for_tree);
pointer_map_destroy (call_stmt_vars);
bitmap_obstack_release (&pta_obstack);
- VEC_free (constraint_t, heap, constraints);
+ constraints.release ();
for (i = 0; i < graph->size; i++)
- VEC_free (constraint_t, heap, graph->complex[i]);
+ graph->complex[i].release ();
free (graph->complex);
free (graph->rep);
free (graph->indirect_cycles);
free (graph);
- VEC_free (varinfo_t, heap, varmap);
+ varmap.release ();
free_alloc_pool (variable_info_pool);
free_alloc_pool (constraint_pool);
obstack_free (&fake_var_decl_obstack, NULL);
+
+ pointer_map_destroy (final_solutions);
+ obstack_free (&final_solutions_obstack, NULL);
}
/* But still dump what we have remaining it. */
dump_alias_info (dump_file);
-
- if (dump_flags & TDF_DETAILS)
- dump_referenced_vars (dump_file);
}
return 0;
/* Debugging dumps. */
if (dump_file)
- {
- dump_alias_info (dump_file);
-
- if (dump_flags & TDF_DETAILS)
- dump_referenced_vars (dump_file);
- }
+ dump_alias_info (dump_file);
/* Deallocate memory used by aliasing data structures and the internal
points-to solution. */
{
GIMPLE_PASS,
"alias", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_tree_pta, /* gate */
NULL, /* execute */
NULL, /* sub */
{
GIMPLE_PASS,
"ealias", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_tree_pta, /* gate */
NULL, /* execute */
NULL, /* sub */
/* IPA PTA solutions for ESCAPED. */
struct pt_solution ipa_escaped_pt
- = { true, false, false, false, false, false, false, NULL };
+ = { true, false, false, false, false, false, NULL };
/* Associate node with varinfo DATA. Worker for
cgraph_for_node_and_aliases. */
associate_varinfo_to_alias (struct cgraph_node *node, void *data)
{
if (node->alias || node->thunk.thunk_p)
- insert_vi_for_tree (node->decl, (varinfo_t)data);
+ insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
return false;
}
if (dump_file && (dump_flags & TDF_DETAILS))
{
- dump_cgraph (dump_file);
+ dump_symtab (dump_file);
fprintf (dump_file, "\n");
}
/* Build the constraints. */
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
varinfo_t vi;
/* Nodes without a body are not interesting. Especially do not
gcc_assert (!node->clone_of);
- vi = create_function_info_for (node->decl,
- alias_get_name (node->decl));
+ vi = create_function_info_for (node->symbol.decl,
+ alias_get_name (node->symbol.decl));
cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
- for (var = varpool_nodes; var; var = var->next)
+ FOR_EACH_VARIABLE (var)
{
if (var->alias)
continue;
- get_vi_for_tree (var->decl);
+ get_vi_for_tree (var->symbol.decl);
}
if (dump_file)
dump_constraints (dump_file, 0);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
struct function *func;
basic_block bb;
- tree old_func_decl;
/* Nodes without a body are not interesting. */
if (!cgraph_function_with_gimple_body_p (node))
{
fprintf (dump_file,
"Generating constraints for %s", cgraph_node_name (node));
- if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
fprintf (dump_file, " (%s)",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (node->symbol.decl)));
fprintf (dump_file, "\n");
}
- func = DECL_STRUCT_FUNCTION (node->decl);
- old_func_decl = current_function_decl;
+ func = DECL_STRUCT_FUNCTION (node->symbol.decl);
push_cfun (func);
- current_function_decl = node->decl;
/* For externally visible or attribute used annotated functions use
local constraints for their arguments.
For local functions we see all callers and thus do not need initial
constraints for parameters. */
- if (node->reachable_from_other_partition
- || node->local.externally_visible
- || node->needed)
+ if (node->symbol.used_from_other_partition
+ || node->symbol.externally_visible
+ || node->symbol.force_output)
{
intra_create_variable_infos ();
/* We also need to make function return values escape. Nothing
escapes by returning from main though. */
- if (!MAIN_NAME_P (DECL_NAME (node->decl)))
+ if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
{
varinfo_t fi, rvi;
- fi = lookup_vi_for_tree (node->decl);
+ fi = lookup_vi_for_tree (node->symbol.decl);
rvi = first_vi_for_offset (fi, fi_result);
if (rvi && rvi->offset == fi_result)
{
{
gimple phi = gsi_stmt (gsi);
- if (is_gimple_reg (gimple_phi_result (phi)))
+ if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (phi);
}
}
}
- current_function_decl = old_func_decl;
pop_cfun ();
if (dump_file)
dump_constraints (dump_file, from);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
}
/* From the constraints compute the points-to sets. */
??? Note that the computed escape set is not correct
for the whole unit as we fail to consider graph edges to
externally visible functions. */
- find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
+ ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
/* Make sure the ESCAPED solution (which is used as placeholder in
other solutions) does not reference itself. This simplifies
ipa_escaped_pt.ipa_escaped = 0;
/* Assign the points-to sets to the SSA names in the unit. */
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_DEFINED_FUNCTION (node)
{
tree ptr;
struct function *fn;
if (!cgraph_function_with_gimple_body_p (node))
continue;
- fn = DECL_STRUCT_FUNCTION (node->decl);
+ fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
- FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
+ FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
{
if (ptr
&& POINTER_TYPE_P (TREE_TYPE (ptr)))
}
/* Compute the call-use and call-clobber sets for all direct calls. */
- fi = lookup_vi_for_tree (node->decl);
+ fi = lookup_vi_for_tree (node->symbol.decl);
gcc_assert (fi->is_fn_info);
- find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
- &clobbers);
- find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
+ clobbers
+ = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
+ uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
for (e = node->callers; e; e = e->next_caller)
{
if (!e->call_stmt)
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_use_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly used by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
memset (pt, 0, sizeof (struct pt_solution));
else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
{
- find_what_var_points_to (vi, pt);
+ *pt = find_what_var_points_to (vi);
/* Escaped (and thus nonlocal) variables are always
implicitly clobbered by calls. */
/* ??? ESCAPED can be empty even though NONLOCAL
if (!uses->anything)
{
- find_what_var_points_to
- (first_vi_for_offset (vi, fi_uses), &sol);
+ sol = find_what_var_points_to
+ (first_vi_for_offset (vi, fi_uses));
pt_solution_ior_into (uses, &sol);
}
if (!clobbers->anything)
{
- find_what_var_points_to
- (first_vi_for_offset (vi, fi_clobbers), &sol);
+ sol = find_what_var_points_to
+ (first_vi_for_offset (vi, fi_clobbers));
pt_solution_ior_into (clobbers, &sol);
}
}
{
SIMPLE_IPA_PASS,
"pta", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
gate_ipa_pta, /* gate */
ipa_pta_execute, /* execute */
NULL, /* sub */