#include "obstack.h"
#include "bitmap.h"
#include "flags.h"
-#include "rtl.h"
-#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
#include "tree.h"
#include "tree-flow.h"
#include "tree-inline.h"
-#include "diagnostic.h"
+#include "diagnostic-core.h"
#include "toplev.h"
#include "gimple.h"
#include "hashtab.h"
keep the set of called functions for indirect calls.
And probably more. */
-
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+static GTY ((if_marked ("tree_map_marked_p"), param_is (struct heapvar_map)))
htab_t heapvar_for_stmt;
static bool use_field_sensitive = true;
struct heapvar_map *h;
void **loc;
- h = GGC_NEW (struct heapvar_map);
+ h = ggc_alloc_heapvar_map ();
h->map.base.from = from;
h->offset = offset;
h->map.hash = heapvar_map_hash (h);
typedef struct constraint_expr ce_s;
DEF_VEC_O(ce_s);
DEF_VEC_ALLOC_O(ce_s, heap);
-static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
+static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
static void get_constraint_for (tree, VEC(ce_s, heap) **);
+static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
static void do_deref (VEC (ce_s, heap) **);
/* Our set constraints are made up of two constraint expressions, one
/* Print out constraint C to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint (constraint_t c)
{
dump_constraint (stderr, c);
/* Print out all constraints to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraints (void)
{
dump_constraints (stderr, 0);
/* Go over the list of constraints printing the edges in the constraint
graph. */
fprintf (file, "\n // The constraint edges:\n");
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
if (c)
dump_constraint_edge (file, c);
/* Print out the constraint graph to stderr. */
-void
+DEBUG_FUNCTION void
debug_constraint_graph (void)
{
dump_constraint_graph (stderr);
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
{
if (constraint_vec_find (*to, *c) == NULL)
{
gcc_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
- for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
{
/* In complex constraints for node src, we may have either
a = *src, and *src = a, or an offseted constraint which are
for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
graph->indirect_cycles[j] = -1;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
unsigned i, t;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs;
struct constraint_expr rhs;
int i;
constraint_t c;
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
if (c)
{
for (j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
- for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
+ FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
{
/* XXX: This is going to unsort the constraints in
some cases, which will occasionally add duplicate
static const char *
alias_get_name (tree decl)
{
- const char *res = get_name (decl);
+ const char *res;
char *temp;
int num_printed = 0;
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ else
+ res= get_name (decl);
if (res != NULL)
return res;
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (t) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (t))
{
get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
}
}
-/* Return true if T is a type that could contain pointers. */
-
-static bool
-type_could_have_pointers (tree type)
-{
- if (POINTER_TYPE_P (type))
- return true;
-
- if (TREE_CODE (type) == ARRAY_TYPE)
- return type_could_have_pointers (TREE_TYPE (type));
-
- return AGGREGATE_TYPE_P (type);
-}
-
-/* Return true if T is a variable of a type that could contain
- pointers. */
-
-static bool
-could_have_pointers (tree t)
-{
- return type_could_have_pointers (TREE_TYPE (t));
-}
/* Return the position, in bits, of FIELD_DECL from the beginning of its
structure. */
does not change the points-to solution. */
if (!use_field_sensitive)
{
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
return;
}
rhsoffset = UNKNOWN_OFFSET;
}
- get_constraint_for (ptr, results);
+ get_constraint_for_rhs (ptr, results);
if (rhsoffset == 0)
return;
/* Given a COMPONENT_REF T, return the constraint_expr vector for it.
- If address_p is true the result will be taken its address of. */
+ If address_p is true the result will be taken its address of.
+ If lhs_p is true then the constraint expression is assumed to be used
+ as the lhs. */
static void
get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
- bool address_p)
+ bool address_p, bool lhs_p)
{
tree orig_t = t;
HOST_WIDE_INT bitsize = -1;
&0->a.b */
forzero = t;
while (handled_component_p (forzero)
- || INDIRECT_REF_P (forzero))
+ || INDIRECT_REF_P (forzero)
+ || TREE_CODE (forzero) == MEM_REF)
forzero = TREE_OPERAND (forzero, 0);
if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
return;
}
+ /* Handle type-punning through unions. If we are extracting a pointer
+ from a union via a possibly type-punning access that pointer
+ points to anything, similar to a conversion of an integer to
+ a pointer. */
+ if (!lhs_p)
+ {
+ tree u;
+ for (u = t;
+ TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
+ u = TREE_OPERAND (u, 0))
+ if (TREE_CODE (u) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
+ {
+ struct constraint_expr temp;
+
+ temp.offset = 0;
+ temp.var = anything_id;
+ temp.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, *results, &temp);
+ return;
+ }
+ }
+
t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
- get_constraint_for_1 (t, results, true);
+ get_constraint_for_1 (t, results, true, lhs_p);
gcc_assert (VEC_length (ce_s, *results) == 1);
result = VEC_last (ce_s, *results);
cexpr.var = curr->id;
VEC_safe_push (ce_s, heap, *results, &cexpr);
}
- else
+ else if (VEC_length (ce_s, *results) == 0)
/* Assert that we found *some* field there. The user couldn't be
accessing *only* padding. */
/* Still the user could access one past the end of an array
embedded in a struct resulting in accessing *only* padding. */
- gcc_assert (VEC_length (ce_s, *results) >= 1
- || ref_contains_array_ref (orig_t));
+ /* Or accessing only padding via type-punning to a type
+ that has a filed just in padding space. */
+ {
+ cexpr.type = SCALAR;
+ cexpr.var = anything_id;
+ cexpr.offset = 0;
+ VEC_safe_push (ce_s, heap, *results, &cexpr);
+ }
}
else if (bitmaxsize == 0)
{
at most one subfiled of any variable. */
if (bitpos == -1
|| bitsize != bitmaxsize
- || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
+ || result->offset == UNKNOWN_OFFSET)
result->offset = UNKNOWN_OFFSET;
else
- result->offset = bitpos;
+ result->offset += bitpos;
}
else if (result->type == ADDRESSOF)
{
struct constraint_expr *c;
unsigned int i = 0;
- for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
{
if (c->type == SCALAR)
c->type = DEREF;
}
}
-static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
-
/* Given a tree T, return the constraint expression for taking the
address of it. */
struct constraint_expr *c;
unsigned int i;
- get_constraint_for_1 (t, results, true);
+ get_constraint_for_1 (t, results, true, true);
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, *results, i, c)
{
if (c->type == DEREF)
c->type = SCALAR;
/* Given a tree T, return the constraint expression for it. */
static void
-get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
+get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
+ bool lhs_p)
{
struct constraint_expr temp;
in that case *NULL does not fail, so it _should_ alias *anything.
It is not worth adding a new option or renaming the existing one,
since this case is relatively obscure. */
- if (flag_delete_null_pointer_checks
- && ((TREE_CODE (t) == INTEGER_CST
- && integer_zerop (t))
- /* The only valid CONSTRUCTORs in gimple with pointer typed
- elements are zero-initializer. But in IPA mode we also
- process global initializers, so verify at least. */
- || (TREE_CODE (t) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (t) == 0)))
- {
- temp.var = nothing_id;
+ if ((TREE_CODE (t) == INTEGER_CST
+ && integer_zerop (t))
+ /* The only valid CONSTRUCTORs in gimple with pointer typed
+ elements are zero-initializer. But in IPA mode we also
+ process global initializers, so verify at least. */
+ || (TREE_CODE (t) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (t) == 0))
+ {
+ if (flag_delete_null_pointer_checks)
+ temp.var = nothing_id;
+ else
+ temp.var = nonlocal_id;
temp.type = ADDRESSOF;
temp.offset = 0;
VEC_safe_push (ce_s, heap, *results, &temp);
{
switch (TREE_CODE (t))
{
- case INDIRECT_REF:
+ case MEM_REF:
{
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ struct constraint_expr cs;
+ varinfo_t vi, curr;
+ tree off = double_int_to_tree (sizetype, mem_ref_offset (t));
+ get_constraint_for_ptr_offset (TREE_OPERAND (t, 0), off, results);
do_deref (results);
+
+ /* If we are not taking the address then make sure to process
+ all subvariables we might access. */
+ cs = *VEC_last (ce_s, *results);
+ if (address_p
+ || cs.type != SCALAR)
+ return;
+
+ vi = get_varinfo (cs.var);
+ curr = vi->next;
+ if (!vi->is_full_var
+ && curr)
+ {
+ unsigned HOST_WIDE_INT size;
+ if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
+ size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
+ else
+ size = -1;
+ for (; curr; curr = curr->next)
+ {
+ if (curr->offset - vi->offset < size)
+ {
+ cs.var = curr->id;
+ VEC_safe_push (ce_s, heap, *results, &cs);
+ }
+ else
+ break;
+ }
+ }
return;
}
case ARRAY_REF:
case ARRAY_RANGE_REF:
case COMPONENT_REF:
- get_constraint_for_component_ref (t, results, address_p);
+ get_constraint_for_component_ref (t, results, address_p, lhs_p);
return;
case VIEW_CONVERT_EXPR:
- get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
+ lhs_p);
return;
/* We are missing handling for TARGET_MEM_REF here. */
default:;
get_constraint_for_ssa_var (t, results, address_p);
return;
}
+ case CONSTRUCTOR:
+ {
+ unsigned int i;
+ tree val;
+ VEC (ce_s, heap) *tmp = NULL;
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
+ {
+ struct constraint_expr *rhsp;
+ unsigned j;
+ get_constraint_for_1 (val, &tmp, address_p, lhs_p);
+ FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
+ VEC_safe_push (ce_s, heap, *results, rhsp);
+ VEC_truncate (ce_s, tmp, 0);
+ }
+ VEC_free (ce_s, heap, tmp);
+ /* We do not know whether the constructor was complete,
+ so technically we have to add &NOTHING or &ANYTHING
+ like we do for an empty constructor as well. */
+ return;
+ }
default:;
}
break;
get_constraint_for_ssa_var (t, results, address_p);
return;
}
+ case tcc_constant:
+ {
+ /* We cannot refer to automatic variables through constants. */
+ temp.type = ADDRESSOF;
+ temp.var = nonlocal_id;
+ temp.offset = 0;
+ VEC_safe_push (ce_s, heap, *results, &temp);
+ return;
+ }
default:;
}
{
gcc_assert (VEC_length (ce_s, *results) == 0);
- get_constraint_for_1 (t, results, false);
+ get_constraint_for_1 (t, results, false, true);
+}
+
+/* Given a gimple tree T, return the constraint expression vector for it
+ to be used as the rhs of a constraint. */
+
+static void
+get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
+{
+ gcc_assert (VEC_length (ce_s, *results) == 0);
+
+ get_constraint_for_1 (t, results, false, false);
}
if (VEC_length (ce_s, lhsc) <= 1
|| VEC_length (ce_s, rhsc) <= 1)
{
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (*lhsp, *rhsp));
}
else
{
struct constraint_expr tmp;
tmp = new_scalar_tmp_constraint_exp ("allalltmp");
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (tmp, *rhsp));
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, tmp));
}
}
unsigned j;
get_constraint_for (lhsop, &lhsc);
- get_constraint_for (rhsop, &rhsc);
+ get_constraint_for_rhs (rhsop, &rhsc);
lhsp = VEC_index (ce_s, lhsc, 0);
rhsp = VEC_index (ce_s, rhsc, 0);
if (lhsp->type == DEREF
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
- && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
- rhsv->offset + lhsoffset, rhsv->size))
+ && (lhsv->is_full_var
+ || rhsv->is_full_var
+ || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
+ rhsv->offset + lhsoffset, rhsv->size)))
process_constraint (new_constraint (*lhsp, *rhsp));
- if (lhsv->offset + rhsoffset + lhsv->size
- > rhsv->offset + lhsoffset + rhsv->size)
+ if (!rhsv->is_full_var
+ && (lhsv->is_full_var
+ || (lhsv->offset + rhsoffset + lhsv->size
+ > rhsv->offset + lhsoffset + rhsv->size)))
{
++k;
if (k >= VEC_length (ce_s, rhsc))
VEC_free (ce_s, heap, rhsc);
}
-/* Create a constraint ID = OP. */
+/* Create constraints ID = { rhsc }. */
static void
-make_constraint_to (unsigned id, tree op)
+make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
{
- VEC(ce_s, heap) *rhsc = NULL;
struct constraint_expr *c;
struct constraint_expr includes;
unsigned int j;
includes.offset = 0;
includes.type = SCALAR;
- get_constraint_for (op, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
process_constraint (new_constraint (includes, *c));
+}
+
+/* Create a constraint ID = OP. */
+
+static void
+make_constraint_to (unsigned id, tree op)
+{
+ VEC(ce_s, heap) *rhsc = NULL;
+ get_constraint_for_rhs (op, &rhsc);
+ make_constraints_to (id, rhsc);
VEC_free (ce_s, heap, rhsc);
}
process_constraint (new_constraint (lhs, rhs));
}
-/* Create a new artificial heap variable with NAME and make a
- constraint from it to LHS. Return the created variable. */
+/* Create a new artificial heap variable with NAME.
+ Return the created variable. */
static varinfo_t
-make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+make_heapvar_for (varinfo_t lhs, const char *name)
{
varinfo_t vi;
tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
vi->is_full_var = true;
insert_vi_for_tree (heapvar, vi);
+ return vi;
+}
+
+/* Create a new artificial heap variable with NAME and make a
+ constraint from it to LHS. Return the created variable. */
+
+static varinfo_t
+make_constraint_from_heapvar (varinfo_t lhs, const char *name)
+{
+ varinfo_t vi = make_heapvar_for (lhs, name);
make_constraint_from (lhs, vi->id);
return vi;
{
struct constraint_expr rhsc;
unsigned i;
+ bool returns_uses = false;
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
+ int flags = gimple_call_arg_flags (stmt, i);
+
+ /* If the argument is not used we can ignore it. */
+ if (flags & EAF_UNUSED)
+ continue;
- /* Find those pointers being passed, and make sure they end up
- pointing to anything. */
- if (could_have_pointers (arg))
+ /* As we compute ESCAPED context-insensitive we do not gain
+ any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
+ set. The argument would still get clobbered through the
+ escape solution.
+ ??? We might get away with less (and more precise) constraints
+ if using a temporary for transitively closing things. */
+ if ((flags & EAF_NOCLOBBER)
+ && (flags & EAF_NOESCAPE))
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ make_transitive_closure_constraints (uses);
+ make_constraint_to (uses->id, arg);
+ returns_uses = true;
+ }
+ else if (flags & EAF_NOESCAPE)
+ {
+ varinfo_t uses = get_call_use_vi (stmt);
+ varinfo_t clobbers = get_call_clobber_vi (stmt);
+ if (!(flags & EAF_DIRECT))
+ {
+ make_transitive_closure_constraints (uses);
+ make_transitive_closure_constraints (clobbers);
+ }
+ make_constraint_to (uses->id, arg);
+ make_constraint_to (clobbers->id, arg);
+ returns_uses = true;
+ }
+ else
make_escape_constraint (arg);
}
+ /* If we added to the calls uses solution make sure we account for
+ pointers to it to be returned. */
+ if (returns_uses)
+ {
+ rhsc.var = get_call_use_vi (stmt)->id;
+ rhsc.offset = 0;
+ rhsc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, *results, &rhsc);
+ }
+
/* The static chain escapes as well. */
if (gimple_call_chain (stmt))
make_escape_constraint (gimple_call_chain (stmt));
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
+ FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
VEC_free(ce_s, heap, tmpc);
}
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
+handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
+ tree fndecl)
{
VEC(ce_s, heap) *lhsc = NULL;
get_constraint_for (lhs, &lhsc);
-
- if (flags & ECF_MALLOC)
+ /* If the store is to a global decl make sure to
+ add proper escape constraints. */
+ lhs = get_base_address (lhs);
+ if (lhs
+ && DECL_P (lhs)
+ && is_global_var (lhs))
+ {
+ struct constraint_expr tmpc;
+ tmpc.var = escaped_id;
+ tmpc.offset = 0;
+ tmpc.type = SCALAR;
+ VEC_safe_push (ce_s, heap, lhsc, &tmpc);
+ }
+
+ /* If the call returns an argument unmodified override the rhs
+ constraints. */
+ flags = gimple_call_return_flags (stmt);
+ if (flags & ERF_RETURNS_ARG
+ && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
+ {
+ tree arg;
+ rhsc = NULL;
+ arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
+ get_constraint_for (arg, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ else if (flags & ERF_NOALIAS)
{
varinfo_t vi;
- vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
+ struct constraint_expr tmpc;
+ rhsc = NULL;
+ vi = make_heapvar_for (get_vi_for_tree (lhs), "HEAP");
/* We delay marking allocated storage global until we know if
it escapes. */
DECL_EXTERNAL (vi->decl) = 0;
vi->is_global_var = 0;
/* If this is not a real malloc call assume the memory was
- initialized and thus may point to global memory. All
+ initialized and thus may point to global memory. All
builtin functions with the malloc attribute behave in a sane way. */
if (!fndecl
|| DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
make_constraint_from (vi, nonlocal_id);
+ tmpc.var = vi->id;
+ tmpc.offset = 0;
+ tmpc.type = ADDRESSOF;
+ VEC_safe_push (ce_s, heap, rhsc, &tmpc);
}
- else if (VEC_length (ce_s, rhsc) > 0)
- {
- /* If the store is to a global decl make sure to
- add proper escape constraints. */
- lhs = get_base_address (lhs);
- if (lhs
- && DECL_P (lhs)
- && is_global_var (lhs))
- {
- struct constraint_expr tmpc;
- tmpc.var = escaped_id;
- tmpc.offset = 0;
- tmpc.type = SCALAR;
- VEC_safe_push (ce_s, heap, lhsc, &tmpc);
- }
- process_all_all_constraints (lhsc, rhsc);
- }
+
+ process_all_all_constraints (lhsc, rhsc);
+
VEC_free (ce_s, heap, lhsc);
}
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
-
- if (could_have_pointers (arg))
- {
- VEC(ce_s, heap) *argc = NULL;
- unsigned i;
- struct constraint_expr *argp;
- get_constraint_for (arg, &argc);
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
- VEC_safe_push (ce_s, heap, *results, argp);
- VEC_free(ce_s, heap, argc);
- }
+ VEC(ce_s, heap) *argc = NULL;
+ unsigned i;
+ struct constraint_expr *argp;
+ get_constraint_for_rhs (arg, &argc);
+ FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
+ VEC_safe_push (ce_s, heap, *results, argp);
+ VEC_free(ce_s, heap, argc);
}
/* May return addresses of globals. */
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
-
- if (could_have_pointers (arg))
+ if (!uses)
{
- if (!uses)
- {
- uses = get_call_use_vi (stmt);
- make_transitive_closure_constraints (uses);
- }
- make_constraint_to (uses->id, arg);
+ uses = get_call_use_vi (stmt);
+ make_transitive_closure_constraints (uses);
}
+ make_constraint_to (uses->id, arg);
}
/* The static chain is used as well. */
if (TREE_CODE (decl) == SSA_NAME)
{
if (TREE_CODE (decl) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (decl))
decl = SSA_NAME_VAR (decl);
return get_vi_for_tree (decl);
/* Now build constraints expressions. */
if (gimple_code (t) == GIMPLE_PHI)
{
- gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
+ size_t i;
+ unsigned int j;
- /* Only care about pointers and structures containing
- pointers. */
- if (could_have_pointers (gimple_phi_result (t)))
+ /* For a phi node, assign all the arguments to
+ the result. */
+ get_constraint_for (gimple_phi_result (t), &lhsc);
+ for (i = 0; i < gimple_phi_num_args (t); i++)
{
- size_t i;
- unsigned int j;
+ tree strippedrhs = PHI_ARG_DEF (t, i);
- /* For a phi node, assign all the arguments to
- the result. */
- get_constraint_for (gimple_phi_result (t), &lhsc);
- for (i = 0; i < gimple_phi_num_args (t); i++)
- {
- tree strippedrhs = PHI_ARG_DEF (t, i);
-
- STRIP_NOPS (strippedrhs);
- get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
+ STRIP_NOPS (strippedrhs);
+ get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
+ {
+ struct constraint_expr *c2;
+ while (VEC_length (ce_s, rhsc) > 0)
{
- struct constraint_expr *c2;
- while (VEC_length (ce_s, rhsc) > 0)
- {
- c2 = VEC_last (ce_s, rhsc);
- process_constraint (new_constraint (*c, *c2));
- VEC_pop (ce_s, rhsc);
- }
+ c2 = VEC_last (ce_s, rhsc);
+ process_constraint (new_constraint (*c, *c2));
+ VEC_pop (ce_s, rhsc);
}
}
}
ac.var = integer_id;
}
ac.offset = 0;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
VEC_free (ce_s, heap, lhsc);
return;
{
lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (frame, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
do_deref (&lhsc);
rhs = get_function_part_constraint (fi, ~0);
rhs.type = ADDRESSOF;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
VEC_free (ce_s, heap, lhsc);
/* va_list is clobbered. */
/* va_end doesn't have any effect that matters. */
case BUILT_IN_VA_END:
return;
+ /* Alternate return. Simply give up for now. */
+ case BUILT_IN_RETURN:
+ {
+ fi = NULL;
+ if (!in_ipa_mode
+ || !(fi = get_vi_for_tree (cfun->decl)))
+ make_constraint_from (get_varinfo (escaped_id), anything_id);
+ else if (in_ipa_mode
+ && fi != NULL)
+ {
+ struct constraint_expr lhs, rhs;
+ lhs = get_function_part_constraint (fi, fi_result);
+ rhs.var = anything_id;
+ rhs.offset = 0;
+ rhs.type = SCALAR;
+ process_constraint (new_constraint (lhs, rhs));
+ }
+ return;
+ }
/* printf-style functions may have hooks to set pointers to
point to somewhere into the generated string. Leave them
for a later excercise... */
of global memory but not of escaped memory. */
if (flags & (ECF_CONST|ECF_NOVOPS))
{
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
+ if (gimple_call_lhs (t))
handle_const_call (t, &rhsc);
}
/* Pure functions can return addresses in and of memory
handle_pure_call (t, &rhsc);
else
handle_rhs_call (t, &rhsc);
- if (gimple_call_lhs (t)
- && could_have_pointers (gimple_call_lhs (t)))
- handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
+ if (gimple_call_lhs (t))
+ handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
VEC_free (ce_s, heap, rhsc);
}
else
struct constraint_expr *rhsp;
tree arg = gimple_call_arg (t, j);
- if (!could_have_pointers (arg))
- continue;
-
- get_constraint_for (arg, &rhsc);
+ get_constraint_for_rhs (arg, &rhsc);
lhs = get_function_part_constraint (fi, fi_parm_base + j);
while (VEC_length (ce_s, rhsc) != 0)
{
/* If we are returning a value, assign it to the result. */
lhsop = gimple_call_lhs (t);
- if (lhsop
- && could_have_pointers (lhsop))
+ if (lhsop)
{
struct constraint_expr rhs;
struct constraint_expr *lhsp;
rhs = *VEC_index (ce_s, tem, 0);
VEC_free(ce_s, heap, tem);
}
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
}
get_constraint_for_address_of (lhsop, &rhsc);
lhs = get_function_part_constraint (fi, fi_result);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
get_constraint_for (gimple_call_chain (t), &rhsc);
lhs = get_function_part_constraint (fi, fi_static_chain);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
points if they have pointer operands. */
- else if (is_gimple_assign (t)
- && could_have_pointers (gimple_assign_lhs (t)))
+ else if (is_gimple_assign (t))
{
/* Otherwise, just a regular assignment statement. */
tree lhsop = gimple_assign_lhs (t);
do_structure_copy (lhsop, rhsop);
else
{
- struct constraint_expr temp;
get_constraint_for (lhsop, &lhsc);
if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
gimple_assign_rhs2 (t), &rhsc);
+ else if (gimple_assign_rhs_code (t) == BIT_AND_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
+ {
+ /* Aligning a pointer via a BIT_AND_EXPR is offsetting
+ the pointer. Handle it by offsetting it by UNKNOWN. */
+ get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
+ NULL_TREE, &rhsc);
+ }
else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
&& !(POINTER_TYPE_P (gimple_expr_type (t))
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
|| gimple_assign_single_p (t))
- get_constraint_for (rhsop, &rhsc);
+ get_constraint_for_rhs (rhsop, &rhsc);
else
{
- temp.type = ADDRESSOF;
- temp.var = anything_id;
- temp.offset = 0;
- VEC_safe_push (ce_s, heap, rhsc, &temp);
+ /* All other operations are merges. */
+ VEC (ce_s, heap) *tmp = NULL;
+ struct constraint_expr *rhsp;
+ unsigned i, j;
+ get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
+ for (i = 2; i < gimple_num_ops (t); ++i)
+ {
+ get_constraint_for_rhs (gimple_op (t, i), &tmp);
+ FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
+ VEC_safe_push (ce_s, heap, rhsc, rhsp);
+ VEC_truncate (ce_s, tmp, 0);
+ }
+ VEC_free (ce_s, heap, tmp);
}
process_all_all_constraints (lhsc, rhsc);
}
make_constraint_from_restrict (get_vi_for_tree (lhsop),
"CAST_RESTRICT");
}
- /* For conversions of pointers to non-pointers the pointer escapes. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
- && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
- {
- make_escape_constraint (gimple_assign_rhs1 (t));
- }
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
- && gimple_return_retval (t) != NULL_TREE
- && could_have_pointers (gimple_return_retval (t)))
+ && gimple_return_retval (t) != NULL_TREE)
{
fi = NULL;
if (!in_ipa_mode
unsigned i;
lhs = get_function_part_constraint (fi, fi_result);
- get_constraint_for (gimple_return_retval (t), &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
/* The asm may read global memory, so outputs may point to
any global memory. */
- if (op && could_have_pointers (op))
+ if (op)
{
VEC(ce_s, heap) *lhsc = NULL;
struct constraint_expr rhsc, *lhsp;
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
VEC_free (ce_s, heap, lhsc);
}
/* Strictly we'd only need the constraint to ESCAPED if
the asm clobbers memory, otherwise using something
along the lines of per-call clobbers/uses would be enough. */
- else if (op && could_have_pointers (op))
+ else if (op)
make_escape_constraint (op);
}
}
VEC(ce_s, heap) *ptrc = NULL;
struct constraint_expr *c, lhs;
unsigned i;
- get_constraint_for (ptr, &ptrc);
+ get_constraint_for_rhs (ptr, &ptrc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
+ FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
process_constraint (new_constraint (lhs, *c));
VEC_free (ce_s, heap, ptrc);
}
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhsc, *rhsp;
unsigned i;
lhsc = get_function_part_constraint (fi, fi_clobbers);
get_constraint_for_address_of (lhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
tem = TREE_OPERAND (tem, 0);
if ((DECL_P (tem)
&& !auto_var_in_fn_p (tem, cfun->decl))
- || INDIRECT_REF_P (tem))
+ || INDIRECT_REF_P (tem)
+ || (TREE_CODE (tem) == MEM_REF
+ && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+ && auto_var_in_fn_p
+ (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
{
struct constraint_expr lhs, *rhsp;
unsigned i;
lhs = get_function_part_constraint (fi, fi_uses);
get_constraint_for_address_of (rhs, &rhsc);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
struct constraint_expr *rhsp, *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
VEC_free (ce_s, heap, lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
return;
ce_s *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
+ FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
VEC_free (ce_s, heap, lhsc);
return;
continue;
get_constraint_for_address_of (arg, &rhsc);
- for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
VEC_free (ce_s, heap, rhsc);
}
unsigned has_unknown_size : 1;
+ unsigned must_have_pointers : 1;
+
unsigned may_have_pointers : 1;
unsigned only_restrict_pointers : 1;
static void
sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
{
- qsort (VEC_address (fieldoff_s, fieldstack),
- VEC_length (fieldoff_s, fieldstack),
- sizeof (fieldoff_s),
- fieldoff_compare);
+ VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
}
/* Return true if V is a tree that we can have subvars for.
return false;
}
+/* Return true if T is a type that does contain pointers. */
+
+static bool
+type_must_have_pointers (tree type)
+{
+ if (POINTER_TYPE_P (type))
+ return true;
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ return type_must_have_pointers (TREE_TYPE (type));
+
+ /* A function or method can have pointers as arguments, so track
+ those separately. */
+ if (TREE_CODE (type) == FUNCTION_TYPE
+ || TREE_CODE (type) == METHOD_TYPE)
+ return true;
+
+ return false;
+}
+
+static bool
+field_must_have_pointers (tree t)
+{
+ return type_must_have_pointers (TREE_TYPE (t));
+}
+
/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
the fields of TYPE onto fieldstack, recording their offsets along
the way.
if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
return false;
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL)
{
bool push = false;
{
fieldoff_s *pair = NULL;
bool has_unknown_size = false;
+ bool must_have_pointers_p;
if (!VEC_empty (fieldoff_s, *fieldstack))
pair = VEC_last (fieldoff_s, *fieldstack);
has_unknown_size = true;
/* If adjacent fields do not contain pointers merge them. */
+ must_have_pointers_p = field_must_have_pointers (field);
if (pair
- && !pair->may_have_pointers
- && !pair->has_unknown_size
&& !has_unknown_size
- && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
- && !could_have_pointers (field))
+ && !must_have_pointers_p
+ && !pair->must_have_pointers
+ && !pair->has_unknown_size
+ && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
{
pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
}
pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
else
pair->size = -1;
- pair->may_have_pointers = could_have_pointers (field);
+ pair->must_have_pointers = must_have_pointers_p;
+ pair->may_have_pointers = true;
pair->only_restrict_pointers
= (!has_unknown_size
&& POINTER_TYPE_P (TREE_TYPE (field))
/* Capture named arguments for K&R functions. They do not
have a prototype and thus no TYPE_ARG_TYPES. */
- for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
++num;
/* Check if the function has variadic arguments. */
/* Creation function node for DECL, using NAME, and return the index
of the variable we've created for the function. */
-static unsigned int
+static varinfo_t
create_function_info_for (tree decl, const char *name)
{
struct function *fn = DECL_STRUCT_FUNCTION (decl);
resultvi->fullsize = vi->fullsize;
resultvi->is_full_var = true;
if (DECL_RESULT (decl))
- resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
+ resultvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < resultvi->offset);
prev_vi->next = resultvi;
prev_vi = resultvi;
argvi->is_full_var = true;
argvi->fullsize = vi->fullsize;
if (arg)
- argvi->may_have_pointers = could_have_pointers (arg);
+ argvi->may_have_pointers = true;
gcc_assert (prev_vi->offset < argvi->offset);
prev_vi->next = argvi;
prev_vi = argvi;
if (arg)
{
insert_vi_for_tree (arg, argvi);
- arg = TREE_CHAIN (arg);
+ arg = DECL_CHAIN (arg);
}
}
prev_vi = argvi;
}
- return vi->id;
+ return vi;
}
unsigned int i;
HOST_WIDE_INT lastoffset = -1;
- for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
{
if (fo->offset == lastoffset)
return true;
vi->fullsize = ~0;
vi->is_unknown_size_var = true;
vi->is_full_var = true;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
return vi;
}
{
vi = new_var_info (decl, name);
vi->offset = 0;
- vi->may_have_pointers = could_have_pointers (decl);
+ vi->may_have_pointers = true;
vi->fullsize = TREE_INT_CST_LOW (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
VEC (ce_s, heap) *rhsc = NULL;
struct constraint_expr lhs, *rhsp;
unsigned i;
- get_constraint_for (DECL_INITIAL (decl), &rhsc);
+ get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
/* If this is a variable that escapes from the unit
the initializer escapes as well. */
lhs.var = escaped_id;
lhs.offset = 0;
lhs.type = SCALAR;
- for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
+ FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
VEC_free (ce_s, heap, rhsc);
/* Print the points-to solution for VAR to stdout. */
-void
+DEBUG_FUNCTION void
debug_solution_for_var (unsigned int var)
{
dump_solution_for_var (stdout, var);
/* For each incoming pointer argument arg, create the constraint ARG
= NONLOCAL or a dummy variable if it is a restrict qualified
passed-by-reference argument. */
- for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
+ for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
{
varinfo_t p;
- if (!could_have_pointers (t))
- continue;
-
/* For restrict qualified pointers to objects passed by
reference build a real representative for the pointed-to object. */
if (DECL_BY_REFERENCE (t)
/* For parameters, get at the points-to set for the actual parm
decl. */
if (TREE_CODE (p) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
+ || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
&& SSA_NAME_IS_DEFAULT_DEF (p))
lookup_p = SSA_NAME_VAR (p);
pt->vars_contains_restrict = vars_contains_restrict;
}
+/* Set the points-to solution *PT to point only to the variable VAR. */
+
+void
+pt_solution_set_var (struct pt_solution *pt, tree var)
+{
+ memset (pt, 0, sizeof (struct pt_solution));
+ pt->vars = BITMAP_GGC_ALLOC ();
+ bitmap_set_bit (pt->vars, DECL_UID (var));
+ pt->vars_contains_global = is_global_var (var);
+}
+
/* Computes the union of the points-to solutions *DEST and *SRC and
stores the result in *DEST. This changes the points-to bitmap
of *DEST and thus may not be used if that might be shared.
/* Debug points-to information to stderr. */
-void
+DEBUG_FUNCTION void
debug_sa_points_to_info (void)
{
dump_sa_points_to_info (stderr);
cfun->gimple_df->escaped.escaped = 0;
/* Mark escaped HEAP variables as global. */
- for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
+ FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
if (vi->is_heap_var
&& !vi->is_restrict_var
&& !vi->is_global_var)
return (optimize
&& flag_ipa_pta
/* Don't bother doing anything if the program has errors. */
- && !(errorcount || sorrycount));
+ && !seen_error ());
}
/* IPA PTA solutions for ESCAPED. */
/* Build the constraints. */
for (node = cgraph_nodes; node; node = node->next)
{
+ struct cgraph_node *alias;
+ varinfo_t vi;
+
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
|| node->clone_of)
continue;
- create_function_info_for (node->decl,
- cgraph_node_name (node));
+ vi = create_function_info_for (node->decl,
+ alias_get_name (node->decl));
+
+ /* Associate the varinfo node with all aliases. */
+ for (alias = node->same_body; alias; alias = alias->next)
+ insert_vi_for_tree (alias->decl, vi);
}
/* Create constraints for global variables and their initializers. */
for (var = varpool_nodes; var; var = var->next)
- get_vi_for_tree (var->decl);
+ {
+ struct varpool_node *alias;
+ varinfo_t vi;
+
+ vi = get_vi_for_tree (var->decl);
+
+ /* Associate the varinfo node with all aliases. */
+ for (alias = var->extra_name; alias; alias = alias->next)
+ insert_vi_for_tree (alias->decl, vi);
+ }
if (dump_file)
{
continue;
if (dump_file)
- fprintf (dump_file,
- "Generating constraints for %s\n",
- cgraph_node_name (node));
+ {
+ fprintf (dump_file,
+ "Generating constraints for %s", cgraph_node_name (node));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ fprintf (dump_file, " (%s)",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ fprintf (dump_file, "\n");
+ }
func = DECL_STRUCT_FUNCTION (node->decl);
old_func_decl = current_function_decl;
fn = DECL_STRUCT_FUNCTION (node->decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
- for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
+ FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
{
if (ptr
&& POINTER_TYPE_P (TREE_TYPE (ptr)))