1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
39 #include "diagnostic.h"
45 #include "tree-pass.h"
47 #include "alloc-pool.h"
48 #include "splay-tree.h"
52 #include "pointer-set.h"
54 /* The idea behind this analyzer is to generate set constraints from the
55 program, then solve the resulting constraints in order to generate the
58 Set constraints are a way of modeling program analysis problems that
59 involve sets. They consist of an inclusion constraint language,
60 describing the variables (each variable is a set) and operations that
61 are involved on the variables, and a set of rules that derive facts
62 from these operations. To solve a system of set constraints, you derive
63 all possible facts under the rules, which gives you the correct sets
66 See "Efficient Field-sensitive pointer analysis for C" by "David
67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68 http://citeseer.ist.psu.edu/pearce04efficient.html
70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72 http://citeseer.ist.psu.edu/heintze01ultrafast.html
74 There are three types of real constraint expressions, DEREF,
75 ADDRESSOF, and SCALAR. Each constraint expression consists
76 of a constraint type, a variable, and an offset.
78 SCALAR is a constraint expression type used to represent x, whether
79 it appears on the LHS or the RHS of a statement.
80 DEREF is a constraint expression type used to represent *x, whether
81 it appears on the LHS or the RHS of a statement.
82 ADDRESSOF is a constraint expression used to represent &x, whether
83 it appears on the LHS or the RHS of a statement.
85 Each pointer variable in the program is assigned an integer id, and
86 each field of a structure variable is assigned an integer id as well.
88 Structure variables are linked to their list of fields through a "next
89 field" in each variable that points to the next field in offset
91 Each variable for a structure field has
93 1. "size", that tells the size in bits of that field.
94 2. "fullsize, that tells the size in bits of the entire structure.
95 3. "offset", that tells the offset in bits from the beginning of the
96 structure to this field.
108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113 In order to solve the system of set constraints, the following is
116 1. Each constraint variable x has a solution set associated with it,
119 2. Constraints are separated into direct, copy, and complex.
120 Direct constraints are ADDRESSOF constraints that require no extra
121 processing, such as P = &Q
122 Copy constraints are those of the form P = Q.
123 Complex constraints are all the constraints involving dereferences
124 and offsets (including offsetted copies).
126 3. All direct constraints of the form P = &Q are processed, such
127 that Q is added to Sol(P)
129 4. All complex constraints for a given constraint variable are stored in a
130 linked list attached to that variable's node.
132 5. A directed graph is built out of the copy constraints. Each
133 constraint variable is a node in the graph, and an edge from
134 Q to P is added for each copy constraint of the form P = Q
136 6. The graph is then walked, and solution sets are
137 propagated along the copy edges, such that an edge from Q to P
138 causes Sol(P) <- Sol(P) union Sol(Q).
140 7. As we visit each node, all complex constraints associated with
141 that node are processed by adding appropriate copy edges to the graph, or the
142 appropriate variables to the solution set.
144 8. The process of walking the graph is iterated until no solution
147 Prior to walking the graph in steps 6 and 7, We perform static
148 cycle elimination on the constraint graph, as well
149 as off-line variable substitution.
151 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
152 on and turned into anything), but isn't. You can just see what offset
153 inside the pointed-to struct it's going to access.
155 TODO: Constant bounded arrays can be handled as if they were structs of the
156 same number of elements.
158 TODO: Modeling heap and incoming pointers becomes much better if we
159 add fields to them as we discover them, which we could do.
161 TODO: We could handle unions, but to be honest, it's probably not
162 worth the pain or slowdown. */
164 /* IPA-PTA optimizations possible.
166 When the indirect function called is ANYTHING we can add disambiguation
167 based on the function signatures (or simply the parameter count which
168 is the varinfo size). We also do not need to consider functions that
169 do not have their address taken.
171 The is_global_var bit which marks escape points is overly conservative
172 in IPA mode. Split it to is_escape_point and is_global_var - only
173 externally visible globals are escape points in IPA mode. This is
174 also needed to fix the pt_solution_includes_global predicate
175 (and thus ptr_deref_may_alias_global_p).
177 The way we introduce DECL_PT_UID to avoid fixing up all points-to
178 sets in the translation unit when we copy a DECL during inlining
179 pessimizes precision. The advantage is that the DECL_PT_UID keeps
180 compile-time and memory usage overhead low - the points-to sets
181 do not grow or get unshared as they would during a fixup phase.
182 An alternative solution is to delay IPA PTA until after all
183 inlining transformations have been applied.
185 The way we propagate clobber/use information isn't optimized.
186 It should use a new complex constraint that properly filters
187 out local variables of the callee (though that would make
188 the sets invalid after inlining). OTOH we might as well
189 admit defeat to WHOPR and simply do all the clobber/use analysis
190 and propagation after PTA finished but before we threw away
191 points-to information for memory variables. WHOPR and PTA
192 do not play along well anyway - the whole constraint solving
193 would need to be done in WPA phase and it will be very interesting
194 to apply the results to local SSA names during LTRANS phase.
196 We probably should compute a per-function unit-ESCAPE solution
197 propagating it simply like the clobber / uses solutions. The
198 solution can go alongside the non-IPA espaced solution and be
199 used to query which vars escape the unit through a function.
201 We never put function decls in points-to sets so we do not
202 keep the set of called functions for indirect calls.
204 And probably more. */
206 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
207 htab_t heapvar_for_stmt;
209 static bool use_field_sensitive = true;
210 static int in_ipa_mode = 0;
212 /* Used for predecessor bitmaps. */
213 static bitmap_obstack predbitmap_obstack;
215 /* Used for points-to sets. */
216 static bitmap_obstack pta_obstack;
218 /* Used for oldsolution members of variables. */
219 static bitmap_obstack oldpta_obstack;
221 /* Used for per-solver-iteration bitmaps. */
222 static bitmap_obstack iteration_obstack;
224 static unsigned int create_variable_info_for (tree, const char *);
225 typedef struct constraint_graph *constraint_graph_t;
226 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
229 typedef struct constraint *constraint_t;
231 DEF_VEC_P(constraint_t);
232 DEF_VEC_ALLOC_P(constraint_t,heap);
234 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
236 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
238 static struct constraint_stats
240 unsigned int total_vars;
241 unsigned int nonpointer_vars;
242 unsigned int unified_vars_static;
243 unsigned int unified_vars_dynamic;
244 unsigned int iterations;
245 unsigned int num_edges;
246 unsigned int num_implicit_edges;
247 unsigned int points_to_sets_created;
252 /* ID of this variable */
255 /* True if this is a variable created by the constraint analysis, such as
256 heap variables and constraints we had to break up. */
257 unsigned int is_artificial_var : 1;
259 /* True if this is a special variable whose solution set should not be
261 unsigned int is_special_var : 1;
263 /* True for variables whose size is not known or variable. */
264 unsigned int is_unknown_size_var : 1;
266 /* True for (sub-)fields that represent a whole variable. */
267 unsigned int is_full_var : 1;
269 /* True if this is a heap variable. */
270 unsigned int is_heap_var : 1;
272 /* True if this is a variable tracking a restrict pointer source. */
273 unsigned int is_restrict_var : 1;
275 /* True if this field may contain pointers. */
276 unsigned int may_have_pointers : 1;
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
281 /* True if this represents a IPA function info. */
282 unsigned int is_fn_info : 1;
284 /* A link to the variable for the next field in this structure. */
285 struct variable_info *next;
287 /* Offset of this variable, in bits, from the base variable */
288 unsigned HOST_WIDE_INT offset;
290 /* Size of the variable, in bits. */
291 unsigned HOST_WIDE_INT size;
293 /* Full size of the base variable, in bits. */
294 unsigned HOST_WIDE_INT fullsize;
296 /* Name of this variable */
299 /* Tree that this variable is associated with. */
302 /* Points-to set for this variable. */
305 /* Old points-to set for this variable. */
308 typedef struct variable_info *varinfo_t;
310 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
311 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
312 unsigned HOST_WIDE_INT);
313 static varinfo_t lookup_vi_for_tree (tree);
315 /* Pool of variable info structures. */
316 static alloc_pool variable_info_pool;
318 DEF_VEC_P(varinfo_t);
320 DEF_VEC_ALLOC_P(varinfo_t, heap);
322 /* Table of variable info structures for constraint variables.
323 Indexed directly by variable info id. */
324 static VEC(varinfo_t,heap) *varmap;
326 /* Return the varmap element N */
328 static inline varinfo_t
329 get_varinfo (unsigned int n)
331 return VEC_index (varinfo_t, varmap, n);
334 /* Static IDs for the special variables. */
335 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
336 escaped_id = 3, nonlocal_id = 4,
337 storedanything_id = 5, integer_id = 6 };
339 struct GTY(()) heapvar_map {
341 unsigned HOST_WIDE_INT offset;
345 heapvar_map_eq (const void *p1, const void *p2)
347 const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
348 const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
349 return (h1->map.base.from == h2->map.base.from
350 && h1->offset == h2->offset);
354 heapvar_map_hash (struct heapvar_map *h)
356 return iterative_hash_host_wide_int (h->offset,
357 htab_hash_pointer (h->map.base.from));
360 /* Lookup a heap var for FROM, and return it if we find one. */
363 heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
365 struct heapvar_map *h, in;
366 in.map.base.from = from;
368 h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
369 heapvar_map_hash (&in));
375 /* Insert a mapping FROM->TO in the heap var for statement
379 heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
381 struct heapvar_map *h;
384 h = GGC_NEW (struct heapvar_map);
385 h->map.base.from = from;
387 h->map.hash = heapvar_map_hash (h);
389 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
390 gcc_assert (*loc == NULL);
391 *(struct heapvar_map **) loc = h;
394 /* Return a new variable info structure consisting for a variable
395 named NAME, and using constraint graph node NODE. Append it
396 to the vector of variable info structures. */
399 new_var_info (tree t, const char *name)
401 unsigned index = VEC_length (varinfo_t, varmap);
402 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
407 /* Vars without decl are artificial and do not have sub-variables. */
408 ret->is_artificial_var = (t == NULL_TREE);
409 ret->is_special_var = false;
410 ret->is_unknown_size_var = false;
411 ret->is_full_var = (t == NULL_TREE);
412 ret->is_heap_var = false;
413 ret->is_restrict_var = false;
414 ret->may_have_pointers = true;
415 ret->is_global_var = (t == NULL_TREE);
416 ret->is_fn_info = false;
418 ret->is_global_var = is_global_var (t);
419 ret->solution = BITMAP_ALLOC (&pta_obstack);
420 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
423 VEC_safe_push (varinfo_t, heap, varmap, ret);
429 /* A map mapping call statements to per-stmt variables for uses
430 and clobbers specific to the call. */
431 struct pointer_map_t *call_stmt_vars;
433 /* Lookup or create the variable for the call statement CALL. */
436 get_call_vi (gimple call)
441 slot_p = pointer_map_insert (call_stmt_vars, call);
443 return (varinfo_t) *slot_p;
445 vi = new_var_info (NULL_TREE, "CALLUSED");
449 vi->is_full_var = true;
451 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
455 vi2->is_full_var = true;
457 *slot_p = (void *) vi;
461 /* Lookup the variable for the call statement CALL representing
462 the uses. Returns NULL if there is nothing special about this call. */
465 lookup_call_use_vi (gimple call)
469 slot_p = pointer_map_contains (call_stmt_vars, call);
471 return (varinfo_t) *slot_p;
476 /* Lookup the variable for the call statement CALL representing
477 the clobbers. Returns NULL if there is nothing special about this call. */
480 lookup_call_clobber_vi (gimple call)
482 varinfo_t uses = lookup_call_use_vi (call);
489 /* Lookup or create the variable for the call statement CALL representing
493 get_call_use_vi (gimple call)
495 return get_call_vi (call);
498 /* Lookup or create the variable for the call statement CALL representing
501 static varinfo_t ATTRIBUTE_UNUSED
502 get_call_clobber_vi (gimple call)
504 return get_call_vi (call)->next;
508 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
510 /* An expression that appears in a constraint. */
512 struct constraint_expr
514 /* Constraint type. */
515 constraint_expr_type type;
517 /* Variable we are referring to in the constraint. */
520 /* Offset, in bits, of this constraint from the beginning of
521 variables it ends up referring to.
523 IOW, in a deref constraint, we would deref, get the result set,
524 then add OFFSET to each member. */
525 HOST_WIDE_INT offset;
528 /* Use 0x8000... as special unknown offset. */
529 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
531 typedef struct constraint_expr ce_s;
533 DEF_VEC_ALLOC_O(ce_s, heap);
534 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
535 static void get_constraint_for (tree, VEC(ce_s, heap) **);
536 static void do_deref (VEC (ce_s, heap) **);
538 /* Our set constraints are made up of two constraint expressions, one
541 As described in the introduction, our set constraints each represent an
542 operation between set valued variables.
546 struct constraint_expr lhs;
547 struct constraint_expr rhs;
550 /* List of constraints that we use to build the constraint graph from. */
552 static VEC(constraint_t,heap) *constraints;
553 static alloc_pool constraint_pool;
555 /* The constraint graph is represented as an array of bitmaps
556 containing successor nodes. */
558 struct constraint_graph
560 /* Size of this graph, which may be different than the number of
561 nodes in the variable map. */
564 /* Explicit successors of each node. */
567 /* Implicit predecessors of each node (Used for variable
569 bitmap *implicit_preds;
571 /* Explicit predecessors of each node (Used for variable substitution). */
574 /* Indirect cycle representatives, or -1 if the node has no indirect
576 int *indirect_cycles;
578 /* Representative node for a node. rep[a] == a unless the node has
582 /* Equivalence class representative for a label. This is used for
583 variable substitution. */
586 /* Pointer equivalence label for a node. All nodes with the same
587 pointer equivalence label can be unified together at some point
588 (either during constraint optimization or after the constraint
592 /* Pointer equivalence representative for a label. This is used to
593 handle nodes that are pointer equivalent but not location
594 equivalent. We can unite these once the addressof constraints
595 are transformed into initial points-to sets. */
598 /* Pointer equivalence label for each node, used during variable
600 unsigned int *pointer_label;
602 /* Location equivalence label for each node, used during location
603 equivalence finding. */
604 unsigned int *loc_label;
606 /* Pointed-by set for each node, used during location equivalence
607 finding. This is pointed-by rather than pointed-to, because it
608 is constructed using the predecessor graph. */
611 /* Points to sets for pointer equivalence. This is *not* the actual
612 points-to sets for nodes. */
615 /* Bitmap of nodes where the bit is set if the node is a direct
616 node. Used for variable substitution. */
617 sbitmap direct_nodes;
619 /* Bitmap of nodes where the bit is set if the node is address
620 taken. Used for variable substitution. */
621 bitmap address_taken;
623 /* Vector of complex constraints for each graph node. Complex
624 constraints are those involving dereferences or offsets that are
626 VEC(constraint_t,heap) **complex;
629 static constraint_graph_t graph;
631 /* During variable substitution and the offline version of indirect
632 cycle finding, we create nodes to represent dereferences and
633 address taken constraints. These represent where these start and
635 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
636 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
638 /* Return the representative node for NODE, if NODE has been unioned
640 This function performs path compression along the way to finding
641 the representative. */
644 find (unsigned int node)
646 gcc_assert (node < graph->size);
647 if (graph->rep[node] != node)
648 return graph->rep[node] = find (graph->rep[node]);
652 /* Union the TO and FROM nodes to the TO nodes.
653 Note that at some point in the future, we may want to do
654 union-by-rank, in which case we are going to have to return the
655 node we unified to. */
658 unite (unsigned int to, unsigned int from)
660 gcc_assert (to < graph->size && from < graph->size);
661 if (to != from && graph->rep[from] != to)
663 graph->rep[from] = to;
669 /* Create a new constraint consisting of LHS and RHS expressions. */
672 new_constraint (const struct constraint_expr lhs,
673 const struct constraint_expr rhs)
675 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
681 /* Print out constraint C to FILE. */
684 dump_constraint (FILE *file, constraint_t c)
686 if (c->lhs.type == ADDRESSOF)
688 else if (c->lhs.type == DEREF)
690 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
691 if (c->lhs.offset == UNKNOWN_OFFSET)
692 fprintf (file, " + UNKNOWN");
693 else if (c->lhs.offset != 0)
694 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
695 fprintf (file, " = ");
696 if (c->rhs.type == ADDRESSOF)
698 else if (c->rhs.type == DEREF)
700 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
701 if (c->rhs.offset == UNKNOWN_OFFSET)
702 fprintf (file, " + UNKNOWN");
703 else if (c->rhs.offset != 0)
704 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
705 fprintf (file, "\n");
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
715 /* Print out constraint C to stderr. */
718 debug_constraint (constraint_t c)
720 dump_constraint (stderr, c);
723 /* Print out all constraints to FILE */
726 dump_constraints (FILE *file, int from)
730 for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
731 dump_constraint (file, c);
734 /* Print out all constraints to stderr. */
737 debug_constraints (void)
739 dump_constraints (stderr, 0);
742 /* Print out to FILE the edge in the constraint graph that is created by
743 constraint c. The edge may have a label, depending on the type of
744 constraint that it represents. If complex1, e.g: a = *b, then the label
745 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
746 complex with an offset, e.g: a = b + 8, then the label is "+".
747 Otherwise the edge has no label. */
750 dump_constraint_edge (FILE *file, constraint_t c)
752 if (c->rhs.type != ADDRESSOF)
754 const char *src = get_varinfo (c->rhs.var)->name;
755 const char *dst = get_varinfo (c->lhs.var)->name;
756 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
757 /* Due to preprocessing of constraints, instructions like *a = *b are
758 illegal; thus, we do not have to handle such cases. */
759 if (c->lhs.type == DEREF)
760 fprintf (file, " [ label=\"*=\" ] ;\n");
761 else if (c->rhs.type == DEREF)
762 fprintf (file, " [ label=\"=*\" ] ;\n");
765 /* We must check the case where the constraint is an offset.
766 In this case, it is treated as a complex constraint. */
767 if (c->rhs.offset != c->lhs.offset)
768 fprintf (file, " [ label=\"+\" ] ;\n");
770 fprintf (file, " ;\n");
775 /* Print the constraint graph in dot format. */
778 dump_constraint_graph (FILE *file)
780 unsigned int i=0, size;
783 /* Only print the graph if it has already been initialized: */
787 /* Print the constraints used to produce the constraint graph. The
788 constraints will be printed as comments in the dot file: */
789 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
790 dump_constraints (file, 0);
791 fprintf (file, "*/\n");
793 /* Prints the header of the dot file: */
794 fprintf (file, "\n\n// The constraint graph in dot format:\n");
795 fprintf (file, "strict digraph {\n");
796 fprintf (file, " node [\n shape = box\n ]\n");
797 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
798 fprintf (file, "\n // List of nodes in the constraint graph:\n");
800 /* The next lines print the nodes in the graph. In order to get the
801 number of nodes in the graph, we must choose the minimum between the
802 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
803 yet been initialized, then graph->size == 0, otherwise we must only
804 read nodes that have an entry in VEC (varinfo_t, varmap). */
805 size = VEC_length (varinfo_t, varmap);
806 size = size < graph->size ? size : graph->size;
807 for (i = 0; i < size; i++)
809 const char *name = get_varinfo (graph->rep[i])->name;
810 fprintf (file, " \"%s\" ;\n", name);
813 /* Go over the list of constraints printing the edges in the constraint
815 fprintf (file, "\n // The constraint edges:\n");
816 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
818 dump_constraint_edge (file, c);
820 /* Prints the tail of the dot file. By now, only the closing bracket. */
821 fprintf (file, "}\n\n\n");
824 /* Print out the constraint graph to stderr. */
827 debug_constraint_graph (void)
829 dump_constraint_graph (stderr);
834 The solver is a simple worklist solver, that works on the following
837 sbitmap changed_nodes = all zeroes;
839 For each node that is not already collapsed:
841 set bit in changed nodes
843 while (changed_count > 0)
845 compute topological ordering for constraint graph
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
850 for each node (n) in the graph in topological order:
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
861 /* Return true if two constraint expressions A and B are equal. */
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
876 if (a.type == b.type)
879 return a.offset < b.offset;
881 return a.var < b.var;
884 return a.type < b.type;
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
891 constraint_less (const constraint_t a, const constraint_t b)
893 if (constraint_expr_less (a->lhs, b->lhs))
895 else if (constraint_expr_less (b->lhs, a->lhs))
898 return constraint_expr_less (a->rhs, b->rhs);
901 /* Return true if two constraints A and B are equal. */
904 constraint_equal (struct constraint a, struct constraint b)
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
914 constraint_vec_find (VEC(constraint_t,heap) *vec,
915 struct constraint lookfor)
923 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
924 if (place >= VEC_length (constraint_t, vec))
926 found = VEC_index (constraint_t, vec, place);
927 if (!constraint_equal (*found, lookfor))
932 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
935 constraint_set_union (VEC(constraint_t,heap) **to,
936 VEC(constraint_t,heap) **from)
941 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
943 if (constraint_vec_find (*to, *c) == NULL)
945 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
947 VEC_safe_insert (constraint_t, heap, *to, place, c);
952 /* Expands the solution in SET to all sub-fields of variables included.
953 Union the expanded result into RESULT. */
956 solution_set_expand (bitmap result, bitmap set)
962 /* In a first pass record all variables we need to add all
963 sub-fields off. This avoids quadratic behavior. */
964 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
966 varinfo_t v = get_varinfo (j);
967 if (v->is_artificial_var
970 v = lookup_vi_for_tree (v->decl);
972 vars = BITMAP_ALLOC (NULL);
973 bitmap_set_bit (vars, v->id);
976 /* In the second pass now do the addition to the solution and
977 to speed up solving add it to the delta as well. */
980 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
982 varinfo_t v = get_varinfo (j);
983 for (; v != NULL; v = v->next)
984 bitmap_set_bit (result, v->id);
990 /* Take a solution set SET, add OFFSET to each member of the set, and
991 overwrite SET with the result when done. */
994 solution_set_add (bitmap set, HOST_WIDE_INT offset)
996 bitmap result = BITMAP_ALLOC (&iteration_obstack);
1000 /* If the offset is unknown we have to expand the solution to
1002 if (offset == UNKNOWN_OFFSET)
1004 solution_set_expand (set, set);
1008 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
1010 varinfo_t vi = get_varinfo (i);
1012 /* If this is a variable with just one field just set its bit
1014 if (vi->is_artificial_var
1015 || vi->is_unknown_size_var
1017 bitmap_set_bit (result, i);
1020 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1025 && fieldoffset > vi->offset)
1029 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1031 bitmap_set_bit (result, vi->id);
1032 /* If the result is not exactly at fieldoffset include the next
1033 field as well. See get_constraint_for_ptr_offset for more
1035 if (vi->offset != fieldoffset
1036 && vi->next != NULL)
1037 bitmap_set_bit (result, vi->next->id);
1041 bitmap_copy (set, result);
1042 BITMAP_FREE (result);
1045 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
1049 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
1052 return bitmap_ior_into (to, from);
1058 tmp = BITMAP_ALLOC (&iteration_obstack);
1059 bitmap_copy (tmp, from);
1060 solution_set_add (tmp, inc);
1061 res = bitmap_ior_into (to, tmp);
1067 /* Insert constraint C into the list of complex constraints for graph
1071 insert_into_complex (constraint_graph_t graph,
1072 unsigned int var, constraint_t c)
1074 VEC (constraint_t, heap) *complex = graph->complex[var];
1075 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1078 /* Only insert constraints that do not already exist. */
1079 if (place >= VEC_length (constraint_t, complex)
1080 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1081 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1085 /* Condense two variable nodes into a single variable node, by moving
1086 all associated info from SRC to TO. */
1089 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1095 gcc_assert (find (from) == to);
1097 /* Move all complex constraints from src node into to node */
1098 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
1100 /* In complex constraints for node src, we may have either
1101 a = *src, and *src = a, or an offseted constraint which are
1102 always added to the rhs node's constraints. */
1104 if (c->rhs.type == DEREF)
1106 else if (c->lhs.type == DEREF)
1111 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1112 VEC_free (constraint_t, heap, graph->complex[from]);
1113 graph->complex[from] = NULL;
1117 /* Remove edges involving NODE from GRAPH. */
1120 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1122 if (graph->succs[node])
1123 BITMAP_FREE (graph->succs[node]);
1126 /* Merge GRAPH nodes FROM and TO into node TO. */
1129 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1132 if (graph->indirect_cycles[from] != -1)
1134 /* If we have indirect cycles with the from node, and we have
1135 none on the to node, the to node has indirect cycles from the
1136 from node now that they are unified.
1137 If indirect cycles exist on both, unify the nodes that they
1138 are in a cycle with, since we know they are in a cycle with
1140 if (graph->indirect_cycles[to] == -1)
1141 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1144 /* Merge all the successor edges. */
1145 if (graph->succs[from])
1147 if (!graph->succs[to])
1148 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1149 bitmap_ior_into (graph->succs[to],
1150 graph->succs[from]);
1153 clear_edges_for_node (graph, from);
1157 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already. */
1161 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1167 if (!graph->implicit_preds[to])
1168 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1170 if (bitmap_set_bit (graph->implicit_preds[to], from))
1171 stats.num_implicit_edges++;
1174 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1175 it doesn't exist in the graph already.
1176 Return false if the edge already existed, true otherwise. */
1179 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1182 if (!graph->preds[to])
1183 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1184 bitmap_set_bit (graph->preds[to], from);
1187 /* Add a graph edge to GRAPH, going from FROM to TO if
1188 it doesn't exist in the graph already.
1189 Return false if the edge already existed, true otherwise. */
1192 add_graph_edge (constraint_graph_t graph, unsigned int to,
1203 if (!graph->succs[from])
1204 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1205 if (bitmap_set_bit (graph->succs[from], to))
1208 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1216 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1219 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1222 return (graph->succs[dest]
1223 && bitmap_bit_p (graph->succs[dest], src));
1226 /* Initialize the constraint graph structure to contain SIZE nodes. */
1229 init_graph (unsigned int size)
1233 graph = XCNEW (struct constraint_graph);
1235 graph->succs = XCNEWVEC (bitmap, graph->size);
1236 graph->indirect_cycles = XNEWVEC (int, graph->size);
1237 graph->rep = XNEWVEC (unsigned int, graph->size);
1238 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1239 graph->pe = XCNEWVEC (unsigned int, graph->size);
1240 graph->pe_rep = XNEWVEC (int, graph->size);
1242 for (j = 0; j < graph->size; j++)
1245 graph->pe_rep[j] = -1;
1246 graph->indirect_cycles[j] = -1;
1250 /* Build the constraint graph, adding only predecessor edges right now. */
1253 build_pred_graph (void)
1259 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1260 graph->preds = XCNEWVEC (bitmap, graph->size);
1261 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1262 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1263 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1264 graph->points_to = XCNEWVEC (bitmap, graph->size);
1265 graph->eq_rep = XNEWVEC (int, graph->size);
1266 graph->direct_nodes = sbitmap_alloc (graph->size);
1267 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1268 sbitmap_zero (graph->direct_nodes);
1270 for (j = 0; j < FIRST_REF_NODE; j++)
1272 if (!get_varinfo (j)->is_special_var)
1273 SET_BIT (graph->direct_nodes, j);
1276 for (j = 0; j < graph->size; j++)
1277 graph->eq_rep[j] = -1;
1279 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1280 graph->indirect_cycles[j] = -1;
1282 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1284 struct constraint_expr lhs = c->lhs;
1285 struct constraint_expr rhs = c->rhs;
1286 unsigned int lhsvar = lhs.var;
1287 unsigned int rhsvar = rhs.var;
1289 if (lhs.type == DEREF)
1292 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1293 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295 else if (rhs.type == DEREF)
1298 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1299 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1301 RESET_BIT (graph->direct_nodes, lhsvar);
1303 else if (rhs.type == ADDRESSOF)
1308 if (graph->points_to[lhsvar] == NULL)
1309 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1310 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1312 if (graph->pointed_by[rhsvar] == NULL)
1313 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1314 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1316 /* Implicitly, *x = y */
1317 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1319 /* All related variables are no longer direct nodes. */
1320 RESET_BIT (graph->direct_nodes, rhsvar);
1321 v = get_varinfo (rhsvar);
1322 if (!v->is_full_var)
1324 v = lookup_vi_for_tree (v->decl);
1327 RESET_BIT (graph->direct_nodes, v->id);
1332 bitmap_set_bit (graph->address_taken, rhsvar);
1334 else if (lhsvar > anything_id
1335 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1338 add_pred_graph_edge (graph, lhsvar, rhsvar);
1339 /* Implicitly, *x = *y */
1340 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1341 FIRST_REF_NODE + rhsvar);
1343 else if (lhs.offset != 0 || rhs.offset != 0)
1345 if (rhs.offset != 0)
1346 RESET_BIT (graph->direct_nodes, lhs.var);
1347 else if (lhs.offset != 0)
1348 RESET_BIT (graph->direct_nodes, rhs.var);
1353 /* Build the constraint graph, adding successor edges. */
1356 build_succ_graph (void)
1361 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1363 struct constraint_expr lhs;
1364 struct constraint_expr rhs;
1365 unsigned int lhsvar;
1366 unsigned int rhsvar;
1373 lhsvar = find (lhs.var);
1374 rhsvar = find (rhs.var);
1376 if (lhs.type == DEREF)
1378 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1379 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1381 else if (rhs.type == DEREF)
1383 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1384 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1386 else if (rhs.type == ADDRESSOF)
1389 gcc_assert (find (rhs.var) == rhs.var);
1390 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1392 else if (lhsvar > anything_id
1393 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1395 add_graph_edge (graph, lhsvar, rhsvar);
1399 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1400 receive pointers. */
1401 t = find (storedanything_id);
1402 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1404 if (!TEST_BIT (graph->direct_nodes, i)
1405 && get_varinfo (i)->may_have_pointers)
1406 add_graph_edge (graph, find (i), t);
1409 /* Everything stored to ANYTHING also potentially escapes. */
1410 add_graph_edge (graph, find (escaped_id), t);
1414 /* Changed variables on the last iteration. */
1415 static unsigned int changed_count;
1416 static sbitmap changed;
1418 /* Strongly Connected Component visitation info. */
1425 unsigned int *node_mapping;
1427 VEC(unsigned,heap) *scc_stack;
1431 /* Recursive routine to find strongly connected components in GRAPH.
1432 SI is the SCC info to store the information in, and N is the id of current
1433 graph node we are processing.
1435 This is Tarjan's strongly connected component finding algorithm, as
1436 modified by Nuutila to keep only non-root nodes on the stack.
1437 The algorithm can be found in "On finding the strongly connected
1438 connected components in a directed graph" by Esko Nuutila and Eljas
1439 Soisalon-Soininen, in Information Processing Letters volume 49,
1440 number 1, pages 9-14. */
1443 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1447 unsigned int my_dfs;
1449 SET_BIT (si->visited, n);
1450 si->dfs[n] = si->current_index ++;
1451 my_dfs = si->dfs[n];
1453 /* Visit all the successors. */
1454 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1458 if (i > LAST_REF_NODE)
1462 if (TEST_BIT (si->deleted, w))
1465 if (!TEST_BIT (si->visited, w))
1466 scc_visit (graph, si, w);
1468 unsigned int t = find (w);
1469 unsigned int nnode = find (n);
1470 gcc_assert (nnode == n);
1472 if (si->dfs[t] < si->dfs[nnode])
1473 si->dfs[n] = si->dfs[t];
1477 /* See if any components have been identified. */
1478 if (si->dfs[n] == my_dfs)
1480 if (VEC_length (unsigned, si->scc_stack) > 0
1481 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1483 bitmap scc = BITMAP_ALLOC (NULL);
1484 unsigned int lowest_node;
1487 bitmap_set_bit (scc, n);
1489 while (VEC_length (unsigned, si->scc_stack) != 0
1490 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1492 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1494 bitmap_set_bit (scc, w);
1497 lowest_node = bitmap_first_set_bit (scc);
1498 gcc_assert (lowest_node < FIRST_REF_NODE);
1500 /* Collapse the SCC nodes into a single node, and mark the
1502 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1504 if (i < FIRST_REF_NODE)
1506 if (unite (lowest_node, i))
1507 unify_nodes (graph, lowest_node, i, false);
1511 unite (lowest_node, i);
1512 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1516 SET_BIT (si->deleted, n);
1519 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1522 /* Unify node FROM into node TO, updating the changed count if
1523 necessary when UPDATE_CHANGED is true. */
1526 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1527 bool update_changed)
1530 gcc_assert (to != from && find (to) == to);
1531 if (dump_file && (dump_flags & TDF_DETAILS))
1532 fprintf (dump_file, "Unifying %s to %s\n",
1533 get_varinfo (from)->name,
1534 get_varinfo (to)->name);
1537 stats.unified_vars_dynamic++;
1539 stats.unified_vars_static++;
1541 merge_graph_nodes (graph, to, from);
1542 merge_node_constraints (graph, to, from);
1544 /* Mark TO as changed if FROM was changed. If TO was already marked
1545 as changed, decrease the changed count. */
1547 if (update_changed && TEST_BIT (changed, from))
1549 RESET_BIT (changed, from);
1550 if (!TEST_BIT (changed, to))
1551 SET_BIT (changed, to);
1554 gcc_assert (changed_count > 0);
1558 if (get_varinfo (from)->solution)
1560 /* If the solution changes because of the merging, we need to mark
1561 the variable as changed. */
1562 if (bitmap_ior_into (get_varinfo (to)->solution,
1563 get_varinfo (from)->solution))
1565 if (update_changed && !TEST_BIT (changed, to))
1567 SET_BIT (changed, to);
1572 BITMAP_FREE (get_varinfo (from)->solution);
1573 BITMAP_FREE (get_varinfo (from)->oldsolution);
1575 if (stats.iterations > 0)
1577 BITMAP_FREE (get_varinfo (to)->oldsolution);
1578 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1581 if (valid_graph_edge (graph, to, to))
1583 if (graph->succs[to])
1584 bitmap_clear_bit (graph->succs[to], to);
1588 /* Information needed to compute the topological ordering of a graph. */
1592 /* sbitmap of visited nodes. */
1594 /* Array that stores the topological order of the graph, *in
1596 VEC(unsigned,heap) *topo_order;
1600 /* Initialize and return a topological info structure. */
1602 static struct topo_info *
1603 init_topo_info (void)
1605 size_t size = graph->size;
1606 struct topo_info *ti = XNEW (struct topo_info);
1607 ti->visited = sbitmap_alloc (size);
1608 sbitmap_zero (ti->visited);
1609 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1614 /* Free the topological sort info pointed to by TI. */
1617 free_topo_info (struct topo_info *ti)
1619 sbitmap_free (ti->visited);
1620 VEC_free (unsigned, heap, ti->topo_order);
1624 /* Visit the graph in topological order, and store the order in the
1625 topo_info structure. */
1628 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1634 SET_BIT (ti->visited, n);
1636 if (graph->succs[n])
1637 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1639 if (!TEST_BIT (ti->visited, j))
1640 topo_visit (graph, ti, j);
1643 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1646 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1647 starting solution for y. */
1650 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1653 unsigned int lhs = c->lhs.var;
1655 bitmap sol = get_varinfo (lhs)->solution;
1658 HOST_WIDE_INT roffset = c->rhs.offset;
1660 /* Our IL does not allow this. */
1661 gcc_assert (c->lhs.offset == 0);
1663 /* If the solution of Y contains anything it is good enough to transfer
1665 if (bitmap_bit_p (delta, anything_id))
1667 flag |= bitmap_set_bit (sol, anything_id);
1671 /* If we do not know at with offset the rhs is dereferenced compute
1672 the reachability set of DELTA, conservatively assuming it is
1673 dereferenced at all valid offsets. */
1674 if (roffset == UNKNOWN_OFFSET)
1676 solution_set_expand (delta, delta);
1677 /* No further offset processing is necessary. */
1681 /* For each variable j in delta (Sol(y)), add
1682 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1683 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1685 varinfo_t v = get_varinfo (j);
1686 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1690 fieldoffset = v->offset;
1691 else if (roffset != 0)
1692 v = first_vi_for_offset (v, fieldoffset);
1693 /* If the access is outside of the variable we can ignore it. */
1701 /* Adding edges from the special vars is pointless.
1702 They don't have sets that can change. */
1703 if (get_varinfo (t)->is_special_var)
1704 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1705 /* Merging the solution from ESCAPED needlessly increases
1706 the set. Use ESCAPED as representative instead. */
1707 else if (v->id == escaped_id)
1708 flag |= bitmap_set_bit (sol, escaped_id);
1709 else if (v->may_have_pointers
1710 && add_graph_edge (graph, lhs, t))
1711 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1713 /* If the variable is not exactly at the requested offset
1714 we have to include the next one. */
1715 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1720 fieldoffset = v->offset;
1726 /* If the LHS solution changed, mark the var as changed. */
1729 get_varinfo (lhs)->solution = sol;
1730 if (!TEST_BIT (changed, lhs))
1732 SET_BIT (changed, lhs);
1738 /* Process a constraint C that represents *(x + off) = y using DELTA
1739 as the starting solution for x. */
1742 do_ds_constraint (constraint_t c, bitmap delta)
1744 unsigned int rhs = c->rhs.var;
1745 bitmap sol = get_varinfo (rhs)->solution;
1748 HOST_WIDE_INT loff = c->lhs.offset;
1750 /* Our IL does not allow this. */
1751 gcc_assert (c->rhs.offset == 0);
1753 /* If the solution of y contains ANYTHING simply use the ANYTHING
1754 solution. This avoids needlessly increasing the points-to sets. */
1755 if (bitmap_bit_p (sol, anything_id))
1756 sol = get_varinfo (find (anything_id))->solution;
1758 /* If the solution for x contains ANYTHING we have to merge the
1759 solution of y into all pointer variables which we do via
1761 if (bitmap_bit_p (delta, anything_id))
1763 unsigned t = find (storedanything_id);
1764 if (add_graph_edge (graph, t, rhs))
1766 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1768 if (!TEST_BIT (changed, t))
1770 SET_BIT (changed, t);
1778 /* If we do not know at with offset the rhs is dereferenced compute
1779 the reachability set of DELTA, conservatively assuming it is
1780 dereferenced at all valid offsets. */
1781 if (loff == UNKNOWN_OFFSET)
1783 solution_set_expand (delta, delta);
1787 /* For each member j of delta (Sol(x)), add an edge from y to j and
1788 union Sol(y) into Sol(j) */
1789 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1791 varinfo_t v = get_varinfo (j);
1793 HOST_WIDE_INT fieldoffset = v->offset + loff;
1795 /* If v is a global variable then this is an escape point. */
1796 if (v->is_global_var)
1798 t = find (escaped_id);
1799 if (add_graph_edge (graph, t, rhs)
1800 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1801 && !TEST_BIT (changed, t))
1803 SET_BIT (changed, t);
1808 if (v->is_special_var)
1812 fieldoffset = v->offset;
1814 v = first_vi_for_offset (v, fieldoffset);
1815 /* If the access is outside of the variable we can ignore it. */
1821 if (v->may_have_pointers)
1824 if (add_graph_edge (graph, t, rhs)
1825 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1826 && !TEST_BIT (changed, t))
1828 SET_BIT (changed, t);
1833 /* If the variable is not exactly at the requested offset
1834 we have to include the next one. */
1835 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1840 fieldoffset = v->offset;
1846 /* Handle a non-simple (simple meaning requires no iteration),
1847 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1850 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1852 if (c->lhs.type == DEREF)
1854 if (c->rhs.type == ADDRESSOF)
1861 do_ds_constraint (c, delta);
1864 else if (c->rhs.type == DEREF)
1867 if (!(get_varinfo (c->lhs.var)->is_special_var))
1868 do_sd_constraint (graph, c, delta);
1876 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1877 solution = get_varinfo (c->rhs.var)->solution;
1878 tmp = get_varinfo (c->lhs.var)->solution;
1880 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1884 get_varinfo (c->lhs.var)->solution = tmp;
1885 if (!TEST_BIT (changed, c->lhs.var))
1887 SET_BIT (changed, c->lhs.var);
1894 /* Initialize and return a new SCC info structure. */
1896 static struct scc_info *
1897 init_scc_info (size_t size)
1899 struct scc_info *si = XNEW (struct scc_info);
1902 si->current_index = 0;
1903 si->visited = sbitmap_alloc (size);
1904 sbitmap_zero (si->visited);
1905 si->deleted = sbitmap_alloc (size);
1906 sbitmap_zero (si->deleted);
1907 si->node_mapping = XNEWVEC (unsigned int, size);
1908 si->dfs = XCNEWVEC (unsigned int, size);
1910 for (i = 0; i < size; i++)
1911 si->node_mapping[i] = i;
1913 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1917 /* Free an SCC info structure pointed to by SI */
1920 free_scc_info (struct scc_info *si)
1922 sbitmap_free (si->visited);
1923 sbitmap_free (si->deleted);
1924 free (si->node_mapping);
1926 VEC_free (unsigned, heap, si->scc_stack);
1931 /* Find indirect cycles in GRAPH that occur, using strongly connected
1932 components, and note them in the indirect cycles map.
1934 This technique comes from Ben Hardekopf and Calvin Lin,
1935 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1936 Lines of Code", submitted to PLDI 2007. */
1939 find_indirect_cycles (constraint_graph_t graph)
1942 unsigned int size = graph->size;
1943 struct scc_info *si = init_scc_info (size);
1945 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1946 if (!TEST_BIT (si->visited, i) && find (i) == i)
1947 scc_visit (graph, si, i);
1952 /* Compute a topological ordering for GRAPH, and store the result in the
1953 topo_info structure TI. */
1956 compute_topo_order (constraint_graph_t graph,
1957 struct topo_info *ti)
1960 unsigned int size = graph->size;
1962 for (i = 0; i != size; ++i)
1963 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1964 topo_visit (graph, ti, i);
1967 /* Structure used to for hash value numbering of pointer equivalence
1970 typedef struct equiv_class_label
1973 unsigned int equivalence_class;
1975 } *equiv_class_label_t;
1976 typedef const struct equiv_class_label *const_equiv_class_label_t;
1978 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1980 static htab_t pointer_equiv_class_table;
1982 /* A hashtable for mapping a bitmap of labels->location equivalence
1984 static htab_t location_equiv_class_table;
1986 /* Hash function for a equiv_class_label_t */
1989 equiv_class_label_hash (const void *p)
1991 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1992 return ecl->hashcode;
1995 /* Equality function for two equiv_class_label_t's. */
1998 equiv_class_label_eq (const void *p1, const void *p2)
2000 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
2001 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
2002 return (eql1->hashcode == eql2->hashcode
2003 && bitmap_equal_p (eql1->labels, eql2->labels));
2006 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
2010 equiv_class_lookup (htab_t table, bitmap labels)
2013 struct equiv_class_label ecl;
2015 ecl.labels = labels;
2016 ecl.hashcode = bitmap_hash (labels);
2018 slot = htab_find_slot_with_hash (table, &ecl,
2019 ecl.hashcode, NO_INSERT);
2023 return ((equiv_class_label_t) *slot)->equivalence_class;
2027 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
2031 equiv_class_add (htab_t table, unsigned int equivalence_class,
2035 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
2037 ecl->labels = labels;
2038 ecl->equivalence_class = equivalence_class;
2039 ecl->hashcode = bitmap_hash (labels);
2041 slot = htab_find_slot_with_hash (table, ecl,
2042 ecl->hashcode, INSERT);
2043 gcc_assert (!*slot);
2044 *slot = (void *) ecl;
2047 /* Perform offline variable substitution.
2049 This is a worst case quadratic time way of identifying variables
2050 that must have equivalent points-to sets, including those caused by
2051 static cycles, and single entry subgraphs, in the constraint graph.
2053 The technique is described in "Exploiting Pointer and Location
2054 Equivalence to Optimize Pointer Analysis. In the 14th International
2055 Static Analysis Symposium (SAS), August 2007." It is known as the
2056 "HU" algorithm, and is equivalent to value numbering the collapsed
2057 constraint graph including evaluating unions.
2059 The general method of finding equivalence classes is as follows:
2060 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2061 Initialize all non-REF nodes to be direct nodes.
2062 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2064 For each constraint containing the dereference, we also do the same
2067 We then compute SCC's in the graph and unify nodes in the same SCC,
2070 For each non-collapsed node x:
2071 Visit all unvisited explicit incoming edges.
2072 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2074 Lookup the equivalence class for pts(x).
2075 If we found one, equivalence_class(x) = found class.
2076 Otherwise, equivalence_class(x) = new class, and new_class is
2077 added to the lookup table.
2079 All direct nodes with the same equivalence class can be replaced
2080 with a single representative node.
2081 All unlabeled nodes (label == 0) are not pointers and all edges
2082 involving them can be eliminated.
2083 We perform these optimizations during rewrite_constraints
2085 In addition to pointer equivalence class finding, we also perform
2086 location equivalence class finding. This is the set of variables
2087 that always appear together in points-to sets. We use this to
2088 compress the size of the points-to sets. */
2090 /* Current maximum pointer equivalence class id. */
2091 static int pointer_equiv_class;
2093 /* Current maximum location equivalence class id. */
2094 static int location_equiv_class;
2096 /* Recursive routine to find strongly connected components in GRAPH,
2097 and label it's nodes with DFS numbers. */
2100 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2104 unsigned int my_dfs;
2106 gcc_assert (si->node_mapping[n] == n);
2107 SET_BIT (si->visited, n);
2108 si->dfs[n] = si->current_index ++;
2109 my_dfs = si->dfs[n];
2111 /* Visit all the successors. */
2112 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2114 unsigned int w = si->node_mapping[i];
2116 if (TEST_BIT (si->deleted, w))
2119 if (!TEST_BIT (si->visited, w))
2120 condense_visit (graph, si, w);
2122 unsigned int t = si->node_mapping[w];
2123 unsigned int nnode = si->node_mapping[n];
2124 gcc_assert (nnode == n);
2126 if (si->dfs[t] < si->dfs[nnode])
2127 si->dfs[n] = si->dfs[t];
2131 /* Visit all the implicit predecessors. */
2132 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2134 unsigned int w = si->node_mapping[i];
2136 if (TEST_BIT (si->deleted, w))
2139 if (!TEST_BIT (si->visited, w))
2140 condense_visit (graph, si, w);
2142 unsigned int t = si->node_mapping[w];
2143 unsigned int nnode = si->node_mapping[n];
2144 gcc_assert (nnode == n);
2146 if (si->dfs[t] < si->dfs[nnode])
2147 si->dfs[n] = si->dfs[t];
2151 /* See if any components have been identified. */
2152 if (si->dfs[n] == my_dfs)
2154 while (VEC_length (unsigned, si->scc_stack) != 0
2155 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2157 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2158 si->node_mapping[w] = n;
2160 if (!TEST_BIT (graph->direct_nodes, w))
2161 RESET_BIT (graph->direct_nodes, n);
2163 /* Unify our nodes. */
2164 if (graph->preds[w])
2166 if (!graph->preds[n])
2167 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2168 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2170 if (graph->implicit_preds[w])
2172 if (!graph->implicit_preds[n])
2173 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2174 bitmap_ior_into (graph->implicit_preds[n],
2175 graph->implicit_preds[w]);
2177 if (graph->points_to[w])
2179 if (!graph->points_to[n])
2180 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2181 bitmap_ior_into (graph->points_to[n],
2182 graph->points_to[w]);
2185 SET_BIT (si->deleted, n);
2188 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2191 /* Label pointer equivalences. */
2194 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2198 SET_BIT (si->visited, n);
2200 if (!graph->points_to[n])
2201 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2203 /* Label and union our incoming edges's points to sets. */
2204 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2206 unsigned int w = si->node_mapping[i];
2207 if (!TEST_BIT (si->visited, w))
2208 label_visit (graph, si, w);
2210 /* Skip unused edges */
2211 if (w == n || graph->pointer_label[w] == 0)
2214 if (graph->points_to[w])
2215 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2217 /* Indirect nodes get fresh variables. */
2218 if (!TEST_BIT (graph->direct_nodes, n))
2219 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2221 if (!bitmap_empty_p (graph->points_to[n]))
2223 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2224 graph->points_to[n]);
2227 label = pointer_equiv_class++;
2228 equiv_class_add (pointer_equiv_class_table,
2229 label, graph->points_to[n]);
2231 graph->pointer_label[n] = label;
2235 /* Perform offline variable substitution, discovering equivalence
2236 classes, and eliminating non-pointer variables. */
2238 static struct scc_info *
2239 perform_var_substitution (constraint_graph_t graph)
2242 unsigned int size = graph->size;
2243 struct scc_info *si = init_scc_info (size);
2245 bitmap_obstack_initialize (&iteration_obstack);
2246 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2247 equiv_class_label_eq, free);
2248 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2249 equiv_class_label_eq, free);
2250 pointer_equiv_class = 1;
2251 location_equiv_class = 1;
2253 /* Condense the nodes, which means to find SCC's, count incoming
2254 predecessors, and unite nodes in SCC's. */
2255 for (i = 0; i < FIRST_REF_NODE; i++)
2256 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2257 condense_visit (graph, si, si->node_mapping[i]);
2259 sbitmap_zero (si->visited);
2260 /* Actually the label the nodes for pointer equivalences */
2261 for (i = 0; i < FIRST_REF_NODE; i++)
2262 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2263 label_visit (graph, si, si->node_mapping[i]);
2265 /* Calculate location equivalence labels. */
2266 for (i = 0; i < FIRST_REF_NODE; i++)
2273 if (!graph->pointed_by[i])
2275 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2277 /* Translate the pointed-by mapping for pointer equivalence
2279 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2281 bitmap_set_bit (pointed_by,
2282 graph->pointer_label[si->node_mapping[j]]);
2284 /* The original pointed_by is now dead. */
2285 BITMAP_FREE (graph->pointed_by[i]);
2287 /* Look up the location equivalence label if one exists, or make
2289 label = equiv_class_lookup (location_equiv_class_table,
2293 label = location_equiv_class++;
2294 equiv_class_add (location_equiv_class_table,
2299 if (dump_file && (dump_flags & TDF_DETAILS))
2300 fprintf (dump_file, "Found location equivalence for node %s\n",
2301 get_varinfo (i)->name);
2302 BITMAP_FREE (pointed_by);
2304 graph->loc_label[i] = label;
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2309 for (i = 0; i < FIRST_REF_NODE; i++)
2311 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2313 "Equivalence classes for %s node id %d:%s are pointer: %d"
2315 direct_node ? "Direct node" : "Indirect node", i,
2316 get_varinfo (i)->name,
2317 graph->pointer_label[si->node_mapping[i]],
2318 graph->loc_label[si->node_mapping[i]]);
2321 /* Quickly eliminate our non-pointer variables. */
2323 for (i = 0; i < FIRST_REF_NODE; i++)
2325 unsigned int node = si->node_mapping[i];
2327 if (graph->pointer_label[node] == 0)
2329 if (dump_file && (dump_flags & TDF_DETAILS))
2331 "%s is a non-pointer variable, eliminating edges.\n",
2332 get_varinfo (node)->name);
2333 stats.nonpointer_vars++;
2334 clear_edges_for_node (graph, node);
2341 /* Free information that was only necessary for variable
2345 free_var_substitution_info (struct scc_info *si)
2348 free (graph->pointer_label);
2349 free (graph->loc_label);
2350 free (graph->pointed_by);
2351 free (graph->points_to);
2352 free (graph->eq_rep);
2353 sbitmap_free (graph->direct_nodes);
2354 htab_delete (pointer_equiv_class_table);
2355 htab_delete (location_equiv_class_table);
2356 bitmap_obstack_release (&iteration_obstack);
2359 /* Return an existing node that is equivalent to NODE, which has
2360 equivalence class LABEL, if one exists. Return NODE otherwise. */
2363 find_equivalent_node (constraint_graph_t graph,
2364 unsigned int node, unsigned int label)
2366 /* If the address version of this variable is unused, we can
2367 substitute it for anything else with the same label.
2368 Otherwise, we know the pointers are equivalent, but not the
2369 locations, and we can unite them later. */
2371 if (!bitmap_bit_p (graph->address_taken, node))
2373 gcc_assert (label < graph->size);
2375 if (graph->eq_rep[label] != -1)
2377 /* Unify the two variables since we know they are equivalent. */
2378 if (unite (graph->eq_rep[label], node))
2379 unify_nodes (graph, graph->eq_rep[label], node, false);
2380 return graph->eq_rep[label];
2384 graph->eq_rep[label] = node;
2385 graph->pe_rep[label] = node;
2390 gcc_assert (label < graph->size);
2391 graph->pe[node] = label;
2392 if (graph->pe_rep[label] == -1)
2393 graph->pe_rep[label] = node;
2399 /* Unite pointer equivalent but not location equivalent nodes in
2400 GRAPH. This may only be performed once variable substitution is
2404 unite_pointer_equivalences (constraint_graph_t graph)
2408 /* Go through the pointer equivalences and unite them to their
2409 representative, if they aren't already. */
2410 for (i = 0; i < FIRST_REF_NODE; i++)
2412 unsigned int label = graph->pe[i];
2415 int label_rep = graph->pe_rep[label];
2417 if (label_rep == -1)
2420 label_rep = find (label_rep);
2421 if (label_rep >= 0 && unite (label_rep, find (i)))
2422 unify_nodes (graph, label_rep, i, false);
2427 /* Move complex constraints to the GRAPH nodes they belong to. */
2430 move_complex_constraints (constraint_graph_t graph)
2435 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2439 struct constraint_expr lhs = c->lhs;
2440 struct constraint_expr rhs = c->rhs;
2442 if (lhs.type == DEREF)
2444 insert_into_complex (graph, lhs.var, c);
2446 else if (rhs.type == DEREF)
2448 if (!(get_varinfo (lhs.var)->is_special_var))
2449 insert_into_complex (graph, rhs.var, c);
2451 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2452 && (lhs.offset != 0 || rhs.offset != 0))
2454 insert_into_complex (graph, rhs.var, c);
2461 /* Optimize and rewrite complex constraints while performing
2462 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2463 result of perform_variable_substitution. */
2466 rewrite_constraints (constraint_graph_t graph,
2467 struct scc_info *si)
2473 for (j = 0; j < graph->size; j++)
2474 gcc_assert (find (j) == j);
2476 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2478 struct constraint_expr lhs = c->lhs;
2479 struct constraint_expr rhs = c->rhs;
2480 unsigned int lhsvar = find (lhs.var);
2481 unsigned int rhsvar = find (rhs.var);
2482 unsigned int lhsnode, rhsnode;
2483 unsigned int lhslabel, rhslabel;
2485 lhsnode = si->node_mapping[lhsvar];
2486 rhsnode = si->node_mapping[rhsvar];
2487 lhslabel = graph->pointer_label[lhsnode];
2488 rhslabel = graph->pointer_label[rhsnode];
2490 /* See if it is really a non-pointer variable, and if so, ignore
2494 if (dump_file && (dump_flags & TDF_DETAILS))
2497 fprintf (dump_file, "%s is a non-pointer variable,"
2498 "ignoring constraint:",
2499 get_varinfo (lhs.var)->name);
2500 dump_constraint (dump_file, c);
2502 VEC_replace (constraint_t, constraints, i, NULL);
2508 if (dump_file && (dump_flags & TDF_DETAILS))
2511 fprintf (dump_file, "%s is a non-pointer variable,"
2512 "ignoring constraint:",
2513 get_varinfo (rhs.var)->name);
2514 dump_constraint (dump_file, c);
2516 VEC_replace (constraint_t, constraints, i, NULL);
2520 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2521 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2522 c->lhs.var = lhsvar;
2523 c->rhs.var = rhsvar;
2528 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2529 part of an SCC, false otherwise. */
2532 eliminate_indirect_cycles (unsigned int node)
2534 if (graph->indirect_cycles[node] != -1
2535 && !bitmap_empty_p (get_varinfo (node)->solution))
2538 VEC(unsigned,heap) *queue = NULL;
2540 unsigned int to = find (graph->indirect_cycles[node]);
2543 /* We can't touch the solution set and call unify_nodes
2544 at the same time, because unify_nodes is going to do
2545 bitmap unions into it. */
2547 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2549 if (find (i) == i && i != to)
2552 VEC_safe_push (unsigned, heap, queue, i);
2557 VEC_iterate (unsigned, queue, queuepos, i);
2560 unify_nodes (graph, to, i, true);
2562 VEC_free (unsigned, heap, queue);
2568 /* Solve the constraint graph GRAPH using our worklist solver.
2569 This is based on the PW* family of solvers from the "Efficient Field
2570 Sensitive Pointer Analysis for C" paper.
2571 It works by iterating over all the graph nodes, processing the complex
2572 constraints and propagating the copy constraints, until everything stops
2573 changed. This corresponds to steps 6-8 in the solving list given above. */
2576 solve_graph (constraint_graph_t graph)
2578 unsigned int size = graph->size;
2583 changed = sbitmap_alloc (size);
2584 sbitmap_zero (changed);
2586 /* Mark all initial non-collapsed nodes as changed. */
2587 for (i = 0; i < size; i++)
2589 varinfo_t ivi = get_varinfo (i);
2590 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2591 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2592 || VEC_length (constraint_t, graph->complex[i]) > 0))
2594 SET_BIT (changed, i);
2599 /* Allocate a bitmap to be used to store the changed bits. */
2600 pts = BITMAP_ALLOC (&pta_obstack);
2602 while (changed_count > 0)
2605 struct topo_info *ti = init_topo_info ();
2608 bitmap_obstack_initialize (&iteration_obstack);
2610 compute_topo_order (graph, ti);
2612 while (VEC_length (unsigned, ti->topo_order) != 0)
2615 i = VEC_pop (unsigned, ti->topo_order);
2617 /* If this variable is not a representative, skip it. */
2621 /* In certain indirect cycle cases, we may merge this
2622 variable to another. */
2623 if (eliminate_indirect_cycles (i) && find (i) != i)
2626 /* If the node has changed, we need to process the
2627 complex constraints and outgoing edges again. */
2628 if (TEST_BIT (changed, i))
2633 VEC(constraint_t,heap) *complex = graph->complex[i];
2634 bool solution_empty;
2636 RESET_BIT (changed, i);
2639 /* Compute the changed set of solution bits. */
2640 bitmap_and_compl (pts, get_varinfo (i)->solution,
2641 get_varinfo (i)->oldsolution);
2643 if (bitmap_empty_p (pts))
2646 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2648 solution = get_varinfo (i)->solution;
2649 solution_empty = bitmap_empty_p (solution);
2651 /* Process the complex constraints */
2652 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2654 /* XXX: This is going to unsort the constraints in
2655 some cases, which will occasionally add duplicate
2656 constraints during unification. This does not
2657 affect correctness. */
2658 c->lhs.var = find (c->lhs.var);
2659 c->rhs.var = find (c->rhs.var);
2661 /* The only complex constraint that can change our
2662 solution to non-empty, given an empty solution,
2663 is a constraint where the lhs side is receiving
2664 some set from elsewhere. */
2665 if (!solution_empty || c->lhs.type != DEREF)
2666 do_complex_constraint (graph, c, pts);
2669 solution_empty = bitmap_empty_p (solution);
2671 if (!solution_empty)
2674 unsigned eff_escaped_id = find (escaped_id);
2676 /* Propagate solution to all successors. */
2677 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2683 unsigned int to = find (j);
2684 tmp = get_varinfo (to)->solution;
2687 /* Don't try to propagate to ourselves. */
2691 /* If we propagate from ESCAPED use ESCAPED as
2693 if (i == eff_escaped_id)
2694 flag = bitmap_set_bit (tmp, escaped_id);
2696 flag = set_union_with_increment (tmp, pts, 0);
2700 get_varinfo (to)->solution = tmp;
2701 if (!TEST_BIT (changed, to))
2703 SET_BIT (changed, to);
2711 free_topo_info (ti);
2712 bitmap_obstack_release (&iteration_obstack);
2716 sbitmap_free (changed);
2717 bitmap_obstack_release (&oldpta_obstack);
2720 /* Map from trees to variable infos. */
2721 static struct pointer_map_t *vi_for_tree;
2724 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2727 insert_vi_for_tree (tree t, varinfo_t vi)
2729 void **slot = pointer_map_insert (vi_for_tree, t);
2731 gcc_assert (*slot == NULL);
2735 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2736 exist in the map, return NULL, otherwise, return the varinfo we found. */
2739 lookup_vi_for_tree (tree t)
2741 void **slot = pointer_map_contains (vi_for_tree, t);
2745 return (varinfo_t) *slot;
2748 /* Return a printable name for DECL */
2751 alias_get_name (tree decl)
2753 const char *res = get_name (decl);
2755 int num_printed = 0;
2764 if (TREE_CODE (decl) == SSA_NAME)
2766 num_printed = asprintf (&temp, "%s_%u",
2767 alias_get_name (SSA_NAME_VAR (decl)),
2768 SSA_NAME_VERSION (decl));
2770 else if (DECL_P (decl))
2772 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2774 if (num_printed > 0)
2776 res = ggc_strdup (temp);
2782 /* Find the variable id for tree T in the map.
2783 If T doesn't exist in the map, create an entry for it and return it. */
2786 get_vi_for_tree (tree t)
2788 void **slot = pointer_map_contains (vi_for_tree, t);
2790 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2792 return (varinfo_t) *slot;
2795 /* Get a scalar constraint expression for a new temporary variable. */
2797 static struct constraint_expr
2798 new_scalar_tmp_constraint_exp (const char *name)
2800 struct constraint_expr tmp;
2803 vi = new_var_info (NULL_TREE, name);
2807 vi->is_full_var = 1;
2816 /* Get a constraint expression vector from an SSA_VAR_P node.
2817 If address_p is true, the result will be taken its address of. */
2820 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2822 struct constraint_expr cexpr;
2825 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2826 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2828 /* For parameters, get at the points-to set for the actual parm
2830 if (TREE_CODE (t) == SSA_NAME
2831 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2832 && SSA_NAME_IS_DEFAULT_DEF (t))
2834 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2838 vi = get_vi_for_tree (t);
2840 cexpr.type = SCALAR;
2842 /* If we determine the result is "anything", and we know this is readonly,
2843 say it points to readonly memory instead. */
2844 if (cexpr.var == anything_id && TREE_READONLY (t))
2847 cexpr.type = ADDRESSOF;
2848 cexpr.var = readonly_id;
2851 /* If we are not taking the address of the constraint expr, add all
2852 sub-fiels of the variable as well. */
2854 && !vi->is_full_var)
2856 for (; vi; vi = vi->next)
2859 VEC_safe_push (ce_s, heap, *results, &cexpr);
2864 VEC_safe_push (ce_s, heap, *results, &cexpr);
2867 /* Process constraint T, performing various simplifications and then
2868 adding it to our list of overall constraints. */
2871 process_constraint (constraint_t t)
2873 struct constraint_expr rhs = t->rhs;
2874 struct constraint_expr lhs = t->lhs;
2876 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2877 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2879 /* If we didn't get any useful constraint from the lhs we get
2880 &ANYTHING as fallback from get_constraint_for. Deal with
2881 it here by turning it into *ANYTHING. */
2882 if (lhs.type == ADDRESSOF
2883 && lhs.var == anything_id)
2886 /* ADDRESSOF on the lhs is invalid. */
2887 gcc_assert (lhs.type != ADDRESSOF);
2889 /* We shouldn't add constraints from things that cannot have pointers.
2890 It's not completely trivial to avoid in the callers, so do it here. */
2891 if (rhs.type != ADDRESSOF
2892 && !get_varinfo (rhs.var)->may_have_pointers)
2895 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2896 if (!get_varinfo (lhs.var)->may_have_pointers)
2899 /* This can happen in our IR with things like n->a = *p */
2900 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2902 /* Split into tmp = *rhs, *lhs = tmp */
2903 struct constraint_expr tmplhs;
2904 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2905 process_constraint (new_constraint (tmplhs, rhs));
2906 process_constraint (new_constraint (lhs, tmplhs));
2908 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2910 /* Split into tmp = &rhs, *lhs = tmp */
2911 struct constraint_expr tmplhs;
2912 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2913 process_constraint (new_constraint (tmplhs, rhs));
2914 process_constraint (new_constraint (lhs, tmplhs));
2918 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2919 VEC_safe_push (constraint_t, heap, constraints, t);
2923 /* Return true if T is a type that could contain pointers. */
2926 type_could_have_pointers (tree type)
2928 if (POINTER_TYPE_P (type))
2931 if (TREE_CODE (type) == ARRAY_TYPE)
2932 return type_could_have_pointers (TREE_TYPE (type));
2934 return AGGREGATE_TYPE_P (type);
2937 /* Return true if T is a variable of a type that could contain
2941 could_have_pointers (tree t)
2943 return type_could_have_pointers (TREE_TYPE (t));
2946 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2949 static HOST_WIDE_INT
2950 bitpos_of_field (const tree fdecl)
2953 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2954 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2957 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2958 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2962 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2963 resulting constraint expressions in *RESULTS. */
2966 get_constraint_for_ptr_offset (tree ptr, tree offset,
2967 VEC (ce_s, heap) **results)
2969 struct constraint_expr c;
2971 HOST_WIDE_INT rhsunitoffset, rhsoffset;
2973 /* If we do not do field-sensitive PTA adding offsets to pointers
2974 does not change the points-to solution. */
2975 if (!use_field_sensitive)
2977 get_constraint_for (ptr, results);
2981 /* If the offset is not a non-negative integer constant that fits
2982 in a HOST_WIDE_INT, we have to fall back to a conservative
2983 solution which includes all sub-fields of all pointed-to
2984 variables of ptr. */
2985 if (offset == NULL_TREE
2986 || !host_integerp (offset, 0))
2987 rhsoffset = UNKNOWN_OFFSET;
2990 /* Make sure the bit-offset also fits. */
2991 rhsunitoffset = TREE_INT_CST_LOW (offset);
2992 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2993 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2994 rhsoffset = UNKNOWN_OFFSET;
2997 get_constraint_for (ptr, results);
3001 /* As we are eventually appending to the solution do not use
3002 VEC_iterate here. */
3003 n = VEC_length (ce_s, *results);
3004 for (j = 0; j < n; j++)
3007 c = *VEC_index (ce_s, *results, j);
3008 curr = get_varinfo (c.var);
3010 if (c.type == ADDRESSOF
3011 /* If this varinfo represents a full variable just use it. */
3012 && curr->is_full_var)
3014 else if (c.type == ADDRESSOF
3015 /* If we do not know the offset add all subfields. */
3016 && rhsoffset == UNKNOWN_OFFSET)
3018 varinfo_t temp = lookup_vi_for_tree (curr->decl);
3021 struct constraint_expr c2;
3023 c2.type = ADDRESSOF;
3025 if (c2.var != c.var)
3026 VEC_safe_push (ce_s, heap, *results, &c2);
3031 else if (c.type == ADDRESSOF)
3034 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3036 /* Search the sub-field which overlaps with the
3037 pointed-to offset. If the result is outside of the variable
3038 we have to provide a conservative result, as the variable is
3039 still reachable from the resulting pointer (even though it
3040 technically cannot point to anything). The last and first
3041 sub-fields are such conservative results.
3042 ??? If we always had a sub-field for &object + 1 then
3043 we could represent this in a more precise way. */
3045 && curr->offset < offset)
3047 temp = first_or_preceding_vi_for_offset (curr, offset);
3049 /* If the found variable is not exactly at the pointed to
3050 result, we have to include the next variable in the
3051 solution as well. Otherwise two increments by offset / 2
3052 do not result in the same or a conservative superset
3054 if (temp->offset != offset
3055 && temp->next != NULL)
3057 struct constraint_expr c2;
3058 c2.var = temp->next->id;
3059 c2.type = ADDRESSOF;
3061 VEC_safe_push (ce_s, heap, *results, &c2);
3067 c.offset = rhsoffset;
3069 VEC_replace (ce_s, *results, j, &c);
3074 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3075 If address_p is true the result will be taken its address of. */
3078 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3082 HOST_WIDE_INT bitsize = -1;
3083 HOST_WIDE_INT bitmaxsize = -1;
3084 HOST_WIDE_INT bitpos;
3086 struct constraint_expr *result;
3088 /* Some people like to do cute things like take the address of
3091 while (handled_component_p (forzero)
3092 || INDIRECT_REF_P (forzero))
3093 forzero = TREE_OPERAND (forzero, 0);
3095 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3097 struct constraint_expr temp;
3100 temp.var = integer_id;
3102 VEC_safe_push (ce_s, heap, *results, &temp);
3106 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3108 /* Pretend to take the address of the base, we'll take care of
3109 adding the required subset of sub-fields below. */
3110 get_constraint_for_1 (t, results, true);
3111 gcc_assert (VEC_length (ce_s, *results) == 1);
3112 result = VEC_last (ce_s, *results);
3114 if (result->type == SCALAR
3115 && get_varinfo (result->var)->is_full_var)
3116 /* For single-field vars do not bother about the offset. */
3118 else if (result->type == SCALAR)
3120 /* In languages like C, you can access one past the end of an
3121 array. You aren't allowed to dereference it, so we can
3122 ignore this constraint. When we handle pointer subtraction,
3123 we may have to do something cute here. */
3125 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3128 /* It's also not true that the constraint will actually start at the
3129 right offset, it may start in some padding. We only care about
3130 setting the constraint to the first actual field it touches, so
3132 struct constraint_expr cexpr = *result;
3134 VEC_pop (ce_s, *results);
3136 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3138 if (ranges_overlap_p (curr->offset, curr->size,
3139 bitpos, bitmaxsize))
3141 cexpr.var = curr->id;
3142 VEC_safe_push (ce_s, heap, *results, &cexpr);
3147 /* If we are going to take the address of this field then
3148 to be able to compute reachability correctly add at least
3149 the last field of the variable. */
3151 && VEC_length (ce_s, *results) == 0)
3153 curr = get_varinfo (cexpr.var);
3154 while (curr->next != NULL)
3156 cexpr.var = curr->id;
3157 VEC_safe_push (ce_s, heap, *results, &cexpr);
3160 /* Assert that we found *some* field there. The user couldn't be
3161 accessing *only* padding. */
3162 /* Still the user could access one past the end of an array
3163 embedded in a struct resulting in accessing *only* padding. */
3164 gcc_assert (VEC_length (ce_s, *results) >= 1
3165 || ref_contains_array_ref (orig_t));
3167 else if (bitmaxsize == 0)
3169 if (dump_file && (dump_flags & TDF_DETAILS))
3170 fprintf (dump_file, "Access to zero-sized part of variable,"
3174 if (dump_file && (dump_flags & TDF_DETAILS))
3175 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3177 else if (result->type == DEREF)
3179 /* If we do not know exactly where the access goes say so. Note
3180 that only for non-structure accesses we know that we access
3181 at most one subfiled of any variable. */
3183 || bitsize != bitmaxsize
3184 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
3185 result->offset = UNKNOWN_OFFSET;
3187 result->offset = bitpos;
3189 else if (result->type == ADDRESSOF)
3191 /* We can end up here for component references on a
3192 VIEW_CONVERT_EXPR <>(&foobar). */
3193 result->type = SCALAR;
3194 result->var = anything_id;
3202 /* Dereference the constraint expression CONS, and return the result.
3203 DEREF (ADDRESSOF) = SCALAR
3204 DEREF (SCALAR) = DEREF
3205 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3206 This is needed so that we can handle dereferencing DEREF constraints. */
3209 do_deref (VEC (ce_s, heap) **constraints)
3211 struct constraint_expr *c;
3214 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3216 if (c->type == SCALAR)
3218 else if (c->type == ADDRESSOF)
3220 else if (c->type == DEREF)
3222 struct constraint_expr tmplhs;
3223 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3224 process_constraint (new_constraint (tmplhs, *c));
3225 c->var = tmplhs.var;
3232 static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
3234 /* Given a tree T, return the constraint expression for taking the
3238 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3240 struct constraint_expr *c;
3243 get_constraint_for_1 (t, results, true);
3245 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3247 if (c->type == DEREF)
3250 c->type = ADDRESSOF;
3254 /* Given a tree T, return the constraint expression for it. */
3257 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3259 struct constraint_expr temp;
3261 /* x = integer is all glommed to a single variable, which doesn't
3262 point to anything by itself. That is, of course, unless it is an
3263 integer constant being treated as a pointer, in which case, we
3264 will return that this is really the addressof anything. This
3265 happens below, since it will fall into the default case. The only
3266 case we know something about an integer treated like a pointer is
3267 when it is the NULL pointer, and then we just say it points to
3270 Do not do that if -fno-delete-null-pointer-checks though, because
3271 in that case *NULL does not fail, so it _should_ alias *anything.
3272 It is not worth adding a new option or renaming the existing one,
3273 since this case is relatively obscure. */
3274 if (flag_delete_null_pointer_checks
3275 && ((TREE_CODE (t) == INTEGER_CST
3276 && integer_zerop (t))
3277 /* The only valid CONSTRUCTORs in gimple with pointer typed
3278 elements are zero-initializer. */
3279 || TREE_CODE (t) == CONSTRUCTOR))
3281 temp.var = nothing_id;
3282 temp.type = ADDRESSOF;
3284 VEC_safe_push (ce_s, heap, *results, &temp);
3288 /* String constants are read-only. */
3289 if (TREE_CODE (t) == STRING_CST)
3291 temp.var = readonly_id;
3294 VEC_safe_push (ce_s, heap, *results, &temp);
3298 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3300 case tcc_expression:
3302 switch (TREE_CODE (t))
3305 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3313 switch (TREE_CODE (t))
3317 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3322 case ARRAY_RANGE_REF:
3324 get_constraint_for_component_ref (t, results, address_p);
3326 case VIEW_CONVERT_EXPR:
3327 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3329 /* We are missing handling for TARGET_MEM_REF here. */
3334 case tcc_exceptional:
3336 switch (TREE_CODE (t))
3340 get_constraint_for_ssa_var (t, results, address_p);
3347 case tcc_declaration:
3349 get_constraint_for_ssa_var (t, results, address_p);
3355 /* The default fallback is a constraint from anything. */
3356 temp.type = ADDRESSOF;
3357 temp.var = anything_id;
3359 VEC_safe_push (ce_s, heap, *results, &temp);
3362 /* Given a gimple tree T, return the constraint expression vector for it. */
3365 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3367 gcc_assert (VEC_length (ce_s, *results) == 0);
3369 get_constraint_for_1 (t, results, false);
3373 /* Efficiently generates constraints from all entries in *RHSC to all
3374 entries in *LHSC. */
3377 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3379 struct constraint_expr *lhsp, *rhsp;
3382 if (VEC_length (ce_s, lhsc) <= 1
3383 || VEC_length (ce_s, rhsc) <= 1)
3385 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3386 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3387 process_constraint (new_constraint (*lhsp, *rhsp));
3391 struct constraint_expr tmp;
3392 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3393 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
3394 process_constraint (new_constraint (tmp, *rhsp));
3395 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3396 process_constraint (new_constraint (*lhsp, tmp));
3400 /* Handle aggregate copies by expanding into copies of the respective
3401 fields of the structures. */
3404 do_structure_copy (tree lhsop, tree rhsop)
3406 struct constraint_expr *lhsp, *rhsp;
3407 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3410 get_constraint_for (lhsop, &lhsc);
3411 get_constraint_for (rhsop, &rhsc);
3412 lhsp = VEC_index (ce_s, lhsc, 0);
3413 rhsp = VEC_index (ce_s, rhsc, 0);
3414 if (lhsp->type == DEREF
3415 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3416 || rhsp->type == DEREF)
3418 if (lhsp->type == DEREF)
3420 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3421 lhsp->offset = UNKNOWN_OFFSET;
3423 if (rhsp->type == DEREF)
3425 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3426 rhsp->offset = UNKNOWN_OFFSET;
3428 process_all_all_constraints (lhsc, rhsc);
3430 else if (lhsp->type == SCALAR
3431 && (rhsp->type == SCALAR
3432 || rhsp->type == ADDRESSOF))
3434 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3435 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3437 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3438 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3439 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3441 varinfo_t lhsv, rhsv;
3442 rhsp = VEC_index (ce_s, rhsc, k);
3443 lhsv = get_varinfo (lhsp->var);
3444 rhsv = get_varinfo (rhsp->var);
3445 if (lhsv->may_have_pointers
3446 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3447 rhsv->offset + lhsoffset, rhsv->size))
3448 process_constraint (new_constraint (*lhsp, *rhsp));
3449 if (lhsv->offset + rhsoffset + lhsv->size
3450 > rhsv->offset + lhsoffset + rhsv->size)
3453 if (k >= VEC_length (ce_s, rhsc))
3463 VEC_free (ce_s, heap, lhsc);
3464 VEC_free (ce_s, heap, rhsc);
3467 /* Create a constraint ID = OP. */
3470 make_constraint_to (unsigned id, tree op)
3472 VEC(ce_s, heap) *rhsc = NULL;
3473 struct constraint_expr *c;
3474 struct constraint_expr includes;
3478 includes.offset = 0;
3479 includes.type = SCALAR;
3481 get_constraint_for (op, &rhsc);
3482 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3483 process_constraint (new_constraint (includes, *c));
3484 VEC_free (ce_s, heap, rhsc);
3487 /* Create a constraint ID = &FROM. */
3490 make_constraint_from (varinfo_t vi, int from)
3492 struct constraint_expr lhs, rhs;
3500 rhs.type = ADDRESSOF;
3501 process_constraint (new_constraint (lhs, rhs));
3504 /* Create a constraint ID = FROM. */
3507 make_copy_constraint (varinfo_t vi, int from)
3509 struct constraint_expr lhs, rhs;
3518 process_constraint (new_constraint (lhs, rhs));
3521 /* Make constraints necessary to make OP escape. */
3524 make_escape_constraint (tree op)
3526 make_constraint_to (escaped_id, op);
3529 /* Add constraints to that the solution of VI is transitively closed. */
3532 make_transitive_closure_constraints (varinfo_t vi)
3534 struct constraint_expr lhs, rhs;
3543 process_constraint (new_constraint (lhs, rhs));
3545 /* VAR = VAR + UNKNOWN; */
3551 rhs.offset = UNKNOWN_OFFSET;
3552 process_constraint (new_constraint (lhs, rhs));
3555 /* Create a new artificial heap variable with NAME and make a
3556 constraint from it to LHS. Return the created variable. */
3559 make_constraint_from_heapvar (varinfo_t lhs, const char *name)
3562 tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
3564 if (heapvar == NULL_TREE)
3567 heapvar = create_tmp_var_raw (ptr_type_node, name);
3568 DECL_EXTERNAL (heapvar) = 1;
3570 heapvar_insert (lhs->decl, lhs->offset, heapvar);
3572 ann = get_var_ann (heapvar);
3573 ann->is_heapvar = 1;
3576 /* For global vars we need to add a heapvar to the list of referenced
3577 vars of a different function than it was created for originally. */
3578 if (cfun && gimple_referenced_vars (cfun))
3579 add_referenced_var (heapvar);
3581 vi = new_var_info (heapvar, name);
3582 vi->is_artificial_var = true;
3583 vi->is_heap_var = true;
3584 vi->is_unknown_size_var = true;
3588 vi->is_full_var = true;
3589 insert_vi_for_tree (heapvar, vi);
3591 make_constraint_from (lhs, vi->id);
3596 /* Create a new artificial heap variable with NAME and make a
3597 constraint from it to LHS. Set flags according to a tag used
3598 for tracking restrict pointers. */
3601 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3604 vi = make_constraint_from_heapvar (lhs, name);
3605 vi->is_restrict_var = 1;
3606 vi->is_global_var = 0;
3607 vi->is_special_var = 1;
3608 vi->may_have_pointers = 0;
3611 /* In IPA mode there are varinfos for different aspects of reach
3612 function designator. One for the points-to set of the return
3613 value, one for the variables that are clobbered by the function,
3614 one for its uses and one for each parameter (including a single
3615 glob for remaining variadic arguments). */
3617 enum { fi_clobbers = 1, fi_uses = 2,
3618 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3620 /* Get a constraint for the requested part of a function designator FI
3621 when operating in IPA mode. */
3623 static struct constraint_expr
3624 get_function_part_constraint (varinfo_t fi, unsigned part)
3626 struct constraint_expr c;
3628 gcc_assert (in_ipa_mode);
3630 if (fi->id == anything_id)
3632 /* ??? We probably should have a ANYFN special variable. */
3633 c.var = anything_id;
3637 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3639 varinfo_t ai = first_vi_for_offset (fi, part);
3640 c.var = ai ? ai->id : anything_id;
3654 /* For non-IPA mode, generate constraints necessary for a call on the
3658 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3660 struct constraint_expr rhsc;
3663 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3665 tree arg = gimple_call_arg (stmt, i);
3667 /* Find those pointers being passed, and make sure they end up
3668 pointing to anything. */
3669 if (could_have_pointers (arg))
3670 make_escape_constraint (arg);
3673 /* The static chain escapes as well. */
3674 if (gimple_call_chain (stmt))
3675 make_escape_constraint (gimple_call_chain (stmt));
3677 /* And if we applied NRV the address of the return slot escapes as well. */
3678 if (gimple_call_return_slot_opt_p (stmt)
3679 && gimple_call_lhs (stmt) != NULL_TREE
3680 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3682 VEC(ce_s, heap) *tmpc = NULL;
3683 struct constraint_expr lhsc, *c;
3684 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3685 lhsc.var = escaped_id;
3688 for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
3689 process_constraint (new_constraint (lhsc, *c));
3690 VEC_free(ce_s, heap, tmpc);
3693 /* Regular functions return nonlocal memory. */
3694 rhsc.var = nonlocal_id;
3697 VEC_safe_push (ce_s, heap, *results, &rhsc);
3700 /* For non-IPA mode, generate constraints necessary for a call
3701 that returns a pointer and assigns it to LHS. This simply makes
3702 the LHS point to global and escaped variables. */
3705 handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
3707 VEC(ce_s, heap) *lhsc = NULL;
3709 get_constraint_for (lhs, &lhsc);
3711 if (flags & ECF_MALLOC)
3714 vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
3715 /* We delay marking allocated storage global until we know if
3717 DECL_EXTERNAL (vi->decl) = 0;
3718 vi->is_global_var = 0;
3719 /* If this is not a real malloc call assume the memory was
3720 initialized and thus may point to global memory. All
3721 builtin functions with the malloc attribute behave in a sane way. */
3723 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3724 make_constraint_from (vi, nonlocal_id);
3726 else if (VEC_length (ce_s, rhsc) > 0)
3728 /* If the store is to a global decl make sure to
3729 add proper escape constraints. */
3730 lhs = get_base_address (lhs);
3733 && is_global_var (lhs))
3735 struct constraint_expr tmpc;
3736 tmpc.var = escaped_id;
3739 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3741 process_all_all_constraints (lhsc, rhsc);
3743 VEC_free (ce_s, heap, lhsc);
3746 /* For non-IPA mode, generate constraints necessary for a call of a
3747 const function that returns a pointer in the statement STMT. */
3750 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3752 struct constraint_expr rhsc;
3755 /* Treat nested const functions the same as pure functions as far
3756 as the static chain is concerned. */
3757 if (gimple_call_chain (stmt))
3759 varinfo_t uses = get_call_use_vi (stmt);
3760 make_transitive_closure_constraints (uses);
3761 make_constraint_to (uses->id, gimple_call_chain (stmt));
3762 rhsc.var = uses->id;
3765 VEC_safe_push (ce_s, heap, *results, &rhsc);
3768 /* May return arguments. */
3769 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3771 tree arg = gimple_call_arg (stmt, k);
3773 if (could_have_pointers (arg))
3775 VEC(ce_s, heap) *argc = NULL;
3777 struct constraint_expr *argp;
3778 get_constraint_for (arg, &argc);
3779 for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
3780 VEC_safe_push (ce_s, heap, *results, argp);
3781 VEC_free(ce_s, heap, argc);
3785 /* May return addresses of globals. */
3786 rhsc.var = nonlocal_id;
3788 rhsc.type = ADDRESSOF;
3789 VEC_safe_push (ce_s, heap, *results, &rhsc);
3792 /* For non-IPA mode, generate constraints necessary for a call to a
3793 pure function in statement STMT. */
3796 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3798 struct constraint_expr rhsc;
3800 varinfo_t uses = NULL;
3802 /* Memory reached from pointer arguments is call-used. */
3803 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3805 tree arg = gimple_call_arg (stmt, i);
3807 if (could_have_pointers (arg))
3811 uses = get_call_use_vi (stmt);
3812 make_transitive_closure_constraints (uses);
3814 make_constraint_to (uses->id, arg);
3818 /* The static chain is used as well. */
3819 if (gimple_call_chain (stmt))
3823 uses = get_call_use_vi (stmt);
3824 make_transitive_closure_constraints (uses);
3826 make_constraint_to (uses->id, gimple_call_chain (stmt));
3829 /* Pure functions may return call-used and nonlocal memory. */
3832 rhsc.var = uses->id;
3835 VEC_safe_push (ce_s, heap, *results, &rhsc);
3837 rhsc.var = nonlocal_id;
3840 VEC_safe_push (ce_s, heap, *results, &rhsc);
3844 /* Return the varinfo for the callee of CALL. */
3847 get_fi_for_callee (gimple call)
3851 /* If we can directly resolve the function being called, do so.
3852 Otherwise, it must be some sort of indirect expression that
3853 we should still be able to handle. */
3854 decl = gimple_call_fndecl (call);
3856 return get_vi_for_tree (decl);
3858 decl = gimple_call_fn (call);
3859 /* The function can be either an SSA name pointer or,
3860 worse, an OBJ_TYPE_REF. In this case we have no
3861 clue and should be getting ANYFN (well, ANYTHING for now). */
3862 if (TREE_CODE (decl) == SSA_NAME)
3864 if (TREE_CODE (decl) == SSA_NAME
3865 && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
3866 && SSA_NAME_IS_DEFAULT_DEF (decl))
3867 decl = SSA_NAME_VAR (decl);
3868 return get_vi_for_tree (decl);
3870 else if (TREE_CODE (decl) == INTEGER_CST
3871 || TREE_CODE (decl) == OBJ_TYPE_REF)
3872 return get_varinfo (anything_id);
3877 /* Walk statement T setting up aliasing constraints according to the
3878 references found in T. This function is the main part of the
3879 constraint builder. AI points to auxiliary alias information used
3880 when building alias sets and computing alias grouping heuristics. */
3883 find_func_aliases (gimple origt)
3886 VEC(ce_s, heap) *lhsc = NULL;
3887 VEC(ce_s, heap) *rhsc = NULL;
3888 struct constraint_expr *c;
3891 /* Now build constraints expressions. */
3892 if (gimple_code (t) == GIMPLE_PHI)
3894 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3896 /* Only care about pointers and structures containing
3898 if (could_have_pointers (gimple_phi_result (t)))
3903 /* For a phi node, assign all the arguments to
3905 get_constraint_for (gimple_phi_result (t), &lhsc);
3906 for (i = 0; i < gimple_phi_num_args (t); i++)
3908 tree strippedrhs = PHI_ARG_DEF (t, i);
3910 STRIP_NOPS (strippedrhs);
3911 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3913 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3915 struct constraint_expr *c2;
3916 while (VEC_length (ce_s, rhsc) > 0)
3918 c2 = VEC_last (ce_s, rhsc);
3919 process_constraint (new_constraint (*c, *c2));
3920 VEC_pop (ce_s, rhsc);
3926 /* In IPA mode, we need to generate constraints to pass call
3927 arguments through their calls. There are two cases,
3928 either a GIMPLE_CALL returning a value, or just a plain
3929 GIMPLE_CALL when we are not.
3931 In non-ipa mode, we need to generate constraints for each
3932 pointer passed by address. */
3933 else if (is_gimple_call (t))
3935 tree fndecl = gimple_call_fndecl (t);
3936 if (fndecl != NULL_TREE
3937 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3938 /* ??? All builtins that are handled here need to be handled
3939 in the alias-oracle query functions explicitly! */
3940 switch (DECL_FUNCTION_CODE (fndecl))
3942 /* All the following functions return a pointer to the same object
3943 as their first argument points to. The functions do not add
3944 to the ESCAPED solution. The functions make the first argument
3945 pointed to memory point to what the second argument pointed to
3946 memory points to. */
3947 case BUILT_IN_STRCPY:
3948 case BUILT_IN_STRNCPY:
3949 case BUILT_IN_BCOPY:
3950 case BUILT_IN_MEMCPY:
3951 case BUILT_IN_MEMMOVE:
3952 case BUILT_IN_MEMPCPY:
3953 case BUILT_IN_STPCPY:
3954 case BUILT_IN_STPNCPY:
3955 case BUILT_IN_STRCAT:
3956 case BUILT_IN_STRNCAT:
3958 tree res = gimple_call_lhs (t);
3959 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3960 == BUILT_IN_BCOPY ? 1 : 0));
3961 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3962 == BUILT_IN_BCOPY ? 0 : 1));
3963 if (res != NULL_TREE)
3965 get_constraint_for (res, &lhsc);
3966 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
3967 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
3968 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
3969 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
3971 get_constraint_for (dest, &rhsc);
3972 process_all_all_constraints (lhsc, rhsc);
3973 VEC_free (ce_s, heap, lhsc);
3974 VEC_free (ce_s, heap, rhsc);
3976 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
3977 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
3980 process_all_all_constraints (lhsc, rhsc);
3981 VEC_free (ce_s, heap, lhsc);
3982 VEC_free (ce_s, heap, rhsc);
3985 case BUILT_IN_MEMSET:
3987 tree res = gimple_call_lhs (t);
3988 tree dest = gimple_call_arg (t, 0);
3991 struct constraint_expr ac;
3992 if (res != NULL_TREE)
3994 get_constraint_for (res, &lhsc);
3995 get_constraint_for (dest, &rhsc);
3996 process_all_all_constraints (lhsc, rhsc);
3997 VEC_free (ce_s, heap, lhsc);
3998 VEC_free (ce_s, heap, rhsc);
4000 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4002 if (flag_delete_null_pointer_checks
4003 && integer_zerop (gimple_call_arg (t, 1)))
4005 ac.type = ADDRESSOF;
4006 ac.var = nothing_id;
4011 ac.var = integer_id;
4014 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4015 process_constraint (new_constraint (*lhsp, ac));
4016 VEC_free (ce_s, heap, lhsc);
4019 /* All the following functions do not return pointers, do not
4020 modify the points-to sets of memory reachable from their
4021 arguments and do not add to the ESCAPED solution. */
4022 case BUILT_IN_SINCOS:
4023 case BUILT_IN_SINCOSF:
4024 case BUILT_IN_SINCOSL:
4025 case BUILT_IN_FREXP:
4026 case BUILT_IN_FREXPF:
4027 case BUILT_IN_FREXPL:
4028 case BUILT_IN_GAMMA_R:
4029 case BUILT_IN_GAMMAF_R:
4030 case BUILT_IN_GAMMAL_R:
4031 case BUILT_IN_LGAMMA_R:
4032 case BUILT_IN_LGAMMAF_R:
4033 case BUILT_IN_LGAMMAL_R:
4035 case BUILT_IN_MODFF:
4036 case BUILT_IN_MODFL:
4037 case BUILT_IN_REMQUO:
4038 case BUILT_IN_REMQUOF:
4039 case BUILT_IN_REMQUOL:
4042 /* Trampolines are special - they set up passing the static
4044 case BUILT_IN_INIT_TRAMPOLINE:
4046 tree tramp = gimple_call_arg (t, 0);
4047 tree nfunc = gimple_call_arg (t, 1);
4048 tree frame = gimple_call_arg (t, 2);
4050 struct constraint_expr lhs, *rhsp;
4053 varinfo_t nfi = NULL;
4054 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4055 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4058 lhs = get_function_part_constraint (nfi, fi_static_chain);
4059 get_constraint_for (frame, &rhsc);
4060 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
4061 process_constraint (new_constraint (lhs, *rhsp));
4062 VEC_free (ce_s, heap, rhsc);
4064 /* Make the frame point to the function for
4065 the trampoline adjustment call. */
4066 get_constraint_for (tramp, &lhsc);
4068 get_constraint_for (nfunc, &rhsc);
4069 process_all_all_constraints (lhsc, rhsc);
4070 VEC_free (ce_s, heap, rhsc);
4071 VEC_free (ce_s, heap, lhsc);
4076 /* Else fallthru to generic handling which will let
4077 the frame escape. */
4080 case BUILT_IN_ADJUST_TRAMPOLINE:
4082 tree tramp = gimple_call_arg (t, 0);
4083 tree res = gimple_call_lhs (t);
4084 if (in_ipa_mode && res)
4086 get_constraint_for (res, &lhsc);
4087 get_constraint_for (tramp, &rhsc);
4089 process_all_all_constraints (lhsc, rhsc);
4090 VEC_free (ce_s, heap, rhsc);
4091 VEC_free (ce_s, heap, lhsc);
4095 /* Variadic argument handling needs to be handled in IPA
4097 case BUILT_IN_VA_START:
4101 tree valist = gimple_call_arg (t, 0);
4102 struct constraint_expr rhs, *lhsp;
4104 /* The va_list gets access to pointers in variadic
4106 fi = lookup_vi_for_tree (cfun->decl);
4107 gcc_assert (fi != NULL);
4108 get_constraint_for (valist, &lhsc);
4110 rhs = get_function_part_constraint (fi, ~0);
4111 rhs.type = ADDRESSOF;
4112 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4113 process_constraint (new_constraint (*lhsp, rhs));
4114 VEC_free (ce_s, heap, lhsc);
4115 /* va_list is clobbered. */
4116 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4121 /* va_end doesn't have any effect that matters. */
4122 case BUILT_IN_VA_END:
4124 /* printf-style functions may have hooks to set pointers to
4125 point to somewhere into the generated string. Leave them
4126 for a later excercise... */
4128 /* Fallthru to general call handling. */;
4132 && (!(fi = lookup_vi_for_tree (fndecl))
4133 || !fi->is_fn_info)))
4135 VEC(ce_s, heap) *rhsc = NULL;
4136 int flags = gimple_call_flags (t);
4138 /* Const functions can return their arguments and addresses
4139 of global memory but not of escaped memory. */
4140 if (flags & (ECF_CONST|ECF_NOVOPS))
4142 if (gimple_call_lhs (t)
4143 && could_have_pointers (gimple_call_lhs (t)))
4144 handle_const_call (t, &rhsc);
4146 /* Pure functions can return addresses in and of memory
4147 reachable from their arguments, but they are not an escape
4148 point for reachable memory of their arguments. */
4149 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4150 handle_pure_call (t, &rhsc);
4152 handle_rhs_call (t, &rhsc);
4153 if (gimple_call_lhs (t)
4154 && could_have_pointers (gimple_call_lhs (t)))
4155 handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
4156 VEC_free (ce_s, heap, rhsc);
4163 fi = get_fi_for_callee (t);
4165 /* Assign all the passed arguments to the appropriate incoming
4166 parameters of the function. */
4167 for (j = 0; j < gimple_call_num_args (t); j++)
4169 struct constraint_expr lhs ;
4170 struct constraint_expr *rhsp;
4171 tree arg = gimple_call_arg (t, j);
4173 if (!could_have_pointers (arg))
4176 get_constraint_for (arg, &rhsc);
4177 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4178 while (VEC_length (ce_s, rhsc) != 0)
4180 rhsp = VEC_last (ce_s, rhsc);
4181 process_constraint (new_constraint (lhs, *rhsp));
4182 VEC_pop (ce_s, rhsc);
4186 /* If we are returning a value, assign it to the result. */
4187 lhsop = gimple_call_lhs (t);
4189 && could_have_pointers (lhsop))
4191 struct constraint_expr rhs;
4192 struct constraint_expr *lhsp;
4194 get_constraint_for (lhsop, &lhsc);
4195 rhs = get_function_part_constraint (fi, fi_result);
4197 && DECL_RESULT (fndecl)
4198 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4200 VEC(ce_s, heap) *tem = NULL;
4201 VEC_safe_push (ce_s, heap, tem, &rhs);
4203 rhs = *VEC_index (ce_s, tem, 0);
4204 VEC_free(ce_s, heap, tem);
4206 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4207 process_constraint (new_constraint (*lhsp, rhs));
4210 /* If we pass the result decl by reference, honor that. */
4213 && DECL_RESULT (fndecl)
4214 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4216 struct constraint_expr lhs;
4217 struct constraint_expr *rhsp;
4219 get_constraint_for_address_of (lhsop, &rhsc);
4220 lhs = get_function_part_constraint (fi, fi_result);
4221 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4222 process_constraint (new_constraint (lhs, *rhsp));
4223 VEC_free (ce_s, heap, rhsc);
4226 /* If we use a static chain, pass it along. */
4227 if (gimple_call_chain (t))
4229 struct constraint_expr lhs;
4230 struct constraint_expr *rhsp;
4232 get_constraint_for (gimple_call_chain (t), &rhsc);
4233 lhs = get_function_part_constraint (fi, fi_static_chain);
4234 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4235 process_constraint (new_constraint (lhs, *rhsp));
4239 /* Otherwise, just a regular assignment statement. Only care about
4240 operations with pointer result, others are dealt with as escape
4241 points if they have pointer operands. */
4242 else if (is_gimple_assign (t)
4243 && could_have_pointers (gimple_assign_lhs (t)))
4245 /* Otherwise, just a regular assignment statement. */
4246 tree lhsop = gimple_assign_lhs (t);
4247 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4249 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4250 do_structure_copy (lhsop, rhsop);
4253 struct constraint_expr temp;
4254 get_constraint_for (lhsop, &lhsc);
4256 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
4257 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4258 gimple_assign_rhs2 (t), &rhsc);
4259 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
4260 && !(POINTER_TYPE_P (gimple_expr_type (t))
4261 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4262 || gimple_assign_single_p (t))
4263 get_constraint_for (rhsop, &rhsc);
4266 temp.type = ADDRESSOF;
4267 temp.var = anything_id;
4269 VEC_safe_push (ce_s, heap, rhsc, &temp);
4271 process_all_all_constraints (lhsc, rhsc);
4273 /* If there is a store to a global variable the rhs escapes. */
4274 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4276 && is_global_var (lhsop)
4278 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4279 make_escape_constraint (rhsop);
4280 /* If this is a conversion of a non-restrict pointer to a
4281 restrict pointer track it with a new heapvar. */
4282 else if (gimple_assign_cast_p (t)
4283 && POINTER_TYPE_P (TREE_TYPE (rhsop))
4284 && POINTER_TYPE_P (TREE_TYPE (lhsop))
4285 && !TYPE_RESTRICT (TREE_TYPE (rhsop))
4286 && TYPE_RESTRICT (TREE_TYPE (lhsop)))
4287 make_constraint_from_restrict (get_vi_for_tree (lhsop),
4290 /* For conversions of pointers to non-pointers the pointer escapes. */
4291 else if (gimple_assign_cast_p (t)
4292 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
4293 && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
4295 make_escape_constraint (gimple_assign_rhs1 (t));
4297 /* Handle escapes through return. */
4298 else if (gimple_code (t) == GIMPLE_RETURN
4299 && gimple_return_retval (t) != NULL_TREE
4300 && could_have_pointers (gimple_return_retval (t)))
4304 || !(fi = get_vi_for_tree (cfun->decl)))
4305 make_escape_constraint (gimple_return_retval (t));
4306 else if (in_ipa_mode
4309 struct constraint_expr lhs ;
4310 struct constraint_expr *rhsp;
4313 lhs = get_function_part_constraint (fi, fi_result);
4314 get_constraint_for (gimple_return_retval (t), &rhsc);
4315 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4316 process_constraint (new_constraint (lhs, *rhsp));
4319 /* Handle asms conservatively by adding escape constraints to everything. */
4320 else if (gimple_code (t) == GIMPLE_ASM)
4322 unsigned i, noutputs;
4323 const char **oconstraints;
4324 const char *constraint;
4325 bool allows_mem, allows_reg, is_inout;
4327 noutputs = gimple_asm_noutputs (t);
4328 oconstraints = XALLOCAVEC (const char *, noutputs);
4330 for (i = 0; i < noutputs; ++i)
4332 tree link = gimple_asm_output_op (t, i);
4333 tree op = TREE_VALUE (link);
4335 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4336 oconstraints[i] = constraint;
4337 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4338 &allows_reg, &is_inout);
4340 /* A memory constraint makes the address of the operand escape. */
4341 if (!allows_reg && allows_mem)
4342 make_escape_constraint (build_fold_addr_expr (op));
4344 /* The asm may read global memory, so outputs may point to
4345 any global memory. */
4346 if (op && could_have_pointers (op))
4348 VEC(ce_s, heap) *lhsc = NULL;
4349 struct constraint_expr rhsc, *lhsp;
4351 get_constraint_for (op, &lhsc);
4352 rhsc.var = nonlocal_id;
4355 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4356 process_constraint (new_constraint (*lhsp, rhsc));
4357 VEC_free (ce_s, heap, lhsc);
4360 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4362 tree link = gimple_asm_input_op (t, i);
4363 tree op = TREE_VALUE (link);
4365 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4367 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4368 &allows_mem, &allows_reg);
4370 /* A memory constraint makes the address of the operand escape. */
4371 if (!allows_reg && allows_mem)
4372 make_escape_constraint (build_fold_addr_expr (op));
4373 /* Strictly we'd only need the constraint to ESCAPED if
4374 the asm clobbers memory, otherwise using something
4375 along the lines of per-call clobbers/uses would be enough. */
4376 else if (op && could_have_pointers (op))
4377 make_escape_constraint (op);
4381 VEC_free (ce_s, heap, rhsc);
4382 VEC_free (ce_s, heap, lhsc);
4386 /* Create a constraint adding to the clobber set of FI the memory
4387 pointed to by PTR. */
4390 process_ipa_clobber (varinfo_t fi, tree ptr)
4392 VEC(ce_s, heap) *ptrc = NULL;
4393 struct constraint_expr *c, lhs;
4395 get_constraint_for (ptr, &ptrc);
4396 lhs = get_function_part_constraint (fi, fi_clobbers);
4397 for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
4398 process_constraint (new_constraint (lhs, *c));
4399 VEC_free (ce_s, heap, ptrc);
4402 /* Walk statement T setting up clobber and use constraints according to the
4403 references found in T. This function is a main part of the
4404 IPA constraint builder. */
4407 find_func_clobbers (gimple origt)
4410 VEC(ce_s, heap) *lhsc = NULL;
4411 VEC(ce_s, heap) *rhsc = NULL;
4414 /* Add constraints for clobbered/used in IPA mode.
4415 We are not interested in what automatic variables are clobbered
4416 or used as we only use the information in the caller to which
4417 they do not escape. */
4418 gcc_assert (in_ipa_mode);
4420 /* If the stmt refers to memory in any way it better had a VUSE. */
4421 if (gimple_vuse (t) == NULL_TREE)
4424 /* We'd better have function information for the current function. */
4425 fi = lookup_vi_for_tree (cfun->decl);
4426 gcc_assert (fi != NULL);
4428 /* Account for stores in assignments and calls. */
4429 if (gimple_vdef (t) != NULL_TREE
4430 && gimple_has_lhs (t))
4432 tree lhs = gimple_get_lhs (t);
4434 while (handled_component_p (tem))
4435 tem = TREE_OPERAND (tem, 0);
4437 && !auto_var_in_fn_p (tem, cfun->decl))
4438 || INDIRECT_REF_P (tem))
4440 struct constraint_expr lhsc, *rhsp;
4442 lhsc = get_function_part_constraint (fi, fi_clobbers);
4443 get_constraint_for_address_of (lhs, &rhsc);
4444 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4445 process_constraint (new_constraint (lhsc, *rhsp));
4446 VEC_free (ce_s, heap, rhsc);
4450 /* Account for uses in assigments and returns. */
4451 if (gimple_assign_single_p (t)
4452 || (gimple_code (t) == GIMPLE_RETURN
4453 && gimple_return_retval (t) != NULL_TREE))
4455 tree rhs = (gimple_assign_single_p (t)
4456 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4458 while (handled_component_p (tem))
4459 tem = TREE_OPERAND (tem, 0);
4461 && !auto_var_in_fn_p (tem, cfun->decl))
4462 || INDIRECT_REF_P (tem))
4464 struct constraint_expr lhs, *rhsp;
4466 lhs = get_function_part_constraint (fi, fi_uses);
4467 get_constraint_for_address_of (rhs, &rhsc);
4468 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4469 process_constraint (new_constraint (lhs, *rhsp));
4470 VEC_free (ce_s, heap, rhsc);
4474 if (is_gimple_call (t))
4476 varinfo_t cfi = NULL;
4477 tree decl = gimple_call_fndecl (t);
4478 struct constraint_expr lhs, rhs;
4481 /* For builtins we do not have separate function info. For those
4482 we do not generate escapes for we have to generate clobbers/uses. */
4484 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4485 switch (DECL_FUNCTION_CODE (decl))
4487 /* The following functions use and clobber memory pointed to
4488 by their arguments. */
4489 case BUILT_IN_STRCPY:
4490 case BUILT_IN_STRNCPY:
4491 case BUILT_IN_BCOPY:
4492 case BUILT_IN_MEMCPY:
4493 case BUILT_IN_MEMMOVE:
4494 case BUILT_IN_MEMPCPY:
4495 case BUILT_IN_STPCPY:
4496 case BUILT_IN_STPNCPY:
4497 case BUILT_IN_STRCAT:
4498 case BUILT_IN_STRNCAT:
4500 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4501 == BUILT_IN_BCOPY ? 1 : 0));
4502 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4503 == BUILT_IN_BCOPY ? 0 : 1));
4505 struct constraint_expr *rhsp, *lhsp;
4506 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4507 lhs = get_function_part_constraint (fi, fi_clobbers);
4508 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4509 process_constraint (new_constraint (lhs, *lhsp));
4510 VEC_free (ce_s, heap, lhsc);
4511 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4512 lhs = get_function_part_constraint (fi, fi_uses);
4513 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4514 process_constraint (new_constraint (lhs, *rhsp));
4515 VEC_free (ce_s, heap, rhsc);
4518 /* The following function clobbers memory pointed to by
4520 case BUILT_IN_MEMSET:
4522 tree dest = gimple_call_arg (t, 0);
4525 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4526 lhs = get_function_part_constraint (fi, fi_clobbers);
4527 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4528 process_constraint (new_constraint (lhs, *lhsp));
4529 VEC_free (ce_s, heap, lhsc);
4532 /* The following functions clobber their second and third
4534 case BUILT_IN_SINCOS:
4535 case BUILT_IN_SINCOSF:
4536 case BUILT_IN_SINCOSL:
4538 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4539 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4542 /* The following functions clobber their second argument. */
4543 case BUILT_IN_FREXP:
4544 case BUILT_IN_FREXPF:
4545 case BUILT_IN_FREXPL:
4546 case BUILT_IN_LGAMMA_R:
4547 case BUILT_IN_LGAMMAF_R:
4548 case BUILT_IN_LGAMMAL_R:
4549 case BUILT_IN_GAMMA_R:
4550 case BUILT_IN_GAMMAF_R:
4551 case BUILT_IN_GAMMAL_R:
4553 case BUILT_IN_MODFF:
4554 case BUILT_IN_MODFL:
4556 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4559 /* The following functions clobber their third argument. */
4560 case BUILT_IN_REMQUO:
4561 case BUILT_IN_REMQUOF:
4562 case BUILT_IN_REMQUOL:
4564 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4567 /* The following functions neither read nor clobber memory. */
4570 /* Trampolines are of no interest to us. */
4571 case BUILT_IN_INIT_TRAMPOLINE:
4572 case BUILT_IN_ADJUST_TRAMPOLINE:
4574 case BUILT_IN_VA_START:
4575 case BUILT_IN_VA_END:
4577 /* printf-style functions may have hooks to set pointers to
4578 point to somewhere into the generated string. Leave them
4579 for a later excercise... */
4581 /* Fallthru to general call handling. */;
4584 /* Parameters passed by value are used. */
4585 lhs = get_function_part_constraint (fi, fi_uses);
4586 for (i = 0; i < gimple_call_num_args (t); i++)
4588 struct constraint_expr *rhsp;
4589 tree arg = gimple_call_arg (t, i);
4591 if (TREE_CODE (arg) == SSA_NAME
4592 || is_gimple_min_invariant (arg))
4595 get_constraint_for_address_of (arg, &rhsc);
4596 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4597 process_constraint (new_constraint (lhs, *rhsp));
4598 VEC_free (ce_s, heap, rhsc);
4601 /* Build constraints for propagating clobbers/uses along the
4603 cfi = get_fi_for_callee (t);
4604 if (cfi->id == anything_id)
4606 if (gimple_vdef (t))
4607 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4609 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4614 /* For callees without function info (that's external functions),
4615 ESCAPED is clobbered and used. */
4616 if (gimple_call_fndecl (t)
4617 && !cfi->is_fn_info)
4621 if (gimple_vdef (t))
4622 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4624 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4626 /* Also honor the call statement use/clobber info. */
4627 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4628 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4630 if ((vi = lookup_call_use_vi (t)) != NULL)
4631 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4636 /* Otherwise the caller clobbers and uses what the callee does.
4637 ??? This should use a new complex constraint that filters
4638 local variables of the callee. */
4639 if (gimple_vdef (t))
4641 lhs = get_function_part_constraint (fi, fi_clobbers);
4642 rhs = get_function_part_constraint (cfi, fi_clobbers);
4643 process_constraint (new_constraint (lhs, rhs));
4645 lhs = get_function_part_constraint (fi, fi_uses);
4646 rhs = get_function_part_constraint (cfi, fi_uses);
4647 process_constraint (new_constraint (lhs, rhs));
4649 else if (gimple_code (t) == GIMPLE_ASM)
4651 /* ??? Ick. We can do better. */
4652 if (gimple_vdef (t))
4653 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4655 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4659 VEC_free (ce_s, heap, rhsc);
4663 /* Find the first varinfo in the same variable as START that overlaps with
4664 OFFSET. Return NULL if we can't find one. */
4667 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4669 /* If the offset is outside of the variable, bail out. */
4670 if (offset >= start->fullsize)
4673 /* If we cannot reach offset from start, lookup the first field
4674 and start from there. */
4675 if (start->offset > offset)
4676 start = lookup_vi_for_tree (start->decl);
4680 /* We may not find a variable in the field list with the actual
4681 offset when when we have glommed a structure to a variable.
4682 In that case, however, offset should still be within the size
4684 if (offset >= start->offset
4685 && (offset - start->offset) < start->size)
4694 /* Find the first varinfo in the same variable as START that overlaps with
4695 OFFSET. If there is no such varinfo the varinfo directly preceding
4696 OFFSET is returned. */
4699 first_or_preceding_vi_for_offset (varinfo_t start,
4700 unsigned HOST_WIDE_INT offset)
4702 /* If we cannot reach offset from start, lookup the first field
4703 and start from there. */
4704 if (start->offset > offset)
4705 start = lookup_vi_for_tree (start->decl);
4707 /* We may not find a variable in the field list with the actual
4708 offset when when we have glommed a structure to a variable.
4709 In that case, however, offset should still be within the size
4711 If we got beyond the offset we look for return the field
4712 directly preceding offset which may be the last field. */
4714 && offset >= start->offset
4715 && !((offset - start->offset) < start->size))
4716 start = start->next;
4722 /* Insert the varinfo FIELD into the field list for BASE, at the front
4726 insert_into_field_list (varinfo_t base, varinfo_t field)
4728 varinfo_t prev = base;
4729 varinfo_t curr = base->next;
4735 /* This structure is used during pushing fields onto the fieldstack
4736 to track the offset of the field, since bitpos_of_field gives it
4737 relative to its immediate containing type, and we want it relative
4738 to the ultimate containing object. */
4742 /* Offset from the base of the base containing object to this field. */
4743 HOST_WIDE_INT offset;
4745 /* Size, in bits, of the field. */
4746 unsigned HOST_WIDE_INT size;
4748 unsigned has_unknown_size : 1;
4750 unsigned may_have_pointers : 1;
4752 unsigned only_restrict_pointers : 1;
4754 typedef struct fieldoff fieldoff_s;
4756 DEF_VEC_O(fieldoff_s);
4757 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4759 /* qsort comparison function for two fieldoff's PA and PB */
4762 fieldoff_compare (const void *pa, const void *pb)
4764 const fieldoff_s *foa = (const fieldoff_s *)pa;
4765 const fieldoff_s *fob = (const fieldoff_s *)pb;
4766 unsigned HOST_WIDE_INT foasize, fobsize;
4768 if (foa->offset < fob->offset)
4770 else if (foa->offset > fob->offset)
4773 foasize = foa->size;
4774 fobsize = fob->size;
4775 if (foasize < fobsize)
4777 else if (foasize > fobsize)
4782 /* Sort a fieldstack according to the field offset and sizes. */
4784 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4786 qsort (VEC_address (fieldoff_s, fieldstack),
4787 VEC_length (fieldoff_s, fieldstack),
4788 sizeof (fieldoff_s),
4792 /* Return true if V is a tree that we can have subvars for.
4793 Normally, this is any aggregate type. Also complex
4794 types which are not gimple registers can have subvars. */
4797 var_can_have_subvars (const_tree v)
4799 /* Volatile variables should never have subvars. */
4800 if (TREE_THIS_VOLATILE (v))
4803 /* Non decls or memory tags can never have subvars. */
4807 /* Aggregates without overlapping fields can have subvars. */
4808 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4814 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4815 the fields of TYPE onto fieldstack, recording their offsets along
4818 OFFSET is used to keep track of the offset in this entire
4819 structure, rather than just the immediately containing structure.
4820 Returns the number of fields pushed. */
4823 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4824 HOST_WIDE_INT offset)
4829 if (TREE_CODE (type) != RECORD_TYPE)
4832 /* If the vector of fields is growing too big, bail out early.
4833 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4835 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4838 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4839 if (TREE_CODE (field) == FIELD_DECL)
4843 HOST_WIDE_INT foff = bitpos_of_field (field);
4845 if (!var_can_have_subvars (field)
4846 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4847 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4849 else if (!(pushed = push_fields_onto_fieldstack
4850 (TREE_TYPE (field), fieldstack, offset + foff))
4851 && (DECL_SIZE (field)
4852 && !integer_zerop (DECL_SIZE (field))))
4853 /* Empty structures may have actual size, like in C++. So
4854 see if we didn't push any subfields and the size is
4855 nonzero, push the field onto the stack. */
4860 fieldoff_s *pair = NULL;
4861 bool has_unknown_size = false;
4863 if (!VEC_empty (fieldoff_s, *fieldstack))
4864 pair = VEC_last (fieldoff_s, *fieldstack);
4866 if (!DECL_SIZE (field)
4867 || !host_integerp (DECL_SIZE (field), 1))
4868 has_unknown_size = true;
4870 /* If adjacent fields do not contain pointers merge them. */
4872 && !pair->may_have_pointers
4873 && !could_have_pointers (field)
4874 && !pair->has_unknown_size
4875 && !has_unknown_size
4876 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4878 pair = VEC_last (fieldoff_s, *fieldstack);
4879 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4883 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4884 pair->offset = offset + foff;
4885 pair->has_unknown_size = has_unknown_size;
4886 if (!has_unknown_size)
4887 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4890 pair->may_have_pointers = could_have_pointers (field);
4891 pair->only_restrict_pointers
4892 = (!has_unknown_size
4893 && POINTER_TYPE_P (TREE_TYPE (field))
4894 && TYPE_RESTRICT (TREE_TYPE (field)));
4905 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4906 if it is a varargs function. */
4909 count_num_arguments (tree decl, bool *is_varargs)
4911 unsigned int num = 0;
4914 /* Capture named arguments for K&R functions. They do not
4915 have a prototype and thus no TYPE_ARG_TYPES. */
4916 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
4919 /* Check if the function has variadic arguments. */
4920 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
4921 if (TREE_VALUE (t) == void_type_node)
4929 /* Creation function node for DECL, using NAME, and return the index
4930 of the variable we've created for the function. */
4933 create_function_info_for (tree decl, const char *name)
4935 struct function *fn = DECL_STRUCT_FUNCTION (decl);
4936 varinfo_t vi, prev_vi;
4939 bool is_varargs = false;
4940 unsigned int num_args = count_num_arguments (decl, &is_varargs);
4942 /* Create the variable info. */
4944 vi = new_var_info (decl, name);
4947 vi->fullsize = fi_parm_base + num_args;
4949 vi->may_have_pointers = false;
4952 insert_vi_for_tree (vi->decl, vi);
4958 /* Create a variable for things the function clobbers and one for
4959 things the function uses. */
4961 varinfo_t clobbervi, usevi;
4962 const char *newname;
4965 asprintf (&tempname, "%s.clobber", name);
4966 newname = ggc_strdup (tempname);
4969 clobbervi = new_var_info (NULL, newname);
4970 clobbervi->offset = fi_clobbers;
4971 clobbervi->size = 1;
4972 clobbervi->fullsize = vi->fullsize;
4973 clobbervi->is_full_var = true;
4974 clobbervi->is_global_var = false;
4975 gcc_assert (prev_vi->offset < clobbervi->offset);
4976 prev_vi->next = clobbervi;
4977 prev_vi = clobbervi;
4980 asprintf (&tempname, "%s.use", name);
4981 newname = ggc_strdup (tempname);
4984 usevi = new_var_info (NULL, newname);
4985 usevi->offset = fi_uses;
4987 usevi->fullsize = vi->fullsize;
4988 usevi->is_full_var = true;
4989 usevi->is_global_var = false;
4990 gcc_assert (prev_vi->offset < usevi->offset);
4991 prev_vi->next = usevi;
4996 /* And one for the static chain. */
4997 if (fn->static_chain_decl != NULL_TREE)
5000 const char *newname;
5003 asprintf (&tempname, "%s.chain", name);
5004 newname = ggc_strdup (tempname);
5007 chainvi = new_var_info (fn->static_chain_decl, newname);
5008 chainvi->offset = fi_static_chain;
5010 chainvi->fullsize = vi->fullsize;
5011 chainvi->is_full_var = true;
5012 chainvi->is_global_var = false;
5013 gcc_assert (prev_vi->offset < chainvi->offset);
5014 prev_vi->next = chainvi;
5017 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5020 /* Create a variable for the return var. */
5021 if (DECL_RESULT (decl) != NULL
5022 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5025 const char *newname;
5027 tree resultdecl = decl;
5029 if (DECL_RESULT (decl))
5030 resultdecl = DECL_RESULT (decl);
5032 asprintf (&tempname, "%s.result", name);
5033 newname = ggc_strdup (tempname);
5036 resultvi = new_var_info (resultdecl, newname);
5037 resultvi->offset = fi_result;
5039 resultvi->fullsize = vi->fullsize;
5040 resultvi->is_full_var = true;
5041 if (DECL_RESULT (decl))
5042 resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
5043 gcc_assert (prev_vi->offset < resultvi->offset);
5044 prev_vi->next = resultvi;
5047 if (DECL_RESULT (decl))
5048 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5051 /* Set up variables for each argument. */
5052 arg = DECL_ARGUMENTS (decl);
5053 for (i = 0; i < num_args; i++)
5056 const char *newname;
5058 tree argdecl = decl;
5063 asprintf (&tempname, "%s.arg%d", name, i);
5064 newname = ggc_strdup (tempname);
5067 argvi = new_var_info (argdecl, newname);
5068 argvi->offset = fi_parm_base + i;
5070 argvi->is_full_var = true;
5071 argvi->fullsize = vi->fullsize;
5073 argvi->may_have_pointers = could_have_pointers (arg);
5074 gcc_assert (prev_vi->offset < argvi->offset);
5075 prev_vi->next = argvi;
5080 insert_vi_for_tree (arg, argvi);
5081 arg = TREE_CHAIN (arg);
5085 /* Add one representative for all further args. */
5089 const char *newname;
5093 asprintf (&tempname, "%s.varargs", name);
5094 newname = ggc_strdup (tempname);
5097 /* We need sth that can be pointed to for va_start. */
5098 decl = create_tmp_var_raw (ptr_type_node, name);
5101 argvi = new_var_info (decl, newname);
5102 argvi->offset = fi_parm_base + num_args;
5104 argvi->is_full_var = true;
5105 argvi->is_heap_var = true;
5106 argvi->fullsize = vi->fullsize;
5107 gcc_assert (prev_vi->offset < argvi->offset);
5108 prev_vi->next = argvi;
5117 /* Return true if FIELDSTACK contains fields that overlap.
5118 FIELDSTACK is assumed to be sorted by offset. */
5121 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5123 fieldoff_s *fo = NULL;
5125 HOST_WIDE_INT lastoffset = -1;
5127 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5129 if (fo->offset == lastoffset)
5131 lastoffset = fo->offset;
5136 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5137 This will also create any varinfo structures necessary for fields
5141 create_variable_info_for (tree decl, const char *name)
5144 tree decl_type = TREE_TYPE (decl);
5145 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5146 VEC (fieldoff_s,heap) *fieldstack = NULL;
5148 if (var_can_have_subvars (decl) && use_field_sensitive)
5149 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5151 /* If the variable doesn't have subvars, we may end up needing to
5152 sort the field list and create fake variables for all the
5154 vi = new_var_info (decl, name);
5156 vi->may_have_pointers = could_have_pointers (decl);
5158 || !host_integerp (declsize, 1))
5160 vi->is_unknown_size_var = true;
5166 vi->fullsize = TREE_INT_CST_LOW (declsize);
5167 vi->size = vi->fullsize;
5170 insert_vi_for_tree (vi->decl, vi);
5172 /* ??? The setting of vi->may_have_pointers is too conservative here
5173 and may get refined below. Thus we have superfluous constraints
5174 here sometimes which triggers the commented assert in
5175 dump_sa_points_to_info. */
5176 if (vi->is_global_var
5177 && vi->may_have_pointers)
5179 /* Mark global restrict qualified pointers. */
5180 if (POINTER_TYPE_P (TREE_TYPE (decl))
5181 && TYPE_RESTRICT (TREE_TYPE (decl)))
5182 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5184 /* For escaped variables initialize them from nonlocal. */
5186 || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5187 make_copy_constraint (vi, nonlocal_id);
5189 /* If this is a global variable with an initializer and we are in
5190 IPA mode generate constraints for it. In non-IPA mode
5191 the initializer from nonlocal is all we need. */
5193 && DECL_INITIAL (vi->decl))
5195 VEC (ce_s, heap) *rhsc = NULL;
5196 struct constraint_expr lhs, *rhsp;
5198 get_constraint_for (DECL_INITIAL (vi->decl), &rhsc);
5202 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5203 process_constraint (new_constraint (lhs, *rhsp));
5204 /* If this is a variable that escapes from the unit
5205 the initializer escapes as well. */
5206 if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5208 lhs.var = escaped_id;
5211 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5212 process_constraint (new_constraint (lhs, *rhsp));
5214 VEC_free (ce_s, heap, rhsc);
5215 /* ??? Force us to not use subfields. Else we'd have to parse
5216 arbitrary initializers. */
5217 VEC_free (fieldoff_s, heap, fieldstack);
5222 if (use_field_sensitive
5223 && !vi->is_unknown_size_var
5224 && var_can_have_subvars (decl)
5225 && VEC_length (fieldoff_s, fieldstack) > 1
5226 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
5228 fieldoff_s *fo = NULL;
5229 bool notokay = false;
5232 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5234 if (fo->has_unknown_size
5242 /* We can't sort them if we have a field with a variable sized type,
5243 which will make notokay = true. In that case, we are going to return
5244 without creating varinfos for the fields anyway, so sorting them is a
5248 sort_fieldstack (fieldstack);
5249 /* Due to some C++ FE issues, like PR 22488, we might end up
5250 what appear to be overlapping fields even though they,
5251 in reality, do not overlap. Until the C++ FE is fixed,
5252 we will simply disable field-sensitivity for these cases. */
5253 notokay = check_for_overlaps (fieldstack);
5257 if (VEC_length (fieldoff_s, fieldstack) != 0)
5258 fo = VEC_index (fieldoff_s, fieldstack, 0);
5260 if (fo == NULL || notokay)
5262 vi->is_unknown_size_var = 1;
5265 vi->is_full_var = true;
5266 VEC_free (fieldoff_s, heap, fieldstack);
5270 vi->size = fo->size;
5271 vi->offset = fo->offset;
5272 vi->may_have_pointers = fo->may_have_pointers;
5273 if (vi->is_global_var
5274 && vi->may_have_pointers)
5276 if (fo->only_restrict_pointers)
5277 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5279 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
5280 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
5284 const char *newname = "NULL";
5289 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5290 "+" HOST_WIDE_INT_PRINT_DEC,
5291 vi->name, fo->offset, fo->size);
5292 newname = ggc_strdup (tempname);
5295 newvi = new_var_info (decl, newname);
5296 newvi->offset = fo->offset;
5297 newvi->size = fo->size;
5298 newvi->fullsize = vi->fullsize;
5299 newvi->may_have_pointers = fo->may_have_pointers;
5300 insert_into_field_list (vi, newvi);
5301 if ((newvi->is_global_var || TREE_CODE (decl) == PARM_DECL)
5302 && newvi->may_have_pointers)
5304 if (fo->only_restrict_pointers)
5305 make_constraint_from_restrict (newvi, "GLOBAL_RESTRICT");
5306 if (newvi->is_global_var && !in_ipa_mode)
5307 make_copy_constraint (newvi, nonlocal_id);
5314 vi->is_full_var = true;
5316 VEC_free (fieldoff_s, heap, fieldstack);
5321 /* Print out the points-to solution for VAR to FILE. */
5324 dump_solution_for_var (FILE *file, unsigned int var)
5326 varinfo_t vi = get_varinfo (var);
5330 /* Dump the solution for unified vars anyway, this avoids difficulties
5331 in scanning dumps in the testsuite. */
5332 fprintf (file, "%s = { ", vi->name);
5333 vi = get_varinfo (find (var));
5334 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5335 fprintf (file, "%s ", get_varinfo (i)->name);
5336 fprintf (file, "}");
5338 /* But note when the variable was unified. */
5340 fprintf (file, " same as %s", vi->name);
5342 fprintf (file, "\n");
5345 /* Print the points-to solution for VAR to stdout. */
5348 debug_solution_for_var (unsigned int var)
5350 dump_solution_for_var (stdout, var);
5353 /* Create varinfo structures for all of the variables in the
5354 function for intraprocedural mode. */
5357 intra_create_variable_infos (void)
5361 /* For each incoming pointer argument arg, create the constraint ARG
5362 = NONLOCAL or a dummy variable if it is a restrict qualified
5363 passed-by-reference argument. */
5364 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
5368 if (!could_have_pointers (t))
5371 /* For restrict qualified pointers to objects passed by
5372 reference build a real representative for the pointed-to object. */
5373 if (DECL_BY_REFERENCE (t)
5374 && POINTER_TYPE_P (TREE_TYPE (t))
5375 && TYPE_RESTRICT (TREE_TYPE (t)))
5377 struct constraint_expr lhsc, rhsc;
5379 tree heapvar = heapvar_lookup (t, 0);
5380 if (heapvar == NULL_TREE)
5383 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
5385 DECL_EXTERNAL (heapvar) = 1;
5386 heapvar_insert (t, 0, heapvar);
5387 ann = get_var_ann (heapvar);
5388 ann->is_heapvar = 1;
5390 if (gimple_referenced_vars (cfun))
5391 add_referenced_var (heapvar);
5392 lhsc.var = get_vi_for_tree (t)->id;
5395 rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
5396 rhsc.type = ADDRESSOF;
5398 process_constraint (new_constraint (lhsc, rhsc));
5399 vi->is_restrict_var = 1;
5403 for (p = get_vi_for_tree (t); p; p = p->next)
5404 if (p->may_have_pointers)
5405 make_constraint_from (p, nonlocal_id);
5406 if (POINTER_TYPE_P (TREE_TYPE (t))
5407 && TYPE_RESTRICT (TREE_TYPE (t)))
5408 make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
5411 /* Add a constraint for a result decl that is passed by reference. */
5412 if (DECL_RESULT (cfun->decl)
5413 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5415 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5417 for (p = result_vi; p; p = p->next)
5418 make_constraint_from (p, nonlocal_id);
5421 /* Add a constraint for the incoming static chain parameter. */
5422 if (cfun->static_chain_decl != NULL_TREE)
5424 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5426 for (p = chain_vi; p; p = p->next)
5427 make_constraint_from (p, nonlocal_id);
5431 /* Structure used to put solution bitmaps in a hashtable so they can
5432 be shared among variables with the same points-to set. */
5434 typedef struct shared_bitmap_info
5438 } *shared_bitmap_info_t;
5439 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5441 static htab_t shared_bitmap_table;
5443 /* Hash function for a shared_bitmap_info_t */
5446 shared_bitmap_hash (const void *p)
5448 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5449 return bi->hashcode;
5452 /* Equality function for two shared_bitmap_info_t's. */
5455 shared_bitmap_eq (const void *p1, const void *p2)
5457 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5458 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5459 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5462 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5463 existing instance if there is one, NULL otherwise. */
5466 shared_bitmap_lookup (bitmap pt_vars)
5469 struct shared_bitmap_info sbi;
5471 sbi.pt_vars = pt_vars;
5472 sbi.hashcode = bitmap_hash (pt_vars);
5474 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5475 sbi.hashcode, NO_INSERT);
5479 return ((shared_bitmap_info_t) *slot)->pt_vars;
5483 /* Add a bitmap to the shared bitmap hashtable. */
5486 shared_bitmap_add (bitmap pt_vars)
5489 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5491 sbi->pt_vars = pt_vars;
5492 sbi->hashcode = bitmap_hash (pt_vars);
5494 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5495 sbi->hashcode, INSERT);
5496 gcc_assert (!*slot);
5497 *slot = (void *) sbi;
5501 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5504 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5509 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5511 varinfo_t vi = get_varinfo (i);
5513 /* The only artificial variables that are allowed in a may-alias
5514 set are heap variables. */
5515 if (vi->is_artificial_var && !vi->is_heap_var)
5518 if (TREE_CODE (vi->decl) == VAR_DECL
5519 || TREE_CODE (vi->decl) == PARM_DECL
5520 || TREE_CODE (vi->decl) == RESULT_DECL)
5522 /* If we are in IPA mode we will not recompute points-to
5523 sets after inlining so make sure they stay valid. */
5525 && !DECL_PT_UID_SET_P (vi->decl))
5526 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5528 /* Add the decl to the points-to set. Note that the points-to
5529 set contains global variables. */
5530 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5531 if (vi->is_global_var)
5532 pt->vars_contains_global = true;
5538 /* Compute the points-to solution *PT for the variable VI. */
5541 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5545 bitmap finished_solution;
5549 memset (pt, 0, sizeof (struct pt_solution));
5551 /* This variable may have been collapsed, let's get the real
5553 vi = get_varinfo (find (orig_vi->id));
5555 /* Translate artificial variables into SSA_NAME_PTR_INFO
5557 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5559 varinfo_t vi = get_varinfo (i);
5561 if (vi->is_artificial_var)
5563 if (vi->id == nothing_id)
5565 else if (vi->id == escaped_id)
5568 pt->ipa_escaped = 1;
5572 else if (vi->id == nonlocal_id)
5574 else if (vi->is_heap_var)
5575 /* We represent heapvars in the points-to set properly. */
5577 else if (vi->id == readonly_id)
5580 else if (vi->id == anything_id
5581 || vi->id == integer_id)
5584 if (vi->is_restrict_var)
5585 pt->vars_contains_restrict = true;
5588 /* Instead of doing extra work, simply do not create
5589 elaborate points-to information for pt_anything pointers. */
5591 && (orig_vi->is_artificial_var
5592 || !pt->vars_contains_restrict))
5595 /* Share the final set of variables when possible. */
5596 finished_solution = BITMAP_GGC_ALLOC ();
5597 stats.points_to_sets_created++;
5599 set_uids_in_ptset (finished_solution, vi->solution, pt);
5600 result = shared_bitmap_lookup (finished_solution);
5603 shared_bitmap_add (finished_solution);
5604 pt->vars = finished_solution;
5609 bitmap_clear (finished_solution);
5613 /* Given a pointer variable P, fill in its points-to set. */
5616 find_what_p_points_to (tree p)
5618 struct ptr_info_def *pi;
5622 /* For parameters, get at the points-to set for the actual parm
5624 if (TREE_CODE (p) == SSA_NAME
5625 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5626 && SSA_NAME_IS_DEFAULT_DEF (p))
5627 lookup_p = SSA_NAME_VAR (p);
5629 vi = lookup_vi_for_tree (lookup_p);
5633 pi = get_ptr_info (p);
5634 find_what_var_points_to (vi, &pi->pt);
5638 /* Query statistics for points-to solutions. */
5641 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5642 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5643 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5644 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5648 dump_pta_stats (FILE *s)
5650 fprintf (s, "\nPTA query stats:\n");
5651 fprintf (s, " pt_solution_includes: "
5652 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5653 HOST_WIDE_INT_PRINT_DEC" queries\n",
5654 pta_stats.pt_solution_includes_no_alias,
5655 pta_stats.pt_solution_includes_no_alias
5656 + pta_stats.pt_solution_includes_may_alias);
5657 fprintf (s, " pt_solutions_intersect: "
5658 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5659 HOST_WIDE_INT_PRINT_DEC" queries\n",
5660 pta_stats.pt_solutions_intersect_no_alias,
5661 pta_stats.pt_solutions_intersect_no_alias
5662 + pta_stats.pt_solutions_intersect_may_alias);
5666 /* Reset the points-to solution *PT to a conservative default
5667 (point to anything). */
5670 pt_solution_reset (struct pt_solution *pt)
5672 memset (pt, 0, sizeof (struct pt_solution));
5673 pt->anything = true;
5676 /* Set the points-to solution *PT to point only to the variables
5677 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5678 global variables and VARS_CONTAINS_RESTRICT specifies whether
5679 it contains restrict tag variables. */
5682 pt_solution_set (struct pt_solution *pt, bitmap vars,
5683 bool vars_contains_global, bool vars_contains_restrict)
5685 memset (pt, 0, sizeof (struct pt_solution));
5687 pt->vars_contains_global = vars_contains_global;
5688 pt->vars_contains_restrict = vars_contains_restrict;
5691 /* Computes the union of the points-to solutions *DEST and *SRC and
5692 stores the result in *DEST. This changes the points-to bitmap
5693 of *DEST and thus may not be used if that might be shared.
5694 The points-to bitmap of *SRC and *DEST will not be shared after
5695 this function if they were not before. */
5698 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
5700 dest->anything |= src->anything;
5703 pt_solution_reset (dest);
5707 dest->nonlocal |= src->nonlocal;
5708 dest->escaped |= src->escaped;
5709 dest->ipa_escaped |= src->ipa_escaped;
5710 dest->null |= src->null;
5711 dest->vars_contains_global |= src->vars_contains_global;
5712 dest->vars_contains_restrict |= src->vars_contains_restrict;
5717 dest->vars = BITMAP_GGC_ALLOC ();
5718 bitmap_ior_into (dest->vars, src->vars);
5721 /* Return true if the points-to solution *PT is empty. */
5724 pt_solution_empty_p (struct pt_solution *pt)
5731 && !bitmap_empty_p (pt->vars))
5734 /* If the solution includes ESCAPED, check if that is empty. */
5736 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5739 /* If the solution includes ESCAPED, check if that is empty. */
5741 && !pt_solution_empty_p (&ipa_escaped_pt))
5747 /* Return true if the points-to solution *PT includes global memory. */
5750 pt_solution_includes_global (struct pt_solution *pt)
5754 || pt->vars_contains_global)
5758 return pt_solution_includes_global (&cfun->gimple_df->escaped);
5760 if (pt->ipa_escaped)
5761 return pt_solution_includes_global (&ipa_escaped_pt);
5763 /* ??? This predicate is not correct for the IPA-PTA solution
5764 as we do not properly distinguish between unit escape points
5765 and global variables. */
5766 if (cfun->gimple_df->ipa_pta)
5772 /* Return true if the points-to solution *PT includes the variable
5773 declaration DECL. */
5776 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
5782 && is_global_var (decl))
5786 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
5789 /* If the solution includes ESCAPED, check it. */
5791 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
5794 /* If the solution includes ESCAPED, check it. */
5796 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
5803 pt_solution_includes (struct pt_solution *pt, const_tree decl)
5805 bool res = pt_solution_includes_1 (pt, decl);
5807 ++pta_stats.pt_solution_includes_may_alias;
5809 ++pta_stats.pt_solution_includes_no_alias;
5813 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
5817 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
5819 if (pt1->anything || pt2->anything)
5822 /* If either points to unknown global memory and the other points to
5823 any global memory they alias. */
5826 || pt2->vars_contains_global))
5828 && pt1->vars_contains_global))
5831 /* Check the escaped solution if required. */
5832 if ((pt1->escaped || pt2->escaped)
5833 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5835 /* If both point to escaped memory and that solution
5836 is not empty they alias. */
5837 if (pt1->escaped && pt2->escaped)
5840 /* If either points to escaped memory see if the escaped solution
5841 intersects with the other. */
5843 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
5845 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
5849 /* Check the escaped solution if required.
5850 ??? Do we need to check the local against the IPA escaped sets? */
5851 if ((pt1->ipa_escaped || pt2->ipa_escaped)
5852 && !pt_solution_empty_p (&ipa_escaped_pt))
5854 /* If both point to escaped memory and that solution
5855 is not empty they alias. */
5856 if (pt1->ipa_escaped && pt2->ipa_escaped)
5859 /* If either points to escaped memory see if the escaped solution
5860 intersects with the other. */
5861 if ((pt1->ipa_escaped
5862 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
5863 || (pt2->ipa_escaped
5864 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
5868 /* Now both pointers alias if their points-to solution intersects. */
5871 && bitmap_intersect_p (pt1->vars, pt2->vars));
5875 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
5877 bool res = pt_solutions_intersect_1 (pt1, pt2);
5879 ++pta_stats.pt_solutions_intersect_may_alias;
5881 ++pta_stats.pt_solutions_intersect_no_alias;
5885 /* Return true if both points-to solutions PT1 and PT2 for two restrict
5886 qualified pointers are possibly based on the same pointer. */
5889 pt_solutions_same_restrict_base (struct pt_solution *pt1,
5890 struct pt_solution *pt2)
5892 /* If we deal with points-to solutions of two restrict qualified
5893 pointers solely rely on the pointed-to variable bitmap intersection.
5894 For two pointers that are based on each other the bitmaps will
5896 if (pt1->vars_contains_restrict
5897 && pt2->vars_contains_restrict)
5899 gcc_assert (pt1->vars && pt2->vars);
5900 return bitmap_intersect_p (pt1->vars, pt2->vars);
5907 /* Dump points-to information to OUTFILE. */
5910 dump_sa_points_to_info (FILE *outfile)
5914 fprintf (outfile, "\nPoints-to sets\n\n");
5916 if (dump_flags & TDF_STATS)
5918 fprintf (outfile, "Stats:\n");
5919 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5920 fprintf (outfile, "Non-pointer vars: %d\n",
5921 stats.nonpointer_vars);
5922 fprintf (outfile, "Statically unified vars: %d\n",
5923 stats.unified_vars_static);
5924 fprintf (outfile, "Dynamically unified vars: %d\n",
5925 stats.unified_vars_dynamic);
5926 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5927 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5928 fprintf (outfile, "Number of implicit edges: %d\n",
5929 stats.num_implicit_edges);
5932 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5934 varinfo_t vi = get_varinfo (i);
5935 if (!vi->may_have_pointers)
5937 dump_solution_for_var (outfile, i);
5942 /* Debug points-to information to stderr. */
5945 debug_sa_points_to_info (void)
5947 dump_sa_points_to_info (stderr);
5951 /* Initialize the always-existing constraint variables for NULL
5952 ANYTHING, READONLY, and INTEGER */
5955 init_base_vars (void)
5957 struct constraint_expr lhs, rhs;
5958 varinfo_t var_anything;
5959 varinfo_t var_nothing;
5960 varinfo_t var_readonly;
5961 varinfo_t var_escaped;
5962 varinfo_t var_nonlocal;
5963 varinfo_t var_storedanything;
5964 varinfo_t var_integer;
5966 /* Create the NULL variable, used to represent that a variable points
5968 var_nothing = new_var_info (NULL_TREE, "NULL");
5969 gcc_assert (var_nothing->id == nothing_id);
5970 var_nothing->is_artificial_var = 1;
5971 var_nothing->offset = 0;
5972 var_nothing->size = ~0;
5973 var_nothing->fullsize = ~0;
5974 var_nothing->is_special_var = 1;
5975 var_nothing->may_have_pointers = 0;
5976 var_nothing->is_global_var = 0;
5978 /* Create the ANYTHING variable, used to represent that a variable
5979 points to some unknown piece of memory. */
5980 var_anything = new_var_info (NULL_TREE, "ANYTHING");
5981 gcc_assert (var_anything->id == anything_id);
5982 var_anything->is_artificial_var = 1;
5983 var_anything->size = ~0;
5984 var_anything->offset = 0;
5985 var_anything->next = NULL;
5986 var_anything->fullsize = ~0;
5987 var_anything->is_special_var = 1;
5989 /* Anything points to anything. This makes deref constraints just
5990 work in the presence of linked list and other p = *p type loops,
5991 by saying that *ANYTHING = ANYTHING. */
5993 lhs.var = anything_id;
5995 rhs.type = ADDRESSOF;
5996 rhs.var = anything_id;
5999 /* This specifically does not use process_constraint because
6000 process_constraint ignores all anything = anything constraints, since all
6001 but this one are redundant. */
6002 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6004 /* Create the READONLY variable, used to represent that a variable
6005 points to readonly memory. */
6006 var_readonly = new_var_info (NULL_TREE, "READONLY");
6007 gcc_assert (var_readonly->id == readonly_id);
6008 var_readonly->is_artificial_var = 1;
6009 var_readonly->offset = 0;
6010 var_readonly->size = ~0;
6011 var_readonly->fullsize = ~0;
6012 var_readonly->next = NULL;
6013 var_readonly->is_special_var = 1;
6015 /* readonly memory points to anything, in order to make deref
6016 easier. In reality, it points to anything the particular
6017 readonly variable can point to, but we don't track this
6020 lhs.var = readonly_id;
6022 rhs.type = ADDRESSOF;
6023 rhs.var = readonly_id; /* FIXME */
6025 process_constraint (new_constraint (lhs, rhs));
6027 /* Create the ESCAPED variable, used to represent the set of escaped
6029 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6030 gcc_assert (var_escaped->id == escaped_id);
6031 var_escaped->is_artificial_var = 1;
6032 var_escaped->offset = 0;
6033 var_escaped->size = ~0;
6034 var_escaped->fullsize = ~0;
6035 var_escaped->is_special_var = 0;
6037 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6039 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6040 gcc_assert (var_nonlocal->id == nonlocal_id);
6041 var_nonlocal->is_artificial_var = 1;
6042 var_nonlocal->offset = 0;
6043 var_nonlocal->size = ~0;
6044 var_nonlocal->fullsize = ~0;
6045 var_nonlocal->is_special_var = 1;
6047 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6049 lhs.var = escaped_id;
6052 rhs.var = escaped_id;
6054 process_constraint (new_constraint (lhs, rhs));
6056 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6057 whole variable escapes. */
6059 lhs.var = escaped_id;
6062 rhs.var = escaped_id;
6063 rhs.offset = UNKNOWN_OFFSET;
6064 process_constraint (new_constraint (lhs, rhs));
6066 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6067 everything pointed to by escaped points to what global memory can
6070 lhs.var = escaped_id;
6073 rhs.var = nonlocal_id;
6075 process_constraint (new_constraint (lhs, rhs));
6077 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6078 global memory may point to global memory and escaped memory. */
6080 lhs.var = nonlocal_id;
6082 rhs.type = ADDRESSOF;
6083 rhs.var = nonlocal_id;
6085 process_constraint (new_constraint (lhs, rhs));
6086 rhs.type = ADDRESSOF;
6087 rhs.var = escaped_id;
6089 process_constraint (new_constraint (lhs, rhs));
6091 /* Create the STOREDANYTHING variable, used to represent the set of
6092 variables stored to *ANYTHING. */
6093 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6094 gcc_assert (var_storedanything->id == storedanything_id);
6095 var_storedanything->is_artificial_var = 1;
6096 var_storedanything->offset = 0;
6097 var_storedanything->size = ~0;
6098 var_storedanything->fullsize = ~0;
6099 var_storedanything->is_special_var = 0;
6101 /* Create the INTEGER variable, used to represent that a variable points
6102 to what an INTEGER "points to". */
6103 var_integer = new_var_info (NULL_TREE, "INTEGER");
6104 gcc_assert (var_integer->id == integer_id);
6105 var_integer->is_artificial_var = 1;
6106 var_integer->size = ~0;
6107 var_integer->fullsize = ~0;
6108 var_integer->offset = 0;
6109 var_integer->next = NULL;
6110 var_integer->is_special_var = 1;
6112 /* INTEGER = ANYTHING, because we don't know where a dereference of
6113 a random integer will point to. */
6115 lhs.var = integer_id;
6117 rhs.type = ADDRESSOF;
6118 rhs.var = anything_id;
6120 process_constraint (new_constraint (lhs, rhs));
6123 /* Initialize things necessary to perform PTA */
6126 init_alias_vars (void)
6128 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6130 bitmap_obstack_initialize (&pta_obstack);
6131 bitmap_obstack_initialize (&oldpta_obstack);
6132 bitmap_obstack_initialize (&predbitmap_obstack);
6134 constraint_pool = create_alloc_pool ("Constraint pool",
6135 sizeof (struct constraint), 30);
6136 variable_info_pool = create_alloc_pool ("Variable info pool",
6137 sizeof (struct variable_info), 30);
6138 constraints = VEC_alloc (constraint_t, heap, 8);
6139 varmap = VEC_alloc (varinfo_t, heap, 8);
6140 vi_for_tree = pointer_map_create ();
6141 call_stmt_vars = pointer_map_create ();
6143 memset (&stats, 0, sizeof (stats));
6144 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6145 shared_bitmap_eq, free);
6149 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6150 predecessor edges. */
6153 remove_preds_and_fake_succs (constraint_graph_t graph)
6157 /* Clear the implicit ref and address nodes from the successor
6159 for (i = 0; i < FIRST_REF_NODE; i++)
6161 if (graph->succs[i])
6162 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6163 FIRST_REF_NODE * 2);
6166 /* Free the successor list for the non-ref nodes. */
6167 for (i = FIRST_REF_NODE; i < graph->size; i++)
6169 if (graph->succs[i])
6170 BITMAP_FREE (graph->succs[i]);
6173 /* Now reallocate the size of the successor list as, and blow away
6174 the predecessor bitmaps. */
6175 graph->size = VEC_length (varinfo_t, varmap);
6176 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6178 free (graph->implicit_preds);
6179 graph->implicit_preds = NULL;
6180 free (graph->preds);
6181 graph->preds = NULL;
6182 bitmap_obstack_release (&predbitmap_obstack);
6185 /* Initialize the heapvar for statement mapping. */
6188 init_alias_heapvars (void)
6190 if (!heapvar_for_stmt)
6191 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
6195 /* Delete the heapvar for statement mapping. */
6198 delete_alias_heapvars (void)
6200 if (heapvar_for_stmt)
6201 htab_delete (heapvar_for_stmt);
6202 heapvar_for_stmt = NULL;
6205 /* Solve the constraint set. */
6208 solve_constraints (void)
6210 struct scc_info *si;
6214 "\nCollapsing static cycles and doing variable "
6217 init_graph (VEC_length (varinfo_t, varmap) * 2);
6220 fprintf (dump_file, "Building predecessor graph\n");
6221 build_pred_graph ();
6224 fprintf (dump_file, "Detecting pointer and location "
6226 si = perform_var_substitution (graph);
6229 fprintf (dump_file, "Rewriting constraints and unifying "
6231 rewrite_constraints (graph, si);
6233 build_succ_graph ();
6234 free_var_substitution_info (si);
6236 if (dump_file && (dump_flags & TDF_GRAPH))
6237 dump_constraint_graph (dump_file);
6239 move_complex_constraints (graph);
6242 fprintf (dump_file, "Uniting pointer but not location equivalent "
6244 unite_pointer_equivalences (graph);
6247 fprintf (dump_file, "Finding indirect cycles\n");
6248 find_indirect_cycles (graph);
6250 /* Implicit nodes and predecessors are no longer necessary at this
6252 remove_preds_and_fake_succs (graph);
6255 fprintf (dump_file, "Solving graph\n");
6257 solve_graph (graph);
6260 dump_sa_points_to_info (dump_file);
6263 /* Create points-to sets for the current function. See the comments
6264 at the start of the file for an algorithmic overview. */
6267 compute_points_to_sets (void)
6273 timevar_push (TV_TREE_PTA);
6276 init_alias_heapvars ();
6278 intra_create_variable_infos ();
6280 /* Now walk all statements and build the constraint set. */
6283 gimple_stmt_iterator gsi;
6285 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6287 gimple phi = gsi_stmt (gsi);
6289 if (is_gimple_reg (gimple_phi_result (phi)))
6290 find_func_aliases (phi);
6293 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6295 gimple stmt = gsi_stmt (gsi);
6297 find_func_aliases (stmt);
6303 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6304 dump_constraints (dump_file, 0);
6307 /* From the constraints compute the points-to sets. */
6308 solve_constraints ();
6310 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6311 find_what_var_points_to (get_varinfo (escaped_id),
6312 &cfun->gimple_df->escaped);
6314 /* Make sure the ESCAPED solution (which is used as placeholder in
6315 other solutions) does not reference itself. This simplifies
6316 points-to solution queries. */
6317 cfun->gimple_df->escaped.escaped = 0;
6319 /* Mark escaped HEAP variables as global. */
6320 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
6322 && !vi->is_restrict_var
6323 && !vi->is_global_var)
6324 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6325 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6327 /* Compute the points-to sets for pointer SSA_NAMEs. */
6328 for (i = 0; i < num_ssa_names; ++i)
6330 tree ptr = ssa_name (i);
6332 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6333 find_what_p_points_to (ptr);
6336 /* Compute the call-used/clobbered sets. */
6339 gimple_stmt_iterator gsi;
6341 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6343 gimple stmt = gsi_stmt (gsi);
6344 struct pt_solution *pt;
6345 if (!is_gimple_call (stmt))
6348 pt = gimple_call_use_set (stmt);
6349 if (gimple_call_flags (stmt) & ECF_CONST)
6350 memset (pt, 0, sizeof (struct pt_solution));
6351 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6353 find_what_var_points_to (vi, pt);
6354 /* Escaped (and thus nonlocal) variables are always
6355 implicitly used by calls. */
6356 /* ??? ESCAPED can be empty even though NONLOCAL
6363 /* If there is nothing special about this call then
6364 we have made everything that is used also escape. */
6365 *pt = cfun->gimple_df->escaped;
6369 pt = gimple_call_clobber_set (stmt);
6370 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6371 memset (pt, 0, sizeof (struct pt_solution));
6372 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6374 find_what_var_points_to (vi, pt);
6375 /* Escaped (and thus nonlocal) variables are always
6376 implicitly clobbered by calls. */
6377 /* ??? ESCAPED can be empty even though NONLOCAL
6384 /* If there is nothing special about this call then
6385 we have made everything that is used also escape. */
6386 *pt = cfun->gimple_df->escaped;
6392 timevar_pop (TV_TREE_PTA);
6396 /* Delete created points-to sets. */
6399 delete_points_to_sets (void)
6403 htab_delete (shared_bitmap_table);
6404 if (dump_file && (dump_flags & TDF_STATS))
6405 fprintf (dump_file, "Points to sets created:%d\n",
6406 stats.points_to_sets_created);
6408 pointer_map_destroy (vi_for_tree);
6409 pointer_map_destroy (call_stmt_vars);
6410 bitmap_obstack_release (&pta_obstack);
6411 VEC_free (constraint_t, heap, constraints);
6413 for (i = 0; i < graph->size; i++)
6414 VEC_free (constraint_t, heap, graph->complex[i]);
6415 free (graph->complex);
6418 free (graph->succs);
6420 free (graph->pe_rep);
6421 free (graph->indirect_cycles);
6424 VEC_free (varinfo_t, heap, varmap);
6425 free_alloc_pool (variable_info_pool);
6426 free_alloc_pool (constraint_pool);
6430 /* Compute points-to information for every SSA_NAME pointer in the
6431 current function and compute the transitive closure of escaped
6432 variables to re-initialize the call-clobber states of local variables. */
6435 compute_may_aliases (void)
6437 if (cfun->gimple_df->ipa_pta)
6441 fprintf (dump_file, "\nNot re-computing points-to information "
6442 "because IPA points-to information is available.\n\n");
6444 /* But still dump what we have remaining it. */
6445 dump_alias_info (dump_file);
6447 if (dump_flags & TDF_DETAILS)
6448 dump_referenced_vars (dump_file);
6454 /* For each pointer P_i, determine the sets of variables that P_i may
6455 point-to. Compute the reachability set of escaped and call-used
6457 compute_points_to_sets ();
6459 /* Debugging dumps. */
6462 dump_alias_info (dump_file);
6464 if (dump_flags & TDF_DETAILS)
6465 dump_referenced_vars (dump_file);
6468 /* Deallocate memory used by aliasing data structures and the internal
6469 points-to solution. */
6470 delete_points_to_sets ();
6472 gcc_assert (!need_ssa_update_p (cfun));
6478 gate_tree_pta (void)
6480 return flag_tree_pta;
6483 /* A dummy pass to cause points-to information to be computed via
6484 TODO_rebuild_alias. */
6486 struct gimple_opt_pass pass_build_alias =
6491 gate_tree_pta, /* gate */
6495 0, /* static_pass_number */
6496 TV_NONE, /* tv_id */
6497 PROP_cfg | PROP_ssa, /* properties_required */
6498 0, /* properties_provided */
6499 0, /* properties_destroyed */
6500 0, /* todo_flags_start */
6501 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6505 /* A dummy pass to cause points-to information to be computed via
6506 TODO_rebuild_alias. */
6508 struct gimple_opt_pass pass_build_ealias =
6512 "ealias", /* name */
6513 gate_tree_pta, /* gate */
6517 0, /* static_pass_number */
6518 TV_NONE, /* tv_id */
6519 PROP_cfg | PROP_ssa, /* properties_required */
6520 0, /* properties_provided */
6521 0, /* properties_destroyed */
6522 0, /* todo_flags_start */
6523 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6528 /* Return true if we should execute IPA PTA. */
6534 /* Don't bother doing anything if the program has errors. */
6535 && !(errorcount || sorrycount));
6538 /* IPA PTA solutions for ESCAPED. */
6539 struct pt_solution ipa_escaped_pt
6540 = { true, false, false, false, false, false, false, NULL };
6542 /* Execute the driver for IPA PTA. */
6544 ipa_pta_execute (void)
6546 struct cgraph_node *node;
6547 struct varpool_node *var;
6552 init_alias_heapvars ();
6555 /* Build the constraints. */
6556 for (node = cgraph_nodes; node; node = node->next)
6558 /* Nodes without a body are not interesting. Especially do not
6559 visit clones at this point for now - we get duplicate decls
6560 there for inline clones at least. */
6561 if (!gimple_has_body_p (node->decl)
6565 create_function_info_for (node->decl,
6566 cgraph_node_name (node));
6569 /* Create constraints for global variables and their initializers. */
6570 for (var = varpool_nodes; var; var = var->next)
6571 get_vi_for_tree (var->decl);
6576 "Generating constraints for global initializers\n\n");
6577 dump_constraints (dump_file, 0);
6578 fprintf (dump_file, "\n");
6580 from = VEC_length (constraint_t, constraints);
6582 for (node = cgraph_nodes; node; node = node->next)
6584 struct function *func;
6588 /* Nodes without a body are not interesting. */
6589 if (!gimple_has_body_p (node->decl)
6595 "Generating constraints for %s\n",
6596 cgraph_node_name (node));
6598 func = DECL_STRUCT_FUNCTION (node->decl);
6599 old_func_decl = current_function_decl;
6601 current_function_decl = node->decl;
6603 /* For externally visible functions use local constraints for
6604 their arguments. For local functions we see all callers
6605 and thus do not need initial constraints for parameters. */
6606 if (node->local.externally_visible)
6607 intra_create_variable_infos ();
6609 /* Build constriants for the function body. */
6610 FOR_EACH_BB_FN (bb, func)
6612 gimple_stmt_iterator gsi;
6614 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6617 gimple phi = gsi_stmt (gsi);
6619 if (is_gimple_reg (gimple_phi_result (phi)))
6620 find_func_aliases (phi);
6623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6625 gimple stmt = gsi_stmt (gsi);
6627 find_func_aliases (stmt);
6628 find_func_clobbers (stmt);
6632 current_function_decl = old_func_decl;
6637 fprintf (dump_file, "\n");
6638 dump_constraints (dump_file, from);
6639 fprintf (dump_file, "\n");
6641 from = VEC_length (constraint_t, constraints);
6644 /* From the constraints compute the points-to sets. */
6645 solve_constraints ();
6647 /* Compute the global points-to sets for ESCAPED.
6648 ??? Note that the computed escape set is not correct
6649 for the whole unit as we fail to consider graph edges to
6650 externally visible functions. */
6651 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
6653 /* Make sure the ESCAPED solution (which is used as placeholder in
6654 other solutions) does not reference itself. This simplifies
6655 points-to solution queries. */
6656 ipa_escaped_pt.ipa_escaped = 0;
6658 /* Assign the points-to sets to the SSA names in the unit. */
6659 for (node = cgraph_nodes; node; node = node->next)
6662 struct function *fn;
6666 struct pt_solution uses, clobbers;
6667 struct cgraph_edge *e;
6669 /* Nodes without a body are not interesting. */
6670 if (!gimple_has_body_p (node->decl)
6674 fn = DECL_STRUCT_FUNCTION (node->decl);
6676 /* Compute the points-to sets for pointer SSA_NAMEs. */
6677 for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
6680 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6681 find_what_p_points_to (ptr);
6684 /* Compute the call-use and call-clobber sets for all direct calls. */
6685 fi = lookup_vi_for_tree (node->decl);
6686 gcc_assert (fi->is_fn_info);
6687 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
6689 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
6690 for (e = node->callers; e; e = e->next_caller)
6695 *gimple_call_clobber_set (e->call_stmt) = clobbers;
6696 *gimple_call_use_set (e->call_stmt) = uses;
6699 /* Compute the call-use and call-clobber sets for indirect calls
6700 and calls to external functions. */
6701 FOR_EACH_BB_FN (bb, fn)
6703 gimple_stmt_iterator gsi;
6705 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6707 gimple stmt = gsi_stmt (gsi);
6708 struct pt_solution *pt;
6712 if (!is_gimple_call (stmt))
6715 /* Handle direct calls to external functions. */
6716 decl = gimple_call_fndecl (stmt);
6718 && (!(fi = lookup_vi_for_tree (decl))
6719 || !fi->is_fn_info))
6721 pt = gimple_call_use_set (stmt);
6722 if (gimple_call_flags (stmt) & ECF_CONST)
6723 memset (pt, 0, sizeof (struct pt_solution));
6724 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6726 find_what_var_points_to (vi, pt);
6727 /* Escaped (and thus nonlocal) variables are always
6728 implicitly used by calls. */
6729 /* ??? ESCAPED can be empty even though NONLOCAL
6732 pt->ipa_escaped = 1;
6736 /* If there is nothing special about this call then
6737 we have made everything that is used also escape. */
6738 *pt = ipa_escaped_pt;
6742 pt = gimple_call_clobber_set (stmt);
6743 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6744 memset (pt, 0, sizeof (struct pt_solution));
6745 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6747 find_what_var_points_to (vi, pt);
6748 /* Escaped (and thus nonlocal) variables are always
6749 implicitly clobbered by calls. */
6750 /* ??? ESCAPED can be empty even though NONLOCAL
6753 pt->ipa_escaped = 1;
6757 /* If there is nothing special about this call then
6758 we have made everything that is used also escape. */
6759 *pt = ipa_escaped_pt;
6764 /* Handle indirect calls. */
6766 && (fi = get_fi_for_callee (stmt)))
6768 /* We need to accumulate all clobbers/uses of all possible
6770 fi = get_varinfo (find (fi->id));
6771 /* If we cannot constrain the set of functions we'll end up
6772 calling we end up using/clobbering everything. */
6773 if (bitmap_bit_p (fi->solution, anything_id)
6774 || bitmap_bit_p (fi->solution, nonlocal_id)
6775 || bitmap_bit_p (fi->solution, escaped_id))
6777 pt_solution_reset (gimple_call_clobber_set (stmt));
6778 pt_solution_reset (gimple_call_use_set (stmt));
6784 struct pt_solution *uses, *clobbers;
6786 uses = gimple_call_use_set (stmt);
6787 clobbers = gimple_call_clobber_set (stmt);
6788 memset (uses, 0, sizeof (struct pt_solution));
6789 memset (clobbers, 0, sizeof (struct pt_solution));
6790 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
6792 struct pt_solution sol;
6794 vi = get_varinfo (i);
6795 if (!vi->is_fn_info)
6797 /* ??? We could be more precise here? */
6799 uses->ipa_escaped = 1;
6800 clobbers->nonlocal = 1;
6801 clobbers->ipa_escaped = 1;
6805 if (!uses->anything)
6807 find_what_var_points_to
6808 (first_vi_for_offset (vi, fi_uses), &sol);
6809 pt_solution_ior_into (uses, &sol);
6811 if (!clobbers->anything)
6813 find_what_var_points_to
6814 (first_vi_for_offset (vi, fi_clobbers), &sol);
6815 pt_solution_ior_into (clobbers, &sol);
6823 fn->gimple_df->ipa_pta = true;
6826 delete_points_to_sets ();
6833 struct simple_ipa_opt_pass pass_ipa_pta =
6838 gate_ipa_pta, /* gate */
6839 ipa_pta_execute, /* execute */
6842 0, /* static_pass_number */
6843 TV_IPA_PTA, /* tv_id */
6844 0, /* properties_required */
6845 0, /* properties_provided */
6846 0, /* properties_destroyed */
6847 0, /* todo_flags_start */
6848 TODO_update_ssa /* todo_flags_finish */
6853 #include "gt-tree-ssa-structalias.h"