1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
39 #include "diagnostic.h"
45 #include "tree-pass.h"
47 #include "alloc-pool.h"
48 #include "splay-tree.h"
52 #include "pointer-set.h"
54 /* The idea behind this analyzer is to generate set constraints from the
55 program, then solve the resulting constraints in order to generate the
58 Set constraints are a way of modeling program analysis problems that
59 involve sets. They consist of an inclusion constraint language,
60 describing the variables (each variable is a set) and operations that
61 are involved on the variables, and a set of rules that derive facts
62 from these operations. To solve a system of set constraints, you derive
63 all possible facts under the rules, which gives you the correct sets
66 See "Efficient Field-sensitive pointer analysis for C" by "David
67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68 http://citeseer.ist.psu.edu/pearce04efficient.html
70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72 http://citeseer.ist.psu.edu/heintze01ultrafast.html
74 There are three types of real constraint expressions, DEREF,
75 ADDRESSOF, and SCALAR. Each constraint expression consists
76 of a constraint type, a variable, and an offset.
78 SCALAR is a constraint expression type used to represent x, whether
79 it appears on the LHS or the RHS of a statement.
80 DEREF is a constraint expression type used to represent *x, whether
81 it appears on the LHS or the RHS of a statement.
82 ADDRESSOF is a constraint expression used to represent &x, whether
83 it appears on the LHS or the RHS of a statement.
85 Each pointer variable in the program is assigned an integer id, and
86 each field of a structure variable is assigned an integer id as well.
88 Structure variables are linked to their list of fields through a "next
89 field" in each variable that points to the next field in offset
91 Each variable for a structure field has
93 1. "size", that tells the size in bits of that field.
94 2. "fullsize, that tells the size in bits of the entire structure.
95 3. "offset", that tells the offset in bits from the beginning of the
96 structure to this field.
108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113 In order to solve the system of set constraints, the following is
116 1. Each constraint variable x has a solution set associated with it,
119 2. Constraints are separated into direct, copy, and complex.
120 Direct constraints are ADDRESSOF constraints that require no extra
121 processing, such as P = &Q
122 Copy constraints are those of the form P = Q.
123 Complex constraints are all the constraints involving dereferences
124 and offsets (including offsetted copies).
126 3. All direct constraints of the form P = &Q are processed, such
127 that Q is added to Sol(P)
129 4. All complex constraints for a given constraint variable are stored in a
130 linked list attached to that variable's node.
132 5. A directed graph is built out of the copy constraints. Each
133 constraint variable is a node in the graph, and an edge from
134 Q to P is added for each copy constraint of the form P = Q
136 6. The graph is then walked, and solution sets are
137 propagated along the copy edges, such that an edge from Q to P
138 causes Sol(P) <- Sol(P) union Sol(Q).
140 7. As we visit each node, all complex constraints associated with
141 that node are processed by adding appropriate copy edges to the graph, or the
142 appropriate variables to the solution set.
144 8. The process of walking the graph is iterated until no solution
147 Prior to walking the graph in steps 6 and 7, We perform static
148 cycle elimination on the constraint graph, as well
149 as off-line variable substitution.
151 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
152 on and turned into anything), but isn't. You can just see what offset
153 inside the pointed-to struct it's going to access.
155 TODO: Constant bounded arrays can be handled as if they were structs of the
156 same number of elements.
158 TODO: Modeling heap and incoming pointers becomes much better if we
159 add fields to them as we discover them, which we could do.
161 TODO: We could handle unions, but to be honest, it's probably not
162 worth the pain or slowdown. */
164 /* IPA-PTA optimizations possible.
166 When the indirect function called is ANYTHING we can add disambiguation
167 based on the function signatures (or simply the parameter count which
168 is the varinfo size). We also do not need to consider functions that
169 do not have their address taken.
171 The is_global_var bit which marks escape points is overly conservative
172 in IPA mode. Split it to is_escape_point and is_global_var - only
173 externally visible globals are escape points in IPA mode. This is
174 also needed to fix the pt_solution_includes_global predicate
175 (and thus ptr_deref_may_alias_global_p).
177 The way we introduce DECL_PT_UID to avoid fixing up all points-to
178 sets in the translation unit when we copy a DECL during inlining
179 pessimizes precision. The advantage is that the DECL_PT_UID keeps
180 compile-time and memory usage overhead low - the points-to sets
181 do not grow or get unshared as they would during a fixup phase.
182 An alternative solution is to delay IPA PTA until after all
183 inlining transformations have been applied.
185 The way we propagate clobber/use information isn't optimized.
186 It should use a new complex constraint that properly filters
187 out local variables of the callee (though that would make
188 the sets invalid after inlining). OTOH we might as well
189 admit defeat to WHOPR and simply do all the clobber/use analysis
190 and propagation after PTA finished but before we threw away
191 points-to information for memory variables. WHOPR and PTA
192 do not play along well anyway - the whole constraint solving
193 would need to be done in WPA phase and it will be very interesting
194 to apply the results to local SSA names during LTRANS phase.
196 We probably should compute a per-function unit-ESCAPE solution
197 propagating it simply like the clobber / uses solutions. The
198 solution can go alongside the non-IPA espaced solution and be
199 used to query which vars escape the unit through a function.
201 We never put function decls in points-to sets so we do not
202 keep the set of called functions for indirect calls.
204 And probably more. */
206 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
207 htab_t heapvar_for_stmt;
209 static bool use_field_sensitive = true;
210 static int in_ipa_mode = 0;
212 /* Used for predecessor bitmaps. */
213 static bitmap_obstack predbitmap_obstack;
215 /* Used for points-to sets. */
216 static bitmap_obstack pta_obstack;
218 /* Used for oldsolution members of variables. */
219 static bitmap_obstack oldpta_obstack;
221 /* Used for per-solver-iteration bitmaps. */
222 static bitmap_obstack iteration_obstack;
224 static unsigned int create_variable_info_for (tree, const char *);
225 typedef struct constraint_graph *constraint_graph_t;
226 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
229 typedef struct constraint *constraint_t;
231 DEF_VEC_P(constraint_t);
232 DEF_VEC_ALLOC_P(constraint_t,heap);
234 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
236 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
238 static struct constraint_stats
240 unsigned int total_vars;
241 unsigned int nonpointer_vars;
242 unsigned int unified_vars_static;
243 unsigned int unified_vars_dynamic;
244 unsigned int iterations;
245 unsigned int num_edges;
246 unsigned int num_implicit_edges;
247 unsigned int points_to_sets_created;
252 /* ID of this variable */
255 /* True if this is a variable created by the constraint analysis, such as
256 heap variables and constraints we had to break up. */
257 unsigned int is_artificial_var : 1;
259 /* True if this is a special variable whose solution set should not be
261 unsigned int is_special_var : 1;
263 /* True for variables whose size is not known or variable. */
264 unsigned int is_unknown_size_var : 1;
266 /* True for (sub-)fields that represent a whole variable. */
267 unsigned int is_full_var : 1;
269 /* True if this is a heap variable. */
270 unsigned int is_heap_var : 1;
272 /* True if this is a variable tracking a restrict pointer source. */
273 unsigned int is_restrict_var : 1;
275 /* True if this field may contain pointers. */
276 unsigned int may_have_pointers : 1;
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
281 /* True if this represents a IPA function info. */
282 unsigned int is_fn_info : 1;
284 /* A link to the variable for the next field in this structure. */
285 struct variable_info *next;
287 /* Offset of this variable, in bits, from the base variable */
288 unsigned HOST_WIDE_INT offset;
290 /* Size of the variable, in bits. */
291 unsigned HOST_WIDE_INT size;
293 /* Full size of the base variable, in bits. */
294 unsigned HOST_WIDE_INT fullsize;
296 /* Name of this variable */
299 /* Tree that this variable is associated with. */
302 /* Points-to set for this variable. */
305 /* Old points-to set for this variable. */
308 typedef struct variable_info *varinfo_t;
310 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
311 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
312 unsigned HOST_WIDE_INT);
313 static varinfo_t lookup_vi_for_tree (tree);
315 /* Pool of variable info structures. */
316 static alloc_pool variable_info_pool;
318 DEF_VEC_P(varinfo_t);
320 DEF_VEC_ALLOC_P(varinfo_t, heap);
322 /* Table of variable info structures for constraint variables.
323 Indexed directly by variable info id. */
324 static VEC(varinfo_t,heap) *varmap;
326 /* Return the varmap element N */
328 static inline varinfo_t
329 get_varinfo (unsigned int n)
331 return VEC_index (varinfo_t, varmap, n);
334 /* Static IDs for the special variables. */
335 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
336 escaped_id = 3, nonlocal_id = 4,
337 storedanything_id = 5, integer_id = 6 };
339 struct GTY(()) heapvar_map {
341 unsigned HOST_WIDE_INT offset;
345 heapvar_map_eq (const void *p1, const void *p2)
347 const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
348 const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
349 return (h1->map.base.from == h2->map.base.from
350 && h1->offset == h2->offset);
354 heapvar_map_hash (struct heapvar_map *h)
356 return iterative_hash_host_wide_int (h->offset,
357 htab_hash_pointer (h->map.base.from));
360 /* Lookup a heap var for FROM, and return it if we find one. */
363 heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
365 struct heapvar_map *h, in;
366 in.map.base.from = from;
368 h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
369 heapvar_map_hash (&in));
375 /* Insert a mapping FROM->TO in the heap var for statement
379 heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
381 struct heapvar_map *h;
384 h = GGC_NEW (struct heapvar_map);
385 h->map.base.from = from;
387 h->map.hash = heapvar_map_hash (h);
389 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
390 gcc_assert (*loc == NULL);
391 *(struct heapvar_map **) loc = h;
394 /* Return a new variable info structure consisting for a variable
395 named NAME, and using constraint graph node NODE. Append it
396 to the vector of variable info structures. */
399 new_var_info (tree t, const char *name)
401 unsigned index = VEC_length (varinfo_t, varmap);
402 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
407 /* Vars without decl are artificial and do not have sub-variables. */
408 ret->is_artificial_var = (t == NULL_TREE);
409 ret->is_special_var = false;
410 ret->is_unknown_size_var = false;
411 ret->is_full_var = (t == NULL_TREE);
412 ret->is_heap_var = false;
413 ret->is_restrict_var = false;
414 ret->may_have_pointers = true;
415 ret->is_global_var = (t == NULL_TREE);
416 ret->is_fn_info = false;
418 ret->is_global_var = is_global_var (t);
419 ret->solution = BITMAP_ALLOC (&pta_obstack);
420 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
423 VEC_safe_push (varinfo_t, heap, varmap, ret);
429 /* A map mapping call statements to per-stmt variables for uses
430 and clobbers specific to the call. */
431 struct pointer_map_t *call_stmt_vars;
433 /* Lookup or create the variable for the call statement CALL. */
436 get_call_vi (gimple call)
441 slot_p = pointer_map_insert (call_stmt_vars, call);
443 return (varinfo_t) *slot_p;
445 vi = new_var_info (NULL_TREE, "CALLUSED");
449 vi->is_full_var = true;
451 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
455 vi2->is_full_var = true;
457 *slot_p = (void *) vi;
461 /* Lookup the variable for the call statement CALL representing
462 the uses. Returns NULL if there is nothing special about this call. */
465 lookup_call_use_vi (gimple call)
469 slot_p = pointer_map_contains (call_stmt_vars, call);
471 return (varinfo_t) *slot_p;
476 /* Lookup the variable for the call statement CALL representing
477 the clobbers. Returns NULL if there is nothing special about this call. */
480 lookup_call_clobber_vi (gimple call)
482 varinfo_t uses = lookup_call_use_vi (call);
489 /* Lookup or create the variable for the call statement CALL representing
493 get_call_use_vi (gimple call)
495 return get_call_vi (call);
498 /* Lookup or create the variable for the call statement CALL representing
501 static varinfo_t ATTRIBUTE_UNUSED
502 get_call_clobber_vi (gimple call)
504 return get_call_vi (call)->next;
508 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
510 /* An expression that appears in a constraint. */
512 struct constraint_expr
514 /* Constraint type. */
515 constraint_expr_type type;
517 /* Variable we are referring to in the constraint. */
520 /* Offset, in bits, of this constraint from the beginning of
521 variables it ends up referring to.
523 IOW, in a deref constraint, we would deref, get the result set,
524 then add OFFSET to each member. */
525 HOST_WIDE_INT offset;
528 /* Use 0x8000... as special unknown offset. */
529 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
531 typedef struct constraint_expr ce_s;
533 DEF_VEC_ALLOC_O(ce_s, heap);
534 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
535 static void get_constraint_for (tree, VEC(ce_s, heap) **);
536 static void do_deref (VEC (ce_s, heap) **);
538 /* Our set constraints are made up of two constraint expressions, one
541 As described in the introduction, our set constraints each represent an
542 operation between set valued variables.
546 struct constraint_expr lhs;
547 struct constraint_expr rhs;
550 /* List of constraints that we use to build the constraint graph from. */
552 static VEC(constraint_t,heap) *constraints;
553 static alloc_pool constraint_pool;
555 /* The constraint graph is represented as an array of bitmaps
556 containing successor nodes. */
558 struct constraint_graph
560 /* Size of this graph, which may be different than the number of
561 nodes in the variable map. */
564 /* Explicit successors of each node. */
567 /* Implicit predecessors of each node (Used for variable
569 bitmap *implicit_preds;
571 /* Explicit predecessors of each node (Used for variable substitution). */
574 /* Indirect cycle representatives, or -1 if the node has no indirect
576 int *indirect_cycles;
578 /* Representative node for a node. rep[a] == a unless the node has
582 /* Equivalence class representative for a label. This is used for
583 variable substitution. */
586 /* Pointer equivalence label for a node. All nodes with the same
587 pointer equivalence label can be unified together at some point
588 (either during constraint optimization or after the constraint
592 /* Pointer equivalence representative for a label. This is used to
593 handle nodes that are pointer equivalent but not location
594 equivalent. We can unite these once the addressof constraints
595 are transformed into initial points-to sets. */
598 /* Pointer equivalence label for each node, used during variable
600 unsigned int *pointer_label;
602 /* Location equivalence label for each node, used during location
603 equivalence finding. */
604 unsigned int *loc_label;
606 /* Pointed-by set for each node, used during location equivalence
607 finding. This is pointed-by rather than pointed-to, because it
608 is constructed using the predecessor graph. */
611 /* Points to sets for pointer equivalence. This is *not* the actual
612 points-to sets for nodes. */
615 /* Bitmap of nodes where the bit is set if the node is a direct
616 node. Used for variable substitution. */
617 sbitmap direct_nodes;
619 /* Bitmap of nodes where the bit is set if the node is address
620 taken. Used for variable substitution. */
621 bitmap address_taken;
623 /* Vector of complex constraints for each graph node. Complex
624 constraints are those involving dereferences or offsets that are
626 VEC(constraint_t,heap) **complex;
629 static constraint_graph_t graph;
631 /* During variable substitution and the offline version of indirect
632 cycle finding, we create nodes to represent dereferences and
633 address taken constraints. These represent where these start and
635 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
636 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
638 /* Return the representative node for NODE, if NODE has been unioned
640 This function performs path compression along the way to finding
641 the representative. */
644 find (unsigned int node)
646 gcc_assert (node < graph->size);
647 if (graph->rep[node] != node)
648 return graph->rep[node] = find (graph->rep[node]);
652 /* Union the TO and FROM nodes to the TO nodes.
653 Note that at some point in the future, we may want to do
654 union-by-rank, in which case we are going to have to return the
655 node we unified to. */
658 unite (unsigned int to, unsigned int from)
660 gcc_assert (to < graph->size && from < graph->size);
661 if (to != from && graph->rep[from] != to)
663 graph->rep[from] = to;
669 /* Create a new constraint consisting of LHS and RHS expressions. */
672 new_constraint (const struct constraint_expr lhs,
673 const struct constraint_expr rhs)
675 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
681 /* Print out constraint C to FILE. */
684 dump_constraint (FILE *file, constraint_t c)
686 if (c->lhs.type == ADDRESSOF)
688 else if (c->lhs.type == DEREF)
690 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
691 if (c->lhs.offset == UNKNOWN_OFFSET)
692 fprintf (file, " + UNKNOWN");
693 else if (c->lhs.offset != 0)
694 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
695 fprintf (file, " = ");
696 if (c->rhs.type == ADDRESSOF)
698 else if (c->rhs.type == DEREF)
700 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
701 if (c->rhs.offset == UNKNOWN_OFFSET)
702 fprintf (file, " + UNKNOWN");
703 else if (c->rhs.offset != 0)
704 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
705 fprintf (file, "\n");
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
715 /* Print out constraint C to stderr. */
718 debug_constraint (constraint_t c)
720 dump_constraint (stderr, c);
723 /* Print out all constraints to FILE */
726 dump_constraints (FILE *file, int from)
730 for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
731 dump_constraint (file, c);
734 /* Print out all constraints to stderr. */
737 debug_constraints (void)
739 dump_constraints (stderr, 0);
742 /* Print out to FILE the edge in the constraint graph that is created by
743 constraint c. The edge may have a label, depending on the type of
744 constraint that it represents. If complex1, e.g: a = *b, then the label
745 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
746 complex with an offset, e.g: a = b + 8, then the label is "+".
747 Otherwise the edge has no label. */
750 dump_constraint_edge (FILE *file, constraint_t c)
752 if (c->rhs.type != ADDRESSOF)
754 const char *src = get_varinfo (c->rhs.var)->name;
755 const char *dst = get_varinfo (c->lhs.var)->name;
756 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
757 /* Due to preprocessing of constraints, instructions like *a = *b are
758 illegal; thus, we do not have to handle such cases. */
759 if (c->lhs.type == DEREF)
760 fprintf (file, " [ label=\"*=\" ] ;\n");
761 else if (c->rhs.type == DEREF)
762 fprintf (file, " [ label=\"=*\" ] ;\n");
765 /* We must check the case where the constraint is an offset.
766 In this case, it is treated as a complex constraint. */
767 if (c->rhs.offset != c->lhs.offset)
768 fprintf (file, " [ label=\"+\" ] ;\n");
770 fprintf (file, " ;\n");
775 /* Print the constraint graph in dot format. */
778 dump_constraint_graph (FILE *file)
780 unsigned int i=0, size;
783 /* Only print the graph if it has already been initialized: */
787 /* Print the constraints used to produce the constraint graph. The
788 constraints will be printed as comments in the dot file: */
789 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
790 dump_constraints (file, 0);
791 fprintf (file, "*/\n");
793 /* Prints the header of the dot file: */
794 fprintf (file, "\n\n// The constraint graph in dot format:\n");
795 fprintf (file, "strict digraph {\n");
796 fprintf (file, " node [\n shape = box\n ]\n");
797 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
798 fprintf (file, "\n // List of nodes in the constraint graph:\n");
800 /* The next lines print the nodes in the graph. In order to get the
801 number of nodes in the graph, we must choose the minimum between the
802 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
803 yet been initialized, then graph->size == 0, otherwise we must only
804 read nodes that have an entry in VEC (varinfo_t, varmap). */
805 size = VEC_length (varinfo_t, varmap);
806 size = size < graph->size ? size : graph->size;
807 for (i = 0; i < size; i++)
809 const char *name = get_varinfo (graph->rep[i])->name;
810 fprintf (file, " \"%s\" ;\n", name);
813 /* Go over the list of constraints printing the edges in the constraint
815 fprintf (file, "\n // The constraint edges:\n");
816 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
818 dump_constraint_edge (file, c);
820 /* Prints the tail of the dot file. By now, only the closing bracket. */
821 fprintf (file, "}\n\n\n");
824 /* Print out the constraint graph to stderr. */
827 debug_constraint_graph (void)
829 dump_constraint_graph (stderr);
834 The solver is a simple worklist solver, that works on the following
837 sbitmap changed_nodes = all zeroes;
839 For each node that is not already collapsed:
841 set bit in changed nodes
843 while (changed_count > 0)
845 compute topological ordering for constraint graph
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
850 for each node (n) in the graph in topological order:
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
861 /* Return true if two constraint expressions A and B are equal. */
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
876 if (a.type == b.type)
879 return a.offset < b.offset;
881 return a.var < b.var;
884 return a.type < b.type;
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
891 constraint_less (const constraint_t a, const constraint_t b)
893 if (constraint_expr_less (a->lhs, b->lhs))
895 else if (constraint_expr_less (b->lhs, a->lhs))
898 return constraint_expr_less (a->rhs, b->rhs);
901 /* Return true if two constraints A and B are equal. */
904 constraint_equal (struct constraint a, struct constraint b)
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
914 constraint_vec_find (VEC(constraint_t,heap) *vec,
915 struct constraint lookfor)
923 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
924 if (place >= VEC_length (constraint_t, vec))
926 found = VEC_index (constraint_t, vec, place);
927 if (!constraint_equal (*found, lookfor))
932 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
935 constraint_set_union (VEC(constraint_t,heap) **to,
936 VEC(constraint_t,heap) **from)
941 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
943 if (constraint_vec_find (*to, *c) == NULL)
945 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
947 VEC_safe_insert (constraint_t, heap, *to, place, c);
952 /* Expands the solution in SET to all sub-fields of variables included.
953 Union the expanded result into RESULT. */
956 solution_set_expand (bitmap result, bitmap set)
962 /* In a first pass record all variables we need to add all
963 sub-fields off. This avoids quadratic behavior. */
964 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
966 varinfo_t v = get_varinfo (j);
967 if (v->is_artificial_var
970 v = lookup_vi_for_tree (v->decl);
972 vars = BITMAP_ALLOC (NULL);
973 bitmap_set_bit (vars, v->id);
976 /* In the second pass now do the addition to the solution and
977 to speed up solving add it to the delta as well. */
980 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
982 varinfo_t v = get_varinfo (j);
983 for (; v != NULL; v = v->next)
984 bitmap_set_bit (result, v->id);
990 /* Take a solution set SET, add OFFSET to each member of the set, and
991 overwrite SET with the result when done. */
994 solution_set_add (bitmap set, HOST_WIDE_INT offset)
996 bitmap result = BITMAP_ALLOC (&iteration_obstack);
1000 /* If the offset is unknown we have to expand the solution to
1002 if (offset == UNKNOWN_OFFSET)
1004 solution_set_expand (set, set);
1008 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
1010 varinfo_t vi = get_varinfo (i);
1012 /* If this is a variable with just one field just set its bit
1014 if (vi->is_artificial_var
1015 || vi->is_unknown_size_var
1017 bitmap_set_bit (result, i);
1020 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1025 && fieldoffset > vi->offset)
1029 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1031 bitmap_set_bit (result, vi->id);
1032 /* If the result is not exactly at fieldoffset include the next
1033 field as well. See get_constraint_for_ptr_offset for more
1035 if (vi->offset != fieldoffset
1036 && vi->next != NULL)
1037 bitmap_set_bit (result, vi->next->id);
1041 bitmap_copy (set, result);
1042 BITMAP_FREE (result);
1045 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
1049 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
1052 return bitmap_ior_into (to, from);
1058 tmp = BITMAP_ALLOC (&iteration_obstack);
1059 bitmap_copy (tmp, from);
1060 solution_set_add (tmp, inc);
1061 res = bitmap_ior_into (to, tmp);
1067 /* Insert constraint C into the list of complex constraints for graph
1071 insert_into_complex (constraint_graph_t graph,
1072 unsigned int var, constraint_t c)
1074 VEC (constraint_t, heap) *complex = graph->complex[var];
1075 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1078 /* Only insert constraints that do not already exist. */
1079 if (place >= VEC_length (constraint_t, complex)
1080 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1081 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1085 /* Condense two variable nodes into a single variable node, by moving
1086 all associated info from SRC to TO. */
1089 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1095 gcc_assert (find (from) == to);
1097 /* Move all complex constraints from src node into to node */
1098 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
1100 /* In complex constraints for node src, we may have either
1101 a = *src, and *src = a, or an offseted constraint which are
1102 always added to the rhs node's constraints. */
1104 if (c->rhs.type == DEREF)
1106 else if (c->lhs.type == DEREF)
1111 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1112 VEC_free (constraint_t, heap, graph->complex[from]);
1113 graph->complex[from] = NULL;
1117 /* Remove edges involving NODE from GRAPH. */
1120 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1122 if (graph->succs[node])
1123 BITMAP_FREE (graph->succs[node]);
1126 /* Merge GRAPH nodes FROM and TO into node TO. */
1129 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1132 if (graph->indirect_cycles[from] != -1)
1134 /* If we have indirect cycles with the from node, and we have
1135 none on the to node, the to node has indirect cycles from the
1136 from node now that they are unified.
1137 If indirect cycles exist on both, unify the nodes that they
1138 are in a cycle with, since we know they are in a cycle with
1140 if (graph->indirect_cycles[to] == -1)
1141 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1144 /* Merge all the successor edges. */
1145 if (graph->succs[from])
1147 if (!graph->succs[to])
1148 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1149 bitmap_ior_into (graph->succs[to],
1150 graph->succs[from]);
1153 clear_edges_for_node (graph, from);
1157 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already. */
1161 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1167 if (!graph->implicit_preds[to])
1168 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1170 if (bitmap_set_bit (graph->implicit_preds[to], from))
1171 stats.num_implicit_edges++;
1174 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1175 it doesn't exist in the graph already.
1176 Return false if the edge already existed, true otherwise. */
1179 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1182 if (!graph->preds[to])
1183 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1184 bitmap_set_bit (graph->preds[to], from);
1187 /* Add a graph edge to GRAPH, going from FROM to TO if
1188 it doesn't exist in the graph already.
1189 Return false if the edge already existed, true otherwise. */
1192 add_graph_edge (constraint_graph_t graph, unsigned int to,
1203 if (!graph->succs[from])
1204 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1205 if (bitmap_set_bit (graph->succs[from], to))
1208 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1216 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1219 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1222 return (graph->succs[dest]
1223 && bitmap_bit_p (graph->succs[dest], src));
1226 /* Initialize the constraint graph structure to contain SIZE nodes. */
1229 init_graph (unsigned int size)
1233 graph = XCNEW (struct constraint_graph);
1235 graph->succs = XCNEWVEC (bitmap, graph->size);
1236 graph->indirect_cycles = XNEWVEC (int, graph->size);
1237 graph->rep = XNEWVEC (unsigned int, graph->size);
1238 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1239 graph->pe = XCNEWVEC (unsigned int, graph->size);
1240 graph->pe_rep = XNEWVEC (int, graph->size);
1242 for (j = 0; j < graph->size; j++)
1245 graph->pe_rep[j] = -1;
1246 graph->indirect_cycles[j] = -1;
1250 /* Build the constraint graph, adding only predecessor edges right now. */
1253 build_pred_graph (void)
1259 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1260 graph->preds = XCNEWVEC (bitmap, graph->size);
1261 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1262 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1263 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1264 graph->points_to = XCNEWVEC (bitmap, graph->size);
1265 graph->eq_rep = XNEWVEC (int, graph->size);
1266 graph->direct_nodes = sbitmap_alloc (graph->size);
1267 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1268 sbitmap_zero (graph->direct_nodes);
1270 for (j = 0; j < FIRST_REF_NODE; j++)
1272 if (!get_varinfo (j)->is_special_var)
1273 SET_BIT (graph->direct_nodes, j);
1276 for (j = 0; j < graph->size; j++)
1277 graph->eq_rep[j] = -1;
1279 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1280 graph->indirect_cycles[j] = -1;
1282 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1284 struct constraint_expr lhs = c->lhs;
1285 struct constraint_expr rhs = c->rhs;
1286 unsigned int lhsvar = lhs.var;
1287 unsigned int rhsvar = rhs.var;
1289 if (lhs.type == DEREF)
1292 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1293 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295 else if (rhs.type == DEREF)
1298 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1299 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1301 RESET_BIT (graph->direct_nodes, lhsvar);
1303 else if (rhs.type == ADDRESSOF)
1308 if (graph->points_to[lhsvar] == NULL)
1309 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1310 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1312 if (graph->pointed_by[rhsvar] == NULL)
1313 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1314 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1316 /* Implicitly, *x = y */
1317 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1319 /* All related variables are no longer direct nodes. */
1320 RESET_BIT (graph->direct_nodes, rhsvar);
1321 v = get_varinfo (rhsvar);
1322 if (!v->is_full_var)
1324 v = lookup_vi_for_tree (v->decl);
1327 RESET_BIT (graph->direct_nodes, v->id);
1332 bitmap_set_bit (graph->address_taken, rhsvar);
1334 else if (lhsvar > anything_id
1335 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1338 add_pred_graph_edge (graph, lhsvar, rhsvar);
1339 /* Implicitly, *x = *y */
1340 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1341 FIRST_REF_NODE + rhsvar);
1343 else if (lhs.offset != 0 || rhs.offset != 0)
1345 if (rhs.offset != 0)
1346 RESET_BIT (graph->direct_nodes, lhs.var);
1347 else if (lhs.offset != 0)
1348 RESET_BIT (graph->direct_nodes, rhs.var);
1353 /* Build the constraint graph, adding successor edges. */
1356 build_succ_graph (void)
1361 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1363 struct constraint_expr lhs;
1364 struct constraint_expr rhs;
1365 unsigned int lhsvar;
1366 unsigned int rhsvar;
1373 lhsvar = find (lhs.var);
1374 rhsvar = find (rhs.var);
1376 if (lhs.type == DEREF)
1378 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1379 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1381 else if (rhs.type == DEREF)
1383 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1384 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1386 else if (rhs.type == ADDRESSOF)
1389 gcc_assert (find (rhs.var) == rhs.var);
1390 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1392 else if (lhsvar > anything_id
1393 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1395 add_graph_edge (graph, lhsvar, rhsvar);
1399 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1400 receive pointers. */
1401 t = find (storedanything_id);
1402 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1404 if (!TEST_BIT (graph->direct_nodes, i)
1405 && get_varinfo (i)->may_have_pointers)
1406 add_graph_edge (graph, find (i), t);
1409 /* Everything stored to ANYTHING also potentially escapes. */
1410 add_graph_edge (graph, find (escaped_id), t);
1414 /* Changed variables on the last iteration. */
1415 static unsigned int changed_count;
1416 static sbitmap changed;
1418 /* Strongly Connected Component visitation info. */
1425 unsigned int *node_mapping;
1427 VEC(unsigned,heap) *scc_stack;
1431 /* Recursive routine to find strongly connected components in GRAPH.
1432 SI is the SCC info to store the information in, and N is the id of current
1433 graph node we are processing.
1435 This is Tarjan's strongly connected component finding algorithm, as
1436 modified by Nuutila to keep only non-root nodes on the stack.
1437 The algorithm can be found in "On finding the strongly connected
1438 connected components in a directed graph" by Esko Nuutila and Eljas
1439 Soisalon-Soininen, in Information Processing Letters volume 49,
1440 number 1, pages 9-14. */
1443 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1447 unsigned int my_dfs;
1449 SET_BIT (si->visited, n);
1450 si->dfs[n] = si->current_index ++;
1451 my_dfs = si->dfs[n];
1453 /* Visit all the successors. */
1454 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1458 if (i > LAST_REF_NODE)
1462 if (TEST_BIT (si->deleted, w))
1465 if (!TEST_BIT (si->visited, w))
1466 scc_visit (graph, si, w);
1468 unsigned int t = find (w);
1469 unsigned int nnode = find (n);
1470 gcc_assert (nnode == n);
1472 if (si->dfs[t] < si->dfs[nnode])
1473 si->dfs[n] = si->dfs[t];
1477 /* See if any components have been identified. */
1478 if (si->dfs[n] == my_dfs)
1480 if (VEC_length (unsigned, si->scc_stack) > 0
1481 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1483 bitmap scc = BITMAP_ALLOC (NULL);
1484 unsigned int lowest_node;
1487 bitmap_set_bit (scc, n);
1489 while (VEC_length (unsigned, si->scc_stack) != 0
1490 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1492 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1494 bitmap_set_bit (scc, w);
1497 lowest_node = bitmap_first_set_bit (scc);
1498 gcc_assert (lowest_node < FIRST_REF_NODE);
1500 /* Collapse the SCC nodes into a single node, and mark the
1502 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1504 if (i < FIRST_REF_NODE)
1506 if (unite (lowest_node, i))
1507 unify_nodes (graph, lowest_node, i, false);
1511 unite (lowest_node, i);
1512 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1516 SET_BIT (si->deleted, n);
1519 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1522 /* Unify node FROM into node TO, updating the changed count if
1523 necessary when UPDATE_CHANGED is true. */
1526 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1527 bool update_changed)
1530 gcc_assert (to != from && find (to) == to);
1531 if (dump_file && (dump_flags & TDF_DETAILS))
1532 fprintf (dump_file, "Unifying %s to %s\n",
1533 get_varinfo (from)->name,
1534 get_varinfo (to)->name);
1537 stats.unified_vars_dynamic++;
1539 stats.unified_vars_static++;
1541 merge_graph_nodes (graph, to, from);
1542 merge_node_constraints (graph, to, from);
1544 /* Mark TO as changed if FROM was changed. If TO was already marked
1545 as changed, decrease the changed count. */
1547 if (update_changed && TEST_BIT (changed, from))
1549 RESET_BIT (changed, from);
1550 if (!TEST_BIT (changed, to))
1551 SET_BIT (changed, to);
1554 gcc_assert (changed_count > 0);
1558 if (get_varinfo (from)->solution)
1560 /* If the solution changes because of the merging, we need to mark
1561 the variable as changed. */
1562 if (bitmap_ior_into (get_varinfo (to)->solution,
1563 get_varinfo (from)->solution))
1565 if (update_changed && !TEST_BIT (changed, to))
1567 SET_BIT (changed, to);
1572 BITMAP_FREE (get_varinfo (from)->solution);
1573 BITMAP_FREE (get_varinfo (from)->oldsolution);
1575 if (stats.iterations > 0)
1577 BITMAP_FREE (get_varinfo (to)->oldsolution);
1578 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1581 if (valid_graph_edge (graph, to, to))
1583 if (graph->succs[to])
1584 bitmap_clear_bit (graph->succs[to], to);
1588 /* Information needed to compute the topological ordering of a graph. */
1592 /* sbitmap of visited nodes. */
1594 /* Array that stores the topological order of the graph, *in
1596 VEC(unsigned,heap) *topo_order;
1600 /* Initialize and return a topological info structure. */
1602 static struct topo_info *
1603 init_topo_info (void)
1605 size_t size = graph->size;
1606 struct topo_info *ti = XNEW (struct topo_info);
1607 ti->visited = sbitmap_alloc (size);
1608 sbitmap_zero (ti->visited);
1609 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1614 /* Free the topological sort info pointed to by TI. */
1617 free_topo_info (struct topo_info *ti)
1619 sbitmap_free (ti->visited);
1620 VEC_free (unsigned, heap, ti->topo_order);
1624 /* Visit the graph in topological order, and store the order in the
1625 topo_info structure. */
1628 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1634 SET_BIT (ti->visited, n);
1636 if (graph->succs[n])
1637 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1639 if (!TEST_BIT (ti->visited, j))
1640 topo_visit (graph, ti, j);
1643 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1646 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1647 starting solution for y. */
1650 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1653 unsigned int lhs = c->lhs.var;
1655 bitmap sol = get_varinfo (lhs)->solution;
1658 HOST_WIDE_INT roffset = c->rhs.offset;
1660 /* Our IL does not allow this. */
1661 gcc_assert (c->lhs.offset == 0);
1663 /* If the solution of Y contains anything it is good enough to transfer
1665 if (bitmap_bit_p (delta, anything_id))
1667 flag |= bitmap_set_bit (sol, anything_id);
1671 /* If we do not know at with offset the rhs is dereferenced compute
1672 the reachability set of DELTA, conservatively assuming it is
1673 dereferenced at all valid offsets. */
1674 if (roffset == UNKNOWN_OFFSET)
1676 solution_set_expand (delta, delta);
1677 /* No further offset processing is necessary. */
1681 /* For each variable j in delta (Sol(y)), add
1682 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1683 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1685 varinfo_t v = get_varinfo (j);
1686 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1690 fieldoffset = v->offset;
1691 else if (roffset != 0)
1692 v = first_vi_for_offset (v, fieldoffset);
1693 /* If the access is outside of the variable we can ignore it. */
1701 /* Adding edges from the special vars is pointless.
1702 They don't have sets that can change. */
1703 if (get_varinfo (t)->is_special_var)
1704 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1705 /* Merging the solution from ESCAPED needlessly increases
1706 the set. Use ESCAPED as representative instead. */
1707 else if (v->id == escaped_id)
1708 flag |= bitmap_set_bit (sol, escaped_id);
1709 else if (v->may_have_pointers
1710 && add_graph_edge (graph, lhs, t))
1711 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1713 /* If the variable is not exactly at the requested offset
1714 we have to include the next one. */
1715 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1720 fieldoffset = v->offset;
1726 /* If the LHS solution changed, mark the var as changed. */
1729 get_varinfo (lhs)->solution = sol;
1730 if (!TEST_BIT (changed, lhs))
1732 SET_BIT (changed, lhs);
1738 /* Process a constraint C that represents *(x + off) = y using DELTA
1739 as the starting solution for x. */
1742 do_ds_constraint (constraint_t c, bitmap delta)
1744 unsigned int rhs = c->rhs.var;
1745 bitmap sol = get_varinfo (rhs)->solution;
1748 HOST_WIDE_INT loff = c->lhs.offset;
1749 bool escaped_p = false;
1751 /* Our IL does not allow this. */
1752 gcc_assert (c->rhs.offset == 0);
1754 /* If the solution of y contains ANYTHING simply use the ANYTHING
1755 solution. This avoids needlessly increasing the points-to sets. */
1756 if (bitmap_bit_p (sol, anything_id))
1757 sol = get_varinfo (find (anything_id))->solution;
1759 /* If the solution for x contains ANYTHING we have to merge the
1760 solution of y into all pointer variables which we do via
1762 if (bitmap_bit_p (delta, anything_id))
1764 unsigned t = find (storedanything_id);
1765 if (add_graph_edge (graph, t, rhs))
1767 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1769 if (!TEST_BIT (changed, t))
1771 SET_BIT (changed, t);
1779 /* If we do not know at with offset the rhs is dereferenced compute
1780 the reachability set of DELTA, conservatively assuming it is
1781 dereferenced at all valid offsets. */
1782 if (loff == UNKNOWN_OFFSET)
1784 solution_set_expand (delta, delta);
1788 /* For each member j of delta (Sol(x)), add an edge from y to j and
1789 union Sol(y) into Sol(j) */
1790 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1792 varinfo_t v = get_varinfo (j);
1794 HOST_WIDE_INT fieldoffset = v->offset + loff;
1797 fieldoffset = v->offset;
1799 v = first_vi_for_offset (v, fieldoffset);
1800 /* If the access is outside of the variable we can ignore it. */
1806 if (v->may_have_pointers)
1808 /* If v is a global variable then this is an escape point. */
1809 if (v->is_global_var
1812 t = find (escaped_id);
1813 if (add_graph_edge (graph, t, rhs)
1814 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1815 && !TEST_BIT (changed, t))
1817 SET_BIT (changed, t);
1820 /* Enough to let rhs escape once. */
1824 if (v->is_special_var)
1828 if (add_graph_edge (graph, t, rhs)
1829 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1830 && !TEST_BIT (changed, t))
1832 SET_BIT (changed, t);
1837 /* If the variable is not exactly at the requested offset
1838 we have to include the next one. */
1839 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1844 fieldoffset = v->offset;
1850 /* Handle a non-simple (simple meaning requires no iteration),
1851 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1854 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1856 if (c->lhs.type == DEREF)
1858 if (c->rhs.type == ADDRESSOF)
1865 do_ds_constraint (c, delta);
1868 else if (c->rhs.type == DEREF)
1871 if (!(get_varinfo (c->lhs.var)->is_special_var))
1872 do_sd_constraint (graph, c, delta);
1880 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1881 solution = get_varinfo (c->rhs.var)->solution;
1882 tmp = get_varinfo (c->lhs.var)->solution;
1884 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1888 get_varinfo (c->lhs.var)->solution = tmp;
1889 if (!TEST_BIT (changed, c->lhs.var))
1891 SET_BIT (changed, c->lhs.var);
1898 /* Initialize and return a new SCC info structure. */
1900 static struct scc_info *
1901 init_scc_info (size_t size)
1903 struct scc_info *si = XNEW (struct scc_info);
1906 si->current_index = 0;
1907 si->visited = sbitmap_alloc (size);
1908 sbitmap_zero (si->visited);
1909 si->deleted = sbitmap_alloc (size);
1910 sbitmap_zero (si->deleted);
1911 si->node_mapping = XNEWVEC (unsigned int, size);
1912 si->dfs = XCNEWVEC (unsigned int, size);
1914 for (i = 0; i < size; i++)
1915 si->node_mapping[i] = i;
1917 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1921 /* Free an SCC info structure pointed to by SI */
1924 free_scc_info (struct scc_info *si)
1926 sbitmap_free (si->visited);
1927 sbitmap_free (si->deleted);
1928 free (si->node_mapping);
1930 VEC_free (unsigned, heap, si->scc_stack);
1935 /* Find indirect cycles in GRAPH that occur, using strongly connected
1936 components, and note them in the indirect cycles map.
1938 This technique comes from Ben Hardekopf and Calvin Lin,
1939 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1940 Lines of Code", submitted to PLDI 2007. */
1943 find_indirect_cycles (constraint_graph_t graph)
1946 unsigned int size = graph->size;
1947 struct scc_info *si = init_scc_info (size);
1949 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1950 if (!TEST_BIT (si->visited, i) && find (i) == i)
1951 scc_visit (graph, si, i);
1956 /* Compute a topological ordering for GRAPH, and store the result in the
1957 topo_info structure TI. */
1960 compute_topo_order (constraint_graph_t graph,
1961 struct topo_info *ti)
1964 unsigned int size = graph->size;
1966 for (i = 0; i != size; ++i)
1967 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1968 topo_visit (graph, ti, i);
1971 /* Structure used to for hash value numbering of pointer equivalence
1974 typedef struct equiv_class_label
1977 unsigned int equivalence_class;
1979 } *equiv_class_label_t;
1980 typedef const struct equiv_class_label *const_equiv_class_label_t;
1982 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1984 static htab_t pointer_equiv_class_table;
1986 /* A hashtable for mapping a bitmap of labels->location equivalence
1988 static htab_t location_equiv_class_table;
1990 /* Hash function for a equiv_class_label_t */
1993 equiv_class_label_hash (const void *p)
1995 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1996 return ecl->hashcode;
1999 /* Equality function for two equiv_class_label_t's. */
2002 equiv_class_label_eq (const void *p1, const void *p2)
2004 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
2005 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
2006 return (eql1->hashcode == eql2->hashcode
2007 && bitmap_equal_p (eql1->labels, eql2->labels));
2010 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
2014 equiv_class_lookup (htab_t table, bitmap labels)
2017 struct equiv_class_label ecl;
2019 ecl.labels = labels;
2020 ecl.hashcode = bitmap_hash (labels);
2022 slot = htab_find_slot_with_hash (table, &ecl,
2023 ecl.hashcode, NO_INSERT);
2027 return ((equiv_class_label_t) *slot)->equivalence_class;
2031 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
2035 equiv_class_add (htab_t table, unsigned int equivalence_class,
2039 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
2041 ecl->labels = labels;
2042 ecl->equivalence_class = equivalence_class;
2043 ecl->hashcode = bitmap_hash (labels);
2045 slot = htab_find_slot_with_hash (table, ecl,
2046 ecl->hashcode, INSERT);
2047 gcc_assert (!*slot);
2048 *slot = (void *) ecl;
2051 /* Perform offline variable substitution.
2053 This is a worst case quadratic time way of identifying variables
2054 that must have equivalent points-to sets, including those caused by
2055 static cycles, and single entry subgraphs, in the constraint graph.
2057 The technique is described in "Exploiting Pointer and Location
2058 Equivalence to Optimize Pointer Analysis. In the 14th International
2059 Static Analysis Symposium (SAS), August 2007." It is known as the
2060 "HU" algorithm, and is equivalent to value numbering the collapsed
2061 constraint graph including evaluating unions.
2063 The general method of finding equivalence classes is as follows:
2064 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2065 Initialize all non-REF nodes to be direct nodes.
2066 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2068 For each constraint containing the dereference, we also do the same
2071 We then compute SCC's in the graph and unify nodes in the same SCC,
2074 For each non-collapsed node x:
2075 Visit all unvisited explicit incoming edges.
2076 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2078 Lookup the equivalence class for pts(x).
2079 If we found one, equivalence_class(x) = found class.
2080 Otherwise, equivalence_class(x) = new class, and new_class is
2081 added to the lookup table.
2083 All direct nodes with the same equivalence class can be replaced
2084 with a single representative node.
2085 All unlabeled nodes (label == 0) are not pointers and all edges
2086 involving them can be eliminated.
2087 We perform these optimizations during rewrite_constraints
2089 In addition to pointer equivalence class finding, we also perform
2090 location equivalence class finding. This is the set of variables
2091 that always appear together in points-to sets. We use this to
2092 compress the size of the points-to sets. */
2094 /* Current maximum pointer equivalence class id. */
2095 static int pointer_equiv_class;
2097 /* Current maximum location equivalence class id. */
2098 static int location_equiv_class;
2100 /* Recursive routine to find strongly connected components in GRAPH,
2101 and label it's nodes with DFS numbers. */
2104 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2108 unsigned int my_dfs;
2110 gcc_assert (si->node_mapping[n] == n);
2111 SET_BIT (si->visited, n);
2112 si->dfs[n] = si->current_index ++;
2113 my_dfs = si->dfs[n];
2115 /* Visit all the successors. */
2116 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2118 unsigned int w = si->node_mapping[i];
2120 if (TEST_BIT (si->deleted, w))
2123 if (!TEST_BIT (si->visited, w))
2124 condense_visit (graph, si, w);
2126 unsigned int t = si->node_mapping[w];
2127 unsigned int nnode = si->node_mapping[n];
2128 gcc_assert (nnode == n);
2130 if (si->dfs[t] < si->dfs[nnode])
2131 si->dfs[n] = si->dfs[t];
2135 /* Visit all the implicit predecessors. */
2136 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2138 unsigned int w = si->node_mapping[i];
2140 if (TEST_BIT (si->deleted, w))
2143 if (!TEST_BIT (si->visited, w))
2144 condense_visit (graph, si, w);
2146 unsigned int t = si->node_mapping[w];
2147 unsigned int nnode = si->node_mapping[n];
2148 gcc_assert (nnode == n);
2150 if (si->dfs[t] < si->dfs[nnode])
2151 si->dfs[n] = si->dfs[t];
2155 /* See if any components have been identified. */
2156 if (si->dfs[n] == my_dfs)
2158 while (VEC_length (unsigned, si->scc_stack) != 0
2159 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2161 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2162 si->node_mapping[w] = n;
2164 if (!TEST_BIT (graph->direct_nodes, w))
2165 RESET_BIT (graph->direct_nodes, n);
2167 /* Unify our nodes. */
2168 if (graph->preds[w])
2170 if (!graph->preds[n])
2171 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2172 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2174 if (graph->implicit_preds[w])
2176 if (!graph->implicit_preds[n])
2177 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2178 bitmap_ior_into (graph->implicit_preds[n],
2179 graph->implicit_preds[w]);
2181 if (graph->points_to[w])
2183 if (!graph->points_to[n])
2184 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2185 bitmap_ior_into (graph->points_to[n],
2186 graph->points_to[w]);
2189 SET_BIT (si->deleted, n);
2192 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2195 /* Label pointer equivalences. */
2198 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2202 SET_BIT (si->visited, n);
2204 if (!graph->points_to[n])
2205 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2207 /* Label and union our incoming edges's points to sets. */
2208 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2210 unsigned int w = si->node_mapping[i];
2211 if (!TEST_BIT (si->visited, w))
2212 label_visit (graph, si, w);
2214 /* Skip unused edges */
2215 if (w == n || graph->pointer_label[w] == 0)
2218 if (graph->points_to[w])
2219 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2221 /* Indirect nodes get fresh variables. */
2222 if (!TEST_BIT (graph->direct_nodes, n))
2223 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2225 if (!bitmap_empty_p (graph->points_to[n]))
2227 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2228 graph->points_to[n]);
2231 label = pointer_equiv_class++;
2232 equiv_class_add (pointer_equiv_class_table,
2233 label, graph->points_to[n]);
2235 graph->pointer_label[n] = label;
2239 /* Perform offline variable substitution, discovering equivalence
2240 classes, and eliminating non-pointer variables. */
2242 static struct scc_info *
2243 perform_var_substitution (constraint_graph_t graph)
2246 unsigned int size = graph->size;
2247 struct scc_info *si = init_scc_info (size);
2249 bitmap_obstack_initialize (&iteration_obstack);
2250 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2251 equiv_class_label_eq, free);
2252 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2253 equiv_class_label_eq, free);
2254 pointer_equiv_class = 1;
2255 location_equiv_class = 1;
2257 /* Condense the nodes, which means to find SCC's, count incoming
2258 predecessors, and unite nodes in SCC's. */
2259 for (i = 0; i < FIRST_REF_NODE; i++)
2260 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2261 condense_visit (graph, si, si->node_mapping[i]);
2263 sbitmap_zero (si->visited);
2264 /* Actually the label the nodes for pointer equivalences */
2265 for (i = 0; i < FIRST_REF_NODE; i++)
2266 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2267 label_visit (graph, si, si->node_mapping[i]);
2269 /* Calculate location equivalence labels. */
2270 for (i = 0; i < FIRST_REF_NODE; i++)
2277 if (!graph->pointed_by[i])
2279 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2281 /* Translate the pointed-by mapping for pointer equivalence
2283 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2285 bitmap_set_bit (pointed_by,
2286 graph->pointer_label[si->node_mapping[j]]);
2288 /* The original pointed_by is now dead. */
2289 BITMAP_FREE (graph->pointed_by[i]);
2291 /* Look up the location equivalence label if one exists, or make
2293 label = equiv_class_lookup (location_equiv_class_table,
2297 label = location_equiv_class++;
2298 equiv_class_add (location_equiv_class_table,
2303 if (dump_file && (dump_flags & TDF_DETAILS))
2304 fprintf (dump_file, "Found location equivalence for node %s\n",
2305 get_varinfo (i)->name);
2306 BITMAP_FREE (pointed_by);
2308 graph->loc_label[i] = label;
2312 if (dump_file && (dump_flags & TDF_DETAILS))
2313 for (i = 0; i < FIRST_REF_NODE; i++)
2315 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2317 "Equivalence classes for %s node id %d:%s are pointer: %d"
2319 direct_node ? "Direct node" : "Indirect node", i,
2320 get_varinfo (i)->name,
2321 graph->pointer_label[si->node_mapping[i]],
2322 graph->loc_label[si->node_mapping[i]]);
2325 /* Quickly eliminate our non-pointer variables. */
2327 for (i = 0; i < FIRST_REF_NODE; i++)
2329 unsigned int node = si->node_mapping[i];
2331 if (graph->pointer_label[node] == 0)
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2335 "%s is a non-pointer variable, eliminating edges.\n",
2336 get_varinfo (node)->name);
2337 stats.nonpointer_vars++;
2338 clear_edges_for_node (graph, node);
2345 /* Free information that was only necessary for variable
2349 free_var_substitution_info (struct scc_info *si)
2352 free (graph->pointer_label);
2353 free (graph->loc_label);
2354 free (graph->pointed_by);
2355 free (graph->points_to);
2356 free (graph->eq_rep);
2357 sbitmap_free (graph->direct_nodes);
2358 htab_delete (pointer_equiv_class_table);
2359 htab_delete (location_equiv_class_table);
2360 bitmap_obstack_release (&iteration_obstack);
2363 /* Return an existing node that is equivalent to NODE, which has
2364 equivalence class LABEL, if one exists. Return NODE otherwise. */
2367 find_equivalent_node (constraint_graph_t graph,
2368 unsigned int node, unsigned int label)
2370 /* If the address version of this variable is unused, we can
2371 substitute it for anything else with the same label.
2372 Otherwise, we know the pointers are equivalent, but not the
2373 locations, and we can unite them later. */
2375 if (!bitmap_bit_p (graph->address_taken, node))
2377 gcc_assert (label < graph->size);
2379 if (graph->eq_rep[label] != -1)
2381 /* Unify the two variables since we know they are equivalent. */
2382 if (unite (graph->eq_rep[label], node))
2383 unify_nodes (graph, graph->eq_rep[label], node, false);
2384 return graph->eq_rep[label];
2388 graph->eq_rep[label] = node;
2389 graph->pe_rep[label] = node;
2394 gcc_assert (label < graph->size);
2395 graph->pe[node] = label;
2396 if (graph->pe_rep[label] == -1)
2397 graph->pe_rep[label] = node;
2403 /* Unite pointer equivalent but not location equivalent nodes in
2404 GRAPH. This may only be performed once variable substitution is
2408 unite_pointer_equivalences (constraint_graph_t graph)
2412 /* Go through the pointer equivalences and unite them to their
2413 representative, if they aren't already. */
2414 for (i = 0; i < FIRST_REF_NODE; i++)
2416 unsigned int label = graph->pe[i];
2419 int label_rep = graph->pe_rep[label];
2421 if (label_rep == -1)
2424 label_rep = find (label_rep);
2425 if (label_rep >= 0 && unite (label_rep, find (i)))
2426 unify_nodes (graph, label_rep, i, false);
2431 /* Move complex constraints to the GRAPH nodes they belong to. */
2434 move_complex_constraints (constraint_graph_t graph)
2439 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2443 struct constraint_expr lhs = c->lhs;
2444 struct constraint_expr rhs = c->rhs;
2446 if (lhs.type == DEREF)
2448 insert_into_complex (graph, lhs.var, c);
2450 else if (rhs.type == DEREF)
2452 if (!(get_varinfo (lhs.var)->is_special_var))
2453 insert_into_complex (graph, rhs.var, c);
2455 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2456 && (lhs.offset != 0 || rhs.offset != 0))
2458 insert_into_complex (graph, rhs.var, c);
2465 /* Optimize and rewrite complex constraints while performing
2466 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2467 result of perform_variable_substitution. */
2470 rewrite_constraints (constraint_graph_t graph,
2471 struct scc_info *si)
2477 for (j = 0; j < graph->size; j++)
2478 gcc_assert (find (j) == j);
2480 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2482 struct constraint_expr lhs = c->lhs;
2483 struct constraint_expr rhs = c->rhs;
2484 unsigned int lhsvar = find (lhs.var);
2485 unsigned int rhsvar = find (rhs.var);
2486 unsigned int lhsnode, rhsnode;
2487 unsigned int lhslabel, rhslabel;
2489 lhsnode = si->node_mapping[lhsvar];
2490 rhsnode = si->node_mapping[rhsvar];
2491 lhslabel = graph->pointer_label[lhsnode];
2492 rhslabel = graph->pointer_label[rhsnode];
2494 /* See if it is really a non-pointer variable, and if so, ignore
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fprintf (dump_file, "%s is a non-pointer variable,"
2502 "ignoring constraint:",
2503 get_varinfo (lhs.var)->name);
2504 dump_constraint (dump_file, c);
2506 VEC_replace (constraint_t, constraints, i, NULL);
2512 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, "%s is a non-pointer variable,"
2516 "ignoring constraint:",
2517 get_varinfo (rhs.var)->name);
2518 dump_constraint (dump_file, c);
2520 VEC_replace (constraint_t, constraints, i, NULL);
2524 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2525 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2526 c->lhs.var = lhsvar;
2527 c->rhs.var = rhsvar;
2532 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2533 part of an SCC, false otherwise. */
2536 eliminate_indirect_cycles (unsigned int node)
2538 if (graph->indirect_cycles[node] != -1
2539 && !bitmap_empty_p (get_varinfo (node)->solution))
2542 VEC(unsigned,heap) *queue = NULL;
2544 unsigned int to = find (graph->indirect_cycles[node]);
2547 /* We can't touch the solution set and call unify_nodes
2548 at the same time, because unify_nodes is going to do
2549 bitmap unions into it. */
2551 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2553 if (find (i) == i && i != to)
2556 VEC_safe_push (unsigned, heap, queue, i);
2561 VEC_iterate (unsigned, queue, queuepos, i);
2564 unify_nodes (graph, to, i, true);
2566 VEC_free (unsigned, heap, queue);
2572 /* Solve the constraint graph GRAPH using our worklist solver.
2573 This is based on the PW* family of solvers from the "Efficient Field
2574 Sensitive Pointer Analysis for C" paper.
2575 It works by iterating over all the graph nodes, processing the complex
2576 constraints and propagating the copy constraints, until everything stops
2577 changed. This corresponds to steps 6-8 in the solving list given above. */
2580 solve_graph (constraint_graph_t graph)
2582 unsigned int size = graph->size;
2587 changed = sbitmap_alloc (size);
2588 sbitmap_zero (changed);
2590 /* Mark all initial non-collapsed nodes as changed. */
2591 for (i = 0; i < size; i++)
2593 varinfo_t ivi = get_varinfo (i);
2594 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2595 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2596 || VEC_length (constraint_t, graph->complex[i]) > 0))
2598 SET_BIT (changed, i);
2603 /* Allocate a bitmap to be used to store the changed bits. */
2604 pts = BITMAP_ALLOC (&pta_obstack);
2606 while (changed_count > 0)
2609 struct topo_info *ti = init_topo_info ();
2612 bitmap_obstack_initialize (&iteration_obstack);
2614 compute_topo_order (graph, ti);
2616 while (VEC_length (unsigned, ti->topo_order) != 0)
2619 i = VEC_pop (unsigned, ti->topo_order);
2621 /* If this variable is not a representative, skip it. */
2625 /* In certain indirect cycle cases, we may merge this
2626 variable to another. */
2627 if (eliminate_indirect_cycles (i) && find (i) != i)
2630 /* If the node has changed, we need to process the
2631 complex constraints and outgoing edges again. */
2632 if (TEST_BIT (changed, i))
2637 VEC(constraint_t,heap) *complex = graph->complex[i];
2638 bool solution_empty;
2640 RESET_BIT (changed, i);
2643 /* Compute the changed set of solution bits. */
2644 bitmap_and_compl (pts, get_varinfo (i)->solution,
2645 get_varinfo (i)->oldsolution);
2647 if (bitmap_empty_p (pts))
2650 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2652 solution = get_varinfo (i)->solution;
2653 solution_empty = bitmap_empty_p (solution);
2655 /* Process the complex constraints */
2656 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2658 /* XXX: This is going to unsort the constraints in
2659 some cases, which will occasionally add duplicate
2660 constraints during unification. This does not
2661 affect correctness. */
2662 c->lhs.var = find (c->lhs.var);
2663 c->rhs.var = find (c->rhs.var);
2665 /* The only complex constraint that can change our
2666 solution to non-empty, given an empty solution,
2667 is a constraint where the lhs side is receiving
2668 some set from elsewhere. */
2669 if (!solution_empty || c->lhs.type != DEREF)
2670 do_complex_constraint (graph, c, pts);
2673 solution_empty = bitmap_empty_p (solution);
2675 if (!solution_empty)
2678 unsigned eff_escaped_id = find (escaped_id);
2680 /* Propagate solution to all successors. */
2681 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2687 unsigned int to = find (j);
2688 tmp = get_varinfo (to)->solution;
2691 /* Don't try to propagate to ourselves. */
2695 /* If we propagate from ESCAPED use ESCAPED as
2697 if (i == eff_escaped_id)
2698 flag = bitmap_set_bit (tmp, escaped_id);
2700 flag = set_union_with_increment (tmp, pts, 0);
2704 get_varinfo (to)->solution = tmp;
2705 if (!TEST_BIT (changed, to))
2707 SET_BIT (changed, to);
2715 free_topo_info (ti);
2716 bitmap_obstack_release (&iteration_obstack);
2720 sbitmap_free (changed);
2721 bitmap_obstack_release (&oldpta_obstack);
2724 /* Map from trees to variable infos. */
2725 static struct pointer_map_t *vi_for_tree;
2728 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2731 insert_vi_for_tree (tree t, varinfo_t vi)
2733 void **slot = pointer_map_insert (vi_for_tree, t);
2735 gcc_assert (*slot == NULL);
2739 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2740 exist in the map, return NULL, otherwise, return the varinfo we found. */
2743 lookup_vi_for_tree (tree t)
2745 void **slot = pointer_map_contains (vi_for_tree, t);
2749 return (varinfo_t) *slot;
2752 /* Return a printable name for DECL */
2755 alias_get_name (tree decl)
2757 const char *res = get_name (decl);
2759 int num_printed = 0;
2768 if (TREE_CODE (decl) == SSA_NAME)
2770 num_printed = asprintf (&temp, "%s_%u",
2771 alias_get_name (SSA_NAME_VAR (decl)),
2772 SSA_NAME_VERSION (decl));
2774 else if (DECL_P (decl))
2776 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2778 if (num_printed > 0)
2780 res = ggc_strdup (temp);
2786 /* Find the variable id for tree T in the map.
2787 If T doesn't exist in the map, create an entry for it and return it. */
2790 get_vi_for_tree (tree t)
2792 void **slot = pointer_map_contains (vi_for_tree, t);
2794 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2796 return (varinfo_t) *slot;
2799 /* Get a scalar constraint expression for a new temporary variable. */
2801 static struct constraint_expr
2802 new_scalar_tmp_constraint_exp (const char *name)
2804 struct constraint_expr tmp;
2807 vi = new_var_info (NULL_TREE, name);
2811 vi->is_full_var = 1;
2820 /* Get a constraint expression vector from an SSA_VAR_P node.
2821 If address_p is true, the result will be taken its address of. */
2824 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2826 struct constraint_expr cexpr;
2829 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2830 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2832 /* For parameters, get at the points-to set for the actual parm
2834 if (TREE_CODE (t) == SSA_NAME
2835 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2836 && SSA_NAME_IS_DEFAULT_DEF (t))
2838 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2842 vi = get_vi_for_tree (t);
2844 cexpr.type = SCALAR;
2846 /* If we determine the result is "anything", and we know this is readonly,
2847 say it points to readonly memory instead. */
2848 if (cexpr.var == anything_id && TREE_READONLY (t))
2851 cexpr.type = ADDRESSOF;
2852 cexpr.var = readonly_id;
2855 /* If we are not taking the address of the constraint expr, add all
2856 sub-fiels of the variable as well. */
2858 && !vi->is_full_var)
2860 for (; vi; vi = vi->next)
2863 VEC_safe_push (ce_s, heap, *results, &cexpr);
2868 VEC_safe_push (ce_s, heap, *results, &cexpr);
2871 /* Process constraint T, performing various simplifications and then
2872 adding it to our list of overall constraints. */
2875 process_constraint (constraint_t t)
2877 struct constraint_expr rhs = t->rhs;
2878 struct constraint_expr lhs = t->lhs;
2880 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2881 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2883 /* If we didn't get any useful constraint from the lhs we get
2884 &ANYTHING as fallback from get_constraint_for. Deal with
2885 it here by turning it into *ANYTHING. */
2886 if (lhs.type == ADDRESSOF
2887 && lhs.var == anything_id)
2890 /* ADDRESSOF on the lhs is invalid. */
2891 gcc_assert (lhs.type != ADDRESSOF);
2893 /* We shouldn't add constraints from things that cannot have pointers.
2894 It's not completely trivial to avoid in the callers, so do it here. */
2895 if (rhs.type != ADDRESSOF
2896 && !get_varinfo (rhs.var)->may_have_pointers)
2899 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2900 if (!get_varinfo (lhs.var)->may_have_pointers)
2903 /* This can happen in our IR with things like n->a = *p */
2904 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2906 /* Split into tmp = *rhs, *lhs = tmp */
2907 struct constraint_expr tmplhs;
2908 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2909 process_constraint (new_constraint (tmplhs, rhs));
2910 process_constraint (new_constraint (lhs, tmplhs));
2912 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2914 /* Split into tmp = &rhs, *lhs = tmp */
2915 struct constraint_expr tmplhs;
2916 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2917 process_constraint (new_constraint (tmplhs, rhs));
2918 process_constraint (new_constraint (lhs, tmplhs));
2922 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2923 VEC_safe_push (constraint_t, heap, constraints, t);
2927 /* Return true if T is a type that could contain pointers. */
2930 type_could_have_pointers (tree type)
2932 if (POINTER_TYPE_P (type))
2935 if (TREE_CODE (type) == ARRAY_TYPE)
2936 return type_could_have_pointers (TREE_TYPE (type));
2938 return AGGREGATE_TYPE_P (type);
2941 /* Return true if T is a variable of a type that could contain
2945 could_have_pointers (tree t)
2947 return type_could_have_pointers (TREE_TYPE (t));
2950 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2953 static HOST_WIDE_INT
2954 bitpos_of_field (const tree fdecl)
2957 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2958 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2961 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2962 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2966 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2967 resulting constraint expressions in *RESULTS. */
2970 get_constraint_for_ptr_offset (tree ptr, tree offset,
2971 VEC (ce_s, heap) **results)
2973 struct constraint_expr c;
2975 HOST_WIDE_INT rhsunitoffset, rhsoffset;
2977 /* If we do not do field-sensitive PTA adding offsets to pointers
2978 does not change the points-to solution. */
2979 if (!use_field_sensitive)
2981 get_constraint_for (ptr, results);
2985 /* If the offset is not a non-negative integer constant that fits
2986 in a HOST_WIDE_INT, we have to fall back to a conservative
2987 solution which includes all sub-fields of all pointed-to
2988 variables of ptr. */
2989 if (offset == NULL_TREE
2990 || !host_integerp (offset, 0))
2991 rhsoffset = UNKNOWN_OFFSET;
2994 /* Make sure the bit-offset also fits. */
2995 rhsunitoffset = TREE_INT_CST_LOW (offset);
2996 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2997 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2998 rhsoffset = UNKNOWN_OFFSET;
3001 get_constraint_for (ptr, results);
3005 /* As we are eventually appending to the solution do not use
3006 VEC_iterate here. */
3007 n = VEC_length (ce_s, *results);
3008 for (j = 0; j < n; j++)
3011 c = *VEC_index (ce_s, *results, j);
3012 curr = get_varinfo (c.var);
3014 if (c.type == ADDRESSOF
3015 /* If this varinfo represents a full variable just use it. */
3016 && curr->is_full_var)
3018 else if (c.type == ADDRESSOF
3019 /* If we do not know the offset add all subfields. */
3020 && rhsoffset == UNKNOWN_OFFSET)
3022 varinfo_t temp = lookup_vi_for_tree (curr->decl);
3025 struct constraint_expr c2;
3027 c2.type = ADDRESSOF;
3029 if (c2.var != c.var)
3030 VEC_safe_push (ce_s, heap, *results, &c2);
3035 else if (c.type == ADDRESSOF)
3038 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3040 /* Search the sub-field which overlaps with the
3041 pointed-to offset. If the result is outside of the variable
3042 we have to provide a conservative result, as the variable is
3043 still reachable from the resulting pointer (even though it
3044 technically cannot point to anything). The last and first
3045 sub-fields are such conservative results.
3046 ??? If we always had a sub-field for &object + 1 then
3047 we could represent this in a more precise way. */
3049 && curr->offset < offset)
3051 temp = first_or_preceding_vi_for_offset (curr, offset);
3053 /* If the found variable is not exactly at the pointed to
3054 result, we have to include the next variable in the
3055 solution as well. Otherwise two increments by offset / 2
3056 do not result in the same or a conservative superset
3058 if (temp->offset != offset
3059 && temp->next != NULL)
3061 struct constraint_expr c2;
3062 c2.var = temp->next->id;
3063 c2.type = ADDRESSOF;
3065 VEC_safe_push (ce_s, heap, *results, &c2);
3071 c.offset = rhsoffset;
3073 VEC_replace (ce_s, *results, j, &c);
3078 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3079 If address_p is true the result will be taken its address of. */
3082 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3086 HOST_WIDE_INT bitsize = -1;
3087 HOST_WIDE_INT bitmaxsize = -1;
3088 HOST_WIDE_INT bitpos;
3090 struct constraint_expr *result;
3092 /* Some people like to do cute things like take the address of
3095 while (handled_component_p (forzero)
3096 || INDIRECT_REF_P (forzero))
3097 forzero = TREE_OPERAND (forzero, 0);
3099 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3101 struct constraint_expr temp;
3104 temp.var = integer_id;
3106 VEC_safe_push (ce_s, heap, *results, &temp);
3110 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3112 /* Pretend to take the address of the base, we'll take care of
3113 adding the required subset of sub-fields below. */
3114 get_constraint_for_1 (t, results, true);
3115 gcc_assert (VEC_length (ce_s, *results) == 1);
3116 result = VEC_last (ce_s, *results);
3118 if (result->type == SCALAR
3119 && get_varinfo (result->var)->is_full_var)
3120 /* For single-field vars do not bother about the offset. */
3122 else if (result->type == SCALAR)
3124 /* In languages like C, you can access one past the end of an
3125 array. You aren't allowed to dereference it, so we can
3126 ignore this constraint. When we handle pointer subtraction,
3127 we may have to do something cute here. */
3129 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3132 /* It's also not true that the constraint will actually start at the
3133 right offset, it may start in some padding. We only care about
3134 setting the constraint to the first actual field it touches, so
3136 struct constraint_expr cexpr = *result;
3138 VEC_pop (ce_s, *results);
3140 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3142 if (ranges_overlap_p (curr->offset, curr->size,
3143 bitpos, bitmaxsize))
3145 cexpr.var = curr->id;
3146 VEC_safe_push (ce_s, heap, *results, &cexpr);
3151 /* If we are going to take the address of this field then
3152 to be able to compute reachability correctly add at least
3153 the last field of the variable. */
3155 && VEC_length (ce_s, *results) == 0)
3157 curr = get_varinfo (cexpr.var);
3158 while (curr->next != NULL)
3160 cexpr.var = curr->id;
3161 VEC_safe_push (ce_s, heap, *results, &cexpr);
3164 /* Assert that we found *some* field there. The user couldn't be
3165 accessing *only* padding. */
3166 /* Still the user could access one past the end of an array
3167 embedded in a struct resulting in accessing *only* padding. */
3168 gcc_assert (VEC_length (ce_s, *results) >= 1
3169 || ref_contains_array_ref (orig_t));
3171 else if (bitmaxsize == 0)
3173 if (dump_file && (dump_flags & TDF_DETAILS))
3174 fprintf (dump_file, "Access to zero-sized part of variable,"
3178 if (dump_file && (dump_flags & TDF_DETAILS))
3179 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3181 else if (result->type == DEREF)
3183 /* If we do not know exactly where the access goes say so. Note
3184 that only for non-structure accesses we know that we access
3185 at most one subfiled of any variable. */
3187 || bitsize != bitmaxsize
3188 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
3189 result->offset = UNKNOWN_OFFSET;
3191 result->offset = bitpos;
3193 else if (result->type == ADDRESSOF)
3195 /* We can end up here for component references on a
3196 VIEW_CONVERT_EXPR <>(&foobar). */
3197 result->type = SCALAR;
3198 result->var = anything_id;
3206 /* Dereference the constraint expression CONS, and return the result.
3207 DEREF (ADDRESSOF) = SCALAR
3208 DEREF (SCALAR) = DEREF
3209 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3210 This is needed so that we can handle dereferencing DEREF constraints. */
3213 do_deref (VEC (ce_s, heap) **constraints)
3215 struct constraint_expr *c;
3218 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3220 if (c->type == SCALAR)
3222 else if (c->type == ADDRESSOF)
3224 else if (c->type == DEREF)
3226 struct constraint_expr tmplhs;
3227 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3228 process_constraint (new_constraint (tmplhs, *c));
3229 c->var = tmplhs.var;
3236 static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
3238 /* Given a tree T, return the constraint expression for taking the
3242 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3244 struct constraint_expr *c;
3247 get_constraint_for_1 (t, results, true);
3249 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3251 if (c->type == DEREF)
3254 c->type = ADDRESSOF;
3258 /* Given a tree T, return the constraint expression for it. */
3261 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3263 struct constraint_expr temp;
3265 /* x = integer is all glommed to a single variable, which doesn't
3266 point to anything by itself. That is, of course, unless it is an
3267 integer constant being treated as a pointer, in which case, we
3268 will return that this is really the addressof anything. This
3269 happens below, since it will fall into the default case. The only
3270 case we know something about an integer treated like a pointer is
3271 when it is the NULL pointer, and then we just say it points to
3274 Do not do that if -fno-delete-null-pointer-checks though, because
3275 in that case *NULL does not fail, so it _should_ alias *anything.
3276 It is not worth adding a new option or renaming the existing one,
3277 since this case is relatively obscure. */
3278 if (flag_delete_null_pointer_checks
3279 && ((TREE_CODE (t) == INTEGER_CST
3280 && integer_zerop (t))
3281 /* The only valid CONSTRUCTORs in gimple with pointer typed
3282 elements are zero-initializer. */
3283 || TREE_CODE (t) == CONSTRUCTOR))
3285 temp.var = nothing_id;
3286 temp.type = ADDRESSOF;
3288 VEC_safe_push (ce_s, heap, *results, &temp);
3292 /* String constants are read-only. */
3293 if (TREE_CODE (t) == STRING_CST)
3295 temp.var = readonly_id;
3298 VEC_safe_push (ce_s, heap, *results, &temp);
3302 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3304 case tcc_expression:
3306 switch (TREE_CODE (t))
3309 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3317 switch (TREE_CODE (t))
3321 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3326 case ARRAY_RANGE_REF:
3328 get_constraint_for_component_ref (t, results, address_p);
3330 case VIEW_CONVERT_EXPR:
3331 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3333 /* We are missing handling for TARGET_MEM_REF here. */
3338 case tcc_exceptional:
3340 switch (TREE_CODE (t))
3344 get_constraint_for_ssa_var (t, results, address_p);
3351 case tcc_declaration:
3353 get_constraint_for_ssa_var (t, results, address_p);
3359 /* The default fallback is a constraint from anything. */
3360 temp.type = ADDRESSOF;
3361 temp.var = anything_id;
3363 VEC_safe_push (ce_s, heap, *results, &temp);
3366 /* Given a gimple tree T, return the constraint expression vector for it. */
3369 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3371 gcc_assert (VEC_length (ce_s, *results) == 0);
3373 get_constraint_for_1 (t, results, false);
3377 /* Efficiently generates constraints from all entries in *RHSC to all
3378 entries in *LHSC. */
3381 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3383 struct constraint_expr *lhsp, *rhsp;
3386 if (VEC_length (ce_s, lhsc) <= 1
3387 || VEC_length (ce_s, rhsc) <= 1)
3389 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3390 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3391 process_constraint (new_constraint (*lhsp, *rhsp));
3395 struct constraint_expr tmp;
3396 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3397 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
3398 process_constraint (new_constraint (tmp, *rhsp));
3399 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3400 process_constraint (new_constraint (*lhsp, tmp));
3404 /* Handle aggregate copies by expanding into copies of the respective
3405 fields of the structures. */
3408 do_structure_copy (tree lhsop, tree rhsop)
3410 struct constraint_expr *lhsp, *rhsp;
3411 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3414 get_constraint_for (lhsop, &lhsc);
3415 get_constraint_for (rhsop, &rhsc);
3416 lhsp = VEC_index (ce_s, lhsc, 0);
3417 rhsp = VEC_index (ce_s, rhsc, 0);
3418 if (lhsp->type == DEREF
3419 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3420 || rhsp->type == DEREF)
3422 if (lhsp->type == DEREF)
3424 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3425 lhsp->offset = UNKNOWN_OFFSET;
3427 if (rhsp->type == DEREF)
3429 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3430 rhsp->offset = UNKNOWN_OFFSET;
3432 process_all_all_constraints (lhsc, rhsc);
3434 else if (lhsp->type == SCALAR
3435 && (rhsp->type == SCALAR
3436 || rhsp->type == ADDRESSOF))
3438 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3439 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3441 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3442 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3443 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3445 varinfo_t lhsv, rhsv;
3446 rhsp = VEC_index (ce_s, rhsc, k);
3447 lhsv = get_varinfo (lhsp->var);
3448 rhsv = get_varinfo (rhsp->var);
3449 if (lhsv->may_have_pointers
3450 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3451 rhsv->offset + lhsoffset, rhsv->size))
3452 process_constraint (new_constraint (*lhsp, *rhsp));
3453 if (lhsv->offset + rhsoffset + lhsv->size
3454 > rhsv->offset + lhsoffset + rhsv->size)
3457 if (k >= VEC_length (ce_s, rhsc))
3467 VEC_free (ce_s, heap, lhsc);
3468 VEC_free (ce_s, heap, rhsc);
3471 /* Create a constraint ID = OP. */
3474 make_constraint_to (unsigned id, tree op)
3476 VEC(ce_s, heap) *rhsc = NULL;
3477 struct constraint_expr *c;
3478 struct constraint_expr includes;
3482 includes.offset = 0;
3483 includes.type = SCALAR;
3485 get_constraint_for (op, &rhsc);
3486 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3487 process_constraint (new_constraint (includes, *c));
3488 VEC_free (ce_s, heap, rhsc);
3491 /* Create a constraint ID = &FROM. */
3494 make_constraint_from (varinfo_t vi, int from)
3496 struct constraint_expr lhs, rhs;
3504 rhs.type = ADDRESSOF;
3505 process_constraint (new_constraint (lhs, rhs));
3508 /* Create a constraint ID = FROM. */
3511 make_copy_constraint (varinfo_t vi, int from)
3513 struct constraint_expr lhs, rhs;
3522 process_constraint (new_constraint (lhs, rhs));
3525 /* Make constraints necessary to make OP escape. */
3528 make_escape_constraint (tree op)
3530 make_constraint_to (escaped_id, op);
3533 /* Add constraints to that the solution of VI is transitively closed. */
3536 make_transitive_closure_constraints (varinfo_t vi)
3538 struct constraint_expr lhs, rhs;
3547 process_constraint (new_constraint (lhs, rhs));
3549 /* VAR = VAR + UNKNOWN; */
3555 rhs.offset = UNKNOWN_OFFSET;
3556 process_constraint (new_constraint (lhs, rhs));
3559 /* Create a new artificial heap variable with NAME and make a
3560 constraint from it to LHS. Return the created variable. */
3563 make_constraint_from_heapvar (varinfo_t lhs, const char *name)
3566 tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
3568 if (heapvar == NULL_TREE)
3571 heapvar = create_tmp_var_raw (ptr_type_node, name);
3572 DECL_EXTERNAL (heapvar) = 1;
3574 heapvar_insert (lhs->decl, lhs->offset, heapvar);
3576 ann = get_var_ann (heapvar);
3577 ann->is_heapvar = 1;
3580 /* For global vars we need to add a heapvar to the list of referenced
3581 vars of a different function than it was created for originally. */
3582 if (cfun && gimple_referenced_vars (cfun))
3583 add_referenced_var (heapvar);
3585 vi = new_var_info (heapvar, name);
3586 vi->is_artificial_var = true;
3587 vi->is_heap_var = true;
3588 vi->is_unknown_size_var = true;
3592 vi->is_full_var = true;
3593 insert_vi_for_tree (heapvar, vi);
3595 make_constraint_from (lhs, vi->id);
3600 /* Create a new artificial heap variable with NAME and make a
3601 constraint from it to LHS. Set flags according to a tag used
3602 for tracking restrict pointers. */
3605 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3608 vi = make_constraint_from_heapvar (lhs, name);
3609 vi->is_restrict_var = 1;
3610 vi->is_global_var = 0;
3611 vi->is_special_var = 1;
3612 vi->may_have_pointers = 0;
3615 /* In IPA mode there are varinfos for different aspects of reach
3616 function designator. One for the points-to set of the return
3617 value, one for the variables that are clobbered by the function,
3618 one for its uses and one for each parameter (including a single
3619 glob for remaining variadic arguments). */
3621 enum { fi_clobbers = 1, fi_uses = 2,
3622 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3624 /* Get a constraint for the requested part of a function designator FI
3625 when operating in IPA mode. */
3627 static struct constraint_expr
3628 get_function_part_constraint (varinfo_t fi, unsigned part)
3630 struct constraint_expr c;
3632 gcc_assert (in_ipa_mode);
3634 if (fi->id == anything_id)
3636 /* ??? We probably should have a ANYFN special variable. */
3637 c.var = anything_id;
3641 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3643 varinfo_t ai = first_vi_for_offset (fi, part);
3644 c.var = ai ? ai->id : anything_id;
3658 /* For non-IPA mode, generate constraints necessary for a call on the
3662 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3664 struct constraint_expr rhsc;
3667 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3669 tree arg = gimple_call_arg (stmt, i);
3671 /* Find those pointers being passed, and make sure they end up
3672 pointing to anything. */
3673 if (could_have_pointers (arg))
3674 make_escape_constraint (arg);
3677 /* The static chain escapes as well. */
3678 if (gimple_call_chain (stmt))
3679 make_escape_constraint (gimple_call_chain (stmt));
3681 /* And if we applied NRV the address of the return slot escapes as well. */
3682 if (gimple_call_return_slot_opt_p (stmt)
3683 && gimple_call_lhs (stmt) != NULL_TREE
3684 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3686 VEC(ce_s, heap) *tmpc = NULL;
3687 struct constraint_expr lhsc, *c;
3688 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3689 lhsc.var = escaped_id;
3692 for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
3693 process_constraint (new_constraint (lhsc, *c));
3694 VEC_free(ce_s, heap, tmpc);
3697 /* Regular functions return nonlocal memory. */
3698 rhsc.var = nonlocal_id;
3701 VEC_safe_push (ce_s, heap, *results, &rhsc);
3704 /* For non-IPA mode, generate constraints necessary for a call
3705 that returns a pointer and assigns it to LHS. This simply makes
3706 the LHS point to global and escaped variables. */
3709 handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
3711 VEC(ce_s, heap) *lhsc = NULL;
3713 get_constraint_for (lhs, &lhsc);
3715 if (flags & ECF_MALLOC)
3718 vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
3719 /* We delay marking allocated storage global until we know if
3721 DECL_EXTERNAL (vi->decl) = 0;
3722 vi->is_global_var = 0;
3723 /* If this is not a real malloc call assume the memory was
3724 initialized and thus may point to global memory. All
3725 builtin functions with the malloc attribute behave in a sane way. */
3727 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3728 make_constraint_from (vi, nonlocal_id);
3730 else if (VEC_length (ce_s, rhsc) > 0)
3732 /* If the store is to a global decl make sure to
3733 add proper escape constraints. */
3734 lhs = get_base_address (lhs);
3737 && is_global_var (lhs))
3739 struct constraint_expr tmpc;
3740 tmpc.var = escaped_id;
3743 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3745 process_all_all_constraints (lhsc, rhsc);
3747 VEC_free (ce_s, heap, lhsc);
3750 /* For non-IPA mode, generate constraints necessary for a call of a
3751 const function that returns a pointer in the statement STMT. */
3754 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3756 struct constraint_expr rhsc;
3759 /* Treat nested const functions the same as pure functions as far
3760 as the static chain is concerned. */
3761 if (gimple_call_chain (stmt))
3763 varinfo_t uses = get_call_use_vi (stmt);
3764 make_transitive_closure_constraints (uses);
3765 make_constraint_to (uses->id, gimple_call_chain (stmt));
3766 rhsc.var = uses->id;
3769 VEC_safe_push (ce_s, heap, *results, &rhsc);
3772 /* May return arguments. */
3773 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3775 tree arg = gimple_call_arg (stmt, k);
3777 if (could_have_pointers (arg))
3779 VEC(ce_s, heap) *argc = NULL;
3781 struct constraint_expr *argp;
3782 get_constraint_for (arg, &argc);
3783 for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
3784 VEC_safe_push (ce_s, heap, *results, argp);
3785 VEC_free(ce_s, heap, argc);
3789 /* May return addresses of globals. */
3790 rhsc.var = nonlocal_id;
3792 rhsc.type = ADDRESSOF;
3793 VEC_safe_push (ce_s, heap, *results, &rhsc);
3796 /* For non-IPA mode, generate constraints necessary for a call to a
3797 pure function in statement STMT. */
3800 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3802 struct constraint_expr rhsc;
3804 varinfo_t uses = NULL;
3806 /* Memory reached from pointer arguments is call-used. */
3807 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3809 tree arg = gimple_call_arg (stmt, i);
3811 if (could_have_pointers (arg))
3815 uses = get_call_use_vi (stmt);
3816 make_transitive_closure_constraints (uses);
3818 make_constraint_to (uses->id, arg);
3822 /* The static chain is used as well. */
3823 if (gimple_call_chain (stmt))
3827 uses = get_call_use_vi (stmt);
3828 make_transitive_closure_constraints (uses);
3830 make_constraint_to (uses->id, gimple_call_chain (stmt));
3833 /* Pure functions may return call-used and nonlocal memory. */
3836 rhsc.var = uses->id;
3839 VEC_safe_push (ce_s, heap, *results, &rhsc);
3841 rhsc.var = nonlocal_id;
3844 VEC_safe_push (ce_s, heap, *results, &rhsc);
3848 /* Return the varinfo for the callee of CALL. */
3851 get_fi_for_callee (gimple call)
3855 /* If we can directly resolve the function being called, do so.
3856 Otherwise, it must be some sort of indirect expression that
3857 we should still be able to handle. */
3858 decl = gimple_call_fndecl (call);
3860 return get_vi_for_tree (decl);
3862 decl = gimple_call_fn (call);
3863 /* The function can be either an SSA name pointer or,
3864 worse, an OBJ_TYPE_REF. In this case we have no
3865 clue and should be getting ANYFN (well, ANYTHING for now). */
3866 if (TREE_CODE (decl) == SSA_NAME)
3868 if (TREE_CODE (decl) == SSA_NAME
3869 && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
3870 && SSA_NAME_IS_DEFAULT_DEF (decl))
3871 decl = SSA_NAME_VAR (decl);
3872 return get_vi_for_tree (decl);
3874 else if (TREE_CODE (decl) == INTEGER_CST
3875 || TREE_CODE (decl) == OBJ_TYPE_REF)
3876 return get_varinfo (anything_id);
3881 /* Walk statement T setting up aliasing constraints according to the
3882 references found in T. This function is the main part of the
3883 constraint builder. AI points to auxiliary alias information used
3884 when building alias sets and computing alias grouping heuristics. */
3887 find_func_aliases (gimple origt)
3890 VEC(ce_s, heap) *lhsc = NULL;
3891 VEC(ce_s, heap) *rhsc = NULL;
3892 struct constraint_expr *c;
3895 /* Now build constraints expressions. */
3896 if (gimple_code (t) == GIMPLE_PHI)
3898 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3900 /* Only care about pointers and structures containing
3902 if (could_have_pointers (gimple_phi_result (t)))
3907 /* For a phi node, assign all the arguments to
3909 get_constraint_for (gimple_phi_result (t), &lhsc);
3910 for (i = 0; i < gimple_phi_num_args (t); i++)
3912 tree strippedrhs = PHI_ARG_DEF (t, i);
3914 STRIP_NOPS (strippedrhs);
3915 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3917 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3919 struct constraint_expr *c2;
3920 while (VEC_length (ce_s, rhsc) > 0)
3922 c2 = VEC_last (ce_s, rhsc);
3923 process_constraint (new_constraint (*c, *c2));
3924 VEC_pop (ce_s, rhsc);
3930 /* In IPA mode, we need to generate constraints to pass call
3931 arguments through their calls. There are two cases,
3932 either a GIMPLE_CALL returning a value, or just a plain
3933 GIMPLE_CALL when we are not.
3935 In non-ipa mode, we need to generate constraints for each
3936 pointer passed by address. */
3937 else if (is_gimple_call (t))
3939 tree fndecl = gimple_call_fndecl (t);
3940 if (fndecl != NULL_TREE
3941 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3942 /* ??? All builtins that are handled here need to be handled
3943 in the alias-oracle query functions explicitly! */
3944 switch (DECL_FUNCTION_CODE (fndecl))
3946 /* All the following functions return a pointer to the same object
3947 as their first argument points to. The functions do not add
3948 to the ESCAPED solution. The functions make the first argument
3949 pointed to memory point to what the second argument pointed to
3950 memory points to. */
3951 case BUILT_IN_STRCPY:
3952 case BUILT_IN_STRNCPY:
3953 case BUILT_IN_BCOPY:
3954 case BUILT_IN_MEMCPY:
3955 case BUILT_IN_MEMMOVE:
3956 case BUILT_IN_MEMPCPY:
3957 case BUILT_IN_STPCPY:
3958 case BUILT_IN_STPNCPY:
3959 case BUILT_IN_STRCAT:
3960 case BUILT_IN_STRNCAT:
3962 tree res = gimple_call_lhs (t);
3963 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3964 == BUILT_IN_BCOPY ? 1 : 0));
3965 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3966 == BUILT_IN_BCOPY ? 0 : 1));
3967 if (res != NULL_TREE)
3969 get_constraint_for (res, &lhsc);
3970 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
3971 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
3972 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
3973 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
3975 get_constraint_for (dest, &rhsc);
3976 process_all_all_constraints (lhsc, rhsc);
3977 VEC_free (ce_s, heap, lhsc);
3978 VEC_free (ce_s, heap, rhsc);
3980 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
3981 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
3984 process_all_all_constraints (lhsc, rhsc);
3985 VEC_free (ce_s, heap, lhsc);
3986 VEC_free (ce_s, heap, rhsc);
3989 case BUILT_IN_MEMSET:
3991 tree res = gimple_call_lhs (t);
3992 tree dest = gimple_call_arg (t, 0);
3995 struct constraint_expr ac;
3996 if (res != NULL_TREE)
3998 get_constraint_for (res, &lhsc);
3999 get_constraint_for (dest, &rhsc);
4000 process_all_all_constraints (lhsc, rhsc);
4001 VEC_free (ce_s, heap, lhsc);
4002 VEC_free (ce_s, heap, rhsc);
4004 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4006 if (flag_delete_null_pointer_checks
4007 && integer_zerop (gimple_call_arg (t, 1)))
4009 ac.type = ADDRESSOF;
4010 ac.var = nothing_id;
4015 ac.var = integer_id;
4018 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4019 process_constraint (new_constraint (*lhsp, ac));
4020 VEC_free (ce_s, heap, lhsc);
4023 /* All the following functions do not return pointers, do not
4024 modify the points-to sets of memory reachable from their
4025 arguments and do not add to the ESCAPED solution. */
4026 case BUILT_IN_SINCOS:
4027 case BUILT_IN_SINCOSF:
4028 case BUILT_IN_SINCOSL:
4029 case BUILT_IN_FREXP:
4030 case BUILT_IN_FREXPF:
4031 case BUILT_IN_FREXPL:
4032 case BUILT_IN_GAMMA_R:
4033 case BUILT_IN_GAMMAF_R:
4034 case BUILT_IN_GAMMAL_R:
4035 case BUILT_IN_LGAMMA_R:
4036 case BUILT_IN_LGAMMAF_R:
4037 case BUILT_IN_LGAMMAL_R:
4039 case BUILT_IN_MODFF:
4040 case BUILT_IN_MODFL:
4041 case BUILT_IN_REMQUO:
4042 case BUILT_IN_REMQUOF:
4043 case BUILT_IN_REMQUOL:
4046 /* Trampolines are special - they set up passing the static
4048 case BUILT_IN_INIT_TRAMPOLINE:
4050 tree tramp = gimple_call_arg (t, 0);
4051 tree nfunc = gimple_call_arg (t, 1);
4052 tree frame = gimple_call_arg (t, 2);
4054 struct constraint_expr lhs, *rhsp;
4057 varinfo_t nfi = NULL;
4058 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4059 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4062 lhs = get_function_part_constraint (nfi, fi_static_chain);
4063 get_constraint_for (frame, &rhsc);
4064 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
4065 process_constraint (new_constraint (lhs, *rhsp));
4066 VEC_free (ce_s, heap, rhsc);
4068 /* Make the frame point to the function for
4069 the trampoline adjustment call. */
4070 get_constraint_for (tramp, &lhsc);
4072 get_constraint_for (nfunc, &rhsc);
4073 process_all_all_constraints (lhsc, rhsc);
4074 VEC_free (ce_s, heap, rhsc);
4075 VEC_free (ce_s, heap, lhsc);
4080 /* Else fallthru to generic handling which will let
4081 the frame escape. */
4084 case BUILT_IN_ADJUST_TRAMPOLINE:
4086 tree tramp = gimple_call_arg (t, 0);
4087 tree res = gimple_call_lhs (t);
4088 if (in_ipa_mode && res)
4090 get_constraint_for (res, &lhsc);
4091 get_constraint_for (tramp, &rhsc);
4093 process_all_all_constraints (lhsc, rhsc);
4094 VEC_free (ce_s, heap, rhsc);
4095 VEC_free (ce_s, heap, lhsc);
4099 /* Variadic argument handling needs to be handled in IPA
4101 case BUILT_IN_VA_START:
4105 tree valist = gimple_call_arg (t, 0);
4106 struct constraint_expr rhs, *lhsp;
4108 /* The va_list gets access to pointers in variadic
4110 fi = lookup_vi_for_tree (cfun->decl);
4111 gcc_assert (fi != NULL);
4112 get_constraint_for (valist, &lhsc);
4114 rhs = get_function_part_constraint (fi, ~0);
4115 rhs.type = ADDRESSOF;
4116 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4117 process_constraint (new_constraint (*lhsp, rhs));
4118 VEC_free (ce_s, heap, lhsc);
4119 /* va_list is clobbered. */
4120 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4125 /* va_end doesn't have any effect that matters. */
4126 case BUILT_IN_VA_END:
4128 /* printf-style functions may have hooks to set pointers to
4129 point to somewhere into the generated string. Leave them
4130 for a later excercise... */
4132 /* Fallthru to general call handling. */;
4136 && (!(fi = lookup_vi_for_tree (fndecl))
4137 || !fi->is_fn_info)))
4139 VEC(ce_s, heap) *rhsc = NULL;
4140 int flags = gimple_call_flags (t);
4142 /* Const functions can return their arguments and addresses
4143 of global memory but not of escaped memory. */
4144 if (flags & (ECF_CONST|ECF_NOVOPS))
4146 if (gimple_call_lhs (t)
4147 && could_have_pointers (gimple_call_lhs (t)))
4148 handle_const_call (t, &rhsc);
4150 /* Pure functions can return addresses in and of memory
4151 reachable from their arguments, but they are not an escape
4152 point for reachable memory of their arguments. */
4153 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4154 handle_pure_call (t, &rhsc);
4156 handle_rhs_call (t, &rhsc);
4157 if (gimple_call_lhs (t)
4158 && could_have_pointers (gimple_call_lhs (t)))
4159 handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
4160 VEC_free (ce_s, heap, rhsc);
4167 fi = get_fi_for_callee (t);
4169 /* Assign all the passed arguments to the appropriate incoming
4170 parameters of the function. */
4171 for (j = 0; j < gimple_call_num_args (t); j++)
4173 struct constraint_expr lhs ;
4174 struct constraint_expr *rhsp;
4175 tree arg = gimple_call_arg (t, j);
4177 if (!could_have_pointers (arg))
4180 get_constraint_for (arg, &rhsc);
4181 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4182 while (VEC_length (ce_s, rhsc) != 0)
4184 rhsp = VEC_last (ce_s, rhsc);
4185 process_constraint (new_constraint (lhs, *rhsp));
4186 VEC_pop (ce_s, rhsc);
4190 /* If we are returning a value, assign it to the result. */
4191 lhsop = gimple_call_lhs (t);
4193 && could_have_pointers (lhsop))
4195 struct constraint_expr rhs;
4196 struct constraint_expr *lhsp;
4198 get_constraint_for (lhsop, &lhsc);
4199 rhs = get_function_part_constraint (fi, fi_result);
4201 && DECL_RESULT (fndecl)
4202 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4204 VEC(ce_s, heap) *tem = NULL;
4205 VEC_safe_push (ce_s, heap, tem, &rhs);
4207 rhs = *VEC_index (ce_s, tem, 0);
4208 VEC_free(ce_s, heap, tem);
4210 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4211 process_constraint (new_constraint (*lhsp, rhs));
4214 /* If we pass the result decl by reference, honor that. */
4217 && DECL_RESULT (fndecl)
4218 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4220 struct constraint_expr lhs;
4221 struct constraint_expr *rhsp;
4223 get_constraint_for_address_of (lhsop, &rhsc);
4224 lhs = get_function_part_constraint (fi, fi_result);
4225 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4226 process_constraint (new_constraint (lhs, *rhsp));
4227 VEC_free (ce_s, heap, rhsc);
4230 /* If we use a static chain, pass it along. */
4231 if (gimple_call_chain (t))
4233 struct constraint_expr lhs;
4234 struct constraint_expr *rhsp;
4236 get_constraint_for (gimple_call_chain (t), &rhsc);
4237 lhs = get_function_part_constraint (fi, fi_static_chain);
4238 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4239 process_constraint (new_constraint (lhs, *rhsp));
4243 /* Otherwise, just a regular assignment statement. Only care about
4244 operations with pointer result, others are dealt with as escape
4245 points if they have pointer operands. */
4246 else if (is_gimple_assign (t)
4247 && could_have_pointers (gimple_assign_lhs (t)))
4249 /* Otherwise, just a regular assignment statement. */
4250 tree lhsop = gimple_assign_lhs (t);
4251 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4253 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4254 do_structure_copy (lhsop, rhsop);
4257 struct constraint_expr temp;
4258 get_constraint_for (lhsop, &lhsc);
4260 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
4261 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4262 gimple_assign_rhs2 (t), &rhsc);
4263 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
4264 && !(POINTER_TYPE_P (gimple_expr_type (t))
4265 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4266 || gimple_assign_single_p (t))
4267 get_constraint_for (rhsop, &rhsc);
4270 temp.type = ADDRESSOF;
4271 temp.var = anything_id;
4273 VEC_safe_push (ce_s, heap, rhsc, &temp);
4275 process_all_all_constraints (lhsc, rhsc);
4277 /* If there is a store to a global variable the rhs escapes. */
4278 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4280 && is_global_var (lhsop)
4282 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4283 make_escape_constraint (rhsop);
4284 /* If this is a conversion of a non-restrict pointer to a
4285 restrict pointer track it with a new heapvar. */
4286 else if (gimple_assign_cast_p (t)
4287 && POINTER_TYPE_P (TREE_TYPE (rhsop))
4288 && POINTER_TYPE_P (TREE_TYPE (lhsop))
4289 && !TYPE_RESTRICT (TREE_TYPE (rhsop))
4290 && TYPE_RESTRICT (TREE_TYPE (lhsop)))
4291 make_constraint_from_restrict (get_vi_for_tree (lhsop),
4294 /* For conversions of pointers to non-pointers the pointer escapes. */
4295 else if (gimple_assign_cast_p (t)
4296 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
4297 && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
4299 make_escape_constraint (gimple_assign_rhs1 (t));
4301 /* Handle escapes through return. */
4302 else if (gimple_code (t) == GIMPLE_RETURN
4303 && gimple_return_retval (t) != NULL_TREE
4304 && could_have_pointers (gimple_return_retval (t)))
4308 || !(fi = get_vi_for_tree (cfun->decl)))
4309 make_escape_constraint (gimple_return_retval (t));
4310 else if (in_ipa_mode
4313 struct constraint_expr lhs ;
4314 struct constraint_expr *rhsp;
4317 lhs = get_function_part_constraint (fi, fi_result);
4318 get_constraint_for (gimple_return_retval (t), &rhsc);
4319 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4320 process_constraint (new_constraint (lhs, *rhsp));
4323 /* Handle asms conservatively by adding escape constraints to everything. */
4324 else if (gimple_code (t) == GIMPLE_ASM)
4326 unsigned i, noutputs;
4327 const char **oconstraints;
4328 const char *constraint;
4329 bool allows_mem, allows_reg, is_inout;
4331 noutputs = gimple_asm_noutputs (t);
4332 oconstraints = XALLOCAVEC (const char *, noutputs);
4334 for (i = 0; i < noutputs; ++i)
4336 tree link = gimple_asm_output_op (t, i);
4337 tree op = TREE_VALUE (link);
4339 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4340 oconstraints[i] = constraint;
4341 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4342 &allows_reg, &is_inout);
4344 /* A memory constraint makes the address of the operand escape. */
4345 if (!allows_reg && allows_mem)
4346 make_escape_constraint (build_fold_addr_expr (op));
4348 /* The asm may read global memory, so outputs may point to
4349 any global memory. */
4350 if (op && could_have_pointers (op))
4352 VEC(ce_s, heap) *lhsc = NULL;
4353 struct constraint_expr rhsc, *lhsp;
4355 get_constraint_for (op, &lhsc);
4356 rhsc.var = nonlocal_id;
4359 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4360 process_constraint (new_constraint (*lhsp, rhsc));
4361 VEC_free (ce_s, heap, lhsc);
4364 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4366 tree link = gimple_asm_input_op (t, i);
4367 tree op = TREE_VALUE (link);
4369 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4371 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4372 &allows_mem, &allows_reg);
4374 /* A memory constraint makes the address of the operand escape. */
4375 if (!allows_reg && allows_mem)
4376 make_escape_constraint (build_fold_addr_expr (op));
4377 /* Strictly we'd only need the constraint to ESCAPED if
4378 the asm clobbers memory, otherwise using something
4379 along the lines of per-call clobbers/uses would be enough. */
4380 else if (op && could_have_pointers (op))
4381 make_escape_constraint (op);
4385 VEC_free (ce_s, heap, rhsc);
4386 VEC_free (ce_s, heap, lhsc);
4390 /* Create a constraint adding to the clobber set of FI the memory
4391 pointed to by PTR. */
4394 process_ipa_clobber (varinfo_t fi, tree ptr)
4396 VEC(ce_s, heap) *ptrc = NULL;
4397 struct constraint_expr *c, lhs;
4399 get_constraint_for (ptr, &ptrc);
4400 lhs = get_function_part_constraint (fi, fi_clobbers);
4401 for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
4402 process_constraint (new_constraint (lhs, *c));
4403 VEC_free (ce_s, heap, ptrc);
4406 /* Walk statement T setting up clobber and use constraints according to the
4407 references found in T. This function is a main part of the
4408 IPA constraint builder. */
4411 find_func_clobbers (gimple origt)
4414 VEC(ce_s, heap) *lhsc = NULL;
4415 VEC(ce_s, heap) *rhsc = NULL;
4418 /* Add constraints for clobbered/used in IPA mode.
4419 We are not interested in what automatic variables are clobbered
4420 or used as we only use the information in the caller to which
4421 they do not escape. */
4422 gcc_assert (in_ipa_mode);
4424 /* If the stmt refers to memory in any way it better had a VUSE. */
4425 if (gimple_vuse (t) == NULL_TREE)
4428 /* We'd better have function information for the current function. */
4429 fi = lookup_vi_for_tree (cfun->decl);
4430 gcc_assert (fi != NULL);
4432 /* Account for stores in assignments and calls. */
4433 if (gimple_vdef (t) != NULL_TREE
4434 && gimple_has_lhs (t))
4436 tree lhs = gimple_get_lhs (t);
4438 while (handled_component_p (tem))
4439 tem = TREE_OPERAND (tem, 0);
4441 && !auto_var_in_fn_p (tem, cfun->decl))
4442 || INDIRECT_REF_P (tem))
4444 struct constraint_expr lhsc, *rhsp;
4446 lhsc = get_function_part_constraint (fi, fi_clobbers);
4447 get_constraint_for_address_of (lhs, &rhsc);
4448 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4449 process_constraint (new_constraint (lhsc, *rhsp));
4450 VEC_free (ce_s, heap, rhsc);
4454 /* Account for uses in assigments and returns. */
4455 if (gimple_assign_single_p (t)
4456 || (gimple_code (t) == GIMPLE_RETURN
4457 && gimple_return_retval (t) != NULL_TREE))
4459 tree rhs = (gimple_assign_single_p (t)
4460 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4462 while (handled_component_p (tem))
4463 tem = TREE_OPERAND (tem, 0);
4465 && !auto_var_in_fn_p (tem, cfun->decl))
4466 || INDIRECT_REF_P (tem))
4468 struct constraint_expr lhs, *rhsp;
4470 lhs = get_function_part_constraint (fi, fi_uses);
4471 get_constraint_for_address_of (rhs, &rhsc);
4472 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4473 process_constraint (new_constraint (lhs, *rhsp));
4474 VEC_free (ce_s, heap, rhsc);
4478 if (is_gimple_call (t))
4480 varinfo_t cfi = NULL;
4481 tree decl = gimple_call_fndecl (t);
4482 struct constraint_expr lhs, rhs;
4485 /* For builtins we do not have separate function info. For those
4486 we do not generate escapes for we have to generate clobbers/uses. */
4488 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4489 switch (DECL_FUNCTION_CODE (decl))
4491 /* The following functions use and clobber memory pointed to
4492 by their arguments. */
4493 case BUILT_IN_STRCPY:
4494 case BUILT_IN_STRNCPY:
4495 case BUILT_IN_BCOPY:
4496 case BUILT_IN_MEMCPY:
4497 case BUILT_IN_MEMMOVE:
4498 case BUILT_IN_MEMPCPY:
4499 case BUILT_IN_STPCPY:
4500 case BUILT_IN_STPNCPY:
4501 case BUILT_IN_STRCAT:
4502 case BUILT_IN_STRNCAT:
4504 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4505 == BUILT_IN_BCOPY ? 1 : 0));
4506 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4507 == BUILT_IN_BCOPY ? 0 : 1));
4509 struct constraint_expr *rhsp, *lhsp;
4510 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4511 lhs = get_function_part_constraint (fi, fi_clobbers);
4512 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4513 process_constraint (new_constraint (lhs, *lhsp));
4514 VEC_free (ce_s, heap, lhsc);
4515 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4516 lhs = get_function_part_constraint (fi, fi_uses);
4517 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4518 process_constraint (new_constraint (lhs, *rhsp));
4519 VEC_free (ce_s, heap, rhsc);
4522 /* The following function clobbers memory pointed to by
4524 case BUILT_IN_MEMSET:
4526 tree dest = gimple_call_arg (t, 0);
4529 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4530 lhs = get_function_part_constraint (fi, fi_clobbers);
4531 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4532 process_constraint (new_constraint (lhs, *lhsp));
4533 VEC_free (ce_s, heap, lhsc);
4536 /* The following functions clobber their second and third
4538 case BUILT_IN_SINCOS:
4539 case BUILT_IN_SINCOSF:
4540 case BUILT_IN_SINCOSL:
4542 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4543 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4546 /* The following functions clobber their second argument. */
4547 case BUILT_IN_FREXP:
4548 case BUILT_IN_FREXPF:
4549 case BUILT_IN_FREXPL:
4550 case BUILT_IN_LGAMMA_R:
4551 case BUILT_IN_LGAMMAF_R:
4552 case BUILT_IN_LGAMMAL_R:
4553 case BUILT_IN_GAMMA_R:
4554 case BUILT_IN_GAMMAF_R:
4555 case BUILT_IN_GAMMAL_R:
4557 case BUILT_IN_MODFF:
4558 case BUILT_IN_MODFL:
4560 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4563 /* The following functions clobber their third argument. */
4564 case BUILT_IN_REMQUO:
4565 case BUILT_IN_REMQUOF:
4566 case BUILT_IN_REMQUOL:
4568 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4571 /* The following functions neither read nor clobber memory. */
4574 /* Trampolines are of no interest to us. */
4575 case BUILT_IN_INIT_TRAMPOLINE:
4576 case BUILT_IN_ADJUST_TRAMPOLINE:
4578 case BUILT_IN_VA_START:
4579 case BUILT_IN_VA_END:
4581 /* printf-style functions may have hooks to set pointers to
4582 point to somewhere into the generated string. Leave them
4583 for a later excercise... */
4585 /* Fallthru to general call handling. */;
4588 /* Parameters passed by value are used. */
4589 lhs = get_function_part_constraint (fi, fi_uses);
4590 for (i = 0; i < gimple_call_num_args (t); i++)
4592 struct constraint_expr *rhsp;
4593 tree arg = gimple_call_arg (t, i);
4595 if (TREE_CODE (arg) == SSA_NAME
4596 || is_gimple_min_invariant (arg))
4599 get_constraint_for_address_of (arg, &rhsc);
4600 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4601 process_constraint (new_constraint (lhs, *rhsp));
4602 VEC_free (ce_s, heap, rhsc);
4605 /* Build constraints for propagating clobbers/uses along the
4607 cfi = get_fi_for_callee (t);
4608 if (cfi->id == anything_id)
4610 if (gimple_vdef (t))
4611 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4613 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4618 /* For callees without function info (that's external functions),
4619 ESCAPED is clobbered and used. */
4620 if (gimple_call_fndecl (t)
4621 && !cfi->is_fn_info)
4625 if (gimple_vdef (t))
4626 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4628 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4630 /* Also honor the call statement use/clobber info. */
4631 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4632 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4634 if ((vi = lookup_call_use_vi (t)) != NULL)
4635 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4640 /* Otherwise the caller clobbers and uses what the callee does.
4641 ??? This should use a new complex constraint that filters
4642 local variables of the callee. */
4643 if (gimple_vdef (t))
4645 lhs = get_function_part_constraint (fi, fi_clobbers);
4646 rhs = get_function_part_constraint (cfi, fi_clobbers);
4647 process_constraint (new_constraint (lhs, rhs));
4649 lhs = get_function_part_constraint (fi, fi_uses);
4650 rhs = get_function_part_constraint (cfi, fi_uses);
4651 process_constraint (new_constraint (lhs, rhs));
4653 else if (gimple_code (t) == GIMPLE_ASM)
4655 /* ??? Ick. We can do better. */
4656 if (gimple_vdef (t))
4657 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4659 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4663 VEC_free (ce_s, heap, rhsc);
4667 /* Find the first varinfo in the same variable as START that overlaps with
4668 OFFSET. Return NULL if we can't find one. */
4671 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4673 /* If the offset is outside of the variable, bail out. */
4674 if (offset >= start->fullsize)
4677 /* If we cannot reach offset from start, lookup the first field
4678 and start from there. */
4679 if (start->offset > offset)
4680 start = lookup_vi_for_tree (start->decl);
4684 /* We may not find a variable in the field list with the actual
4685 offset when when we have glommed a structure to a variable.
4686 In that case, however, offset should still be within the size
4688 if (offset >= start->offset
4689 && (offset - start->offset) < start->size)
4698 /* Find the first varinfo in the same variable as START that overlaps with
4699 OFFSET. If there is no such varinfo the varinfo directly preceding
4700 OFFSET is returned. */
4703 first_or_preceding_vi_for_offset (varinfo_t start,
4704 unsigned HOST_WIDE_INT offset)
4706 /* If we cannot reach offset from start, lookup the first field
4707 and start from there. */
4708 if (start->offset > offset)
4709 start = lookup_vi_for_tree (start->decl);
4711 /* We may not find a variable in the field list with the actual
4712 offset when when we have glommed a structure to a variable.
4713 In that case, however, offset should still be within the size
4715 If we got beyond the offset we look for return the field
4716 directly preceding offset which may be the last field. */
4718 && offset >= start->offset
4719 && !((offset - start->offset) < start->size))
4720 start = start->next;
4726 /* Insert the varinfo FIELD into the field list for BASE, at the front
4730 insert_into_field_list (varinfo_t base, varinfo_t field)
4732 varinfo_t prev = base;
4733 varinfo_t curr = base->next;
4739 /* This structure is used during pushing fields onto the fieldstack
4740 to track the offset of the field, since bitpos_of_field gives it
4741 relative to its immediate containing type, and we want it relative
4742 to the ultimate containing object. */
4746 /* Offset from the base of the base containing object to this field. */
4747 HOST_WIDE_INT offset;
4749 /* Size, in bits, of the field. */
4750 unsigned HOST_WIDE_INT size;
4752 unsigned has_unknown_size : 1;
4754 unsigned may_have_pointers : 1;
4756 unsigned only_restrict_pointers : 1;
4758 typedef struct fieldoff fieldoff_s;
4760 DEF_VEC_O(fieldoff_s);
4761 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4763 /* qsort comparison function for two fieldoff's PA and PB */
4766 fieldoff_compare (const void *pa, const void *pb)
4768 const fieldoff_s *foa = (const fieldoff_s *)pa;
4769 const fieldoff_s *fob = (const fieldoff_s *)pb;
4770 unsigned HOST_WIDE_INT foasize, fobsize;
4772 if (foa->offset < fob->offset)
4774 else if (foa->offset > fob->offset)
4777 foasize = foa->size;
4778 fobsize = fob->size;
4779 if (foasize < fobsize)
4781 else if (foasize > fobsize)
4786 /* Sort a fieldstack according to the field offset and sizes. */
4788 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4790 qsort (VEC_address (fieldoff_s, fieldstack),
4791 VEC_length (fieldoff_s, fieldstack),
4792 sizeof (fieldoff_s),
4796 /* Return true if V is a tree that we can have subvars for.
4797 Normally, this is any aggregate type. Also complex
4798 types which are not gimple registers can have subvars. */
4801 var_can_have_subvars (const_tree v)
4803 /* Volatile variables should never have subvars. */
4804 if (TREE_THIS_VOLATILE (v))
4807 /* Non decls or memory tags can never have subvars. */
4811 /* Aggregates without overlapping fields can have subvars. */
4812 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4818 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4819 the fields of TYPE onto fieldstack, recording their offsets along
4822 OFFSET is used to keep track of the offset in this entire
4823 structure, rather than just the immediately containing structure.
4824 Returns the number of fields pushed. */
4827 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4828 HOST_WIDE_INT offset)
4833 if (TREE_CODE (type) != RECORD_TYPE)
4836 /* If the vector of fields is growing too big, bail out early.
4837 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4839 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4842 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4843 if (TREE_CODE (field) == FIELD_DECL)
4847 HOST_WIDE_INT foff = bitpos_of_field (field);
4849 if (!var_can_have_subvars (field)
4850 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4851 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4853 else if (!(pushed = push_fields_onto_fieldstack
4854 (TREE_TYPE (field), fieldstack, offset + foff))
4855 && (DECL_SIZE (field)
4856 && !integer_zerop (DECL_SIZE (field))))
4857 /* Empty structures may have actual size, like in C++. So
4858 see if we didn't push any subfields and the size is
4859 nonzero, push the field onto the stack. */
4864 fieldoff_s *pair = NULL;
4865 bool has_unknown_size = false;
4867 if (!VEC_empty (fieldoff_s, *fieldstack))
4868 pair = VEC_last (fieldoff_s, *fieldstack);
4870 if (!DECL_SIZE (field)
4871 || !host_integerp (DECL_SIZE (field), 1))
4872 has_unknown_size = true;
4874 /* If adjacent fields do not contain pointers merge them. */
4876 && !pair->may_have_pointers
4877 && !could_have_pointers (field)
4878 && !pair->has_unknown_size
4879 && !has_unknown_size
4880 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4882 pair = VEC_last (fieldoff_s, *fieldstack);
4883 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4887 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4888 pair->offset = offset + foff;
4889 pair->has_unknown_size = has_unknown_size;
4890 if (!has_unknown_size)
4891 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4894 pair->may_have_pointers = could_have_pointers (field);
4895 pair->only_restrict_pointers
4896 = (!has_unknown_size
4897 && POINTER_TYPE_P (TREE_TYPE (field))
4898 && TYPE_RESTRICT (TREE_TYPE (field)));
4909 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4910 if it is a varargs function. */
4913 count_num_arguments (tree decl, bool *is_varargs)
4915 unsigned int num = 0;
4918 /* Capture named arguments for K&R functions. They do not
4919 have a prototype and thus no TYPE_ARG_TYPES. */
4920 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
4923 /* Check if the function has variadic arguments. */
4924 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
4925 if (TREE_VALUE (t) == void_type_node)
4933 /* Creation function node for DECL, using NAME, and return the index
4934 of the variable we've created for the function. */
4937 create_function_info_for (tree decl, const char *name)
4939 struct function *fn = DECL_STRUCT_FUNCTION (decl);
4940 varinfo_t vi, prev_vi;
4943 bool is_varargs = false;
4944 unsigned int num_args = count_num_arguments (decl, &is_varargs);
4946 /* Create the variable info. */
4948 vi = new_var_info (decl, name);
4951 vi->fullsize = fi_parm_base + num_args;
4953 vi->may_have_pointers = false;
4956 insert_vi_for_tree (vi->decl, vi);
4962 /* Create a variable for things the function clobbers and one for
4963 things the function uses. */
4965 varinfo_t clobbervi, usevi;
4966 const char *newname;
4969 asprintf (&tempname, "%s.clobber", name);
4970 newname = ggc_strdup (tempname);
4973 clobbervi = new_var_info (NULL, newname);
4974 clobbervi->offset = fi_clobbers;
4975 clobbervi->size = 1;
4976 clobbervi->fullsize = vi->fullsize;
4977 clobbervi->is_full_var = true;
4978 clobbervi->is_global_var = false;
4979 gcc_assert (prev_vi->offset < clobbervi->offset);
4980 prev_vi->next = clobbervi;
4981 prev_vi = clobbervi;
4984 asprintf (&tempname, "%s.use", name);
4985 newname = ggc_strdup (tempname);
4988 usevi = new_var_info (NULL, newname);
4989 usevi->offset = fi_uses;
4991 usevi->fullsize = vi->fullsize;
4992 usevi->is_full_var = true;
4993 usevi->is_global_var = false;
4994 gcc_assert (prev_vi->offset < usevi->offset);
4995 prev_vi->next = usevi;
5000 /* And one for the static chain. */
5001 if (fn->static_chain_decl != NULL_TREE)
5004 const char *newname;
5007 asprintf (&tempname, "%s.chain", name);
5008 newname = ggc_strdup (tempname);
5011 chainvi = new_var_info (fn->static_chain_decl, newname);
5012 chainvi->offset = fi_static_chain;
5014 chainvi->fullsize = vi->fullsize;
5015 chainvi->is_full_var = true;
5016 chainvi->is_global_var = false;
5017 gcc_assert (prev_vi->offset < chainvi->offset);
5018 prev_vi->next = chainvi;
5021 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5024 /* Create a variable for the return var. */
5025 if (DECL_RESULT (decl) != NULL
5026 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5029 const char *newname;
5031 tree resultdecl = decl;
5033 if (DECL_RESULT (decl))
5034 resultdecl = DECL_RESULT (decl);
5036 asprintf (&tempname, "%s.result", name);
5037 newname = ggc_strdup (tempname);
5040 resultvi = new_var_info (resultdecl, newname);
5041 resultvi->offset = fi_result;
5043 resultvi->fullsize = vi->fullsize;
5044 resultvi->is_full_var = true;
5045 if (DECL_RESULT (decl))
5046 resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
5047 gcc_assert (prev_vi->offset < resultvi->offset);
5048 prev_vi->next = resultvi;
5051 if (DECL_RESULT (decl))
5052 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5055 /* Set up variables for each argument. */
5056 arg = DECL_ARGUMENTS (decl);
5057 for (i = 0; i < num_args; i++)
5060 const char *newname;
5062 tree argdecl = decl;
5067 asprintf (&tempname, "%s.arg%d", name, i);
5068 newname = ggc_strdup (tempname);
5071 argvi = new_var_info (argdecl, newname);
5072 argvi->offset = fi_parm_base + i;
5074 argvi->is_full_var = true;
5075 argvi->fullsize = vi->fullsize;
5077 argvi->may_have_pointers = could_have_pointers (arg);
5078 gcc_assert (prev_vi->offset < argvi->offset);
5079 prev_vi->next = argvi;
5084 insert_vi_for_tree (arg, argvi);
5085 arg = TREE_CHAIN (arg);
5089 /* Add one representative for all further args. */
5093 const char *newname;
5097 asprintf (&tempname, "%s.varargs", name);
5098 newname = ggc_strdup (tempname);
5101 /* We need sth that can be pointed to for va_start. */
5102 decl = create_tmp_var_raw (ptr_type_node, name);
5105 argvi = new_var_info (decl, newname);
5106 argvi->offset = fi_parm_base + num_args;
5108 argvi->is_full_var = true;
5109 argvi->is_heap_var = true;
5110 argvi->fullsize = vi->fullsize;
5111 gcc_assert (prev_vi->offset < argvi->offset);
5112 prev_vi->next = argvi;
5121 /* Return true if FIELDSTACK contains fields that overlap.
5122 FIELDSTACK is assumed to be sorted by offset. */
5125 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5127 fieldoff_s *fo = NULL;
5129 HOST_WIDE_INT lastoffset = -1;
5131 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5133 if (fo->offset == lastoffset)
5135 lastoffset = fo->offset;
5140 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5141 This will also create any varinfo structures necessary for fields
5145 create_variable_info_for (tree decl, const char *name)
5148 tree decl_type = TREE_TYPE (decl);
5149 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5150 VEC (fieldoff_s,heap) *fieldstack = NULL;
5152 if (var_can_have_subvars (decl) && use_field_sensitive)
5153 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5155 /* If the variable doesn't have subvars, we may end up needing to
5156 sort the field list and create fake variables for all the
5158 vi = new_var_info (decl, name);
5160 vi->may_have_pointers = could_have_pointers (decl);
5162 || !host_integerp (declsize, 1))
5164 vi->is_unknown_size_var = true;
5170 vi->fullsize = TREE_INT_CST_LOW (declsize);
5171 vi->size = vi->fullsize;
5174 insert_vi_for_tree (vi->decl, vi);
5176 /* ??? The setting of vi->may_have_pointers is too conservative here
5177 and may get refined below. Thus we have superfluous constraints
5178 here sometimes which triggers the commented assert in
5179 dump_sa_points_to_info. */
5180 if (vi->is_global_var
5181 && vi->may_have_pointers)
5183 /* Mark global restrict qualified pointers. */
5184 if (POINTER_TYPE_P (TREE_TYPE (decl))
5185 && TYPE_RESTRICT (TREE_TYPE (decl)))
5186 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5188 /* For escaped variables initialize them from nonlocal. */
5190 || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5191 make_copy_constraint (vi, nonlocal_id);
5193 /* If this is a global variable with an initializer and we are in
5194 IPA mode generate constraints for it. In non-IPA mode
5195 the initializer from nonlocal is all we need. */
5197 && DECL_INITIAL (vi->decl))
5199 VEC (ce_s, heap) *rhsc = NULL;
5200 struct constraint_expr lhs, *rhsp;
5202 get_constraint_for (DECL_INITIAL (vi->decl), &rhsc);
5206 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5207 process_constraint (new_constraint (lhs, *rhsp));
5208 /* If this is a variable that escapes from the unit
5209 the initializer escapes as well. */
5210 if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5212 lhs.var = escaped_id;
5215 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5216 process_constraint (new_constraint (lhs, *rhsp));
5218 VEC_free (ce_s, heap, rhsc);
5219 /* ??? Force us to not use subfields. Else we'd have to parse
5220 arbitrary initializers. */
5221 VEC_free (fieldoff_s, heap, fieldstack);
5226 if (use_field_sensitive
5227 && !vi->is_unknown_size_var
5228 && var_can_have_subvars (decl)
5229 && VEC_length (fieldoff_s, fieldstack) > 1
5230 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
5232 fieldoff_s *fo = NULL;
5233 bool notokay = false;
5236 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5238 if (fo->has_unknown_size
5246 /* We can't sort them if we have a field with a variable sized type,
5247 which will make notokay = true. In that case, we are going to return
5248 without creating varinfos for the fields anyway, so sorting them is a
5252 sort_fieldstack (fieldstack);
5253 /* Due to some C++ FE issues, like PR 22488, we might end up
5254 what appear to be overlapping fields even though they,
5255 in reality, do not overlap. Until the C++ FE is fixed,
5256 we will simply disable field-sensitivity for these cases. */
5257 notokay = check_for_overlaps (fieldstack);
5261 if (VEC_length (fieldoff_s, fieldstack) != 0)
5262 fo = VEC_index (fieldoff_s, fieldstack, 0);
5264 if (fo == NULL || notokay)
5266 vi->is_unknown_size_var = 1;
5269 vi->is_full_var = true;
5270 VEC_free (fieldoff_s, heap, fieldstack);
5274 vi->size = fo->size;
5275 vi->offset = fo->offset;
5276 vi->may_have_pointers = fo->may_have_pointers;
5277 if (vi->is_global_var
5278 && vi->may_have_pointers)
5280 if (fo->only_restrict_pointers)
5281 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5283 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
5284 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
5288 const char *newname = "NULL";
5293 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5294 "+" HOST_WIDE_INT_PRINT_DEC,
5295 vi->name, fo->offset, fo->size);
5296 newname = ggc_strdup (tempname);
5299 newvi = new_var_info (decl, newname);
5300 newvi->offset = fo->offset;
5301 newvi->size = fo->size;
5302 newvi->fullsize = vi->fullsize;
5303 newvi->may_have_pointers = fo->may_have_pointers;
5304 insert_into_field_list (vi, newvi);
5305 if ((newvi->is_global_var || TREE_CODE (decl) == PARM_DECL)
5306 && newvi->may_have_pointers)
5308 if (fo->only_restrict_pointers)
5309 make_constraint_from_restrict (newvi, "GLOBAL_RESTRICT");
5310 if (newvi->is_global_var && !in_ipa_mode)
5311 make_copy_constraint (newvi, nonlocal_id);
5318 vi->is_full_var = true;
5320 VEC_free (fieldoff_s, heap, fieldstack);
5325 /* Print out the points-to solution for VAR to FILE. */
5328 dump_solution_for_var (FILE *file, unsigned int var)
5330 varinfo_t vi = get_varinfo (var);
5334 /* Dump the solution for unified vars anyway, this avoids difficulties
5335 in scanning dumps in the testsuite. */
5336 fprintf (file, "%s = { ", vi->name);
5337 vi = get_varinfo (find (var));
5338 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5339 fprintf (file, "%s ", get_varinfo (i)->name);
5340 fprintf (file, "}");
5342 /* But note when the variable was unified. */
5344 fprintf (file, " same as %s", vi->name);
5346 fprintf (file, "\n");
5349 /* Print the points-to solution for VAR to stdout. */
5352 debug_solution_for_var (unsigned int var)
5354 dump_solution_for_var (stdout, var);
5357 /* Create varinfo structures for all of the variables in the
5358 function for intraprocedural mode. */
5361 intra_create_variable_infos (void)
5365 /* For each incoming pointer argument arg, create the constraint ARG
5366 = NONLOCAL or a dummy variable if it is a restrict qualified
5367 passed-by-reference argument. */
5368 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
5372 if (!could_have_pointers (t))
5375 /* For restrict qualified pointers to objects passed by
5376 reference build a real representative for the pointed-to object. */
5377 if (DECL_BY_REFERENCE (t)
5378 && POINTER_TYPE_P (TREE_TYPE (t))
5379 && TYPE_RESTRICT (TREE_TYPE (t)))
5381 struct constraint_expr lhsc, rhsc;
5383 tree heapvar = heapvar_lookup (t, 0);
5384 if (heapvar == NULL_TREE)
5387 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
5389 DECL_EXTERNAL (heapvar) = 1;
5390 heapvar_insert (t, 0, heapvar);
5391 ann = get_var_ann (heapvar);
5392 ann->is_heapvar = 1;
5394 if (gimple_referenced_vars (cfun))
5395 add_referenced_var (heapvar);
5396 lhsc.var = get_vi_for_tree (t)->id;
5399 rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
5400 rhsc.type = ADDRESSOF;
5402 process_constraint (new_constraint (lhsc, rhsc));
5403 vi->is_restrict_var = 1;
5407 for (p = get_vi_for_tree (t); p; p = p->next)
5408 if (p->may_have_pointers)
5409 make_constraint_from (p, nonlocal_id);
5410 if (POINTER_TYPE_P (TREE_TYPE (t))
5411 && TYPE_RESTRICT (TREE_TYPE (t)))
5412 make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
5415 /* Add a constraint for a result decl that is passed by reference. */
5416 if (DECL_RESULT (cfun->decl)
5417 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5419 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5421 for (p = result_vi; p; p = p->next)
5422 make_constraint_from (p, nonlocal_id);
5425 /* Add a constraint for the incoming static chain parameter. */
5426 if (cfun->static_chain_decl != NULL_TREE)
5428 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5430 for (p = chain_vi; p; p = p->next)
5431 make_constraint_from (p, nonlocal_id);
5435 /* Structure used to put solution bitmaps in a hashtable so they can
5436 be shared among variables with the same points-to set. */
5438 typedef struct shared_bitmap_info
5442 } *shared_bitmap_info_t;
5443 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5445 static htab_t shared_bitmap_table;
5447 /* Hash function for a shared_bitmap_info_t */
5450 shared_bitmap_hash (const void *p)
5452 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5453 return bi->hashcode;
5456 /* Equality function for two shared_bitmap_info_t's. */
5459 shared_bitmap_eq (const void *p1, const void *p2)
5461 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5462 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5463 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5466 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5467 existing instance if there is one, NULL otherwise. */
5470 shared_bitmap_lookup (bitmap pt_vars)
5473 struct shared_bitmap_info sbi;
5475 sbi.pt_vars = pt_vars;
5476 sbi.hashcode = bitmap_hash (pt_vars);
5478 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5479 sbi.hashcode, NO_INSERT);
5483 return ((shared_bitmap_info_t) *slot)->pt_vars;
5487 /* Add a bitmap to the shared bitmap hashtable. */
5490 shared_bitmap_add (bitmap pt_vars)
5493 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5495 sbi->pt_vars = pt_vars;
5496 sbi->hashcode = bitmap_hash (pt_vars);
5498 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5499 sbi->hashcode, INSERT);
5500 gcc_assert (!*slot);
5501 *slot = (void *) sbi;
5505 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5508 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5513 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5515 varinfo_t vi = get_varinfo (i);
5517 /* The only artificial variables that are allowed in a may-alias
5518 set are heap variables. */
5519 if (vi->is_artificial_var && !vi->is_heap_var)
5522 if (TREE_CODE (vi->decl) == VAR_DECL
5523 || TREE_CODE (vi->decl) == PARM_DECL
5524 || TREE_CODE (vi->decl) == RESULT_DECL)
5526 /* If we are in IPA mode we will not recompute points-to
5527 sets after inlining so make sure they stay valid. */
5529 && !DECL_PT_UID_SET_P (vi->decl))
5530 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5532 /* Add the decl to the points-to set. Note that the points-to
5533 set contains global variables. */
5534 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5535 if (vi->is_global_var)
5536 pt->vars_contains_global = true;
5542 /* Compute the points-to solution *PT for the variable VI. */
5545 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5549 bitmap finished_solution;
5553 memset (pt, 0, sizeof (struct pt_solution));
5555 /* This variable may have been collapsed, let's get the real
5557 vi = get_varinfo (find (orig_vi->id));
5559 /* Translate artificial variables into SSA_NAME_PTR_INFO
5561 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5563 varinfo_t vi = get_varinfo (i);
5565 if (vi->is_artificial_var)
5567 if (vi->id == nothing_id)
5569 else if (vi->id == escaped_id)
5572 pt->ipa_escaped = 1;
5576 else if (vi->id == nonlocal_id)
5578 else if (vi->is_heap_var)
5579 /* We represent heapvars in the points-to set properly. */
5581 else if (vi->id == readonly_id)
5584 else if (vi->id == anything_id
5585 || vi->id == integer_id)
5588 if (vi->is_restrict_var)
5589 pt->vars_contains_restrict = true;
5592 /* Instead of doing extra work, simply do not create
5593 elaborate points-to information for pt_anything pointers. */
5595 && (orig_vi->is_artificial_var
5596 || !pt->vars_contains_restrict))
5599 /* Share the final set of variables when possible. */
5600 finished_solution = BITMAP_GGC_ALLOC ();
5601 stats.points_to_sets_created++;
5603 set_uids_in_ptset (finished_solution, vi->solution, pt);
5604 result = shared_bitmap_lookup (finished_solution);
5607 shared_bitmap_add (finished_solution);
5608 pt->vars = finished_solution;
5613 bitmap_clear (finished_solution);
5617 /* Given a pointer variable P, fill in its points-to set. */
5620 find_what_p_points_to (tree p)
5622 struct ptr_info_def *pi;
5626 /* For parameters, get at the points-to set for the actual parm
5628 if (TREE_CODE (p) == SSA_NAME
5629 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5630 && SSA_NAME_IS_DEFAULT_DEF (p))
5631 lookup_p = SSA_NAME_VAR (p);
5633 vi = lookup_vi_for_tree (lookup_p);
5637 pi = get_ptr_info (p);
5638 find_what_var_points_to (vi, &pi->pt);
5642 /* Query statistics for points-to solutions. */
5645 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5646 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5647 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5648 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5652 dump_pta_stats (FILE *s)
5654 fprintf (s, "\nPTA query stats:\n");
5655 fprintf (s, " pt_solution_includes: "
5656 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5657 HOST_WIDE_INT_PRINT_DEC" queries\n",
5658 pta_stats.pt_solution_includes_no_alias,
5659 pta_stats.pt_solution_includes_no_alias
5660 + pta_stats.pt_solution_includes_may_alias);
5661 fprintf (s, " pt_solutions_intersect: "
5662 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5663 HOST_WIDE_INT_PRINT_DEC" queries\n",
5664 pta_stats.pt_solutions_intersect_no_alias,
5665 pta_stats.pt_solutions_intersect_no_alias
5666 + pta_stats.pt_solutions_intersect_may_alias);
5670 /* Reset the points-to solution *PT to a conservative default
5671 (point to anything). */
5674 pt_solution_reset (struct pt_solution *pt)
5676 memset (pt, 0, sizeof (struct pt_solution));
5677 pt->anything = true;
5680 /* Set the points-to solution *PT to point only to the variables
5681 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5682 global variables and VARS_CONTAINS_RESTRICT specifies whether
5683 it contains restrict tag variables. */
5686 pt_solution_set (struct pt_solution *pt, bitmap vars,
5687 bool vars_contains_global, bool vars_contains_restrict)
5689 memset (pt, 0, sizeof (struct pt_solution));
5691 pt->vars_contains_global = vars_contains_global;
5692 pt->vars_contains_restrict = vars_contains_restrict;
5695 /* Computes the union of the points-to solutions *DEST and *SRC and
5696 stores the result in *DEST. This changes the points-to bitmap
5697 of *DEST and thus may not be used if that might be shared.
5698 The points-to bitmap of *SRC and *DEST will not be shared after
5699 this function if they were not before. */
5702 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
5704 dest->anything |= src->anything;
5707 pt_solution_reset (dest);
5711 dest->nonlocal |= src->nonlocal;
5712 dest->escaped |= src->escaped;
5713 dest->ipa_escaped |= src->ipa_escaped;
5714 dest->null |= src->null;
5715 dest->vars_contains_global |= src->vars_contains_global;
5716 dest->vars_contains_restrict |= src->vars_contains_restrict;
5721 dest->vars = BITMAP_GGC_ALLOC ();
5722 bitmap_ior_into (dest->vars, src->vars);
5725 /* Return true if the points-to solution *PT is empty. */
5728 pt_solution_empty_p (struct pt_solution *pt)
5735 && !bitmap_empty_p (pt->vars))
5738 /* If the solution includes ESCAPED, check if that is empty. */
5740 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5743 /* If the solution includes ESCAPED, check if that is empty. */
5745 && !pt_solution_empty_p (&ipa_escaped_pt))
5751 /* Return true if the points-to solution *PT includes global memory. */
5754 pt_solution_includes_global (struct pt_solution *pt)
5758 || pt->vars_contains_global)
5762 return pt_solution_includes_global (&cfun->gimple_df->escaped);
5764 if (pt->ipa_escaped)
5765 return pt_solution_includes_global (&ipa_escaped_pt);
5767 /* ??? This predicate is not correct for the IPA-PTA solution
5768 as we do not properly distinguish between unit escape points
5769 and global variables. */
5770 if (cfun->gimple_df->ipa_pta)
5776 /* Return true if the points-to solution *PT includes the variable
5777 declaration DECL. */
5780 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
5786 && is_global_var (decl))
5790 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
5793 /* If the solution includes ESCAPED, check it. */
5795 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
5798 /* If the solution includes ESCAPED, check it. */
5800 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
5807 pt_solution_includes (struct pt_solution *pt, const_tree decl)
5809 bool res = pt_solution_includes_1 (pt, decl);
5811 ++pta_stats.pt_solution_includes_may_alias;
5813 ++pta_stats.pt_solution_includes_no_alias;
5817 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
5821 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
5823 if (pt1->anything || pt2->anything)
5826 /* If either points to unknown global memory and the other points to
5827 any global memory they alias. */
5830 || pt2->vars_contains_global))
5832 && pt1->vars_contains_global))
5835 /* Check the escaped solution if required. */
5836 if ((pt1->escaped || pt2->escaped)
5837 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5839 /* If both point to escaped memory and that solution
5840 is not empty they alias. */
5841 if (pt1->escaped && pt2->escaped)
5844 /* If either points to escaped memory see if the escaped solution
5845 intersects with the other. */
5847 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
5849 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
5853 /* Check the escaped solution if required.
5854 ??? Do we need to check the local against the IPA escaped sets? */
5855 if ((pt1->ipa_escaped || pt2->ipa_escaped)
5856 && !pt_solution_empty_p (&ipa_escaped_pt))
5858 /* If both point to escaped memory and that solution
5859 is not empty they alias. */
5860 if (pt1->ipa_escaped && pt2->ipa_escaped)
5863 /* If either points to escaped memory see if the escaped solution
5864 intersects with the other. */
5865 if ((pt1->ipa_escaped
5866 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
5867 || (pt2->ipa_escaped
5868 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
5872 /* Now both pointers alias if their points-to solution intersects. */
5875 && bitmap_intersect_p (pt1->vars, pt2->vars));
5879 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
5881 bool res = pt_solutions_intersect_1 (pt1, pt2);
5883 ++pta_stats.pt_solutions_intersect_may_alias;
5885 ++pta_stats.pt_solutions_intersect_no_alias;
5889 /* Return true if both points-to solutions PT1 and PT2 for two restrict
5890 qualified pointers are possibly based on the same pointer. */
5893 pt_solutions_same_restrict_base (struct pt_solution *pt1,
5894 struct pt_solution *pt2)
5896 /* If we deal with points-to solutions of two restrict qualified
5897 pointers solely rely on the pointed-to variable bitmap intersection.
5898 For two pointers that are based on each other the bitmaps will
5900 if (pt1->vars_contains_restrict
5901 && pt2->vars_contains_restrict)
5903 gcc_assert (pt1->vars && pt2->vars);
5904 return bitmap_intersect_p (pt1->vars, pt2->vars);
5911 /* Dump points-to information to OUTFILE. */
5914 dump_sa_points_to_info (FILE *outfile)
5918 fprintf (outfile, "\nPoints-to sets\n\n");
5920 if (dump_flags & TDF_STATS)
5922 fprintf (outfile, "Stats:\n");
5923 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5924 fprintf (outfile, "Non-pointer vars: %d\n",
5925 stats.nonpointer_vars);
5926 fprintf (outfile, "Statically unified vars: %d\n",
5927 stats.unified_vars_static);
5928 fprintf (outfile, "Dynamically unified vars: %d\n",
5929 stats.unified_vars_dynamic);
5930 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5931 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5932 fprintf (outfile, "Number of implicit edges: %d\n",
5933 stats.num_implicit_edges);
5936 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5938 varinfo_t vi = get_varinfo (i);
5939 if (!vi->may_have_pointers)
5941 dump_solution_for_var (outfile, i);
5946 /* Debug points-to information to stderr. */
5949 debug_sa_points_to_info (void)
5951 dump_sa_points_to_info (stderr);
5955 /* Initialize the always-existing constraint variables for NULL
5956 ANYTHING, READONLY, and INTEGER */
5959 init_base_vars (void)
5961 struct constraint_expr lhs, rhs;
5962 varinfo_t var_anything;
5963 varinfo_t var_nothing;
5964 varinfo_t var_readonly;
5965 varinfo_t var_escaped;
5966 varinfo_t var_nonlocal;
5967 varinfo_t var_storedanything;
5968 varinfo_t var_integer;
5970 /* Create the NULL variable, used to represent that a variable points
5972 var_nothing = new_var_info (NULL_TREE, "NULL");
5973 gcc_assert (var_nothing->id == nothing_id);
5974 var_nothing->is_artificial_var = 1;
5975 var_nothing->offset = 0;
5976 var_nothing->size = ~0;
5977 var_nothing->fullsize = ~0;
5978 var_nothing->is_special_var = 1;
5979 var_nothing->may_have_pointers = 0;
5980 var_nothing->is_global_var = 0;
5982 /* Create the ANYTHING variable, used to represent that a variable
5983 points to some unknown piece of memory. */
5984 var_anything = new_var_info (NULL_TREE, "ANYTHING");
5985 gcc_assert (var_anything->id == anything_id);
5986 var_anything->is_artificial_var = 1;
5987 var_anything->size = ~0;
5988 var_anything->offset = 0;
5989 var_anything->next = NULL;
5990 var_anything->fullsize = ~0;
5991 var_anything->is_special_var = 1;
5993 /* Anything points to anything. This makes deref constraints just
5994 work in the presence of linked list and other p = *p type loops,
5995 by saying that *ANYTHING = ANYTHING. */
5997 lhs.var = anything_id;
5999 rhs.type = ADDRESSOF;
6000 rhs.var = anything_id;
6003 /* This specifically does not use process_constraint because
6004 process_constraint ignores all anything = anything constraints, since all
6005 but this one are redundant. */
6006 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6008 /* Create the READONLY variable, used to represent that a variable
6009 points to readonly memory. */
6010 var_readonly = new_var_info (NULL_TREE, "READONLY");
6011 gcc_assert (var_readonly->id == readonly_id);
6012 var_readonly->is_artificial_var = 1;
6013 var_readonly->offset = 0;
6014 var_readonly->size = ~0;
6015 var_readonly->fullsize = ~0;
6016 var_readonly->next = NULL;
6017 var_readonly->is_special_var = 1;
6019 /* readonly memory points to anything, in order to make deref
6020 easier. In reality, it points to anything the particular
6021 readonly variable can point to, but we don't track this
6024 lhs.var = readonly_id;
6026 rhs.type = ADDRESSOF;
6027 rhs.var = readonly_id; /* FIXME */
6029 process_constraint (new_constraint (lhs, rhs));
6031 /* Create the ESCAPED variable, used to represent the set of escaped
6033 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6034 gcc_assert (var_escaped->id == escaped_id);
6035 var_escaped->is_artificial_var = 1;
6036 var_escaped->offset = 0;
6037 var_escaped->size = ~0;
6038 var_escaped->fullsize = ~0;
6039 var_escaped->is_special_var = 0;
6041 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6043 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6044 gcc_assert (var_nonlocal->id == nonlocal_id);
6045 var_nonlocal->is_artificial_var = 1;
6046 var_nonlocal->offset = 0;
6047 var_nonlocal->size = ~0;
6048 var_nonlocal->fullsize = ~0;
6049 var_nonlocal->is_special_var = 1;
6051 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6053 lhs.var = escaped_id;
6056 rhs.var = escaped_id;
6058 process_constraint (new_constraint (lhs, rhs));
6060 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6061 whole variable escapes. */
6063 lhs.var = escaped_id;
6066 rhs.var = escaped_id;
6067 rhs.offset = UNKNOWN_OFFSET;
6068 process_constraint (new_constraint (lhs, rhs));
6070 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6071 everything pointed to by escaped points to what global memory can
6074 lhs.var = escaped_id;
6077 rhs.var = nonlocal_id;
6079 process_constraint (new_constraint (lhs, rhs));
6081 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6082 global memory may point to global memory and escaped memory. */
6084 lhs.var = nonlocal_id;
6086 rhs.type = ADDRESSOF;
6087 rhs.var = nonlocal_id;
6089 process_constraint (new_constraint (lhs, rhs));
6090 rhs.type = ADDRESSOF;
6091 rhs.var = escaped_id;
6093 process_constraint (new_constraint (lhs, rhs));
6095 /* Create the STOREDANYTHING variable, used to represent the set of
6096 variables stored to *ANYTHING. */
6097 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6098 gcc_assert (var_storedanything->id == storedanything_id);
6099 var_storedanything->is_artificial_var = 1;
6100 var_storedanything->offset = 0;
6101 var_storedanything->size = ~0;
6102 var_storedanything->fullsize = ~0;
6103 var_storedanything->is_special_var = 0;
6105 /* Create the INTEGER variable, used to represent that a variable points
6106 to what an INTEGER "points to". */
6107 var_integer = new_var_info (NULL_TREE, "INTEGER");
6108 gcc_assert (var_integer->id == integer_id);
6109 var_integer->is_artificial_var = 1;
6110 var_integer->size = ~0;
6111 var_integer->fullsize = ~0;
6112 var_integer->offset = 0;
6113 var_integer->next = NULL;
6114 var_integer->is_special_var = 1;
6116 /* INTEGER = ANYTHING, because we don't know where a dereference of
6117 a random integer will point to. */
6119 lhs.var = integer_id;
6121 rhs.type = ADDRESSOF;
6122 rhs.var = anything_id;
6124 process_constraint (new_constraint (lhs, rhs));
6127 /* Initialize things necessary to perform PTA */
6130 init_alias_vars (void)
6132 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6134 bitmap_obstack_initialize (&pta_obstack);
6135 bitmap_obstack_initialize (&oldpta_obstack);
6136 bitmap_obstack_initialize (&predbitmap_obstack);
6138 constraint_pool = create_alloc_pool ("Constraint pool",
6139 sizeof (struct constraint), 30);
6140 variable_info_pool = create_alloc_pool ("Variable info pool",
6141 sizeof (struct variable_info), 30);
6142 constraints = VEC_alloc (constraint_t, heap, 8);
6143 varmap = VEC_alloc (varinfo_t, heap, 8);
6144 vi_for_tree = pointer_map_create ();
6145 call_stmt_vars = pointer_map_create ();
6147 memset (&stats, 0, sizeof (stats));
6148 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6149 shared_bitmap_eq, free);
6153 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6154 predecessor edges. */
6157 remove_preds_and_fake_succs (constraint_graph_t graph)
6161 /* Clear the implicit ref and address nodes from the successor
6163 for (i = 0; i < FIRST_REF_NODE; i++)
6165 if (graph->succs[i])
6166 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6167 FIRST_REF_NODE * 2);
6170 /* Free the successor list for the non-ref nodes. */
6171 for (i = FIRST_REF_NODE; i < graph->size; i++)
6173 if (graph->succs[i])
6174 BITMAP_FREE (graph->succs[i]);
6177 /* Now reallocate the size of the successor list as, and blow away
6178 the predecessor bitmaps. */
6179 graph->size = VEC_length (varinfo_t, varmap);
6180 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6182 free (graph->implicit_preds);
6183 graph->implicit_preds = NULL;
6184 free (graph->preds);
6185 graph->preds = NULL;
6186 bitmap_obstack_release (&predbitmap_obstack);
6189 /* Initialize the heapvar for statement mapping. */
6192 init_alias_heapvars (void)
6194 if (!heapvar_for_stmt)
6195 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
6199 /* Delete the heapvar for statement mapping. */
6202 delete_alias_heapvars (void)
6204 if (heapvar_for_stmt)
6205 htab_delete (heapvar_for_stmt);
6206 heapvar_for_stmt = NULL;
6209 /* Solve the constraint set. */
6212 solve_constraints (void)
6214 struct scc_info *si;
6218 "\nCollapsing static cycles and doing variable "
6221 init_graph (VEC_length (varinfo_t, varmap) * 2);
6224 fprintf (dump_file, "Building predecessor graph\n");
6225 build_pred_graph ();
6228 fprintf (dump_file, "Detecting pointer and location "
6230 si = perform_var_substitution (graph);
6233 fprintf (dump_file, "Rewriting constraints and unifying "
6235 rewrite_constraints (graph, si);
6237 build_succ_graph ();
6238 free_var_substitution_info (si);
6240 if (dump_file && (dump_flags & TDF_GRAPH))
6241 dump_constraint_graph (dump_file);
6243 move_complex_constraints (graph);
6246 fprintf (dump_file, "Uniting pointer but not location equivalent "
6248 unite_pointer_equivalences (graph);
6251 fprintf (dump_file, "Finding indirect cycles\n");
6252 find_indirect_cycles (graph);
6254 /* Implicit nodes and predecessors are no longer necessary at this
6256 remove_preds_and_fake_succs (graph);
6259 fprintf (dump_file, "Solving graph\n");
6261 solve_graph (graph);
6264 dump_sa_points_to_info (dump_file);
6267 /* Create points-to sets for the current function. See the comments
6268 at the start of the file for an algorithmic overview. */
6271 compute_points_to_sets (void)
6277 timevar_push (TV_TREE_PTA);
6280 init_alias_heapvars ();
6282 intra_create_variable_infos ();
6284 /* Now walk all statements and build the constraint set. */
6287 gimple_stmt_iterator gsi;
6289 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6291 gimple phi = gsi_stmt (gsi);
6293 if (is_gimple_reg (gimple_phi_result (phi)))
6294 find_func_aliases (phi);
6297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6299 gimple stmt = gsi_stmt (gsi);
6301 find_func_aliases (stmt);
6307 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6308 dump_constraints (dump_file, 0);
6311 /* From the constraints compute the points-to sets. */
6312 solve_constraints ();
6314 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6315 find_what_var_points_to (get_varinfo (escaped_id),
6316 &cfun->gimple_df->escaped);
6318 /* Make sure the ESCAPED solution (which is used as placeholder in
6319 other solutions) does not reference itself. This simplifies
6320 points-to solution queries. */
6321 cfun->gimple_df->escaped.escaped = 0;
6323 /* Mark escaped HEAP variables as global. */
6324 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
6326 && !vi->is_restrict_var
6327 && !vi->is_global_var)
6328 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6329 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6331 /* Compute the points-to sets for pointer SSA_NAMEs. */
6332 for (i = 0; i < num_ssa_names; ++i)
6334 tree ptr = ssa_name (i);
6336 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6337 find_what_p_points_to (ptr);
6340 /* Compute the call-used/clobbered sets. */
6343 gimple_stmt_iterator gsi;
6345 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6347 gimple stmt = gsi_stmt (gsi);
6348 struct pt_solution *pt;
6349 if (!is_gimple_call (stmt))
6352 pt = gimple_call_use_set (stmt);
6353 if (gimple_call_flags (stmt) & ECF_CONST)
6354 memset (pt, 0, sizeof (struct pt_solution));
6355 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6357 find_what_var_points_to (vi, pt);
6358 /* Escaped (and thus nonlocal) variables are always
6359 implicitly used by calls. */
6360 /* ??? ESCAPED can be empty even though NONLOCAL
6367 /* If there is nothing special about this call then
6368 we have made everything that is used also escape. */
6369 *pt = cfun->gimple_df->escaped;
6373 pt = gimple_call_clobber_set (stmt);
6374 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6375 memset (pt, 0, sizeof (struct pt_solution));
6376 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6378 find_what_var_points_to (vi, pt);
6379 /* Escaped (and thus nonlocal) variables are always
6380 implicitly clobbered by calls. */
6381 /* ??? ESCAPED can be empty even though NONLOCAL
6388 /* If there is nothing special about this call then
6389 we have made everything that is used also escape. */
6390 *pt = cfun->gimple_df->escaped;
6396 timevar_pop (TV_TREE_PTA);
6400 /* Delete created points-to sets. */
6403 delete_points_to_sets (void)
6407 htab_delete (shared_bitmap_table);
6408 if (dump_file && (dump_flags & TDF_STATS))
6409 fprintf (dump_file, "Points to sets created:%d\n",
6410 stats.points_to_sets_created);
6412 pointer_map_destroy (vi_for_tree);
6413 pointer_map_destroy (call_stmt_vars);
6414 bitmap_obstack_release (&pta_obstack);
6415 VEC_free (constraint_t, heap, constraints);
6417 for (i = 0; i < graph->size; i++)
6418 VEC_free (constraint_t, heap, graph->complex[i]);
6419 free (graph->complex);
6422 free (graph->succs);
6424 free (graph->pe_rep);
6425 free (graph->indirect_cycles);
6428 VEC_free (varinfo_t, heap, varmap);
6429 free_alloc_pool (variable_info_pool);
6430 free_alloc_pool (constraint_pool);
6434 /* Compute points-to information for every SSA_NAME pointer in the
6435 current function and compute the transitive closure of escaped
6436 variables to re-initialize the call-clobber states of local variables. */
6439 compute_may_aliases (void)
6441 if (cfun->gimple_df->ipa_pta)
6445 fprintf (dump_file, "\nNot re-computing points-to information "
6446 "because IPA points-to information is available.\n\n");
6448 /* But still dump what we have remaining it. */
6449 dump_alias_info (dump_file);
6451 if (dump_flags & TDF_DETAILS)
6452 dump_referenced_vars (dump_file);
6458 /* For each pointer P_i, determine the sets of variables that P_i may
6459 point-to. Compute the reachability set of escaped and call-used
6461 compute_points_to_sets ();
6463 /* Debugging dumps. */
6466 dump_alias_info (dump_file);
6468 if (dump_flags & TDF_DETAILS)
6469 dump_referenced_vars (dump_file);
6472 /* Deallocate memory used by aliasing data structures and the internal
6473 points-to solution. */
6474 delete_points_to_sets ();
6476 gcc_assert (!need_ssa_update_p (cfun));
6482 gate_tree_pta (void)
6484 return flag_tree_pta;
6487 /* A dummy pass to cause points-to information to be computed via
6488 TODO_rebuild_alias. */
6490 struct gimple_opt_pass pass_build_alias =
6495 gate_tree_pta, /* gate */
6499 0, /* static_pass_number */
6500 TV_NONE, /* tv_id */
6501 PROP_cfg | PROP_ssa, /* properties_required */
6502 0, /* properties_provided */
6503 0, /* properties_destroyed */
6504 0, /* todo_flags_start */
6505 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6509 /* A dummy pass to cause points-to information to be computed via
6510 TODO_rebuild_alias. */
6512 struct gimple_opt_pass pass_build_ealias =
6516 "ealias", /* name */
6517 gate_tree_pta, /* gate */
6521 0, /* static_pass_number */
6522 TV_NONE, /* tv_id */
6523 PROP_cfg | PROP_ssa, /* properties_required */
6524 0, /* properties_provided */
6525 0, /* properties_destroyed */
6526 0, /* todo_flags_start */
6527 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6532 /* Return true if we should execute IPA PTA. */
6538 /* Don't bother doing anything if the program has errors. */
6539 && !(errorcount || sorrycount));
6542 /* IPA PTA solutions for ESCAPED. */
6543 struct pt_solution ipa_escaped_pt
6544 = { true, false, false, false, false, false, false, NULL };
6546 /* Execute the driver for IPA PTA. */
6548 ipa_pta_execute (void)
6550 struct cgraph_node *node;
6551 struct varpool_node *var;
6556 init_alias_heapvars ();
6559 /* Build the constraints. */
6560 for (node = cgraph_nodes; node; node = node->next)
6562 /* Nodes without a body are not interesting. Especially do not
6563 visit clones at this point for now - we get duplicate decls
6564 there for inline clones at least. */
6565 if (!gimple_has_body_p (node->decl)
6569 create_function_info_for (node->decl,
6570 cgraph_node_name (node));
6573 /* Create constraints for global variables and their initializers. */
6574 for (var = varpool_nodes; var; var = var->next)
6575 get_vi_for_tree (var->decl);
6580 "Generating constraints for global initializers\n\n");
6581 dump_constraints (dump_file, 0);
6582 fprintf (dump_file, "\n");
6584 from = VEC_length (constraint_t, constraints);
6586 for (node = cgraph_nodes; node; node = node->next)
6588 struct function *func;
6592 /* Nodes without a body are not interesting. */
6593 if (!gimple_has_body_p (node->decl)
6599 "Generating constraints for %s\n",
6600 cgraph_node_name (node));
6602 func = DECL_STRUCT_FUNCTION (node->decl);
6603 old_func_decl = current_function_decl;
6605 current_function_decl = node->decl;
6607 /* For externally visible functions use local constraints for
6608 their arguments. For local functions we see all callers
6609 and thus do not need initial constraints for parameters. */
6610 if (node->local.externally_visible)
6611 intra_create_variable_infos ();
6613 /* Build constriants for the function body. */
6614 FOR_EACH_BB_FN (bb, func)
6616 gimple_stmt_iterator gsi;
6618 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6621 gimple phi = gsi_stmt (gsi);
6623 if (is_gimple_reg (gimple_phi_result (phi)))
6624 find_func_aliases (phi);
6627 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6629 gimple stmt = gsi_stmt (gsi);
6631 find_func_aliases (stmt);
6632 find_func_clobbers (stmt);
6636 current_function_decl = old_func_decl;
6641 fprintf (dump_file, "\n");
6642 dump_constraints (dump_file, from);
6643 fprintf (dump_file, "\n");
6645 from = VEC_length (constraint_t, constraints);
6648 /* From the constraints compute the points-to sets. */
6649 solve_constraints ();
6651 /* Compute the global points-to sets for ESCAPED.
6652 ??? Note that the computed escape set is not correct
6653 for the whole unit as we fail to consider graph edges to
6654 externally visible functions. */
6655 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
6657 /* Make sure the ESCAPED solution (which is used as placeholder in
6658 other solutions) does not reference itself. This simplifies
6659 points-to solution queries. */
6660 ipa_escaped_pt.ipa_escaped = 0;
6662 /* Assign the points-to sets to the SSA names in the unit. */
6663 for (node = cgraph_nodes; node; node = node->next)
6666 struct function *fn;
6670 struct pt_solution uses, clobbers;
6671 struct cgraph_edge *e;
6673 /* Nodes without a body are not interesting. */
6674 if (!gimple_has_body_p (node->decl)
6678 fn = DECL_STRUCT_FUNCTION (node->decl);
6680 /* Compute the points-to sets for pointer SSA_NAMEs. */
6681 for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
6684 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6685 find_what_p_points_to (ptr);
6688 /* Compute the call-use and call-clobber sets for all direct calls. */
6689 fi = lookup_vi_for_tree (node->decl);
6690 gcc_assert (fi->is_fn_info);
6691 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
6693 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
6694 for (e = node->callers; e; e = e->next_caller)
6699 *gimple_call_clobber_set (e->call_stmt) = clobbers;
6700 *gimple_call_use_set (e->call_stmt) = uses;
6703 /* Compute the call-use and call-clobber sets for indirect calls
6704 and calls to external functions. */
6705 FOR_EACH_BB_FN (bb, fn)
6707 gimple_stmt_iterator gsi;
6709 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6711 gimple stmt = gsi_stmt (gsi);
6712 struct pt_solution *pt;
6716 if (!is_gimple_call (stmt))
6719 /* Handle direct calls to external functions. */
6720 decl = gimple_call_fndecl (stmt);
6722 && (!(fi = lookup_vi_for_tree (decl))
6723 || !fi->is_fn_info))
6725 pt = gimple_call_use_set (stmt);
6726 if (gimple_call_flags (stmt) & ECF_CONST)
6727 memset (pt, 0, sizeof (struct pt_solution));
6728 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6730 find_what_var_points_to (vi, pt);
6731 /* Escaped (and thus nonlocal) variables are always
6732 implicitly used by calls. */
6733 /* ??? ESCAPED can be empty even though NONLOCAL
6736 pt->ipa_escaped = 1;
6740 /* If there is nothing special about this call then
6741 we have made everything that is used also escape. */
6742 *pt = ipa_escaped_pt;
6746 pt = gimple_call_clobber_set (stmt);
6747 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6748 memset (pt, 0, sizeof (struct pt_solution));
6749 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6751 find_what_var_points_to (vi, pt);
6752 /* Escaped (and thus nonlocal) variables are always
6753 implicitly clobbered by calls. */
6754 /* ??? ESCAPED can be empty even though NONLOCAL
6757 pt->ipa_escaped = 1;
6761 /* If there is nothing special about this call then
6762 we have made everything that is used also escape. */
6763 *pt = ipa_escaped_pt;
6768 /* Handle indirect calls. */
6770 && (fi = get_fi_for_callee (stmt)))
6772 /* We need to accumulate all clobbers/uses of all possible
6774 fi = get_varinfo (find (fi->id));
6775 /* If we cannot constrain the set of functions we'll end up
6776 calling we end up using/clobbering everything. */
6777 if (bitmap_bit_p (fi->solution, anything_id)
6778 || bitmap_bit_p (fi->solution, nonlocal_id)
6779 || bitmap_bit_p (fi->solution, escaped_id))
6781 pt_solution_reset (gimple_call_clobber_set (stmt));
6782 pt_solution_reset (gimple_call_use_set (stmt));
6788 struct pt_solution *uses, *clobbers;
6790 uses = gimple_call_use_set (stmt);
6791 clobbers = gimple_call_clobber_set (stmt);
6792 memset (uses, 0, sizeof (struct pt_solution));
6793 memset (clobbers, 0, sizeof (struct pt_solution));
6794 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
6796 struct pt_solution sol;
6798 vi = get_varinfo (i);
6799 if (!vi->is_fn_info)
6801 /* ??? We could be more precise here? */
6803 uses->ipa_escaped = 1;
6804 clobbers->nonlocal = 1;
6805 clobbers->ipa_escaped = 1;
6809 if (!uses->anything)
6811 find_what_var_points_to
6812 (first_vi_for_offset (vi, fi_uses), &sol);
6813 pt_solution_ior_into (uses, &sol);
6815 if (!clobbers->anything)
6817 find_what_var_points_to
6818 (first_vi_for_offset (vi, fi_clobbers), &sol);
6819 pt_solution_ior_into (clobbers, &sol);
6827 fn->gimple_df->ipa_pta = true;
6830 delete_points_to_sets ();
6837 struct simple_ipa_opt_pass pass_ipa_pta =
6842 gate_ipa_pta, /* gate */
6843 ipa_pta_execute, /* execute */
6846 0, /* static_pass_number */
6847 TV_IPA_PTA, /* tv_id */
6848 0, /* properties_required */
6849 0, /* properties_provided */
6850 0, /* properties_destroyed */
6851 0, /* todo_flags_start */
6852 TODO_update_ssa /* todo_flags_finish */
6857 #include "gt-tree-ssa-structalias.h"