1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
36 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
43 #include "tree-gimple.h"
47 #include "tree-pass.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
52 #include "tree-ssa-structalias.h"
55 #include "pointer-set.h"
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
94 Each variable for a structure field has
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
116 In order to solve the system of set constraints, the following is
119 1. Each constraint variable x has a solution set associated with it,
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
147 8. The process of walking the graph is iterated until no solution
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
167 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
168 htab_t heapvar_for_stmt;
170 static bool use_field_sensitive = true;
171 static int in_ipa_mode = 0;
173 /* Used for predecessor bitmaps. */
174 static bitmap_obstack predbitmap_obstack;
176 /* Used for points-to sets. */
177 static bitmap_obstack pta_obstack;
179 /* Used for oldsolution members of variables. */
180 static bitmap_obstack oldpta_obstack;
182 /* Used for per-solver-iteration bitmaps. */
183 static bitmap_obstack iteration_obstack;
185 static unsigned int create_variable_info_for (tree, const char *);
186 typedef struct constraint_graph *constraint_graph_t;
187 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
189 DEF_VEC_P(constraint_t);
190 DEF_VEC_ALLOC_P(constraint_t,heap);
192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
196 static struct constraint_stats
198 unsigned int total_vars;
199 unsigned int nonpointer_vars;
200 unsigned int unified_vars_static;
201 unsigned int unified_vars_dynamic;
202 unsigned int iterations;
203 unsigned int num_edges;
204 unsigned int num_implicit_edges;
205 unsigned int points_to_sets_created;
210 /* ID of this variable */
213 /* Name of this variable */
216 /* Tree that this variable is associated with. */
219 /* Offset of this variable, in bits, from the base variable */
220 unsigned HOST_WIDE_INT offset;
222 /* Size of the variable, in bits. */
223 unsigned HOST_WIDE_INT size;
225 /* Full size of the base variable, in bits. */
226 unsigned HOST_WIDE_INT fullsize;
228 /* A link to the variable for the next field in this structure. */
229 struct variable_info *next;
231 /* True if the variable is directly the target of a dereference.
232 This is used to track which variables are *actually* dereferenced
233 so we can prune their points to listed. */
234 unsigned int directly_dereferenced:1;
236 /* True if this is a variable created by the constraint analysis, such as
237 heap variables and constraints we had to break up. */
238 unsigned int is_artificial_var:1;
240 /* True if this is a special variable whose solution set should not be
242 unsigned int is_special_var:1;
244 /* True for variables whose size is not known or variable. */
245 unsigned int is_unknown_size_var:1;
247 /* True for variables that have unions somewhere in them. */
248 unsigned int has_union:1;
250 /* True if this is a heap variable. */
251 unsigned int is_heap_var:1;
253 /* Points-to set for this variable. */
256 /* Old points-to set for this variable. */
259 /* Variable ids represented by this node. */
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
298 return v->collapsed_to;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
338 /* Insert a mapping FROM->TO in the heap var for statement
342 heapvar_insert (tree from, tree to)
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
366 ret->directly_dereferenced = false;
367 ret->is_artificial_var = false;
368 ret->is_heap_var = false;
369 ret->is_special_var = false;
370 ret->is_unknown_size_var = false;
371 ret->has_union = false;
372 ret->solution = BITMAP_ALLOC (&pta_obstack);
373 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
375 ret->collapsed_to = NULL;
379 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
381 /* An expression that appears in a constraint. */
383 struct constraint_expr
385 /* Constraint type. */
386 constraint_expr_type type;
388 /* Variable we are referring to in the constraint. */
391 /* Offset, in bits, of this constraint from the beginning of
392 variables it ends up referring to.
394 IOW, in a deref constraint, we would deref, get the result set,
395 then add OFFSET to each member. */
396 unsigned HOST_WIDE_INT offset;
399 typedef struct constraint_expr ce_s;
401 DEF_VEC_ALLOC_O(ce_s, heap);
402 static void get_constraint_for (tree, VEC(ce_s, heap) **);
403 static void do_deref (VEC (ce_s, heap) **);
405 /* Our set constraints are made up of two constraint expressions, one
408 As described in the introduction, our set constraints each represent an
409 operation between set valued variables.
413 struct constraint_expr lhs;
414 struct constraint_expr rhs;
417 /* List of constraints that we use to build the constraint graph from. */
419 static VEC(constraint_t,heap) *constraints;
420 static alloc_pool constraint_pool;
424 DEF_VEC_ALLOC_I(int, heap);
426 /* The constraint graph is represented as an array of bitmaps
427 containing successor nodes. */
429 struct constraint_graph
431 /* Size of this graph, which may be different than the number of
432 nodes in the variable map. */
435 /* Explicit successors of each node. */
438 /* Implicit predecessors of each node (Used for variable
440 bitmap *implicit_preds;
442 /* Explicit predecessors of each node (Used for variable substitution). */
445 /* Indirect cycle representatives, or -1 if the node has no indirect
447 int *indirect_cycles;
449 /* Representative node for a node. rep[a] == a unless the node has
453 /* Equivalence class representative for a node. This is used for
454 variable substitution. */
457 /* Label for each node, used during variable substitution. */
460 /* Bitmap of nodes where the bit is set if the node is a direct
461 node. Used for variable substitution. */
462 sbitmap direct_nodes;
464 /* Vector of complex constraints for each graph node. Complex
465 constraints are those involving dereferences or offsets that are
467 VEC(constraint_t,heap) **complex;
470 static constraint_graph_t graph;
472 /* During variable substitution and the offline version of indirect
473 cycle finding, we create nodes to represent dereferences and
474 address taken constraints. These represent where these start and
476 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
477 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
478 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
480 /* Return the representative node for NODE, if NODE has been unioned
482 This function performs path compression along the way to finding
483 the representative. */
486 find (unsigned int node)
488 gcc_assert (node < graph->size);
489 if (graph->rep[node] != node)
490 return graph->rep[node] = find (graph->rep[node]);
494 /* Union the TO and FROM nodes to the TO nodes.
495 Note that at some point in the future, we may want to do
496 union-by-rank, in which case we are going to have to return the
497 node we unified to. */
500 unite (unsigned int to, unsigned int from)
502 gcc_assert (to < graph->size && from < graph->size);
503 if (to != from && graph->rep[from] != to)
505 graph->rep[from] = to;
511 /* Create a new constraint consisting of LHS and RHS expressions. */
514 new_constraint (const struct constraint_expr lhs,
515 const struct constraint_expr rhs)
517 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
523 /* Print out constraint C to FILE. */
526 dump_constraint (FILE *file, constraint_t c)
528 if (c->lhs.type == ADDRESSOF)
530 else if (c->lhs.type == DEREF)
532 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
533 if (c->lhs.offset != 0)
534 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
535 fprintf (file, " = ");
536 if (c->rhs.type == ADDRESSOF)
538 else if (c->rhs.type == DEREF)
540 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
541 if (c->rhs.offset != 0)
542 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
543 fprintf (file, "\n");
546 /* Print out constraint C to stderr. */
549 debug_constraint (constraint_t c)
551 dump_constraint (stderr, c);
554 /* Print out all constraints to FILE */
557 dump_constraints (FILE *file)
561 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
562 dump_constraint (file, c);
565 /* Print out all constraints to stderr. */
568 debug_constraints (void)
570 dump_constraints (stderr);
575 The solver is a simple worklist solver, that works on the following
578 sbitmap changed_nodes = all zeroes;
580 For each node that is not already collapsed:
582 set bit in changed nodes
584 while (changed_count > 0)
586 compute topological ordering for constraint graph
588 find and collapse cycles in the constraint graph (updating
589 changed if necessary)
591 for each node (n) in the graph in topological order:
594 Process each complex constraint associated with the node,
595 updating changed if necessary.
597 For each outgoing edge from n, propagate the solution from n to
598 the destination of the edge, updating changed as necessary.
602 /* Return true if two constraint expressions A and B are equal. */
605 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
607 return a.type == b.type && a.var == b.var && a.offset == b.offset;
610 /* Return true if constraint expression A is less than constraint expression
611 B. This is just arbitrary, but consistent, in order to give them an
615 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
617 if (a.type == b.type)
620 return a.offset < b.offset;
622 return a.var < b.var;
625 return a.type < b.type;
628 /* Return true if constraint A is less than constraint B. This is just
629 arbitrary, but consistent, in order to give them an ordering. */
632 constraint_less (const constraint_t a, const constraint_t b)
634 if (constraint_expr_less (a->lhs, b->lhs))
636 else if (constraint_expr_less (b->lhs, a->lhs))
639 return constraint_expr_less (a->rhs, b->rhs);
642 /* Return true if two constraints A and B are equal. */
645 constraint_equal (struct constraint a, struct constraint b)
647 return constraint_expr_equal (a.lhs, b.lhs)
648 && constraint_expr_equal (a.rhs, b.rhs);
652 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
655 constraint_vec_find (VEC(constraint_t,heap) *vec,
656 struct constraint lookfor)
664 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
665 if (place >= VEC_length (constraint_t, vec))
667 found = VEC_index (constraint_t, vec, place);
668 if (!constraint_equal (*found, lookfor))
673 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
676 constraint_set_union (VEC(constraint_t,heap) **to,
677 VEC(constraint_t,heap) **from)
682 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
684 if (constraint_vec_find (*to, *c) == NULL)
686 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
688 VEC_safe_insert (constraint_t, heap, *to, place, c);
693 /* Take a solution set SET, add OFFSET to each member of the set, and
694 overwrite SET with the result when done. */
697 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
699 bitmap result = BITMAP_ALLOC (&iteration_obstack);
703 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
705 /* If this is a properly sized variable, only add offset if it's
706 less than end. Otherwise, it is globbed to a single
709 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
711 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
712 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
715 bitmap_set_bit (result, v->id);
717 else if (get_varinfo (i)->is_artificial_var
718 || get_varinfo (i)->has_union
719 || get_varinfo (i)->is_unknown_size_var)
721 bitmap_set_bit (result, i);
725 bitmap_copy (set, result);
726 BITMAP_FREE (result);
729 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
733 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
736 return bitmap_ior_into (to, from);
742 tmp = BITMAP_ALLOC (&iteration_obstack);
743 bitmap_copy (tmp, from);
744 solution_set_add (tmp, inc);
745 res = bitmap_ior_into (to, tmp);
751 /* Insert constraint C into the list of complex constraints for graph
755 insert_into_complex (constraint_graph_t graph,
756 unsigned int var, constraint_t c)
758 VEC (constraint_t, heap) *complex = graph->complex[var];
759 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
762 /* Only insert constraints that do not already exist. */
763 if (place >= VEC_length (constraint_t, complex)
764 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
765 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
769 /* Condense two variable nodes into a single variable node, by moving
770 all associated info from SRC to TO. */
773 merge_node_constraints (constraint_graph_t graph, unsigned int to,
779 gcc_assert (find (from) == to);
781 /* Move all complex constraints from src node into to node */
782 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
784 /* In complex constraints for node src, we may have either
785 a = *src, and *src = a, or an offseted constraint which are
786 always added to the rhs node's constraints. */
788 if (c->rhs.type == DEREF)
790 else if (c->lhs.type == DEREF)
795 constraint_set_union (&graph->complex[to], &graph->complex[from]);
796 VEC_free (constraint_t, heap, graph->complex[from]);
797 graph->complex[from] = NULL;
801 /* Remove edges involving NODE from GRAPH. */
804 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
806 if (graph->succs[node])
807 BITMAP_FREE (graph->succs[node]);
810 /* Merge GRAPH nodes FROM and TO into node TO. */
813 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
816 if (graph->indirect_cycles[from] != -1)
818 /* If we have indirect cycles with the from node, and we have
819 none on the to node, the to node has indirect cycles from the
820 from node now that they are unified.
821 If indirect cycles exist on both, unify the nodes that they
822 are in a cycle with, since we know they are in a cycle with
824 if (graph->indirect_cycles[to] == -1)
826 graph->indirect_cycles[to] = graph->indirect_cycles[from];
830 unsigned int tonode = find (graph->indirect_cycles[to]);
831 unsigned int fromnode = find (graph->indirect_cycles[from]);
833 if (unite (tonode, fromnode))
834 unify_nodes (graph, tonode, fromnode, true);
838 /* Merge all the successor edges. */
839 if (graph->succs[from])
841 if (!graph->succs[to])
842 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
843 bitmap_ior_into (graph->succs[to],
847 clear_edges_for_node (graph, from);
851 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
852 it doesn't exist in the graph already. */
855 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
861 if (!graph->implicit_preds[to])
862 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
864 if (!bitmap_bit_p (graph->implicit_preds[to], from))
866 stats.num_implicit_edges++;
867 bitmap_set_bit (graph->implicit_preds[to], from);
871 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
872 it doesn't exist in the graph already.
873 Return false if the edge already existed, true otherwise. */
876 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
879 if (!graph->preds[to])
880 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
881 if (!bitmap_bit_p (graph->preds[to], from))
882 bitmap_set_bit (graph->preds[to], from);
885 /* Add a graph edge to GRAPH, going from FROM to TO if
886 it doesn't exist in the graph already.
887 Return false if the edge already existed, true otherwise. */
890 add_graph_edge (constraint_graph_t graph, unsigned int to,
901 if (!graph->succs[from])
902 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
903 if (!bitmap_bit_p (graph->succs[from], to))
906 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
908 bitmap_set_bit (graph->succs[from], to);
915 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
918 valid_graph_edge (constraint_graph_t graph, unsigned int src,
921 return (graph->succs[dest]
922 && bitmap_bit_p (graph->succs[dest], src));
925 /* Build the constraint graph, adding only predecessor edges right now. */
928 build_pred_graph (void)
934 graph = XNEW (struct constraint_graph);
935 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
936 graph->succs = XCNEWVEC (bitmap, graph->size);
937 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
938 graph->preds = XCNEWVEC (bitmap, graph->size);
939 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
940 graph->label = XCNEWVEC (unsigned int, graph->size);
941 graph->rep = XNEWVEC (unsigned int, graph->size);
942 graph->eq_rep = XNEWVEC (int, graph->size);
943 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
944 VEC_length (varinfo_t, varmap));
945 graph->direct_nodes = sbitmap_alloc (graph->size);
946 sbitmap_zero (graph->direct_nodes);
948 for (j = 0; j < FIRST_REF_NODE; j++)
950 if (!get_varinfo (j)->is_special_var)
951 SET_BIT (graph->direct_nodes, j);
954 for (j = 0; j < graph->size; j++)
957 graph->eq_rep[j] = -1;
960 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
961 graph->indirect_cycles[j] = -1;
963 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
965 struct constraint_expr lhs = c->lhs;
966 struct constraint_expr rhs = c->rhs;
967 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
968 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
970 if (lhs.type == DEREF)
973 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
974 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
975 if (rhs.type == ADDRESSOF)
976 RESET_BIT (graph->direct_nodes, rhsvar);
978 else if (rhs.type == DEREF)
981 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
982 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
984 RESET_BIT (graph->direct_nodes, lhsvar);
986 else if (rhs.type == ADDRESSOF)
989 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
990 /* Implicitly, *x = y */
991 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
993 RESET_BIT (graph->direct_nodes, rhsvar);
995 else if (lhsvar > anything_id
996 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
999 add_pred_graph_edge (graph, lhsvar, rhsvar);
1000 /* Implicitly, *x = *y */
1001 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1002 FIRST_REF_NODE + rhsvar);
1004 else if (lhs.offset != 0 || rhs.offset != 0)
1006 if (rhs.offset != 0)
1007 RESET_BIT (graph->direct_nodes, lhs.var);
1008 if (lhs.offset != 0)
1009 RESET_BIT (graph->direct_nodes, rhs.var);
1014 /* Build the constraint graph, adding successor edges. */
1017 build_succ_graph (void)
1022 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1024 struct constraint_expr lhs;
1025 struct constraint_expr rhs;
1026 unsigned int lhsvar;
1027 unsigned int rhsvar;
1034 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1035 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1037 if (lhs.type == DEREF)
1039 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1040 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1042 else if (rhs.type == DEREF)
1044 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1045 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1047 else if (rhs.type == ADDRESSOF)
1050 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1051 == get_varinfo_fc (rhs.var)->id);
1052 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1054 else if (lhsvar > anything_id
1055 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1057 add_graph_edge (graph, lhsvar, rhsvar);
1063 /* Changed variables on the last iteration. */
1064 static unsigned int changed_count;
1065 static sbitmap changed;
1067 DEF_VEC_I(unsigned);
1068 DEF_VEC_ALLOC_I(unsigned,heap);
1071 /* Strongly Connected Component visitation info. */
1078 unsigned int *node_mapping;
1080 VEC(unsigned,heap) *scc_stack;
1084 /* Recursive routine to find strongly connected components in GRAPH.
1085 SI is the SCC info to store the information in, and N is the id of current
1086 graph node we are processing.
1088 This is Tarjan's strongly connected component finding algorithm, as
1089 modified by Nuutila to keep only non-root nodes on the stack.
1090 The algorithm can be found in "On finding the strongly connected
1091 connected components in a directed graph" by Esko Nuutila and Eljas
1092 Soisalon-Soininen, in Information Processing Letters volume 49,
1093 number 1, pages 9-14. */
1096 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1100 unsigned int my_dfs;
1102 SET_BIT (si->visited, n);
1103 si->dfs[n] = si->current_index ++;
1104 my_dfs = si->dfs[n];
1106 /* Visit all the successors. */
1107 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1111 if (i > LAST_REF_NODE)
1115 if (TEST_BIT (si->roots, w))
1118 if (!TEST_BIT (si->visited, w))
1119 scc_visit (graph, si, w);
1121 unsigned int t = find (w);
1122 unsigned int nnode = find (n);
1123 gcc_assert (nnode == n);
1125 if (si->dfs[t] < si->dfs[nnode])
1126 si->dfs[n] = si->dfs[t];
1130 /* See if any components have been identified. */
1131 if (si->dfs[n] == my_dfs)
1133 if (VEC_length (unsigned, si->scc_stack) > 0
1134 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1136 bitmap scc = BITMAP_ALLOC (NULL);
1137 bool have_ref_node = n >= FIRST_REF_NODE;
1138 unsigned int lowest_node;
1141 bitmap_set_bit (scc, n);
1143 while (VEC_length (unsigned, si->scc_stack) != 0
1144 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1146 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1148 bitmap_set_bit (scc, w);
1149 if (w >= FIRST_REF_NODE)
1150 have_ref_node = true;
1153 lowest_node = bitmap_first_set_bit (scc);
1154 gcc_assert (lowest_node < FIRST_REF_NODE);
1155 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1157 if (i < FIRST_REF_NODE)
1159 /* Mark this node for collapsing. */
1160 if (unite (lowest_node, i))
1161 unify_nodes (graph, lowest_node, i, false);
1165 unite (lowest_node, i);
1166 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1170 SET_BIT (si->roots, n);
1173 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1176 /* Unify node FROM into node TO, updating the changed count if
1177 necessary when UPDATE_CHANGED is true. */
1180 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1181 bool update_changed)
1184 gcc_assert (to != from && find (to) == to);
1185 if (dump_file && (dump_flags & TDF_DETAILS))
1186 fprintf (dump_file, "Unifying %s to %s\n",
1187 get_varinfo (from)->name,
1188 get_varinfo (to)->name);
1191 stats.unified_vars_dynamic++;
1193 stats.unified_vars_static++;
1195 merge_graph_nodes (graph, to, from);
1196 merge_node_constraints (graph, to, from);
1198 if (update_changed && TEST_BIT (changed, from))
1200 RESET_BIT (changed, from);
1201 if (!TEST_BIT (changed, to))
1202 SET_BIT (changed, to);
1205 gcc_assert (changed_count > 0);
1210 /* If the solution changes because of the merging, we need to mark
1211 the variable as changed. */
1212 if (bitmap_ior_into (get_varinfo (to)->solution,
1213 get_varinfo (from)->solution))
1215 if (update_changed && !TEST_BIT (changed, to))
1217 SET_BIT (changed, to);
1222 BITMAP_FREE (get_varinfo (from)->solution);
1223 BITMAP_FREE (get_varinfo (from)->oldsolution);
1225 if (stats.iterations > 0)
1227 BITMAP_FREE (get_varinfo (to)->oldsolution);
1228 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1231 if (valid_graph_edge (graph, to, to))
1233 if (graph->succs[to])
1234 bitmap_clear_bit (graph->succs[to], to);
1238 /* Information needed to compute the topological ordering of a graph. */
1242 /* sbitmap of visited nodes. */
1244 /* Array that stores the topological order of the graph, *in
1246 VEC(unsigned,heap) *topo_order;
1250 /* Initialize and return a topological info structure. */
1252 static struct topo_info *
1253 init_topo_info (void)
1255 size_t size = VEC_length (varinfo_t, varmap);
1256 struct topo_info *ti = XNEW (struct topo_info);
1257 ti->visited = sbitmap_alloc (size);
1258 sbitmap_zero (ti->visited);
1259 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1264 /* Free the topological sort info pointed to by TI. */
1267 free_topo_info (struct topo_info *ti)
1269 sbitmap_free (ti->visited);
1270 VEC_free (unsigned, heap, ti->topo_order);
1274 /* Visit the graph in topological order, and store the order in the
1275 topo_info structure. */
1278 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1284 SET_BIT (ti->visited, n);
1286 if (graph->succs[n])
1287 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1289 if (!TEST_BIT (ti->visited, j))
1290 topo_visit (graph, ti, j);
1293 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1296 /* Return true if variable N + OFFSET is a legal field of N. */
1299 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1301 varinfo_t ninfo = get_varinfo (n);
1303 /* For things we've globbed to single variables, any offset into the
1304 variable acts like the entire variable, so that it becomes offset
1306 if (ninfo->is_special_var
1307 || ninfo->is_artificial_var
1308 || ninfo->is_unknown_size_var)
1313 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1316 /* Process a constraint C that represents *x = &y. */
1319 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1320 constraint_t c, bitmap delta)
1322 unsigned int rhs = c->rhs.var;
1326 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1327 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1329 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1330 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1332 /* *x != NULL && *x != ANYTHING*/
1336 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1338 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1342 sol = get_varinfo (t)->solution;
1343 if (!bitmap_bit_p (sol, rhs))
1345 bitmap_set_bit (sol, rhs);
1346 if (!TEST_BIT (changed, t))
1348 SET_BIT (changed, t);
1353 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1354 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1359 /* Process a constraint C that represents x = *y, using DELTA as the
1360 starting solution. */
1363 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1366 unsigned int lhs = find (c->lhs.var);
1368 bitmap sol = get_varinfo (lhs)->solution;
1372 if (bitmap_bit_p (delta, anything_id))
1374 flag = !bitmap_bit_p (sol, anything_id);
1376 bitmap_set_bit (sol, anything_id);
1379 /* For each variable j in delta (Sol(y)), add
1380 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1381 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1383 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1384 if (type_safe (j, &roffset))
1387 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1390 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1395 /* Adding edges from the special vars is pointless.
1396 They don't have sets that can change. */
1397 if (get_varinfo (t) ->is_special_var)
1398 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1399 else if (add_graph_edge (graph, lhs, t))
1400 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1402 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1403 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1408 /* If the LHS solution changed, mark the var as changed. */
1411 get_varinfo (lhs)->solution = sol;
1412 if (!TEST_BIT (changed, lhs))
1414 SET_BIT (changed, lhs);
1420 /* Process a constraint C that represents *x = y. */
1423 do_ds_constraint (constraint_t c, bitmap delta)
1425 unsigned int rhs = find (c->rhs.var);
1426 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1427 bitmap sol = get_varinfo (rhs)->solution;
1431 if (bitmap_bit_p (sol, anything_id))
1433 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1435 varinfo_t jvi = get_varinfo (j);
1437 unsigned int loff = c->lhs.offset;
1438 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1441 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1446 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1448 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1449 if (!TEST_BIT (changed, t))
1451 SET_BIT (changed, t);
1459 /* For each member j of delta (Sol(x)), add an edge from y to j and
1460 union Sol(y) into Sol(j) */
1461 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1463 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1464 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1468 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1471 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1475 tmp = get_varinfo (t)->solution;
1477 if (set_union_with_increment (tmp, sol, roff))
1479 get_varinfo (t)->solution = tmp;
1481 sol = get_varinfo (rhs)->solution;
1482 if (!TEST_BIT (changed, t))
1484 SET_BIT (changed, t);
1489 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1490 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1494 /* Handle a non-simple (simple meaning requires no iteration),
1495 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1498 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1500 if (c->lhs.type == DEREF)
1502 if (c->rhs.type == ADDRESSOF)
1505 do_da_constraint (graph, c, delta);
1510 do_ds_constraint (c, delta);
1513 else if (c->rhs.type == DEREF)
1516 if (!(get_varinfo (c->lhs.var)->is_special_var))
1517 do_sd_constraint (graph, c, delta);
1526 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1527 t = find (c->rhs.var);
1528 solution = get_varinfo (t)->solution;
1529 t = find (c->lhs.var);
1530 tmp = get_varinfo (t)->solution;
1532 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1536 get_varinfo (t)->solution = tmp;
1537 if (!TEST_BIT (changed, t))
1539 SET_BIT (changed, t);
1546 /* Initialize and return a new SCC info structure. */
1548 static struct scc_info *
1549 init_scc_info (size_t size)
1551 struct scc_info *si = XNEW (struct scc_info);
1554 si->current_index = 0;
1555 si->visited = sbitmap_alloc (size);
1556 sbitmap_zero (si->visited);
1557 si->roots = sbitmap_alloc (size);
1558 sbitmap_zero (si->roots);
1559 si->node_mapping = XNEWVEC (unsigned int, size);
1560 si->dfs = XCNEWVEC (unsigned int, size);
1562 for (i = 0; i < size; i++)
1563 si->node_mapping[i] = i;
1565 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1569 /* Free an SCC info structure pointed to by SI */
1572 free_scc_info (struct scc_info *si)
1574 sbitmap_free (si->visited);
1575 sbitmap_free (si->roots);
1576 free (si->node_mapping);
1578 VEC_free (unsigned, heap, si->scc_stack);
1583 /* Find indirect cycles in GRAPH that occur, using strongly connected
1584 components, and note them in the indirect cycles map.
1586 This technique comes from Ben Hardekopf and Calvin Lin,
1587 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1588 Lines of Code", submitted to PLDI 2007. */
1591 find_indirect_cycles (constraint_graph_t graph)
1594 unsigned int size = graph->size;
1595 struct scc_info *si = init_scc_info (size);
1597 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1598 if (!TEST_BIT (si->visited, i) && find (i) == i)
1599 scc_visit (graph, si, i);
1604 /* Compute a topological ordering for GRAPH, and store the result in the
1605 topo_info structure TI. */
1608 compute_topo_order (constraint_graph_t graph,
1609 struct topo_info *ti)
1612 unsigned int size = VEC_length (varinfo_t, varmap);
1614 for (i = 0; i != size; ++i)
1615 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1616 topo_visit (graph, ti, i);
1619 /* Perform offline variable substitution.
1621 This is a linear time way of identifying variables that must have
1622 equivalent points-to sets, including those caused by static cycles,
1623 and single entry subgraphs, in the constraint graph.
1625 The technique is described in "Off-line variable substitution for
1626 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1627 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1629 There is an optimal way to do this involving hash based value
1630 numbering, once the technique is published i will implement it
1633 The general method of finding equivalence classes is as follows:
1634 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1635 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1636 Initialize all non-REF/ADDRESS nodes to be direct nodes
1637 For each SCC in the predecessor graph:
1638 for each member (x) of the SCC
1639 if x is not a direct node:
1640 set rootnode(SCC) to be not a direct node
1641 collapse node x into rootnode(SCC).
1642 if rootnode(SCC) is not a direct node:
1643 label rootnode(SCC) with a new equivalence class
1645 if all labeled predecessors of rootnode(SCC) have the same
1647 label rootnode(SCC) with this label
1649 label rootnode(SCC) with a new equivalence class
1651 All direct nodes with the same equivalence class can be replaced
1652 with a single representative node.
1653 All unlabeled nodes (label == 0) are not pointers and all edges
1654 involving them can be eliminated.
1655 We perform these optimizations during move_complex_constraints.
1658 static int equivalence_class;
1660 /* Recursive routine to find strongly connected components in GRAPH,
1661 and label it's nodes with equivalence classes.
1662 This is used during variable substitution to find cycles involving
1663 the regular or implicit predecessors, and label them as equivalent.
1664 The SCC finding algorithm used is the same as that for scc_visit. */
1667 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1671 unsigned int my_dfs;
1673 gcc_assert (si->node_mapping[n] == n);
1674 SET_BIT (si->visited, n);
1675 si->dfs[n] = si->current_index ++;
1676 my_dfs = si->dfs[n];
1678 /* Visit all the successors. */
1679 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1681 unsigned int w = si->node_mapping[i];
1683 if (TEST_BIT (si->roots, w))
1686 if (!TEST_BIT (si->visited, w))
1687 label_visit (graph, si, w);
1689 unsigned int t = si->node_mapping[w];
1690 unsigned int nnode = si->node_mapping[n];
1691 gcc_assert (nnode == n);
1693 if (si->dfs[t] < si->dfs[nnode])
1694 si->dfs[n] = si->dfs[t];
1698 /* Visit all the implicit predecessors. */
1699 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1701 unsigned int w = si->node_mapping[i];
1703 if (TEST_BIT (si->roots, w))
1706 if (!TEST_BIT (si->visited, w))
1707 label_visit (graph, si, w);
1709 unsigned int t = si->node_mapping[w];
1710 unsigned int nnode = si->node_mapping[n];
1711 gcc_assert (nnode == n);
1713 if (si->dfs[t] < si->dfs[nnode])
1714 si->dfs[n] = si->dfs[t];
1718 /* See if any components have been identified. */
1719 if (si->dfs[n] == my_dfs)
1721 while (VEC_length (unsigned, si->scc_stack) != 0
1722 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1724 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1725 si->node_mapping[w] = n;
1727 if (!TEST_BIT (graph->direct_nodes, w))
1728 RESET_BIT (graph->direct_nodes, n);
1730 SET_BIT (si->roots, n);
1732 if (!TEST_BIT (graph->direct_nodes, n))
1734 graph->label[n] = equivalence_class++;
1738 unsigned int size = 0;
1739 unsigned int firstlabel = ~0;
1741 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1743 unsigned int j = si->node_mapping[i];
1745 if (j == n || graph->label[j] == 0)
1748 if (firstlabel == (unsigned int)~0)
1750 firstlabel = graph->label[j];
1753 else if (graph->label[j] != firstlabel)
1758 graph->label[n] = 0;
1760 graph->label[n] = firstlabel;
1762 graph->label[n] = equivalence_class++;
1766 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1769 /* Perform offline variable substitution, discovering equivalence
1770 classes, and eliminating non-pointer variables. */
1772 static struct scc_info *
1773 perform_var_substitution (constraint_graph_t graph)
1776 unsigned int size = graph->size;
1777 struct scc_info *si = init_scc_info (size);
1779 bitmap_obstack_initialize (&iteration_obstack);
1780 equivalence_class = 0;
1782 /* We only need to visit the non-address nodes for labeling
1783 purposes, as the address nodes will never have any predecessors,
1784 because &x never appears on the LHS of a constraint. */
1785 for (i = 0; i < LAST_REF_NODE; i++)
1786 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1787 label_visit (graph, si, si->node_mapping[i]);
1789 if (dump_file && (dump_flags & TDF_DETAILS))
1790 for (i = 0; i < FIRST_REF_NODE; i++)
1792 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1794 "Equivalence class for %s node id %d:%s is %d\n",
1795 direct_node ? "Direct node" : "Indirect node", i,
1796 get_varinfo (i)->name,
1797 graph->label[si->node_mapping[i]]);
1800 /* Quickly eliminate our non-pointer variables. */
1802 for (i = 0; i < FIRST_REF_NODE; i++)
1804 unsigned int node = si->node_mapping[i];
1806 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1808 if (dump_file && (dump_flags & TDF_DETAILS))
1810 "%s is a non-pointer variable, eliminating edges.\n",
1811 get_varinfo (node)->name);
1812 stats.nonpointer_vars++;
1813 clear_edges_for_node (graph, node);
1819 /* Free information that was only necessary for variable
1823 free_var_substitution_info (struct scc_info *si)
1826 free (graph->label);
1827 free (graph->eq_rep);
1828 sbitmap_free (graph->direct_nodes);
1829 bitmap_obstack_release (&iteration_obstack);
1832 /* Return an existing node that is equivalent to NODE, which has
1833 equivalence class LABEL, if one exists. Return NODE otherwise. */
1836 find_equivalent_node (constraint_graph_t graph,
1837 unsigned int node, unsigned int label)
1839 /* If the address version of this variable is unused, we can
1840 substitute it for anything else with the same label.
1841 Otherwise, we know the pointers are equivalent, but not the
1844 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1846 gcc_assert (label < graph->size);
1848 if (graph->eq_rep[label] != -1)
1850 /* Unify the two variables since we know they are equivalent. */
1851 if (unite (graph->eq_rep[label], node))
1852 unify_nodes (graph, graph->eq_rep[label], node, false);
1853 return graph->eq_rep[label];
1857 graph->eq_rep[label] = node;
1863 /* Move complex constraints to the appropriate nodes, and collapse
1864 variables we've discovered are equivalent during variable
1865 substitution. SI is the SCC_INFO that is the result of
1866 perform_variable_substitution. */
1869 move_complex_constraints (constraint_graph_t graph,
1870 struct scc_info *si)
1876 for (j = 0; j < graph->size; j++)
1877 gcc_assert (find (j) == j);
1879 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1881 struct constraint_expr lhs = c->lhs;
1882 struct constraint_expr rhs = c->rhs;
1883 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1884 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1885 unsigned int lhsnode, rhsnode;
1886 unsigned int lhslabel, rhslabel;
1888 lhsnode = si->node_mapping[lhsvar];
1889 rhsnode = si->node_mapping[rhsvar];
1890 lhslabel = graph->label[lhsnode];
1891 rhslabel = graph->label[rhsnode];
1893 /* See if it is really a non-pointer variable, and if so, ignore
1897 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1898 lhslabel = graph->label[lhsnode] = equivalence_class++;
1901 if (dump_file && (dump_flags & TDF_DETAILS))
1904 fprintf (dump_file, "%s is a non-pointer variable,"
1905 "ignoring constraint:",
1906 get_varinfo (lhs.var)->name);
1907 dump_constraint (dump_file, c);
1909 VEC_replace (constraint_t, constraints, i, NULL);
1916 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1917 rhslabel = graph->label[rhsnode] = equivalence_class++;
1920 if (dump_file && (dump_flags & TDF_DETAILS))
1923 fprintf (dump_file, "%s is a non-pointer variable,"
1924 "ignoring constraint:",
1925 get_varinfo (rhs.var)->name);
1926 dump_constraint (dump_file, c);
1928 VEC_replace (constraint_t, constraints, i, NULL);
1933 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1934 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1935 c->lhs.var = lhsvar;
1936 c->rhs.var = rhsvar;
1938 if (lhs.type == DEREF)
1940 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1941 insert_into_complex (graph, lhsvar, c);
1943 else if (rhs.type == DEREF)
1945 if (!(get_varinfo (lhsvar)->is_special_var))
1946 insert_into_complex (graph, rhsvar, c);
1948 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1949 && (lhs.offset != 0 || rhs.offset != 0))
1951 insert_into_complex (graph, rhsvar, c);
1957 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1958 part of an SCC, false otherwise. */
1961 eliminate_indirect_cycles (unsigned int node)
1963 if (graph->indirect_cycles[node] != -1
1964 && !bitmap_empty_p (get_varinfo (node)->solution))
1967 VEC(unsigned,heap) *queue = NULL;
1969 unsigned int to = find (graph->indirect_cycles[node]);
1972 /* We can't touch the solution set and call unify_nodes
1973 at the same time, because unify_nodes is going to do
1974 bitmap unions into it. */
1976 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
1978 if (find (i) == i && i != to)
1981 VEC_safe_push (unsigned, heap, queue, i);
1986 VEC_iterate (unsigned, queue, queuepos, i);
1989 unify_nodes (graph, to, i, true);
1991 VEC_free (unsigned, heap, queue);
1997 /* Solve the constraint graph GRAPH using our worklist solver.
1998 This is based on the PW* family of solvers from the "Efficient Field
1999 Sensitive Pointer Analysis for C" paper.
2000 It works by iterating over all the graph nodes, processing the complex
2001 constraints and propagating the copy constraints, until everything stops
2002 changed. This corresponds to steps 6-8 in the solving list given above. */
2005 solve_graph (constraint_graph_t graph)
2007 unsigned int size = VEC_length (varinfo_t, varmap);
2012 changed = sbitmap_alloc (size);
2013 sbitmap_zero (changed);
2015 /* Mark all initial non-collapsed nodes as changed. */
2016 for (i = 0; i < size; i++)
2018 varinfo_t ivi = get_varinfo (i);
2019 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2020 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2021 || VEC_length (constraint_t, graph->complex[i]) > 0))
2023 SET_BIT (changed, i);
2028 /* Allocate a bitmap to be used to store the changed bits. */
2029 pts = BITMAP_ALLOC (&pta_obstack);
2031 while (changed_count > 0)
2034 struct topo_info *ti = init_topo_info ();
2037 bitmap_obstack_initialize (&iteration_obstack);
2039 compute_topo_order (graph, ti);
2041 while (VEC_length (unsigned, ti->topo_order) != 0)
2044 i = VEC_pop (unsigned, ti->topo_order);
2046 /* If this variable is not a representative, skip it. */
2050 /* In certain indirect cycle cases, we may merge this
2051 variable to another. */
2052 if (eliminate_indirect_cycles (i) && find (i) != i)
2055 /* If the node has changed, we need to process the
2056 complex constraints and outgoing edges again. */
2057 if (TEST_BIT (changed, i))
2062 VEC(constraint_t,heap) *complex = graph->complex[i];
2063 bool solution_empty;
2065 RESET_BIT (changed, i);
2068 /* Compute the changed set of solution bits. */
2069 bitmap_and_compl (pts, get_varinfo (i)->solution,
2070 get_varinfo (i)->oldsolution);
2072 if (bitmap_empty_p (pts))
2075 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2077 solution = get_varinfo (i)->solution;
2078 solution_empty = bitmap_empty_p (solution);
2080 /* Process the complex constraints */
2081 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2083 /* The only complex constraint that can change our
2084 solution to non-empty, given an empty solution,
2085 is a constraint where the lhs side is receiving
2086 some set from elsewhere. */
2087 if (!solution_empty || c->lhs.type != DEREF)
2088 do_complex_constraint (graph, c, pts);
2091 solution_empty = bitmap_empty_p (solution);
2093 if (!solution_empty)
2097 /* Propagate solution to all successors. */
2098 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2104 unsigned int to = find (j);
2105 tmp = get_varinfo (to)->solution;
2108 /* Don't try to propagate to ourselves. */
2112 flag = set_union_with_increment (tmp, pts, 0);
2116 get_varinfo (to)->solution = tmp;
2117 if (!TEST_BIT (changed, to))
2119 SET_BIT (changed, to);
2127 free_topo_info (ti);
2128 bitmap_obstack_release (&iteration_obstack);
2132 sbitmap_free (changed);
2133 bitmap_obstack_release (&oldpta_obstack);
2136 /* Map from trees to variable infos. */
2137 static struct pointer_map_t *vi_for_tree;
2140 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2143 insert_vi_for_tree (tree t, varinfo_t vi)
2145 void **slot = pointer_map_insert (vi_for_tree, t);
2147 gcc_assert (*slot == NULL);
2151 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2152 exist in the map, return NULL, otherwise, return the varinfo we found. */
2155 lookup_vi_for_tree (tree t)
2157 void **slot = pointer_map_contains (vi_for_tree, t);
2161 return (varinfo_t) *slot;
2164 /* Return a printable name for DECL */
2167 alias_get_name (tree decl)
2169 const char *res = get_name (decl);
2171 int num_printed = 0;
2180 if (TREE_CODE (decl) == SSA_NAME)
2182 num_printed = asprintf (&temp, "%s_%u",
2183 alias_get_name (SSA_NAME_VAR (decl)),
2184 SSA_NAME_VERSION (decl));
2186 else if (DECL_P (decl))
2188 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2190 if (num_printed > 0)
2192 res = ggc_strdup (temp);
2198 /* Find the variable id for tree T in the map.
2199 If T doesn't exist in the map, create an entry for it and return it. */
2202 get_vi_for_tree (tree t)
2204 void **slot = pointer_map_contains (vi_for_tree, t);
2206 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2208 return (varinfo_t) *slot;
2211 /* Get a constraint expression from an SSA_VAR_P node. */
2213 static struct constraint_expr
2214 get_constraint_exp_from_ssa_var (tree t)
2216 struct constraint_expr cexpr;
2218 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2220 /* For parameters, get at the points-to set for the actual parm
2222 if (TREE_CODE (t) == SSA_NAME
2223 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2224 && SSA_NAME_IS_DEFAULT_DEF (t))
2225 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2227 cexpr.type = SCALAR;
2229 cexpr.var = get_vi_for_tree (t)->id;
2230 /* If we determine the result is "anything", and we know this is readonly,
2231 say it points to readonly memory instead. */
2232 if (cexpr.var == anything_id && TREE_READONLY (t))
2234 cexpr.type = ADDRESSOF;
2235 cexpr.var = readonly_id;
2242 /* Process a completed constraint T, and add it to the constraint
2246 process_constraint (constraint_t t)
2248 struct constraint_expr rhs = t->rhs;
2249 struct constraint_expr lhs = t->lhs;
2251 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2252 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2254 if (lhs.type == DEREF)
2255 get_varinfo (lhs.var)->directly_dereferenced = true;
2256 if (rhs.type == DEREF)
2257 get_varinfo (rhs.var)->directly_dereferenced = true;
2259 if (!use_field_sensitive)
2265 /* ANYTHING == ANYTHING is pointless. */
2266 if (lhs.var == anything_id && rhs.var == anything_id)
2269 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2270 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2275 process_constraint (t);
2277 /* This can happen in our IR with things like n->a = *p */
2278 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2280 /* Split into tmp = *rhs, *lhs = tmp */
2281 tree rhsdecl = get_varinfo (rhs.var)->decl;
2282 tree pointertype = TREE_TYPE (rhsdecl);
2283 tree pointedtotype = TREE_TYPE (pointertype);
2284 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2285 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2287 /* If this is an aggregate of known size, we should have passed
2288 this off to do_structure_copy, and it should have broken it
2290 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2291 || get_varinfo (rhs.var)->is_unknown_size_var);
2293 process_constraint (new_constraint (tmplhs, rhs));
2294 process_constraint (new_constraint (lhs, tmplhs));
2298 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2299 VEC_safe_push (constraint_t, heap, constraints, t);
2303 /* Return true if T is a variable of a type that could contain
2307 could_have_pointers (tree t)
2309 tree type = TREE_TYPE (t);
2311 if (POINTER_TYPE_P (type)
2312 || AGGREGATE_TYPE_P (type)
2313 || TREE_CODE (type) == COMPLEX_TYPE)
2319 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2322 static unsigned HOST_WIDE_INT
2323 bitpos_of_field (const tree fdecl)
2326 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2327 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2330 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2331 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2335 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2336 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2339 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2340 const unsigned HOST_WIDE_INT fieldsize,
2341 const unsigned HOST_WIDE_INT accesspos,
2342 const unsigned HOST_WIDE_INT accesssize)
2344 if (fieldpos == accesspos && fieldsize == accesssize)
2346 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2348 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2354 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2357 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2360 HOST_WIDE_INT bitsize = -1;
2361 HOST_WIDE_INT bitmaxsize = -1;
2362 HOST_WIDE_INT bitpos;
2364 struct constraint_expr *result;
2365 unsigned int beforelength = VEC_length (ce_s, *results);
2367 /* Some people like to do cute things like take the address of
2370 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2371 forzero = TREE_OPERAND (forzero, 0);
2373 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2375 struct constraint_expr temp;
2378 temp.var = integer_id;
2380 VEC_safe_push (ce_s, heap, *results, &temp);
2384 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2386 /* String constants are readonly, so there is nothing to really do
2388 if (TREE_CODE (t) == STRING_CST)
2391 get_constraint_for (t, results);
2392 result = VEC_last (ce_s, *results);
2393 result->offset = bitpos;
2395 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2397 /* This can also happen due to weird offsetof type macros. */
2398 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2399 result->type = SCALAR;
2401 if (result->type == SCALAR)
2403 /* In languages like C, you can access one past the end of an
2404 array. You aren't allowed to dereference it, so we can
2405 ignore this constraint. When we handle pointer subtraction,
2406 we may have to do something cute here. */
2408 if (result->offset < get_varinfo (result->var)->fullsize
2411 /* It's also not true that the constraint will actually start at the
2412 right offset, it may start in some padding. We only care about
2413 setting the constraint to the first actual field it touches, so
2416 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2418 if (offset_overlaps_with_access (curr->offset, curr->size,
2419 result->offset, bitmaxsize))
2421 result->var = curr->id;
2425 /* assert that we found *some* field there. The user couldn't be
2426 accessing *only* padding. */
2427 /* Still the user could access one past the end of an array
2428 embedded in a struct resulting in accessing *only* padding. */
2429 gcc_assert (curr || ref_contains_array_ref (orig_t));
2431 else if (bitmaxsize == 0)
2433 if (dump_file && (dump_flags & TDF_DETAILS))
2434 fprintf (dump_file, "Access to zero-sized part of variable,"
2438 if (dump_file && (dump_flags & TDF_DETAILS))
2439 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2446 /* Dereference the constraint expression CONS, and return the result.
2447 DEREF (ADDRESSOF) = SCALAR
2448 DEREF (SCALAR) = DEREF
2449 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2450 This is needed so that we can handle dereferencing DEREF constraints. */
2453 do_deref (VEC (ce_s, heap) **constraints)
2455 struct constraint_expr *c;
2458 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2460 if (c->type == SCALAR)
2462 else if (c->type == ADDRESSOF)
2464 else if (c->type == DEREF)
2466 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2467 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2468 process_constraint (new_constraint (tmplhs, *c));
2469 c->var = tmplhs.var;
2476 /* Given a tree T, return the constraint expression for it. */
2479 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2481 struct constraint_expr temp;
2483 /* x = integer is all glommed to a single variable, which doesn't
2484 point to anything by itself. That is, of course, unless it is an
2485 integer constant being treated as a pointer, in which case, we
2486 will return that this is really the addressof anything. This
2487 happens below, since it will fall into the default case. The only
2488 case we know something about an integer treated like a pointer is
2489 when it is the NULL pointer, and then we just say it points to
2491 if (TREE_CODE (t) == INTEGER_CST
2492 && !POINTER_TYPE_P (TREE_TYPE (t)))
2494 temp.var = integer_id;
2497 VEC_safe_push (ce_s, heap, *results, &temp);
2500 else if (TREE_CODE (t) == INTEGER_CST
2501 && integer_zerop (t))
2503 temp.var = nothing_id;
2504 temp.type = ADDRESSOF;
2506 VEC_safe_push (ce_s, heap, *results, &temp);
2510 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2512 case tcc_expression:
2515 switch (TREE_CODE (t))
2519 struct constraint_expr *c;
2521 tree exp = TREE_OPERAND (t, 0);
2522 tree pttype = TREE_TYPE (TREE_TYPE (t));
2524 get_constraint_for (exp, results);
2526 /* Make sure we capture constraints to all elements
2528 if ((handled_component_p (exp)
2529 && ref_contains_array_ref (exp))
2530 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2532 struct constraint_expr *origrhs;
2534 struct constraint_expr tmp;
2536 if (VEC_length (ce_s, *results) == 0)
2539 gcc_assert (VEC_length (ce_s, *results) == 1);
2540 origrhs = VEC_last (ce_s, *results);
2542 VEC_pop (ce_s, *results);
2543 origvar = get_varinfo (origrhs->var);
2544 for (; origvar; origvar = origvar->next)
2546 tmp.var = origvar->id;
2547 VEC_safe_push (ce_s, heap, *results, &tmp);
2550 else if (VEC_length (ce_s, *results) == 1
2551 && (AGGREGATE_TYPE_P (pttype)
2552 || TREE_CODE (pttype) == COMPLEX_TYPE))
2554 struct constraint_expr *origrhs;
2556 struct constraint_expr tmp;
2558 gcc_assert (VEC_length (ce_s, *results) == 1);
2559 origrhs = VEC_last (ce_s, *results);
2561 VEC_pop (ce_s, *results);
2562 origvar = get_varinfo (origrhs->var);
2563 for (; origvar; origvar = origvar->next)
2565 tmp.var = origvar->id;
2566 VEC_safe_push (ce_s, heap, *results, &tmp);
2570 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2572 if (c->type == DEREF)
2575 c->type = ADDRESSOF;
2581 /* XXX: In interprocedural mode, if we didn't have the
2582 body, we would need to do *each pointer argument =
2584 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2587 tree heapvar = heapvar_lookup (t);
2589 if (heapvar == NULL)
2591 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2592 DECL_EXTERNAL (heapvar) = 1;
2593 get_var_ann (heapvar)->is_heapvar = 1;
2594 if (gimple_referenced_vars (cfun))
2595 add_referenced_var (heapvar);
2596 heapvar_insert (t, heapvar);
2599 temp.var = create_variable_info_for (heapvar,
2600 alias_get_name (heapvar));
2602 vi = get_varinfo (temp.var);
2603 vi->is_artificial_var = 1;
2604 vi->is_heap_var = 1;
2605 temp.type = ADDRESSOF;
2607 VEC_safe_push (ce_s, heap, *results, &temp);
2612 temp.var = anything_id;
2615 VEC_safe_push (ce_s, heap, *results, &temp);
2621 temp.type = ADDRESSOF;
2622 temp.var = anything_id;
2624 VEC_safe_push (ce_s, heap, *results, &temp);
2631 switch (TREE_CODE (t))
2635 get_constraint_for (TREE_OPERAND (t, 0), results);
2640 case ARRAY_RANGE_REF:
2642 get_constraint_for_component_ref (t, results);
2646 temp.type = ADDRESSOF;
2647 temp.var = anything_id;
2649 VEC_safe_push (ce_s, heap, *results, &temp);
2656 switch (TREE_CODE (t))
2660 case NON_LVALUE_EXPR:
2662 tree op = TREE_OPERAND (t, 0);
2664 /* Cast from non-pointer to pointers are bad news for us.
2665 Anything else, we see through */
2666 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2667 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2669 get_constraint_for (op, results);
2677 temp.type = ADDRESSOF;
2678 temp.var = anything_id;
2680 VEC_safe_push (ce_s, heap, *results, &temp);
2685 case tcc_exceptional:
2687 switch (TREE_CODE (t))
2691 get_constraint_for (PHI_RESULT (t), results);
2697 struct constraint_expr temp;
2698 temp = get_constraint_exp_from_ssa_var (t);
2699 VEC_safe_push (ce_s, heap, *results, &temp);
2705 temp.type = ADDRESSOF;
2706 temp.var = anything_id;
2708 VEC_safe_push (ce_s, heap, *results, &temp);
2713 case tcc_declaration:
2715 struct constraint_expr temp;
2716 temp = get_constraint_exp_from_ssa_var (t);
2717 VEC_safe_push (ce_s, heap, *results, &temp);
2722 temp.type = ADDRESSOF;
2723 temp.var = anything_id;
2725 VEC_safe_push (ce_s, heap, *results, &temp);
2732 /* Handle the structure copy case where we have a simple structure copy
2733 between LHS and RHS that is of SIZE (in bits)
2735 For each field of the lhs variable (lhsfield)
2736 For each field of the rhs variable at lhsfield.offset (rhsfield)
2737 add the constraint lhsfield = rhsfield
2739 If we fail due to some kind of type unsafety or other thing we
2740 can't handle, return false. We expect the caller to collapse the
2741 variable in that case. */
2744 do_simple_structure_copy (const struct constraint_expr lhs,
2745 const struct constraint_expr rhs,
2746 const unsigned HOST_WIDE_INT size)
2748 varinfo_t p = get_varinfo (lhs.var);
2749 unsigned HOST_WIDE_INT pstart, last;
2751 last = p->offset + size;
2752 for (; p && p->offset < last; p = p->next)
2755 struct constraint_expr templhs = lhs;
2756 struct constraint_expr temprhs = rhs;
2757 unsigned HOST_WIDE_INT fieldoffset;
2759 templhs.var = p->id;
2760 q = get_varinfo (temprhs.var);
2761 fieldoffset = p->offset - pstart;
2762 q = first_vi_for_offset (q, q->offset + fieldoffset);
2765 temprhs.var = q->id;
2766 process_constraint (new_constraint (templhs, temprhs));
2772 /* Handle the structure copy case where we have a structure copy between a
2773 aggregate on the LHS and a dereference of a pointer on the RHS
2774 that is of SIZE (in bits)
2776 For each field of the lhs variable (lhsfield)
2777 rhs.offset = lhsfield->offset
2778 add the constraint lhsfield = rhs
2782 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2783 const struct constraint_expr rhs,
2784 const unsigned HOST_WIDE_INT size)
2786 varinfo_t p = get_varinfo (lhs.var);
2787 unsigned HOST_WIDE_INT pstart,last;
2789 last = p->offset + size;
2791 for (; p && p->offset < last; p = p->next)
2794 struct constraint_expr templhs = lhs;
2795 struct constraint_expr temprhs = rhs;
2796 unsigned HOST_WIDE_INT fieldoffset;
2799 if (templhs.type == SCALAR)
2800 templhs.var = p->id;
2802 templhs.offset = p->offset;
2804 q = get_varinfo (temprhs.var);
2805 fieldoffset = p->offset - pstart;
2806 temprhs.offset += fieldoffset;
2807 process_constraint (new_constraint (templhs, temprhs));
2811 /* Handle the structure copy case where we have a structure copy
2812 between an aggregate on the RHS and a dereference of a pointer on
2813 the LHS that is of SIZE (in bits)
2815 For each field of the rhs variable (rhsfield)
2816 lhs.offset = rhsfield->offset
2817 add the constraint lhs = rhsfield
2821 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2822 const struct constraint_expr rhs,
2823 const unsigned HOST_WIDE_INT size)
2825 varinfo_t p = get_varinfo (rhs.var);
2826 unsigned HOST_WIDE_INT pstart,last;
2828 last = p->offset + size;
2830 for (; p && p->offset < last; p = p->next)
2833 struct constraint_expr templhs = lhs;
2834 struct constraint_expr temprhs = rhs;
2835 unsigned HOST_WIDE_INT fieldoffset;
2838 if (temprhs.type == SCALAR)
2839 temprhs.var = p->id;
2841 temprhs.offset = p->offset;
2843 q = get_varinfo (templhs.var);
2844 fieldoffset = p->offset - pstart;
2845 templhs.offset += fieldoffset;
2846 process_constraint (new_constraint (templhs, temprhs));
2850 /* Sometimes, frontends like to give us bad type information. This
2851 function will collapse all the fields from VAR to the end of VAR,
2852 into VAR, so that we treat those fields as a single variable.
2853 We return the variable they were collapsed into. */
2856 collapse_rest_of_var (unsigned int var)
2858 varinfo_t currvar = get_varinfo (var);
2861 for (field = currvar->next; field; field = field->next)
2864 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2865 field->name, currvar->name);
2867 gcc_assert (!field->collapsed_to);
2868 field->collapsed_to = currvar;
2871 currvar->next = NULL;
2872 currvar->size = currvar->fullsize - currvar->offset;
2877 /* Handle aggregate copies by expanding into copies of the respective
2878 fields of the structures. */
2881 do_structure_copy (tree lhsop, tree rhsop)
2883 struct constraint_expr lhs, rhs, tmp;
2884 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2886 unsigned HOST_WIDE_INT lhssize;
2887 unsigned HOST_WIDE_INT rhssize;
2889 get_constraint_for (lhsop, &lhsc);
2890 get_constraint_for (rhsop, &rhsc);
2891 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2892 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2893 lhs = *(VEC_last (ce_s, lhsc));
2894 rhs = *(VEC_last (ce_s, rhsc));
2896 VEC_free (ce_s, heap, lhsc);
2897 VEC_free (ce_s, heap, rhsc);
2899 /* If we have special var = x, swap it around. */
2900 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2907 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2908 possible it's something we could handle. However, most cases falling
2909 into this are dealing with transparent unions, which are slightly
2911 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2913 rhs.type = ADDRESSOF;
2914 rhs.var = anything_id;
2917 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2918 that special var. */
2919 if (rhs.var <= integer_id)
2921 for (p = get_varinfo (lhs.var); p; p = p->next)
2923 struct constraint_expr templhs = lhs;
2924 struct constraint_expr temprhs = rhs;
2926 if (templhs.type == SCALAR )
2927 templhs.var = p->id;
2929 templhs.offset += p->offset;
2930 process_constraint (new_constraint (templhs, temprhs));
2935 tree rhstype = TREE_TYPE (rhsop);
2936 tree lhstype = TREE_TYPE (lhsop);
2940 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2941 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2943 /* If we have a variably sized types on the rhs or lhs, and a deref
2944 constraint, add the constraint, lhsconstraint = &ANYTHING.
2945 This is conservatively correct because either the lhs is an unknown
2946 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2947 constraint, and every variable it can point to must be unknown sized
2948 anyway, so we don't need to worry about fields at all. */
2949 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2950 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2952 rhs.var = anything_id;
2953 rhs.type = ADDRESSOF;
2955 process_constraint (new_constraint (lhs, rhs));
2959 /* The size only really matters insofar as we don't set more or less of
2960 the variable. If we hit an unknown size var, the size should be the
2961 whole darn thing. */
2962 if (get_varinfo (rhs.var)->is_unknown_size_var)
2965 rhssize = TREE_INT_CST_LOW (rhstypesize);
2967 if (get_varinfo (lhs.var)->is_unknown_size_var)
2970 lhssize = TREE_INT_CST_LOW (lhstypesize);
2973 if (rhs.type == SCALAR && lhs.type == SCALAR)
2975 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2977 lhs.var = collapse_rest_of_var (lhs.var);
2978 rhs.var = collapse_rest_of_var (rhs.var);
2983 process_constraint (new_constraint (lhs, rhs));
2986 else if (lhs.type != DEREF && rhs.type == DEREF)
2987 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2988 else if (lhs.type == DEREF && rhs.type != DEREF)
2989 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2992 tree pointedtotype = lhstype;
2995 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2996 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2997 do_structure_copy (tmpvar, rhsop);
2998 do_structure_copy (lhsop, tmpvar);
3004 /* Update related alias information kept in AI. This is used when
3005 building name tags, alias sets and deciding grouping heuristics.
3006 STMT is the statement to process. This function also updates
3007 ADDRESSABLE_VARS. */
3010 update_alias_info (tree stmt, struct alias_info *ai)
3013 use_operand_p use_p;
3015 bool stmt_dereferences_ptr_p;
3016 enum escape_type stmt_escape_type = is_escape_site (stmt);
3017 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3019 stmt_dereferences_ptr_p = false;
3021 if (stmt_escape_type == ESCAPE_TO_CALL
3022 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3024 mem_ref_stats->num_call_sites++;
3025 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3026 mem_ref_stats->num_pure_const_call_sites++;
3028 else if (stmt_escape_type == ESCAPE_TO_ASM)
3029 mem_ref_stats->num_asm_sites++;
3031 /* Mark all the variables whose address are taken by the statement. */
3032 addr_taken = addresses_taken (stmt);
3035 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3037 /* If STMT is an escape point, all the addresses taken by it are
3039 if (stmt_escape_type != NO_ESCAPE)
3044 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3046 tree rvar = referenced_var (i);
3047 if (!unmodifiable_var_p (rvar))
3048 mark_call_clobbered (rvar, stmt_escape_type);
3053 /* Process each operand use. For pointers, determine whether they
3054 are dereferenced by the statement, or whether their value
3056 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3060 struct ptr_info_def *pi;
3061 unsigned num_uses, num_loads, num_stores;
3063 op = USE_FROM_PTR (use_p);
3065 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3066 to the set of addressable variables. */
3067 if (TREE_CODE (op) == ADDR_EXPR)
3069 bitmap addressable_vars = gimple_addressable_vars (cfun);
3071 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3072 gcc_assert (addressable_vars);
3074 /* PHI nodes don't have annotations for pinning the set
3075 of addresses taken, so we collect them here.
3077 FIXME, should we allow PHI nodes to have annotations
3078 so that they can be treated like regular statements?
3079 Currently, they are treated as second-class
3081 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3085 /* Ignore constants (they may occur in PHI node arguments). */
3086 if (TREE_CODE (op) != SSA_NAME)
3089 var = SSA_NAME_VAR (op);
3090 v_ann = var_ann (var);
3092 /* The base variable of an SSA name must be a GIMPLE register, and thus
3093 it cannot be aliased. */
3094 gcc_assert (!may_be_aliased (var));
3096 /* We are only interested in pointers. */
3097 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3100 pi = get_ptr_info (op);
3102 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3103 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3105 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3106 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3109 /* If STMT is a PHI node, then it will not have pointer
3110 dereferences and it will not be an escape point. */
3111 if (TREE_CODE (stmt) == PHI_NODE)
3114 /* Determine whether OP is a dereferenced pointer, and if STMT
3115 is an escape point, whether OP escapes. */
3116 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3118 /* Handle a corner case involving address expressions of the
3119 form '&PTR->FLD'. The problem with these expressions is that
3120 they do not represent a dereference of PTR. However, if some
3121 other transformation propagates them into an INDIRECT_REF
3122 expression, we end up with '*(&PTR->FLD)' which is folded
3125 So, if the original code had no other dereferences of PTR,
3126 the aliaser will not create memory tags for it, and when
3127 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3128 memory operations will receive no VDEF/VUSE operands.
3130 One solution would be to have count_uses_and_derefs consider
3131 &PTR->FLD a dereference of PTR. But that is wrong, since it
3132 is not really a dereference but an offset calculation.
3134 What we do here is to recognize these special ADDR_EXPR
3135 nodes. Since these expressions are never GIMPLE values (they
3136 are not GIMPLE invariants), they can only appear on the RHS
3137 of an assignment and their base address is always an
3138 INDIRECT_REF expression. */
3139 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3140 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3141 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3143 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3144 this represents a potential dereference of PTR. */
3145 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3146 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3147 if (TREE_CODE (base) == INDIRECT_REF
3148 && TREE_OPERAND (base, 0) == op)
3152 if (num_loads + num_stores > 0)
3154 /* Mark OP as dereferenced. In a subsequent pass,
3155 dereferenced pointers that point to a set of
3156 variables will be assigned a name tag to alias
3157 all the variables OP points to. */
3158 pi->is_dereferenced = 1;
3160 /* If this is a store operation, mark OP as being
3161 dereferenced to store, otherwise mark it as being
3162 dereferenced to load. */
3164 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3166 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3168 /* Update the frequency estimate for all the dereferences of
3170 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3172 /* Indicate that STMT contains pointer dereferences. */
3173 stmt_dereferences_ptr_p = true;
3176 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3178 /* If STMT is an escape point and STMT contains at
3179 least one direct use of OP, then the value of OP
3180 escapes and so the pointed-to variables need to
3181 be marked call-clobbered. */
3182 pi->value_escapes_p = 1;
3183 pi->escape_mask |= stmt_escape_type;
3185 /* If the statement makes a function call, assume
3186 that pointer OP will be dereferenced in a store
3187 operation inside the called function. */
3188 if (get_call_expr_in (stmt)
3189 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3191 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3192 pi->is_dereferenced = 1;
3197 if (TREE_CODE (stmt) == PHI_NODE)
3200 /* Mark stored variables in STMT as being written to and update the
3201 memory reference stats for all memory symbols referenced by STMT. */
3202 if (stmt_references_memory_p (stmt))
3207 mem_ref_stats->num_mem_stmts++;
3209 /* Notice that we only update memory reference stats for symbols
3210 loaded and stored by the statement if the statement does not
3211 contain pointer dereferences and it is not a call/asm site.
3212 This is to avoid double accounting problems when creating
3213 memory partitions. After computing points-to information,
3214 pointer dereference statistics are used to update the
3215 reference stats of the pointed-to variables, so here we
3216 should only update direct references to symbols.
3218 Indirect references are not updated here for two reasons: (1)
3219 The first time we compute alias information, the sets
3220 LOADED/STORED are empty for pointer dereferences, (2) After
3221 partitioning, LOADED/STORED may have references to
3222 partitions, not the original pointed-to variables. So, if we
3223 always counted LOADED/STORED here and during partitioning, we
3224 would count many symbols more than once.
3226 This does cause some imprecision when a statement has a
3227 combination of direct symbol references and pointer
3228 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3229 memory symbols in its argument list, but these cases do not
3230 occur so frequently as to constitute a serious problem. */
3231 if (STORED_SYMS (stmt))
3232 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3234 tree sym = referenced_var (i);
3235 pointer_set_insert (ai->written_vars, sym);
3236 if (!stmt_dereferences_ptr_p
3237 && stmt_escape_type != ESCAPE_TO_CALL
3238 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3239 && stmt_escape_type != ESCAPE_TO_ASM)
3240 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3243 if (!stmt_dereferences_ptr_p
3244 && LOADED_SYMS (stmt)
3245 && stmt_escape_type != ESCAPE_TO_CALL
3246 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3247 && stmt_escape_type != ESCAPE_TO_ASM)
3248 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3249 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3254 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3255 Expressions of the type PTR + CST can be handled in two ways:
3257 1- If the constraint for PTR is ADDRESSOF for a non-structure
3258 variable, then we can use it directly because adding or
3259 subtracting a constant may not alter the original ADDRESSOF
3260 constraint (i.e., pointer arithmetic may not legally go outside
3261 an object's boundaries).
3263 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3264 then if CST is a compile-time constant that can be used as an
3265 offset, we can determine which sub-variable will be pointed-to
3268 Return true if the expression is handled. For any other kind of
3269 expression, return false so that each operand can be added as a
3270 separate constraint by the caller. */
3273 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3276 struct constraint_expr *c, *c2;
3279 VEC (ce_s, heap) *temp = NULL;
3280 unsigned int rhsoffset = 0;
3282 if (TREE_CODE (expr) != PLUS_EXPR
3283 && TREE_CODE (expr) != MINUS_EXPR)
3286 op0 = TREE_OPERAND (expr, 0);
3287 op1 = TREE_OPERAND (expr, 1);
3289 get_constraint_for (op0, &temp);
3290 if (POINTER_TYPE_P (TREE_TYPE (op0))
3291 && TREE_CODE (op1) == INTEGER_CST
3292 && TREE_CODE (expr) == PLUS_EXPR)
3294 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3300 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3301 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3303 if (c2->type == ADDRESSOF && rhsoffset != 0)
3305 varinfo_t temp = get_varinfo (c2->var);
3307 /* An access one after the end of an array is valid,
3308 so simply punt on accesses we cannot resolve. */
3309 temp = first_vi_for_offset (temp, rhsoffset);
3316 c2->offset = rhsoffset;
3317 process_constraint (new_constraint (*c, *c2));
3320 VEC_free (ce_s, heap, temp);
3326 /* Walk statement T setting up aliasing constraints according to the
3327 references found in T. This function is the main part of the
3328 constraint builder. AI points to auxiliary alias information used
3329 when building alias sets and computing alias grouping heuristics. */
3332 find_func_aliases (tree origt)
3335 VEC(ce_s, heap) *lhsc = NULL;
3336 VEC(ce_s, heap) *rhsc = NULL;
3337 struct constraint_expr *c;
3339 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3340 t = TREE_OPERAND (t, 0);
3342 /* Now build constraints expressions. */
3343 if (TREE_CODE (t) == PHI_NODE)
3345 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3347 /* Only care about pointers and structures containing
3349 if (could_have_pointers (PHI_RESULT (t)))
3354 /* For a phi node, assign all the arguments to
3356 get_constraint_for (PHI_RESULT (t), &lhsc);
3357 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3360 tree strippedrhs = PHI_ARG_DEF (t, i);
3362 STRIP_NOPS (strippedrhs);
3363 rhstype = TREE_TYPE (strippedrhs);
3364 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3366 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3368 struct constraint_expr *c2;
3369 while (VEC_length (ce_s, rhsc) > 0)
3371 c2 = VEC_last (ce_s, rhsc);
3372 process_constraint (new_constraint (*c, *c2));
3373 VEC_pop (ce_s, rhsc);
3379 /* In IPA mode, we need to generate constraints to pass call
3380 arguments through their calls. There are two cases, either a
3381 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3382 CALL_EXPR when we are not. */
3383 else if (in_ipa_mode
3384 && ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3385 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3386 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3387 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3388 || (TREE_CODE (t) == CALL_EXPR
3389 && !(call_expr_flags (t)
3390 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3395 call_expr_arg_iterator iter;
3399 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3401 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3402 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3409 decl = get_callee_fndecl (rhsop);
3411 /* If we can directly resolve the function being called, do so.
3412 Otherwise, it must be some sort of indirect expression that
3413 we should still be able to handle. */
3416 fi = get_vi_for_tree (decl);
3420 decl = CALL_EXPR_FN (rhsop);
3421 fi = get_vi_for_tree (decl);
3424 /* Assign all the passed arguments to the appropriate incoming
3425 parameters of the function. */
3427 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3429 struct constraint_expr lhs ;
3430 struct constraint_expr *rhsp;
3432 get_constraint_for (arg, &rhsc);
3433 if (TREE_CODE (decl) != FUNCTION_DECL)
3442 lhs.var = first_vi_for_offset (fi, i)->id;
3445 while (VEC_length (ce_s, rhsc) != 0)
3447 rhsp = VEC_last (ce_s, rhsc);
3448 process_constraint (new_constraint (lhs, *rhsp));
3449 VEC_pop (ce_s, rhsc);
3454 /* If we are returning a value, assign it to the result. */
3457 struct constraint_expr rhs;
3458 struct constraint_expr *lhsp;
3461 get_constraint_for (lhsop, &lhsc);
3462 if (TREE_CODE (decl) != FUNCTION_DECL)
3471 rhs.var = first_vi_for_offset (fi, i)->id;
3474 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3475 process_constraint (new_constraint (*lhsp, rhs));
3478 /* Otherwise, just a regular assignment statement. */
3479 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3481 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3482 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3485 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3486 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3487 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3488 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3490 do_structure_copy (lhsop, rhsop);
3494 /* Only care about operations with pointers, structures
3495 containing pointers, dereferences, and call expressions. */
3496 if (could_have_pointers (lhsop)
3497 || TREE_CODE (rhsop) == CALL_EXPR)
3499 get_constraint_for (lhsop, &lhsc);
3500 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3502 /* RHS that consist of unary operations,
3503 exceptional types, or bare decls/constants, get
3504 handled directly by get_constraint_for. */
3506 case tcc_declaration:
3508 case tcc_exceptional:
3509 case tcc_expression:
3515 get_constraint_for (rhsop, &rhsc);
3516 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3518 struct constraint_expr *c2;
3521 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3522 process_constraint (new_constraint (*c, *c2));
3530 /* For pointer arithmetic of the form
3531 PTR + CST, we can simply use PTR's
3532 constraint because pointer arithmetic is
3533 not allowed to go out of bounds. */
3534 if (handle_ptr_arith (lhsc, rhsop))
3539 /* Otherwise, walk each operand. Notice that we
3540 can't use the operand interface because we need
3541 to process expressions other than simple operands
3542 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3544 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3546 tree op = TREE_OPERAND (rhsop, i);
3549 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3550 get_constraint_for (op, &rhsc);
3551 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3553 struct constraint_expr *c2;
3554 while (VEC_length (ce_s, rhsc) > 0)
3556 c2 = VEC_last (ce_s, rhsc);
3557 process_constraint (new_constraint (*c, *c2));
3558 VEC_pop (ce_s, rhsc);
3567 /* After promoting variables and computing aliasing we will
3568 need to re-scan most statements. FIXME: Try to minimize the
3569 number of statements re-scanned. It's not really necessary to
3570 re-scan *all* statements. */
3571 mark_stmt_modified (origt);
3572 VEC_free (ce_s, heap, rhsc);
3573 VEC_free (ce_s, heap, lhsc);
3577 /* Find the first varinfo in the same variable as START that overlaps with
3579 Effectively, walk the chain of fields for the variable START to find the
3580 first field that overlaps with OFFSET.
3581 Return NULL if we can't find one. */
3584 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3586 varinfo_t curr = start;
3589 /* We may not find a variable in the field list with the actual
3590 offset when when we have glommed a structure to a variable.
3591 In that case, however, offset should still be within the size
3593 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3601 /* Insert the varinfo FIELD into the field list for BASE, at the front
3605 insert_into_field_list (varinfo_t base, varinfo_t field)
3607 varinfo_t prev = base;
3608 varinfo_t curr = base->next;
3614 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3618 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3620 varinfo_t prev = base;
3621 varinfo_t curr = base->next;
3632 if (field->offset <= curr->offset)
3637 field->next = prev->next;
3642 /* qsort comparison function for two fieldoff's PA and PB */
3645 fieldoff_compare (const void *pa, const void *pb)
3647 const fieldoff_s *foa = (const fieldoff_s *)pa;
3648 const fieldoff_s *fob = (const fieldoff_s *)pb;
3649 HOST_WIDE_INT foasize, fobsize;
3651 if (foa->offset != fob->offset)
3652 return foa->offset - fob->offset;
3654 foasize = TREE_INT_CST_LOW (foa->size);
3655 fobsize = TREE_INT_CST_LOW (fob->size);
3656 return foasize - fobsize;
3659 /* Sort a fieldstack according to the field offset and sizes. */
3661 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3663 qsort (VEC_address (fieldoff_s, fieldstack),
3664 VEC_length (fieldoff_s, fieldstack),
3665 sizeof (fieldoff_s),
3669 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3670 of TYPE onto fieldstack, recording their offsets along the way.
3671 OFFSET is used to keep track of the offset in this entire structure, rather
3672 than just the immediately containing structure. Returns the number
3674 HAS_UNION is set to true if we find a union type as a field of
3678 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3679 HOST_WIDE_INT offset, bool *has_union)
3684 if (TREE_CODE (type) == COMPLEX_TYPE)
3686 fieldoff_s *real_part, *img_part;
3687 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3688 real_part->type = TREE_TYPE (type);
3689 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3690 real_part->offset = offset;
3691 real_part->decl = NULL_TREE;
3693 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3694 img_part->type = TREE_TYPE (type);
3695 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3696 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3697 img_part->decl = NULL_TREE;
3702 if (TREE_CODE (type) == ARRAY_TYPE)
3704 tree sz = TYPE_SIZE (type);
3705 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3710 || ! host_integerp (sz, 1)
3711 || TREE_INT_CST_LOW (sz) == 0
3713 || ! host_integerp (elsz, 1)
3714 || TREE_INT_CST_LOW (elsz) == 0)
3717 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3718 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3721 for (i = 0; i < nr; ++i)
3727 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3728 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3731 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3733 else if (!(pushed = push_fields_onto_fieldstack
3734 (TREE_TYPE (type), fieldstack,
3735 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3736 /* Empty structures may have actual size, like in C++. So
3737 see if we didn't push any subfields and the size is
3738 nonzero, push the field onto the stack */
3745 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3746 pair->type = TREE_TYPE (type);
3748 pair->decl = NULL_TREE;
3749 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3759 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3760 if (TREE_CODE (field) == FIELD_DECL)
3766 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3767 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3770 if (!var_can_have_subvars (field))
3772 else if (!(pushed = push_fields_onto_fieldstack
3773 (TREE_TYPE (field), fieldstack,
3774 offset + bitpos_of_field (field), has_union))
3775 && DECL_SIZE (field)
3776 && !integer_zerop (DECL_SIZE (field)))
3777 /* Empty structures may have actual size, like in C++. So
3778 see if we didn't push any subfields and the size is
3779 nonzero, push the field onto the stack */
3786 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3787 pair->type = TREE_TYPE (field);
3788 pair->size = DECL_SIZE (field);
3790 pair->offset = offset + bitpos_of_field (field);
3800 /* Create a constraint from ANYTHING variable to VI. */
3802 make_constraint_from_anything (varinfo_t vi)
3804 struct constraint_expr lhs, rhs;
3810 rhs.var = anything_id;
3812 rhs.type = ADDRESSOF;
3813 process_constraint (new_constraint (lhs, rhs));
3816 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3817 if it is a varargs function. */
3820 count_num_arguments (tree decl, bool *is_varargs)
3825 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3829 if (TREE_VALUE (t) == void_type_node)
3839 /* Creation function node for DECL, using NAME, and return the index
3840 of the variable we've created for the function. */
3843 create_function_info_for (tree decl, const char *name)
3845 unsigned int index = VEC_length (varinfo_t, varmap);
3849 bool is_varargs = false;
3851 /* Create the variable info. */
3853 vi = new_var_info (decl, index, name);
3858 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3859 insert_vi_for_tree (vi->decl, vi);
3860 VEC_safe_push (varinfo_t, heap, varmap, vi);
3864 /* If it's varargs, we don't know how many arguments it has, so we
3871 vi->is_unknown_size_var = true;
3876 arg = DECL_ARGUMENTS (decl);
3878 /* Set up variables for each argument. */
3879 for (i = 1; i < vi->fullsize; i++)
3882 const char *newname;
3884 unsigned int newindex;
3885 tree argdecl = decl;
3890 newindex = VEC_length (varinfo_t, varmap);
3891 asprintf (&tempname, "%s.arg%d", name, i-1);
3892 newname = ggc_strdup (tempname);
3895 argvi = new_var_info (argdecl, newindex, newname);
3896 argvi->decl = argdecl;
3897 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3900 argvi->fullsize = vi->fullsize;
3901 argvi->has_union = false;
3902 insert_into_field_list_sorted (vi, argvi);
3903 stats.total_vars ++;
3906 insert_vi_for_tree (arg, argvi);
3907 arg = TREE_CHAIN (arg);
3911 /* Create a variable for the return var. */
3912 if (DECL_RESULT (decl) != NULL
3913 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3916 const char *newname;
3918 unsigned int newindex;
3919 tree resultdecl = decl;
3923 if (DECL_RESULT (decl))
3924 resultdecl = DECL_RESULT (decl);
3926 newindex = VEC_length (varinfo_t, varmap);
3927 asprintf (&tempname, "%s.result", name);
3928 newname = ggc_strdup (tempname);
3931 resultvi = new_var_info (resultdecl, newindex, newname);
3932 resultvi->decl = resultdecl;
3933 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3934 resultvi->offset = i;
3936 resultvi->fullsize = vi->fullsize;
3937 resultvi->has_union = false;
3938 insert_into_field_list_sorted (vi, resultvi);
3939 stats.total_vars ++;
3940 if (DECL_RESULT (decl))
3941 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3947 /* Return true if FIELDSTACK contains fields that overlap.
3948 FIELDSTACK is assumed to be sorted by offset. */
3951 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3953 fieldoff_s *fo = NULL;
3955 HOST_WIDE_INT lastoffset = -1;
3957 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3959 if (fo->offset == lastoffset)
3961 lastoffset = fo->offset;
3966 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3967 This will also create any varinfo structures necessary for fields
3971 create_variable_info_for (tree decl, const char *name)
3973 unsigned int index = VEC_length (varinfo_t, varmap);
3975 tree decltype = TREE_TYPE (decl);
3976 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3977 bool notokay = false;
3979 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3980 VEC (fieldoff_s,heap) *fieldstack = NULL;
3982 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3983 return create_function_info_for (decl, name);
3985 hasunion = TREE_CODE (decltype) == UNION_TYPE
3986 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3987 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3989 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3992 VEC_free (fieldoff_s, heap, fieldstack);
3998 /* If the variable doesn't have subvars, we may end up needing to
3999 sort the field list and create fake variables for all the
4001 vi = new_var_info (decl, index, name);
4004 vi->has_union = hasunion;
4006 || TREE_CODE (declsize) != INTEGER_CST
4007 || TREE_CODE (decltype) == UNION_TYPE
4008 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4010 vi->is_unknown_size_var = true;
4016 vi->fullsize = TREE_INT_CST_LOW (declsize);
4017 vi->size = vi->fullsize;
4020 insert_vi_for_tree (vi->decl, vi);
4021 VEC_safe_push (varinfo_t, heap, varmap, vi);
4022 if (is_global && (!flag_whole_program || !in_ipa_mode))
4023 make_constraint_from_anything (vi);
4026 if (use_field_sensitive
4028 && !vi->is_unknown_size_var
4029 && var_can_have_subvars (decl)
4030 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4032 unsigned int newindex = VEC_length (varinfo_t, varmap);
4033 fieldoff_s *fo = NULL;
4036 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4039 || TREE_CODE (fo->size) != INTEGER_CST
4047 /* We can't sort them if we have a field with a variable sized type,
4048 which will make notokay = true. In that case, we are going to return
4049 without creating varinfos for the fields anyway, so sorting them is a
4053 sort_fieldstack (fieldstack);
4054 /* Due to some C++ FE issues, like PR 22488, we might end up
4055 what appear to be overlapping fields even though they,
4056 in reality, do not overlap. Until the C++ FE is fixed,
4057 we will simply disable field-sensitivity for these cases. */
4058 notokay = check_for_overlaps (fieldstack);
4062 if (VEC_length (fieldoff_s, fieldstack) != 0)
4063 fo = VEC_index (fieldoff_s, fieldstack, 0);
4065 if (fo == NULL || notokay)
4067 vi->is_unknown_size_var = 1;
4070 VEC_free (fieldoff_s, heap, fieldstack);
4074 vi->size = TREE_INT_CST_LOW (fo->size);
4075 vi->offset = fo->offset;
4076 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4077 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4081 const char *newname = "NULL";
4084 newindex = VEC_length (varinfo_t, varmap);
4088 asprintf (&tempname, "%s.%s",
4089 vi->name, alias_get_name (fo->decl));
4091 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4092 vi->name, fo->offset);
4093 newname = ggc_strdup (tempname);
4096 newvi = new_var_info (decl, newindex, newname);
4097 newvi->offset = fo->offset;
4098 newvi->size = TREE_INT_CST_LOW (fo->size);
4099 newvi->fullsize = vi->fullsize;
4100 insert_into_field_list (vi, newvi);
4101 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4102 if (is_global && (!flag_whole_program || !in_ipa_mode))
4103 make_constraint_from_anything (newvi);
4107 VEC_free (fieldoff_s, heap, fieldstack);
4112 /* Print out the points-to solution for VAR to FILE. */
4115 dump_solution_for_var (FILE *file, unsigned int var)
4117 varinfo_t vi = get_varinfo (var);
4121 if (find (var) != var)
4123 varinfo_t vipt = get_varinfo (find (var));
4124 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4128 fprintf (file, "%s = { ", vi->name);
4129 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4131 fprintf (file, "%s ", get_varinfo (i)->name);
4133 fprintf (file, "}\n");
4137 /* Print the points-to solution for VAR to stdout. */
4140 debug_solution_for_var (unsigned int var)
4142 dump_solution_for_var (stdout, var);
4145 /* Create varinfo structures for all of the variables in the
4146 function for intraprocedural mode. */
4149 intra_create_variable_infos (void)
4152 struct constraint_expr lhs, rhs;
4154 /* For each incoming pointer argument arg, create the constraint ARG
4155 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4156 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4160 if (!could_have_pointers (t))
4163 /* If flag_argument_noalias is set, then function pointer
4164 arguments are guaranteed not to point to each other. In that
4165 case, create an artificial variable PARM_NOALIAS and the
4166 constraint ARG = &PARM_NOALIAS. */
4167 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4170 tree heapvar = heapvar_lookup (t);
4174 lhs.var = get_vi_for_tree (t)->id;
4176 if (heapvar == NULL_TREE)
4179 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4181 DECL_EXTERNAL (heapvar) = 1;
4182 if (gimple_referenced_vars (cfun))
4183 add_referenced_var (heapvar);
4185 heapvar_insert (t, heapvar);
4187 ann = get_var_ann (heapvar);
4188 if (flag_argument_noalias == 1)
4189 ann->noalias_state = NO_ALIAS;
4190 else if (flag_argument_noalias == 2)
4191 ann->noalias_state = NO_ALIAS_GLOBAL;
4192 else if (flag_argument_noalias == 3)
4193 ann->noalias_state = NO_ALIAS_ANYTHING;
4198 vi = get_vi_for_tree (heapvar);
4199 vi->is_artificial_var = 1;
4200 vi->is_heap_var = 1;
4202 rhs.type = ADDRESSOF;
4204 for (p = get_varinfo (lhs.var); p; p = p->next)
4206 struct constraint_expr temp = lhs;
4208 process_constraint (new_constraint (temp, rhs));
4213 varinfo_t arg_vi = get_vi_for_tree (t);
4215 for (p = arg_vi; p; p = p->next)
4216 make_constraint_from_anything (p);
4221 /* Structure used to put solution bitmaps in a hashtable so they can
4222 be shared among variables with the same points-to set. */
4224 typedef struct shared_bitmap_info
4228 } *shared_bitmap_info_t;
4230 static htab_t shared_bitmap_table;
4232 /* Hash function for a shared_bitmap_info_t */
4235 shared_bitmap_hash (const void *p)
4237 const shared_bitmap_info_t bi = (shared_bitmap_info_t) p;
4238 return bi->hashcode;
4241 /* Equality function for two shared_bitmap_info_t's. */
4244 shared_bitmap_eq (const void *p1, const void *p2)
4246 const shared_bitmap_info_t sbi1 = (shared_bitmap_info_t) p1;
4247 const shared_bitmap_info_t sbi2 = (shared_bitmap_info_t) p2;
4248 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4251 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4252 existing instance if there is one, NULL otherwise. */
4255 shared_bitmap_lookup (bitmap pt_vars)
4258 struct shared_bitmap_info sbi;
4260 sbi.pt_vars = pt_vars;
4261 sbi.hashcode = bitmap_hash (pt_vars);
4263 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4264 sbi.hashcode, NO_INSERT);
4268 return ((shared_bitmap_info_t) *slot)->pt_vars;
4272 /* Add a bitmap to the shared bitmap hashtable. */
4275 shared_bitmap_add (bitmap pt_vars)
4278 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4280 sbi->pt_vars = pt_vars;
4281 sbi->hashcode = bitmap_hash (pt_vars);
4283 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4284 sbi->hashcode, INSERT);
4285 gcc_assert (!*slot);
4286 *slot = (void *) sbi;
4290 /* Set bits in INTO corresponding to the variable uids in solution set
4291 FROM, which came from variable PTR.
4292 For variables that are actually dereferenced, we also use type
4293 based alias analysis to prune the points-to sets.
4294 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4295 help determine whether we are we are allowed to prune using TBAA. */
4298 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed)
4303 HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4305 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4307 varinfo_t vi = get_varinfo (i);
4308 unsigned HOST_WIDE_INT var_alias_set;
4310 /* The only artificial variables that are allowed in a may-alias
4311 set are heap variables. */
4312 if (vi->is_artificial_var && !vi->is_heap_var)
4315 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4317 /* Variables containing unions may need to be converted to
4318 their SFT's, because SFT's can have unions and we cannot. */
4319 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4320 bitmap_set_bit (into, DECL_UID (sv->var));
4322 else if (TREE_CODE (vi->decl) == VAR_DECL
4323 || TREE_CODE (vi->decl) == PARM_DECL)
4325 if (var_can_have_subvars (vi->decl)
4326 && get_subvars_for_var (vi->decl))
4328 /* If VI->DECL is an aggregate for which we created
4329 SFTs, add the SFT corresponding to VI->OFFSET. */
4330 tree sft = get_subvar_at (vi->decl, vi->offset);
4333 var_alias_set = get_alias_set (sft);
4334 if ((!is_derefed && !vi->directly_dereferenced)
4335 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4336 bitmap_set_bit (into, DECL_UID (sft));
4341 /* Otherwise, just add VI->DECL to the alias set.
4342 Don't type prune artificial vars. */
4343 if (vi->is_artificial_var)
4344 bitmap_set_bit (into, DECL_UID (vi->decl));
4347 var_alias_set = get_alias_set (vi->decl);
4348 if ((!is_derefed && !vi->directly_dereferenced)
4349 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4350 bitmap_set_bit (into, DECL_UID (vi->decl));
4358 static bool have_alias_info = false;
4360 /* The list of SMT's that are in use by our pointer variables. This
4361 is the set of SMT's for all pointers that can point to anything. */
4362 static bitmap used_smts;
4364 /* Due to the ordering of points-to set calculation and SMT
4365 calculation being a bit co-dependent, we can't just calculate SMT
4366 used info whenever we want, we have to calculate it around the time
4367 that find_what_p_points_to is called. */
4369 /* Mark which SMT's are in use by points-to anything variables. */
4372 set_used_smts (void)
4376 used_smts = BITMAP_ALLOC (&pta_obstack);
4378 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4380 tree var = vi->decl;
4383 struct ptr_info_def *pi = NULL;
4385 /* For parm decls, the pointer info may be under the default
4387 if (TREE_CODE (vi->decl) == PARM_DECL
4388 && gimple_default_def (cfun, var))
4389 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4390 else if (TREE_CODE (var) == SSA_NAME)
4391 pi = SSA_NAME_PTR_INFO (var);
4393 /* Skip the special variables and those without their own
4395 if (vi->is_special_var || find (vi->id) != vi->id
4397 || (pi && !pi->is_dereferenced)
4398 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4399 || !POINTER_TYPE_P (TREE_TYPE (var)))
4402 if (TREE_CODE (var) == SSA_NAME)
4403 var = SSA_NAME_VAR (var);
4409 smt = va->symbol_mem_tag;
4410 if (smt && bitmap_bit_p (vi->solution, anything_id))
4411 bitmap_set_bit (used_smts, DECL_UID (smt));
4415 /* Merge the necessary SMT's into the bitmap INTO, which is
4416 P's varinfo. This involves merging all SMT's that are a subset of
4417 the SMT necessary for P. */
4420 merge_smts_into (tree p, bitmap solution)
4428 if (TREE_CODE (p) == SSA_NAME)
4429 var = SSA_NAME_VAR (p);
4431 smt = var_ann (var)->symbol_mem_tag;
4434 HOST_WIDE_INT smtset = get_alias_set (TREE_TYPE (smt));
4436 /* Need to set the SMT subsets first before this
4437 will work properly. */
4438 bitmap_set_bit (solution, DECL_UID (smt));
4439 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4441 tree newsmt = referenced_var (i);
4442 tree newsmttype = TREE_TYPE (newsmt);
4444 if (alias_set_subset_of (get_alias_set (newsmttype),
4446 bitmap_set_bit (solution, i);
4449 aliases = MTAG_ALIASES (smt);
4451 bitmap_ior_into (solution, aliases);
4455 /* Given a pointer variable P, fill in its points-to set, or return
4457 Rather than return false for variables that point-to anything, we
4458 instead find the corresponding SMT, and merge in it's aliases. In
4459 addition to these aliases, we also set the bits for the SMT's
4460 themselves and their subsets, as SMT's are still in use by
4461 non-SSA_NAME's, and pruning may eliminate every one of their
4462 aliases. In such a case, if we did not include the right set of
4463 SMT's in the points-to set of the variable, we'd end up with
4464 statements that do not conflict but should. */
4467 find_what_p_points_to (tree p)
4472 if (!have_alias_info)
4475 /* For parameters, get at the points-to set for the actual parm
4477 if (TREE_CODE (p) == SSA_NAME
4478 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4479 && SSA_NAME_IS_DEFAULT_DEF (p))
4480 lookup_p = SSA_NAME_VAR (p);
4482 vi = lookup_vi_for_tree (lookup_p);
4485 if (vi->is_artificial_var)
4488 /* See if this is a field or a structure. */
4489 if (vi->size != vi->fullsize)
4491 /* Nothing currently asks about structure fields directly,
4492 but when they do, we need code here to hand back the
4494 if (!var_can_have_subvars (vi->decl)
4495 || get_subvars_for_var (vi->decl) == NULL)
4500 struct ptr_info_def *pi = get_ptr_info (p);
4503 bool was_pt_anything = false;
4504 bitmap finished_solution;
4507 if (!pi->is_dereferenced)
4510 /* This variable may have been collapsed, let's get the real
4512 vi = get_varinfo (find (vi->id));
4514 /* Translate artificial variables into SSA_NAME_PTR_INFO
4516 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4518 varinfo_t vi = get_varinfo (i);
4520 if (vi->is_artificial_var)
4522 /* FIXME. READONLY should be handled better so that
4523 flow insensitive aliasing can disregard writable
4525 if (vi->id == nothing_id)
4527 else if (vi->id == anything_id)
4528 was_pt_anything = 1;
4529 else if (vi->id == readonly_id)
4530 was_pt_anything = 1;
4531 else if (vi->id == integer_id)
4532 was_pt_anything = 1;
4533 else if (vi->is_heap_var)
4534 pi->pt_global_mem = 1;
4538 /* Share the final set of variables when possible. */
4540 finished_solution = BITMAP_GGC_ALLOC ();
4541 stats.points_to_sets_created++;
4543 /* Instead of using pt_anything, we merge in the SMT aliases
4544 for the underlying SMT. In addition, if they could have
4545 pointed to anything, they could point to global memory.
4546 But we cannot do that for ref-all pointers because these
4547 aliases have not been computed yet. */
4548 if (was_pt_anything)
4550 if (PTR_IS_REF_ALL (p))
4552 pi->pt_anything = 1;
4556 merge_smts_into (p, finished_solution);
4557 pi->pt_global_mem = 1;
4560 set_uids_in_ptset (vi->decl, finished_solution, vi->solution,
4561 vi->directly_dereferenced);
4562 result = shared_bitmap_lookup (finished_solution);
4566 shared_bitmap_add (finished_solution);
4567 pi->pt_vars = finished_solution;
4571 pi->pt_vars = result;
4572 bitmap_clear (finished_solution);
4575 if (bitmap_empty_p (pi->pt_vars))
4587 /* Dump points-to information to OUTFILE. */
4590 dump_sa_points_to_info (FILE *outfile)
4594 fprintf (outfile, "\nPoints-to sets\n\n");
4596 if (dump_flags & TDF_STATS)
4598 fprintf (outfile, "Stats:\n");
4599 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4600 fprintf (outfile, "Non-pointer vars: %d\n",
4601 stats.nonpointer_vars);
4602 fprintf (outfile, "Statically unified vars: %d\n",
4603 stats.unified_vars_static);
4604 fprintf (outfile, "Dynamically unified vars: %d\n",
4605 stats.unified_vars_dynamic);
4606 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4607 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4608 fprintf (outfile, "Number of implicit edges: %d\n",
4609 stats.num_implicit_edges);
4612 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4613 dump_solution_for_var (outfile, i);
4617 /* Debug points-to information to stderr. */
4620 debug_sa_points_to_info (void)
4622 dump_sa_points_to_info (stderr);
4626 /* Initialize the always-existing constraint variables for NULL
4627 ANYTHING, READONLY, and INTEGER */
4630 init_base_vars (void)
4632 struct constraint_expr lhs, rhs;
4634 /* Create the NULL variable, used to represent that a variable points
4636 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4637 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4638 insert_vi_for_tree (nothing_tree, var_nothing);
4639 var_nothing->is_artificial_var = 1;
4640 var_nothing->offset = 0;
4641 var_nothing->size = ~0;
4642 var_nothing->fullsize = ~0;
4643 var_nothing->is_special_var = 1;
4645 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4647 /* Create the ANYTHING variable, used to represent that a variable
4648 points to some unknown piece of memory. */
4649 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4650 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4651 insert_vi_for_tree (anything_tree, var_anything);
4652 var_anything->is_artificial_var = 1;
4653 var_anything->size = ~0;
4654 var_anything->offset = 0;
4655 var_anything->next = NULL;
4656 var_anything->fullsize = ~0;
4657 var_anything->is_special_var = 1;
4660 /* Anything points to anything. This makes deref constraints just
4661 work in the presence of linked list and other p = *p type loops,
4662 by saying that *ANYTHING = ANYTHING. */
4663 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4665 lhs.var = anything_id;
4667 rhs.type = ADDRESSOF;
4668 rhs.var = anything_id;
4671 /* This specifically does not use process_constraint because
4672 process_constraint ignores all anything = anything constraints, since all
4673 but this one are redundant. */
4674 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4676 /* Create the READONLY variable, used to represent that a variable
4677 points to readonly memory. */
4678 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4679 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4680 var_readonly->is_artificial_var = 1;
4681 var_readonly->offset = 0;
4682 var_readonly->size = ~0;
4683 var_readonly->fullsize = ~0;
4684 var_readonly->next = NULL;
4685 var_readonly->is_special_var = 1;
4686 insert_vi_for_tree (readonly_tree, var_readonly);
4688 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4690 /* readonly memory points to anything, in order to make deref
4691 easier. In reality, it points to anything the particular
4692 readonly variable can point to, but we don't track this
4695 lhs.var = readonly_id;
4697 rhs.type = ADDRESSOF;
4698 rhs.var = anything_id;
4701 process_constraint (new_constraint (lhs, rhs));
4703 /* Create the INTEGER variable, used to represent that a variable points
4705 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4706 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4707 insert_vi_for_tree (integer_tree, var_integer);
4708 var_integer->is_artificial_var = 1;
4709 var_integer->size = ~0;
4710 var_integer->fullsize = ~0;
4711 var_integer->offset = 0;
4712 var_integer->next = NULL;
4713 var_integer->is_special_var = 1;
4715 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4717 /* INTEGER = ANYTHING, because we don't know where a dereference of
4718 a random integer will point to. */
4720 lhs.var = integer_id;
4722 rhs.type = ADDRESSOF;
4723 rhs.var = anything_id;
4725 process_constraint (new_constraint (lhs, rhs));
4728 /* Initialize things necessary to perform PTA */
4731 init_alias_vars (void)
4733 bitmap_obstack_initialize (&pta_obstack);
4734 bitmap_obstack_initialize (&oldpta_obstack);
4735 bitmap_obstack_initialize (&predbitmap_obstack);
4737 constraint_pool = create_alloc_pool ("Constraint pool",
4738 sizeof (struct constraint), 30);
4739 variable_info_pool = create_alloc_pool ("Variable info pool",
4740 sizeof (struct variable_info), 30);
4741 constraints = VEC_alloc (constraint_t, heap, 8);
4742 varmap = VEC_alloc (varinfo_t, heap, 8);
4743 vi_for_tree = pointer_map_create ();
4745 memset (&stats, 0, sizeof (stats));
4746 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
4747 shared_bitmap_eq, free);
4751 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4752 predecessor edges. */
4755 remove_preds_and_fake_succs (constraint_graph_t graph)
4759 /* Clear the implicit ref and address nodes from the successor
4761 for (i = 0; i < FIRST_REF_NODE; i++)
4763 if (graph->succs[i])
4764 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4765 FIRST_REF_NODE * 2);
4768 /* Free the successor list for the non-ref nodes. */
4769 for (i = FIRST_REF_NODE; i < graph->size; i++)
4771 if (graph->succs[i])
4772 BITMAP_FREE (graph->succs[i]);
4775 /* Now reallocate the size of the successor list as, and blow away
4776 the predecessor bitmaps. */
4777 graph->size = VEC_length (varinfo_t, varmap);
4778 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
4780 free (graph->implicit_preds);
4781 graph->implicit_preds = NULL;
4782 free (graph->preds);
4783 graph->preds = NULL;
4784 bitmap_obstack_release (&predbitmap_obstack);
4787 /* Create points-to sets for the current function. See the comments
4788 at the start of the file for an algorithmic overview. */
4791 compute_points_to_sets (struct alias_info *ai)
4793 struct scc_info *si;
4796 timevar_push (TV_TREE_PTA);
4799 init_alias_heapvars ();
4801 intra_create_variable_infos ();
4803 /* Now walk all statements and derive aliases. */
4806 block_stmt_iterator bsi;
4809 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4811 if (is_gimple_reg (PHI_RESULT (phi)))
4813 find_func_aliases (phi);
4815 /* Update various related attributes like escaped
4816 addresses, pointer dereferences for loads and stores.
4817 This is used when creating name tags and alias
4819 update_alias_info (phi, ai);
4823 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4825 tree stmt = bsi_stmt (bsi);
4827 find_func_aliases (stmt);
4829 /* Update various related attributes like escaped
4830 addresses, pointer dereferences for loads and stores.
4831 This is used when creating name tags and alias
4833 update_alias_info (stmt, ai);
4840 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4841 dump_constraints (dump_file);
4846 "\nCollapsing static cycles and doing variable "
4848 build_pred_graph ();
4849 si = perform_var_substitution (graph);
4850 move_complex_constraints (graph, si);
4851 free_var_substitution_info (si);
4853 build_succ_graph ();
4854 find_indirect_cycles (graph);
4856 /* Implicit nodes and predecessors are no longer necessary at this
4858 remove_preds_and_fake_succs (graph);
4861 fprintf (dump_file, "\nSolving graph:\n");
4863 solve_graph (graph);
4866 dump_sa_points_to_info (dump_file);
4868 have_alias_info = true;
4870 timevar_pop (TV_TREE_PTA);
4874 /* Delete created points-to sets. */
4877 delete_points_to_sets (void)
4882 htab_delete (shared_bitmap_table);
4883 if (dump_file && (dump_flags & TDF_STATS))
4884 fprintf (dump_file, "Points to sets created:%d\n",
4885 stats.points_to_sets_created);
4887 pointer_map_destroy (vi_for_tree);
4888 bitmap_obstack_release (&pta_obstack);
4889 VEC_free (constraint_t, heap, constraints);
4891 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4892 VEC_free (constraint_t, heap, graph->complex[i]);
4893 free (graph->complex);
4896 free (graph->succs);
4897 free (graph->indirect_cycles);
4900 VEC_free (varinfo_t, heap, varmap);
4901 free_alloc_pool (variable_info_pool);
4902 free_alloc_pool (constraint_pool);
4903 have_alias_info = false;
4906 /* Return true if we should execute IPA PTA. */
4910 return (flag_unit_at_a_time != 0
4912 /* Don't bother doing anything if the program has errors. */
4913 && !(errorcount || sorrycount));
4916 /* Execute the driver for IPA PTA. */
4918 ipa_pta_execute (void)
4920 struct cgraph_node *node;
4921 struct scc_info *si;
4924 init_alias_heapvars ();
4927 for (node = cgraph_nodes; node; node = node->next)
4929 if (!node->analyzed || cgraph_is_master_clone (node))
4933 varid = create_function_info_for (node->decl,
4934 cgraph_node_name (node));
4935 if (node->local.externally_visible)
4937 varinfo_t fi = get_varinfo (varid);
4938 for (; fi; fi = fi->next)
4939 make_constraint_from_anything (fi);
4943 for (node = cgraph_nodes; node; node = node->next)
4945 if (node->analyzed && cgraph_is_master_clone (node))
4947 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4949 tree old_func_decl = current_function_decl;
4952 "Generating constraints for %s\n",
4953 cgraph_node_name (node));
4955 current_function_decl = node->decl;
4957 FOR_EACH_BB_FN (bb, cfun)
4959 block_stmt_iterator bsi;
4962 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4964 if (is_gimple_reg (PHI_RESULT (phi)))
4966 find_func_aliases (phi);
4970 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4972 tree stmt = bsi_stmt (bsi);
4973 find_func_aliases (stmt);
4976 current_function_decl = old_func_decl;
4981 /* Make point to anything. */
4989 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4990 dump_constraints (dump_file);
4995 "\nCollapsing static cycles and doing variable "
4998 build_pred_graph ();
4999 si = perform_var_substitution (graph);
5000 move_complex_constraints (graph, si);
5001 free_var_substitution_info (si);
5003 build_succ_graph ();
5004 find_indirect_cycles (graph);
5006 /* Implicit nodes and predecessors are no longer necessary at this
5008 remove_preds_and_fake_succs (graph);
5011 fprintf (dump_file, "\nSolving graph:\n");
5013 solve_graph (graph);
5016 dump_sa_points_to_info (dump_file);
5019 delete_alias_heapvars ();
5020 delete_points_to_sets ();
5024 struct tree_opt_pass pass_ipa_pta =
5027 gate_ipa_pta, /* gate */
5028 ipa_pta_execute, /* execute */
5031 0, /* static_pass_number */
5032 TV_IPA_PTA, /* tv_id */
5033 0, /* properties_required */
5034 0, /* properties_provided */
5035 0, /* properties_destroyed */
5036 0, /* todo_flags_start */
5037 0, /* todo_flags_finish */
5041 /* Initialize the heapvar for statement mapping. */
5043 init_alias_heapvars (void)
5045 if (!heapvar_for_stmt)
5046 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5051 delete_alias_heapvars (void)
5053 htab_delete (heapvar_for_stmt);
5054 heapvar_for_stmt = NULL;
5058 #include "gt-tree-ssa-structalias.h"