1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
36 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
43 #include "tree-gimple.h"
47 #include "tree-pass.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
52 #include "tree-ssa-structalias.h"
55 #include "pointer-set.h"
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
94 Each variable for a structure field has
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
116 In order to solve the system of set constraints, the following is
119 1. Each constraint variable x has a solution set associated with it,
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
147 8. The process of walking the graph is iterated until no solution
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
167 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
168 htab_t heapvar_for_stmt;
170 static bool use_field_sensitive = true;
171 static int in_ipa_mode = 0;
173 /* Used for predecessor bitmaps. */
174 static bitmap_obstack predbitmap_obstack;
176 /* Used for points-to sets. */
177 static bitmap_obstack pta_obstack;
179 /* Used for oldsolution members of variables. */
180 static bitmap_obstack oldpta_obstack;
182 /* Used for per-solver-iteration bitmaps. */
183 static bitmap_obstack iteration_obstack;
185 static unsigned int create_variable_info_for (tree, const char *);
186 typedef struct constraint_graph *constraint_graph_t;
187 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
189 DEF_VEC_P(constraint_t);
190 DEF_VEC_ALLOC_P(constraint_t,heap);
192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
196 static struct constraint_stats
198 unsigned int total_vars;
199 unsigned int nonpointer_vars;
200 unsigned int unified_vars_static;
201 unsigned int unified_vars_dynamic;
202 unsigned int iterations;
203 unsigned int num_edges;
204 unsigned int num_implicit_edges;
205 unsigned int points_to_sets_created;
210 /* ID of this variable */
213 /* Name of this variable */
216 /* Tree that this variable is associated with. */
219 /* Offset of this variable, in bits, from the base variable */
220 unsigned HOST_WIDE_INT offset;
222 /* Size of the variable, in bits. */
223 unsigned HOST_WIDE_INT size;
225 /* Full size of the base variable, in bits. */
226 unsigned HOST_WIDE_INT fullsize;
228 /* A link to the variable for the next field in this structure. */
229 struct variable_info *next;
231 /* True if the variable is directly the target of a dereference.
232 This is used to track which variables are *actually* dereferenced
233 so we can prune their points to listed. */
234 unsigned int directly_dereferenced:1;
236 /* True if this is a variable created by the constraint analysis, such as
237 heap variables and constraints we had to break up. */
238 unsigned int is_artificial_var:1;
240 /* True if this is a special variable whose solution set should not be
242 unsigned int is_special_var:1;
244 /* True for variables whose size is not known or variable. */
245 unsigned int is_unknown_size_var:1;
247 /* True for variables that have unions somewhere in them. */
248 unsigned int has_union:1;
250 /* True if this is a heap variable. */
251 unsigned int is_heap_var:1;
253 /* Points-to set for this variable. */
256 /* Old points-to set for this variable. */
259 /* Variable ids represented by this node. */
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
298 return v->collapsed_to;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
331 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
337 /* Insert a mapping FROM->TO in the heap var for statement
341 heapvar_insert (tree from, tree to)
346 h = ggc_alloc (sizeof (struct tree_map));
347 h->hash = htab_hash_pointer (from);
350 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
351 *(struct tree_map **) loc = h;
354 /* Return a new variable info structure consisting for a variable
355 named NAME, and using constraint graph node NODE. */
358 new_var_info (tree t, unsigned int id, const char *name)
360 varinfo_t ret = pool_alloc (variable_info_pool);
365 ret->directly_dereferenced = false;
366 ret->is_artificial_var = false;
367 ret->is_heap_var = false;
368 ret->is_special_var = false;
369 ret->is_unknown_size_var = false;
370 ret->has_union = false;
371 ret->solution = BITMAP_ALLOC (&pta_obstack);
372 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
374 ret->collapsed_to = NULL;
378 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
380 /* An expression that appears in a constraint. */
382 struct constraint_expr
384 /* Constraint type. */
385 constraint_expr_type type;
387 /* Variable we are referring to in the constraint. */
390 /* Offset, in bits, of this constraint from the beginning of
391 variables it ends up referring to.
393 IOW, in a deref constraint, we would deref, get the result set,
394 then add OFFSET to each member. */
395 unsigned HOST_WIDE_INT offset;
398 typedef struct constraint_expr ce_s;
400 DEF_VEC_ALLOC_O(ce_s, heap);
401 static void get_constraint_for (tree, VEC(ce_s, heap) **);
402 static void do_deref (VEC (ce_s, heap) **);
404 /* Our set constraints are made up of two constraint expressions, one
407 As described in the introduction, our set constraints each represent an
408 operation between set valued variables.
412 struct constraint_expr lhs;
413 struct constraint_expr rhs;
416 /* List of constraints that we use to build the constraint graph from. */
418 static VEC(constraint_t,heap) *constraints;
419 static alloc_pool constraint_pool;
423 DEF_VEC_ALLOC_I(int, heap);
425 /* The constraint graph is represented as an array of bitmaps
426 containing successor nodes. */
428 struct constraint_graph
430 /* Size of this graph, which may be different than the number of
431 nodes in the variable map. */
434 /* Explicit successors of each node. */
437 /* Implicit predecessors of each node (Used for variable
439 bitmap *implicit_preds;
441 /* Explicit predecessors of each node (Used for variable substitution). */
444 /* Indirect cycle representatives, or -1 if the node has no indirect
446 int *indirect_cycles;
448 /* Representative node for a node. rep[a] == a unless the node has
452 /* Equivalence class representative for a node. This is used for
453 variable substitution. */
456 /* Label for each node, used during variable substitution. */
459 /* Bitmap of nodes where the bit is set if the node is a direct
460 node. Used for variable substitution. */
461 sbitmap direct_nodes;
463 /* Vector of complex constraints for each graph node. Complex
464 constraints are those involving dereferences or offsets that are
466 VEC(constraint_t,heap) **complex;
469 static constraint_graph_t graph;
471 /* During variable substitution and the offline version of indirect
472 cycle finding, we create nodes to represent dereferences and
473 address taken constraints. These represent where these start and
475 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
476 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
477 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
479 /* Return the representative node for NODE, if NODE has been unioned
481 This function performs path compression along the way to finding
482 the representative. */
485 find (unsigned int node)
487 gcc_assert (node < graph->size);
488 if (graph->rep[node] != node)
489 return graph->rep[node] = find (graph->rep[node]);
493 /* Union the TO and FROM nodes to the TO nodes.
494 Note that at some point in the future, we may want to do
495 union-by-rank, in which case we are going to have to return the
496 node we unified to. */
499 unite (unsigned int to, unsigned int from)
501 gcc_assert (to < graph->size && from < graph->size);
502 if (to != from && graph->rep[from] != to)
504 graph->rep[from] = to;
510 /* Create a new constraint consisting of LHS and RHS expressions. */
513 new_constraint (const struct constraint_expr lhs,
514 const struct constraint_expr rhs)
516 constraint_t ret = pool_alloc (constraint_pool);
522 /* Print out constraint C to FILE. */
525 dump_constraint (FILE *file, constraint_t c)
527 if (c->lhs.type == ADDRESSOF)
529 else if (c->lhs.type == DEREF)
531 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
532 if (c->lhs.offset != 0)
533 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
534 fprintf (file, " = ");
535 if (c->rhs.type == ADDRESSOF)
537 else if (c->rhs.type == DEREF)
539 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
540 if (c->rhs.offset != 0)
541 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
542 fprintf (file, "\n");
545 /* Print out constraint C to stderr. */
548 debug_constraint (constraint_t c)
550 dump_constraint (stderr, c);
553 /* Print out all constraints to FILE */
556 dump_constraints (FILE *file)
560 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
561 dump_constraint (file, c);
564 /* Print out all constraints to stderr. */
567 debug_constraints (void)
569 dump_constraints (stderr);
574 The solver is a simple worklist solver, that works on the following
577 sbitmap changed_nodes = all zeroes;
579 For each node that is not already collapsed:
581 set bit in changed nodes
583 while (changed_count > 0)
585 compute topological ordering for constraint graph
587 find and collapse cycles in the constraint graph (updating
588 changed if necessary)
590 for each node (n) in the graph in topological order:
593 Process each complex constraint associated with the node,
594 updating changed if necessary.
596 For each outgoing edge from n, propagate the solution from n to
597 the destination of the edge, updating changed as necessary.
601 /* Return true if two constraint expressions A and B are equal. */
604 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
606 return a.type == b.type && a.var == b.var && a.offset == b.offset;
609 /* Return true if constraint expression A is less than constraint expression
610 B. This is just arbitrary, but consistent, in order to give them an
614 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
616 if (a.type == b.type)
619 return a.offset < b.offset;
621 return a.var < b.var;
624 return a.type < b.type;
627 /* Return true if constraint A is less than constraint B. This is just
628 arbitrary, but consistent, in order to give them an ordering. */
631 constraint_less (const constraint_t a, const constraint_t b)
633 if (constraint_expr_less (a->lhs, b->lhs))
635 else if (constraint_expr_less (b->lhs, a->lhs))
638 return constraint_expr_less (a->rhs, b->rhs);
641 /* Return true if two constraints A and B are equal. */
644 constraint_equal (struct constraint a, struct constraint b)
646 return constraint_expr_equal (a.lhs, b.lhs)
647 && constraint_expr_equal (a.rhs, b.rhs);
651 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
654 constraint_vec_find (VEC(constraint_t,heap) *vec,
655 struct constraint lookfor)
663 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
664 if (place >= VEC_length (constraint_t, vec))
666 found = VEC_index (constraint_t, vec, place);
667 if (!constraint_equal (*found, lookfor))
672 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
675 constraint_set_union (VEC(constraint_t,heap) **to,
676 VEC(constraint_t,heap) **from)
681 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
683 if (constraint_vec_find (*to, *c) == NULL)
685 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
687 VEC_safe_insert (constraint_t, heap, *to, place, c);
692 /* Take a solution set SET, add OFFSET to each member of the set, and
693 overwrite SET with the result when done. */
696 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
698 bitmap result = BITMAP_ALLOC (&iteration_obstack);
702 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
704 /* If this is a properly sized variable, only add offset if it's
705 less than end. Otherwise, it is globbed to a single
708 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
710 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
711 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
714 bitmap_set_bit (result, v->id);
716 else if (get_varinfo (i)->is_artificial_var
717 || get_varinfo (i)->has_union
718 || get_varinfo (i)->is_unknown_size_var)
720 bitmap_set_bit (result, i);
724 bitmap_copy (set, result);
725 BITMAP_FREE (result);
728 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
732 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
735 return bitmap_ior_into (to, from);
741 tmp = BITMAP_ALLOC (&iteration_obstack);
742 bitmap_copy (tmp, from);
743 solution_set_add (tmp, inc);
744 res = bitmap_ior_into (to, tmp);
750 /* Insert constraint C into the list of complex constraints for graph
754 insert_into_complex (constraint_graph_t graph,
755 unsigned int var, constraint_t c)
757 VEC (constraint_t, heap) *complex = graph->complex[var];
758 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
761 /* Only insert constraints that do not already exist. */
762 if (place >= VEC_length (constraint_t, complex)
763 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
764 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
768 /* Condense two variable nodes into a single variable node, by moving
769 all associated info from SRC to TO. */
772 merge_node_constraints (constraint_graph_t graph, unsigned int to,
778 gcc_assert (find (from) == to);
780 /* Move all complex constraints from src node into to node */
781 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
783 /* In complex constraints for node src, we may have either
784 a = *src, and *src = a, or an offseted constraint which are
785 always added to the rhs node's constraints. */
787 if (c->rhs.type == DEREF)
789 else if (c->lhs.type == DEREF)
794 constraint_set_union (&graph->complex[to], &graph->complex[from]);
795 VEC_free (constraint_t, heap, graph->complex[from]);
796 graph->complex[from] = NULL;
800 /* Remove edges involving NODE from GRAPH. */
803 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
805 if (graph->succs[node])
806 BITMAP_FREE (graph->succs[node]);
809 /* Merge GRAPH nodes FROM and TO into node TO. */
812 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
815 if (graph->indirect_cycles[from] != -1)
817 /* If we have indirect cycles with the from node, and we have
818 none on the to node, the to node has indirect cycles from the
819 from node now that they are unified.
820 If indirect cycles exist on both, unify the nodes that they
821 are in a cycle with, since we know they are in a cycle with
823 if (graph->indirect_cycles[to] == -1)
825 graph->indirect_cycles[to] = graph->indirect_cycles[from];
829 unsigned int tonode = find (graph->indirect_cycles[to]);
830 unsigned int fromnode = find (graph->indirect_cycles[from]);
832 if (unite (tonode, fromnode))
833 unify_nodes (graph, tonode, fromnode, true);
837 /* Merge all the successor edges. */
838 if (graph->succs[from])
840 if (!graph->succs[to])
841 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
842 bitmap_ior_into (graph->succs[to],
846 clear_edges_for_node (graph, from);
850 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
851 it doesn't exist in the graph already. */
854 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
860 if (!graph->implicit_preds[to])
861 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
863 if (!bitmap_bit_p (graph->implicit_preds[to], from))
865 stats.num_implicit_edges++;
866 bitmap_set_bit (graph->implicit_preds[to], from);
870 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
871 it doesn't exist in the graph already.
872 Return false if the edge already existed, true otherwise. */
875 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
878 if (!graph->preds[to])
879 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
880 if (!bitmap_bit_p (graph->preds[to], from))
881 bitmap_set_bit (graph->preds[to], from);
884 /* Add a graph edge to GRAPH, going from FROM to TO if
885 it doesn't exist in the graph already.
886 Return false if the edge already existed, true otherwise. */
889 add_graph_edge (constraint_graph_t graph, unsigned int to,
900 if (!graph->succs[from])
901 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
902 if (!bitmap_bit_p (graph->succs[from], to))
905 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
907 bitmap_set_bit (graph->succs[from], to);
914 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
917 valid_graph_edge (constraint_graph_t graph, unsigned int src,
920 return (graph->succs[dest]
921 && bitmap_bit_p (graph->succs[dest], src));
924 /* Build the constraint graph, adding only predecessor edges right now. */
927 build_pred_graph (void)
933 graph = XNEW (struct constraint_graph);
934 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
935 graph->succs = XCNEWVEC (bitmap, graph->size);
936 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
937 graph->preds = XCNEWVEC (bitmap, graph->size);
938 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
939 graph->label = XCNEWVEC (unsigned int, graph->size);
940 graph->rep = XNEWVEC (unsigned int, graph->size);
941 graph->eq_rep = XNEWVEC (int, graph->size);
942 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
943 VEC_length (varinfo_t, varmap));
944 graph->direct_nodes = sbitmap_alloc (graph->size);
945 sbitmap_zero (graph->direct_nodes);
947 for (j = 0; j < FIRST_REF_NODE; j++)
949 if (!get_varinfo (j)->is_special_var)
950 SET_BIT (graph->direct_nodes, j);
953 for (j = 0; j < graph->size; j++)
956 graph->eq_rep[j] = -1;
959 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
960 graph->indirect_cycles[j] = -1;
962 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
964 struct constraint_expr lhs = c->lhs;
965 struct constraint_expr rhs = c->rhs;
966 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
967 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
969 if (lhs.type == DEREF)
972 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
973 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
974 if (rhs.type == ADDRESSOF)
975 RESET_BIT (graph->direct_nodes, rhsvar);
977 else if (rhs.type == DEREF)
980 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
981 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
983 RESET_BIT (graph->direct_nodes, lhsvar);
985 else if (rhs.type == ADDRESSOF)
988 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
989 /* Implicitly, *x = y */
990 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
992 RESET_BIT (graph->direct_nodes, rhsvar);
994 else if (lhsvar > anything_id
995 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
998 add_pred_graph_edge (graph, lhsvar, rhsvar);
999 /* Implicitly, *x = *y */
1000 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1001 FIRST_REF_NODE + rhsvar);
1003 else if (lhs.offset != 0 || rhs.offset != 0)
1005 if (rhs.offset != 0)
1006 RESET_BIT (graph->direct_nodes, lhs.var);
1007 if (lhs.offset != 0)
1008 RESET_BIT (graph->direct_nodes, rhs.var);
1013 /* Build the constraint graph, adding successor edges. */
1016 build_succ_graph (void)
1021 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1023 struct constraint_expr lhs;
1024 struct constraint_expr rhs;
1025 unsigned int lhsvar;
1026 unsigned int rhsvar;
1033 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1034 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1036 if (lhs.type == DEREF)
1038 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1039 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1041 else if (rhs.type == DEREF)
1043 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1044 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1046 else if (rhs.type == ADDRESSOF)
1049 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1050 == get_varinfo_fc (rhs.var)->id);
1051 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1053 else if (lhsvar > anything_id
1054 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1056 add_graph_edge (graph, lhsvar, rhsvar);
1062 /* Changed variables on the last iteration. */
1063 static unsigned int changed_count;
1064 static sbitmap changed;
1066 DEF_VEC_I(unsigned);
1067 DEF_VEC_ALLOC_I(unsigned,heap);
1070 /* Strongly Connected Component visitation info. */
1077 unsigned int *node_mapping;
1079 VEC(unsigned,heap) *scc_stack;
1083 /* Recursive routine to find strongly connected components in GRAPH.
1084 SI is the SCC info to store the information in, and N is the id of current
1085 graph node we are processing.
1087 This is Tarjan's strongly connected component finding algorithm, as
1088 modified by Nuutila to keep only non-root nodes on the stack.
1089 The algorithm can be found in "On finding the strongly connected
1090 connected components in a directed graph" by Esko Nuutila and Eljas
1091 Soisalon-Soininen, in Information Processing Letters volume 49,
1092 number 1, pages 9-14. */
1095 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1099 unsigned int my_dfs;
1101 SET_BIT (si->visited, n);
1102 si->dfs[n] = si->current_index ++;
1103 my_dfs = si->dfs[n];
1105 /* Visit all the successors. */
1106 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1110 if (i > LAST_REF_NODE)
1114 if (TEST_BIT (si->roots, w))
1117 if (!TEST_BIT (si->visited, w))
1118 scc_visit (graph, si, w);
1120 unsigned int t = find (w);
1121 unsigned int nnode = find (n);
1122 gcc_assert (nnode == n);
1124 if (si->dfs[t] < si->dfs[nnode])
1125 si->dfs[n] = si->dfs[t];
1129 /* See if any components have been identified. */
1130 if (si->dfs[n] == my_dfs)
1132 if (VEC_length (unsigned, si->scc_stack) > 0
1133 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1135 bitmap scc = BITMAP_ALLOC (NULL);
1136 bool have_ref_node = n >= FIRST_REF_NODE;
1137 unsigned int lowest_node;
1140 bitmap_set_bit (scc, n);
1142 while (VEC_length (unsigned, si->scc_stack) != 0
1143 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1145 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1147 bitmap_set_bit (scc, w);
1148 if (w >= FIRST_REF_NODE)
1149 have_ref_node = true;
1152 lowest_node = bitmap_first_set_bit (scc);
1153 gcc_assert (lowest_node < FIRST_REF_NODE);
1154 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1156 if (i < FIRST_REF_NODE)
1158 /* Mark this node for collapsing. */
1159 if (unite (lowest_node, i))
1160 unify_nodes (graph, lowest_node, i, false);
1164 unite (lowest_node, i);
1165 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1169 SET_BIT (si->roots, n);
1172 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1175 /* Unify node FROM into node TO, updating the changed count if
1176 necessary when UPDATE_CHANGED is true. */
1179 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1180 bool update_changed)
1183 gcc_assert (to != from && find (to) == to);
1184 if (dump_file && (dump_flags & TDF_DETAILS))
1185 fprintf (dump_file, "Unifying %s to %s\n",
1186 get_varinfo (from)->name,
1187 get_varinfo (to)->name);
1190 stats.unified_vars_dynamic++;
1192 stats.unified_vars_static++;
1194 merge_graph_nodes (graph, to, from);
1195 merge_node_constraints (graph, to, from);
1197 if (update_changed && TEST_BIT (changed, from))
1199 RESET_BIT (changed, from);
1200 if (!TEST_BIT (changed, to))
1201 SET_BIT (changed, to);
1204 gcc_assert (changed_count > 0);
1209 /* If the solution changes because of the merging, we need to mark
1210 the variable as changed. */
1211 if (bitmap_ior_into (get_varinfo (to)->solution,
1212 get_varinfo (from)->solution))
1214 if (update_changed && !TEST_BIT (changed, to))
1216 SET_BIT (changed, to);
1221 BITMAP_FREE (get_varinfo (from)->solution);
1222 BITMAP_FREE (get_varinfo (from)->oldsolution);
1224 if (stats.iterations > 0)
1226 BITMAP_FREE (get_varinfo (to)->oldsolution);
1227 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1230 if (valid_graph_edge (graph, to, to))
1232 if (graph->succs[to])
1233 bitmap_clear_bit (graph->succs[to], to);
1237 /* Information needed to compute the topological ordering of a graph. */
1241 /* sbitmap of visited nodes. */
1243 /* Array that stores the topological order of the graph, *in
1245 VEC(unsigned,heap) *topo_order;
1249 /* Initialize and return a topological info structure. */
1251 static struct topo_info *
1252 init_topo_info (void)
1254 size_t size = VEC_length (varinfo_t, varmap);
1255 struct topo_info *ti = XNEW (struct topo_info);
1256 ti->visited = sbitmap_alloc (size);
1257 sbitmap_zero (ti->visited);
1258 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1263 /* Free the topological sort info pointed to by TI. */
1266 free_topo_info (struct topo_info *ti)
1268 sbitmap_free (ti->visited);
1269 VEC_free (unsigned, heap, ti->topo_order);
1273 /* Visit the graph in topological order, and store the order in the
1274 topo_info structure. */
1277 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1283 SET_BIT (ti->visited, n);
1285 if (graph->succs[n])
1286 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1288 if (!TEST_BIT (ti->visited, j))
1289 topo_visit (graph, ti, j);
1292 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1295 /* Return true if variable N + OFFSET is a legal field of N. */
1298 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1300 varinfo_t ninfo = get_varinfo (n);
1302 /* For things we've globbed to single variables, any offset into the
1303 variable acts like the entire variable, so that it becomes offset
1305 if (ninfo->is_special_var
1306 || ninfo->is_artificial_var
1307 || ninfo->is_unknown_size_var)
1312 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1315 /* Process a constraint C that represents *x = &y. */
1318 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1319 constraint_t c, bitmap delta)
1321 unsigned int rhs = c->rhs.var;
1325 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1326 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1328 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1329 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1331 /* *x != NULL && *x != ANYTHING*/
1335 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1337 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1341 sol = get_varinfo (t)->solution;
1342 if (!bitmap_bit_p (sol, rhs))
1344 bitmap_set_bit (sol, rhs);
1345 if (!TEST_BIT (changed, t))
1347 SET_BIT (changed, t);
1352 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1353 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1358 /* Process a constraint C that represents x = *y, using DELTA as the
1359 starting solution. */
1362 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1365 unsigned int lhs = find (c->lhs.var);
1367 bitmap sol = get_varinfo (lhs)->solution;
1371 if (bitmap_bit_p (delta, anything_id))
1373 flag = !bitmap_bit_p (sol, anything_id);
1375 bitmap_set_bit (sol, anything_id);
1378 /* For each variable j in delta (Sol(y)), add
1379 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1380 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1382 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1383 if (type_safe (j, &roffset))
1386 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1389 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1394 /* Adding edges from the special vars is pointless.
1395 They don't have sets that can change. */
1396 if (get_varinfo (t) ->is_special_var)
1397 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1398 else if (add_graph_edge (graph, lhs, t))
1399 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1401 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1402 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1407 /* If the LHS solution changed, mark the var as changed. */
1410 get_varinfo (lhs)->solution = sol;
1411 if (!TEST_BIT (changed, lhs))
1413 SET_BIT (changed, lhs);
1419 /* Process a constraint C that represents *x = y. */
1422 do_ds_constraint (constraint_t c, bitmap delta)
1424 unsigned int rhs = find (c->rhs.var);
1425 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1426 bitmap sol = get_varinfo (rhs)->solution;
1430 if (bitmap_bit_p (sol, anything_id))
1432 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1434 varinfo_t jvi = get_varinfo (j);
1436 unsigned int loff = c->lhs.offset;
1437 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1440 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1445 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1447 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1448 if (!TEST_BIT (changed, t))
1450 SET_BIT (changed, t);
1458 /* For each member j of delta (Sol(x)), add an edge from y to j and
1459 union Sol(y) into Sol(j) */
1460 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1462 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1463 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1467 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1470 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1474 tmp = get_varinfo (t)->solution;
1476 if (set_union_with_increment (tmp, sol, roff))
1478 get_varinfo (t)->solution = tmp;
1480 sol = get_varinfo (rhs)->solution;
1481 if (!TEST_BIT (changed, t))
1483 SET_BIT (changed, t);
1488 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1489 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1493 /* Handle a non-simple (simple meaning requires no iteration),
1494 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1497 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1499 if (c->lhs.type == DEREF)
1501 if (c->rhs.type == ADDRESSOF)
1504 do_da_constraint (graph, c, delta);
1509 do_ds_constraint (c, delta);
1512 else if (c->rhs.type == DEREF)
1515 if (!(get_varinfo (c->lhs.var)->is_special_var))
1516 do_sd_constraint (graph, c, delta);
1525 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1526 t = find (c->rhs.var);
1527 solution = get_varinfo (t)->solution;
1528 t = find (c->lhs.var);
1529 tmp = get_varinfo (t)->solution;
1531 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1535 get_varinfo (t)->solution = tmp;
1536 if (!TEST_BIT (changed, t))
1538 SET_BIT (changed, t);
1545 /* Initialize and return a new SCC info structure. */
1547 static struct scc_info *
1548 init_scc_info (size_t size)
1550 struct scc_info *si = XNEW (struct scc_info);
1553 si->current_index = 0;
1554 si->visited = sbitmap_alloc (size);
1555 sbitmap_zero (si->visited);
1556 si->roots = sbitmap_alloc (size);
1557 sbitmap_zero (si->roots);
1558 si->node_mapping = XNEWVEC (unsigned int, size);
1559 si->dfs = XCNEWVEC (unsigned int, size);
1561 for (i = 0; i < size; i++)
1562 si->node_mapping[i] = i;
1564 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1568 /* Free an SCC info structure pointed to by SI */
1571 free_scc_info (struct scc_info *si)
1573 sbitmap_free (si->visited);
1574 sbitmap_free (si->roots);
1575 free (si->node_mapping);
1577 VEC_free (unsigned, heap, si->scc_stack);
1582 /* Find indirect cycles in GRAPH that occur, using strongly connected
1583 components, and note them in the indirect cycles map.
1585 This technique comes from Ben Hardekopf and Calvin Lin,
1586 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1587 Lines of Code", submitted to PLDI 2007. */
1590 find_indirect_cycles (constraint_graph_t graph)
1593 unsigned int size = graph->size;
1594 struct scc_info *si = init_scc_info (size);
1596 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1597 if (!TEST_BIT (si->visited, i) && find (i) == i)
1598 scc_visit (graph, si, i);
1603 /* Compute a topological ordering for GRAPH, and store the result in the
1604 topo_info structure TI. */
1607 compute_topo_order (constraint_graph_t graph,
1608 struct topo_info *ti)
1611 unsigned int size = VEC_length (varinfo_t, varmap);
1613 for (i = 0; i != size; ++i)
1614 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1615 topo_visit (graph, ti, i);
1618 /* Perform offline variable substitution.
1620 This is a linear time way of identifying variables that must have
1621 equivalent points-to sets, including those caused by static cycles,
1622 and single entry subgraphs, in the constraint graph.
1624 The technique is described in "Off-line variable substitution for
1625 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1626 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1628 There is an optimal way to do this involving hash based value
1629 numbering, once the technique is published i will implement it
1632 The general method of finding equivalence classes is as follows:
1633 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1634 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1635 Initialize all non-REF/ADDRESS nodes to be direct nodes
1636 For each SCC in the predecessor graph:
1637 for each member (x) of the SCC
1638 if x is not a direct node:
1639 set rootnode(SCC) to be not a direct node
1640 collapse node x into rootnode(SCC).
1641 if rootnode(SCC) is not a direct node:
1642 label rootnode(SCC) with a new equivalence class
1644 if all labeled predecessors of rootnode(SCC) have the same
1646 label rootnode(SCC) with this label
1648 label rootnode(SCC) with a new equivalence class
1650 All direct nodes with the same equivalence class can be replaced
1651 with a single representative node.
1652 All unlabeled nodes (label == 0) are not pointers and all edges
1653 involving them can be eliminated.
1654 We perform these optimizations during move_complex_constraints.
1657 static int equivalence_class;
1659 /* Recursive routine to find strongly connected components in GRAPH,
1660 and label it's nodes with equivalence classes.
1661 This is used during variable substitution to find cycles involving
1662 the regular or implicit predecessors, and label them as equivalent.
1663 The SCC finding algorithm used is the same as that for scc_visit. */
1666 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1670 unsigned int my_dfs;
1672 gcc_assert (si->node_mapping[n] == n);
1673 SET_BIT (si->visited, n);
1674 si->dfs[n] = si->current_index ++;
1675 my_dfs = si->dfs[n];
1677 /* Visit all the successors. */
1678 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1680 unsigned int w = si->node_mapping[i];
1682 if (TEST_BIT (si->roots, w))
1685 if (!TEST_BIT (si->visited, w))
1686 label_visit (graph, si, w);
1688 unsigned int t = si->node_mapping[w];
1689 unsigned int nnode = si->node_mapping[n];
1690 gcc_assert (nnode == n);
1692 if (si->dfs[t] < si->dfs[nnode])
1693 si->dfs[n] = si->dfs[t];
1697 /* Visit all the implicit predecessors. */
1698 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1700 unsigned int w = si->node_mapping[i];
1702 if (TEST_BIT (si->roots, w))
1705 if (!TEST_BIT (si->visited, w))
1706 label_visit (graph, si, w);
1708 unsigned int t = si->node_mapping[w];
1709 unsigned int nnode = si->node_mapping[n];
1710 gcc_assert (nnode == n);
1712 if (si->dfs[t] < si->dfs[nnode])
1713 si->dfs[n] = si->dfs[t];
1717 /* See if any components have been identified. */
1718 if (si->dfs[n] == my_dfs)
1720 while (VEC_length (unsigned, si->scc_stack) != 0
1721 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1723 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1724 si->node_mapping[w] = n;
1726 if (!TEST_BIT (graph->direct_nodes, w))
1727 RESET_BIT (graph->direct_nodes, n);
1729 SET_BIT (si->roots, n);
1731 if (!TEST_BIT (graph->direct_nodes, n))
1733 graph->label[n] = equivalence_class++;
1737 unsigned int size = 0;
1738 unsigned int firstlabel = ~0;
1740 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1742 unsigned int j = si->node_mapping[i];
1744 if (j == n || graph->label[j] == 0)
1747 if (firstlabel == (unsigned int)~0)
1749 firstlabel = graph->label[j];
1752 else if (graph->label[j] != firstlabel)
1757 graph->label[n] = 0;
1759 graph->label[n] = firstlabel;
1761 graph->label[n] = equivalence_class++;
1765 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1768 /* Perform offline variable substitution, discovering equivalence
1769 classes, and eliminating non-pointer variables. */
1771 static struct scc_info *
1772 perform_var_substitution (constraint_graph_t graph)
1775 unsigned int size = graph->size;
1776 struct scc_info *si = init_scc_info (size);
1778 bitmap_obstack_initialize (&iteration_obstack);
1779 equivalence_class = 0;
1781 /* We only need to visit the non-address nodes for labeling
1782 purposes, as the address nodes will never have any predecessors,
1783 because &x never appears on the LHS of a constraint. */
1784 for (i = 0; i < LAST_REF_NODE; i++)
1785 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1786 label_visit (graph, si, si->node_mapping[i]);
1788 if (dump_file && (dump_flags & TDF_DETAILS))
1789 for (i = 0; i < FIRST_REF_NODE; i++)
1791 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1793 "Equivalence class for %s node id %d:%s is %d\n",
1794 direct_node ? "Direct node" : "Indirect node", i,
1795 get_varinfo (i)->name,
1796 graph->label[si->node_mapping[i]]);
1799 /* Quickly eliminate our non-pointer variables. */
1801 for (i = 0; i < FIRST_REF_NODE; i++)
1803 unsigned int node = si->node_mapping[i];
1805 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1807 if (dump_file && (dump_flags & TDF_DETAILS))
1809 "%s is a non-pointer variable, eliminating edges.\n",
1810 get_varinfo (node)->name);
1811 stats.nonpointer_vars++;
1812 clear_edges_for_node (graph, node);
1818 /* Free information that was only necessary for variable
1822 free_var_substitution_info (struct scc_info *si)
1825 free (graph->label);
1826 free (graph->eq_rep);
1827 sbitmap_free (graph->direct_nodes);
1828 bitmap_obstack_release (&iteration_obstack);
1831 /* Return an existing node that is equivalent to NODE, which has
1832 equivalence class LABEL, if one exists. Return NODE otherwise. */
1835 find_equivalent_node (constraint_graph_t graph,
1836 unsigned int node, unsigned int label)
1838 /* If the address version of this variable is unused, we can
1839 substitute it for anything else with the same label.
1840 Otherwise, we know the pointers are equivalent, but not the
1843 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1845 gcc_assert (label < graph->size);
1847 if (graph->eq_rep[label] != -1)
1849 /* Unify the two variables since we know they are equivalent. */
1850 if (unite (graph->eq_rep[label], node))
1851 unify_nodes (graph, graph->eq_rep[label], node, false);
1852 return graph->eq_rep[label];
1856 graph->eq_rep[label] = node;
1862 /* Move complex constraints to the appropriate nodes, and collapse
1863 variables we've discovered are equivalent during variable
1864 substitution. SI is the SCC_INFO that is the result of
1865 perform_variable_substitution. */
1868 move_complex_constraints (constraint_graph_t graph,
1869 struct scc_info *si)
1875 for (j = 0; j < graph->size; j++)
1876 gcc_assert (find (j) == j);
1878 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1880 struct constraint_expr lhs = c->lhs;
1881 struct constraint_expr rhs = c->rhs;
1882 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1883 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1884 unsigned int lhsnode, rhsnode;
1885 unsigned int lhslabel, rhslabel;
1887 lhsnode = si->node_mapping[lhsvar];
1888 rhsnode = si->node_mapping[rhsvar];
1889 lhslabel = graph->label[lhsnode];
1890 rhslabel = graph->label[rhsnode];
1892 /* See if it is really a non-pointer variable, and if so, ignore
1896 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1897 lhslabel = graph->label[lhsnode] = equivalence_class++;
1900 if (dump_file && (dump_flags & TDF_DETAILS))
1903 fprintf (dump_file, "%s is a non-pointer variable,"
1904 "ignoring constraint:",
1905 get_varinfo (lhs.var)->name);
1906 dump_constraint (dump_file, c);
1908 VEC_replace (constraint_t, constraints, i, NULL);
1915 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1916 rhslabel = graph->label[rhsnode] = equivalence_class++;
1919 if (dump_file && (dump_flags & TDF_DETAILS))
1922 fprintf (dump_file, "%s is a non-pointer variable,"
1923 "ignoring constraint:",
1924 get_varinfo (rhs.var)->name);
1925 dump_constraint (dump_file, c);
1927 VEC_replace (constraint_t, constraints, i, NULL);
1932 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1933 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1934 c->lhs.var = lhsvar;
1935 c->rhs.var = rhsvar;
1937 if (lhs.type == DEREF)
1939 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1940 insert_into_complex (graph, lhsvar, c);
1942 else if (rhs.type == DEREF)
1944 if (!(get_varinfo (lhsvar)->is_special_var))
1945 insert_into_complex (graph, rhsvar, c);
1947 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1948 && (lhs.offset != 0 || rhs.offset != 0))
1950 insert_into_complex (graph, rhsvar, c);
1956 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1957 part of an SCC, false otherwise. */
1960 eliminate_indirect_cycles (unsigned int node)
1962 if (graph->indirect_cycles[node] != -1
1963 && !bitmap_empty_p (get_varinfo (node)->solution))
1966 VEC(unsigned,heap) *queue = NULL;
1968 unsigned int to = find (graph->indirect_cycles[node]);
1971 /* We can't touch the solution set and call unify_nodes
1972 at the same time, because unify_nodes is going to do
1973 bitmap unions into it. */
1975 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
1977 if (find (i) == i && i != to)
1980 VEC_safe_push (unsigned, heap, queue, i);
1985 VEC_iterate (unsigned, queue, queuepos, i);
1988 unify_nodes (graph, to, i, true);
1990 VEC_free (unsigned, heap, queue);
1996 /* Solve the constraint graph GRAPH using our worklist solver.
1997 This is based on the PW* family of solvers from the "Efficient Field
1998 Sensitive Pointer Analysis for C" paper.
1999 It works by iterating over all the graph nodes, processing the complex
2000 constraints and propagating the copy constraints, until everything stops
2001 changed. This corresponds to steps 6-8 in the solving list given above. */
2004 solve_graph (constraint_graph_t graph)
2006 unsigned int size = VEC_length (varinfo_t, varmap);
2011 changed = sbitmap_alloc (size);
2012 sbitmap_zero (changed);
2014 /* Mark all initial non-collapsed nodes as changed. */
2015 for (i = 0; i < size; i++)
2017 varinfo_t ivi = get_varinfo (i);
2018 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2019 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2020 || VEC_length (constraint_t, graph->complex[i]) > 0))
2022 SET_BIT (changed, i);
2027 /* Allocate a bitmap to be used to store the changed bits. */
2028 pts = BITMAP_ALLOC (&pta_obstack);
2030 while (changed_count > 0)
2033 struct topo_info *ti = init_topo_info ();
2036 bitmap_obstack_initialize (&iteration_obstack);
2038 compute_topo_order (graph, ti);
2040 while (VEC_length (unsigned, ti->topo_order) != 0)
2043 i = VEC_pop (unsigned, ti->topo_order);
2045 /* If this variable is not a representative, skip it. */
2049 /* In certain indirect cycle cases, we may merge this
2050 variable to another. */
2051 if (eliminate_indirect_cycles (i) && find (i) != i)
2054 /* If the node has changed, we need to process the
2055 complex constraints and outgoing edges again. */
2056 if (TEST_BIT (changed, i))
2061 VEC(constraint_t,heap) *complex = graph->complex[i];
2062 bool solution_empty;
2064 RESET_BIT (changed, i);
2067 /* Compute the changed set of solution bits. */
2068 bitmap_and_compl (pts, get_varinfo (i)->solution,
2069 get_varinfo (i)->oldsolution);
2071 if (bitmap_empty_p (pts))
2074 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2076 solution = get_varinfo (i)->solution;
2077 solution_empty = bitmap_empty_p (solution);
2079 /* Process the complex constraints */
2080 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2082 /* The only complex constraint that can change our
2083 solution to non-empty, given an empty solution,
2084 is a constraint where the lhs side is receiving
2085 some set from elsewhere. */
2086 if (!solution_empty || c->lhs.type != DEREF)
2087 do_complex_constraint (graph, c, pts);
2090 solution_empty = bitmap_empty_p (solution);
2092 if (!solution_empty)
2096 /* Propagate solution to all successors. */
2097 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2103 unsigned int to = find (j);
2104 tmp = get_varinfo (to)->solution;
2107 /* Don't try to propagate to ourselves. */
2111 flag = set_union_with_increment (tmp, pts, 0);
2115 get_varinfo (to)->solution = tmp;
2116 if (!TEST_BIT (changed, to))
2118 SET_BIT (changed, to);
2126 free_topo_info (ti);
2127 bitmap_obstack_release (&iteration_obstack);
2131 sbitmap_free (changed);
2132 bitmap_obstack_release (&oldpta_obstack);
2135 /* Map from trees to variable infos. */
2136 static struct pointer_map_t *vi_for_tree;
2139 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2142 insert_vi_for_tree (tree t, varinfo_t vi)
2144 void **slot = pointer_map_insert (vi_for_tree, t);
2146 gcc_assert (*slot == NULL);
2150 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2151 exist in the map, return NULL, otherwise, return the varinfo we found. */
2154 lookup_vi_for_tree (tree t)
2156 void **slot = pointer_map_contains (vi_for_tree, t);
2160 return (varinfo_t) *slot;
2163 /* Return a printable name for DECL */
2166 alias_get_name (tree decl)
2168 const char *res = get_name (decl);
2170 int num_printed = 0;
2179 if (TREE_CODE (decl) == SSA_NAME)
2181 num_printed = asprintf (&temp, "%s_%u",
2182 alias_get_name (SSA_NAME_VAR (decl)),
2183 SSA_NAME_VERSION (decl));
2185 else if (DECL_P (decl))
2187 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2189 if (num_printed > 0)
2191 res = ggc_strdup (temp);
2197 /* Find the variable id for tree T in the map.
2198 If T doesn't exist in the map, create an entry for it and return it. */
2201 get_vi_for_tree (tree t)
2203 void **slot = pointer_map_contains (vi_for_tree, t);
2205 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2207 return (varinfo_t) *slot;
2210 /* Get a constraint expression from an SSA_VAR_P node. */
2212 static struct constraint_expr
2213 get_constraint_exp_from_ssa_var (tree t)
2215 struct constraint_expr cexpr;
2217 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2219 /* For parameters, get at the points-to set for the actual parm
2221 if (TREE_CODE (t) == SSA_NAME
2222 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2223 && SSA_NAME_IS_DEFAULT_DEF (t))
2224 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2226 cexpr.type = SCALAR;
2228 cexpr.var = get_vi_for_tree (t)->id;
2229 /* If we determine the result is "anything", and we know this is readonly,
2230 say it points to readonly memory instead. */
2231 if (cexpr.var == anything_id && TREE_READONLY (t))
2233 cexpr.type = ADDRESSOF;
2234 cexpr.var = readonly_id;
2241 /* Process a completed constraint T, and add it to the constraint
2245 process_constraint (constraint_t t)
2247 struct constraint_expr rhs = t->rhs;
2248 struct constraint_expr lhs = t->lhs;
2250 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2251 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2253 if (lhs.type == DEREF)
2254 get_varinfo (lhs.var)->directly_dereferenced = true;
2255 if (rhs.type == DEREF)
2256 get_varinfo (rhs.var)->directly_dereferenced = true;
2258 if (!use_field_sensitive)
2264 /* ANYTHING == ANYTHING is pointless. */
2265 if (lhs.var == anything_id && rhs.var == anything_id)
2268 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2269 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2274 process_constraint (t);
2276 /* This can happen in our IR with things like n->a = *p */
2277 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2279 /* Split into tmp = *rhs, *lhs = tmp */
2280 tree rhsdecl = get_varinfo (rhs.var)->decl;
2281 tree pointertype = TREE_TYPE (rhsdecl);
2282 tree pointedtotype = TREE_TYPE (pointertype);
2283 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2284 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2286 /* If this is an aggregate of known size, we should have passed
2287 this off to do_structure_copy, and it should have broken it
2289 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2290 || get_varinfo (rhs.var)->is_unknown_size_var);
2292 process_constraint (new_constraint (tmplhs, rhs));
2293 process_constraint (new_constraint (lhs, tmplhs));
2297 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2298 VEC_safe_push (constraint_t, heap, constraints, t);
2302 /* Return true if T is a variable of a type that could contain
2306 could_have_pointers (tree t)
2308 tree type = TREE_TYPE (t);
2310 if (POINTER_TYPE_P (type)
2311 || AGGREGATE_TYPE_P (type)
2312 || TREE_CODE (type) == COMPLEX_TYPE)
2318 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2321 static unsigned HOST_WIDE_INT
2322 bitpos_of_field (const tree fdecl)
2325 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2326 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2329 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2330 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2334 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2335 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2338 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2339 const unsigned HOST_WIDE_INT fieldsize,
2340 const unsigned HOST_WIDE_INT accesspos,
2341 const unsigned HOST_WIDE_INT accesssize)
2343 if (fieldpos == accesspos && fieldsize == accesssize)
2345 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2347 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2353 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2356 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2359 HOST_WIDE_INT bitsize = -1;
2360 HOST_WIDE_INT bitmaxsize = -1;
2361 HOST_WIDE_INT bitpos;
2363 struct constraint_expr *result;
2364 unsigned int beforelength = VEC_length (ce_s, *results);
2366 /* Some people like to do cute things like take the address of
2369 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2370 forzero = TREE_OPERAND (forzero, 0);
2372 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2374 struct constraint_expr temp;
2377 temp.var = integer_id;
2379 VEC_safe_push (ce_s, heap, *results, &temp);
2383 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2385 /* String constants are readonly, so there is nothing to really do
2387 if (TREE_CODE (t) == STRING_CST)
2390 get_constraint_for (t, results);
2391 result = VEC_last (ce_s, *results);
2392 result->offset = bitpos;
2394 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2396 /* This can also happen due to weird offsetof type macros. */
2397 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2398 result->type = SCALAR;
2400 if (result->type == SCALAR)
2402 /* In languages like C, you can access one past the end of an
2403 array. You aren't allowed to dereference it, so we can
2404 ignore this constraint. When we handle pointer subtraction,
2405 we may have to do something cute here. */
2407 if (result->offset < get_varinfo (result->var)->fullsize
2410 /* It's also not true that the constraint will actually start at the
2411 right offset, it may start in some padding. We only care about
2412 setting the constraint to the first actual field it touches, so
2415 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2417 if (offset_overlaps_with_access (curr->offset, curr->size,
2418 result->offset, bitmaxsize))
2420 result->var = curr->id;
2424 /* assert that we found *some* field there. The user couldn't be
2425 accessing *only* padding. */
2426 /* Still the user could access one past the end of an array
2427 embedded in a struct resulting in accessing *only* padding. */
2428 gcc_assert (curr || ref_contains_array_ref (orig_t));
2430 else if (bitmaxsize == 0)
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file, "Access to zero-sized part of variable,"
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2438 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2445 /* Dereference the constraint expression CONS, and return the result.
2446 DEREF (ADDRESSOF) = SCALAR
2447 DEREF (SCALAR) = DEREF
2448 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2449 This is needed so that we can handle dereferencing DEREF constraints. */
2452 do_deref (VEC (ce_s, heap) **constraints)
2454 struct constraint_expr *c;
2457 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2459 if (c->type == SCALAR)
2461 else if (c->type == ADDRESSOF)
2463 else if (c->type == DEREF)
2465 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2466 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2467 process_constraint (new_constraint (tmplhs, *c));
2468 c->var = tmplhs.var;
2475 /* Given a tree T, return the constraint expression for it. */
2478 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2480 struct constraint_expr temp;
2482 /* x = integer is all glommed to a single variable, which doesn't
2483 point to anything by itself. That is, of course, unless it is an
2484 integer constant being treated as a pointer, in which case, we
2485 will return that this is really the addressof anything. This
2486 happens below, since it will fall into the default case. The only
2487 case we know something about an integer treated like a pointer is
2488 when it is the NULL pointer, and then we just say it points to
2490 if (TREE_CODE (t) == INTEGER_CST
2491 && !POINTER_TYPE_P (TREE_TYPE (t)))
2493 temp.var = integer_id;
2496 VEC_safe_push (ce_s, heap, *results, &temp);
2499 else if (TREE_CODE (t) == INTEGER_CST
2500 && integer_zerop (t))
2502 temp.var = nothing_id;
2503 temp.type = ADDRESSOF;
2505 VEC_safe_push (ce_s, heap, *results, &temp);
2509 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2511 case tcc_expression:
2514 switch (TREE_CODE (t))
2518 struct constraint_expr *c;
2520 tree exp = TREE_OPERAND (t, 0);
2521 tree pttype = TREE_TYPE (TREE_TYPE (t));
2523 get_constraint_for (exp, results);
2525 /* Make sure we capture constraints to all elements
2527 if ((handled_component_p (exp)
2528 && ref_contains_array_ref (exp))
2529 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2531 struct constraint_expr *origrhs;
2533 struct constraint_expr tmp;
2535 if (VEC_length (ce_s, *results) == 0)
2538 gcc_assert (VEC_length (ce_s, *results) == 1);
2539 origrhs = VEC_last (ce_s, *results);
2541 VEC_pop (ce_s, *results);
2542 origvar = get_varinfo (origrhs->var);
2543 for (; origvar; origvar = origvar->next)
2545 tmp.var = origvar->id;
2546 VEC_safe_push (ce_s, heap, *results, &tmp);
2549 else if (VEC_length (ce_s, *results) == 1
2550 && (AGGREGATE_TYPE_P (pttype)
2551 || TREE_CODE (pttype) == COMPLEX_TYPE))
2553 struct constraint_expr *origrhs;
2555 struct constraint_expr tmp;
2557 gcc_assert (VEC_length (ce_s, *results) == 1);
2558 origrhs = VEC_last (ce_s, *results);
2560 VEC_pop (ce_s, *results);
2561 origvar = get_varinfo (origrhs->var);
2562 for (; origvar; origvar = origvar->next)
2564 tmp.var = origvar->id;
2565 VEC_safe_push (ce_s, heap, *results, &tmp);
2569 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2571 if (c->type == DEREF)
2574 c->type = ADDRESSOF;
2580 /* XXX: In interprocedural mode, if we didn't have the
2581 body, we would need to do *each pointer argument =
2583 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2586 tree heapvar = heapvar_lookup (t);
2588 if (heapvar == NULL)
2590 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2591 DECL_EXTERNAL (heapvar) = 1;
2592 get_var_ann (heapvar)->is_heapvar = 1;
2593 if (gimple_referenced_vars (cfun))
2594 add_referenced_var (heapvar);
2595 heapvar_insert (t, heapvar);
2598 temp.var = create_variable_info_for (heapvar,
2599 alias_get_name (heapvar));
2601 vi = get_varinfo (temp.var);
2602 vi->is_artificial_var = 1;
2603 vi->is_heap_var = 1;
2604 temp.type = ADDRESSOF;
2606 VEC_safe_push (ce_s, heap, *results, &temp);
2611 temp.var = anything_id;
2614 VEC_safe_push (ce_s, heap, *results, &temp);
2620 temp.type = ADDRESSOF;
2621 temp.var = anything_id;
2623 VEC_safe_push (ce_s, heap, *results, &temp);
2630 switch (TREE_CODE (t))
2634 get_constraint_for (TREE_OPERAND (t, 0), results);
2639 case ARRAY_RANGE_REF:
2641 get_constraint_for_component_ref (t, results);
2645 temp.type = ADDRESSOF;
2646 temp.var = anything_id;
2648 VEC_safe_push (ce_s, heap, *results, &temp);
2655 switch (TREE_CODE (t))
2659 case NON_LVALUE_EXPR:
2661 tree op = TREE_OPERAND (t, 0);
2663 /* Cast from non-pointer to pointers are bad news for us.
2664 Anything else, we see through */
2665 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2666 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2668 get_constraint_for (op, results);
2676 temp.type = ADDRESSOF;
2677 temp.var = anything_id;
2679 VEC_safe_push (ce_s, heap, *results, &temp);
2684 case tcc_exceptional:
2686 switch (TREE_CODE (t))
2690 get_constraint_for (PHI_RESULT (t), results);
2696 struct constraint_expr temp;
2697 temp = get_constraint_exp_from_ssa_var (t);
2698 VEC_safe_push (ce_s, heap, *results, &temp);
2704 temp.type = ADDRESSOF;
2705 temp.var = anything_id;
2707 VEC_safe_push (ce_s, heap, *results, &temp);
2712 case tcc_declaration:
2714 struct constraint_expr temp;
2715 temp = get_constraint_exp_from_ssa_var (t);
2716 VEC_safe_push (ce_s, heap, *results, &temp);
2721 temp.type = ADDRESSOF;
2722 temp.var = anything_id;
2724 VEC_safe_push (ce_s, heap, *results, &temp);
2731 /* Handle the structure copy case where we have a simple structure copy
2732 between LHS and RHS that is of SIZE (in bits)
2734 For each field of the lhs variable (lhsfield)
2735 For each field of the rhs variable at lhsfield.offset (rhsfield)
2736 add the constraint lhsfield = rhsfield
2738 If we fail due to some kind of type unsafety or other thing we
2739 can't handle, return false. We expect the caller to collapse the
2740 variable in that case. */
2743 do_simple_structure_copy (const struct constraint_expr lhs,
2744 const struct constraint_expr rhs,
2745 const unsigned HOST_WIDE_INT size)
2747 varinfo_t p = get_varinfo (lhs.var);
2748 unsigned HOST_WIDE_INT pstart, last;
2750 last = p->offset + size;
2751 for (; p && p->offset < last; p = p->next)
2754 struct constraint_expr templhs = lhs;
2755 struct constraint_expr temprhs = rhs;
2756 unsigned HOST_WIDE_INT fieldoffset;
2758 templhs.var = p->id;
2759 q = get_varinfo (temprhs.var);
2760 fieldoffset = p->offset - pstart;
2761 q = first_vi_for_offset (q, q->offset + fieldoffset);
2764 temprhs.var = q->id;
2765 process_constraint (new_constraint (templhs, temprhs));
2771 /* Handle the structure copy case where we have a structure copy between a
2772 aggregate on the LHS and a dereference of a pointer on the RHS
2773 that is of SIZE (in bits)
2775 For each field of the lhs variable (lhsfield)
2776 rhs.offset = lhsfield->offset
2777 add the constraint lhsfield = rhs
2781 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2782 const struct constraint_expr rhs,
2783 const unsigned HOST_WIDE_INT size)
2785 varinfo_t p = get_varinfo (lhs.var);
2786 unsigned HOST_WIDE_INT pstart,last;
2788 last = p->offset + size;
2790 for (; p && p->offset < last; p = p->next)
2793 struct constraint_expr templhs = lhs;
2794 struct constraint_expr temprhs = rhs;
2795 unsigned HOST_WIDE_INT fieldoffset;
2798 if (templhs.type == SCALAR)
2799 templhs.var = p->id;
2801 templhs.offset = p->offset;
2803 q = get_varinfo (temprhs.var);
2804 fieldoffset = p->offset - pstart;
2805 temprhs.offset += fieldoffset;
2806 process_constraint (new_constraint (templhs, temprhs));
2810 /* Handle the structure copy case where we have a structure copy
2811 between a aggregate on the RHS and a dereference of a pointer on
2812 the LHS that is of SIZE (in bits)
2814 For each field of the rhs variable (rhsfield)
2815 lhs.offset = rhsfield->offset
2816 add the constraint lhs = rhsfield
2820 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2821 const struct constraint_expr rhs,
2822 const unsigned HOST_WIDE_INT size)
2824 varinfo_t p = get_varinfo (rhs.var);
2825 unsigned HOST_WIDE_INT pstart,last;
2827 last = p->offset + size;
2829 for (; p && p->offset < last; p = p->next)
2832 struct constraint_expr templhs = lhs;
2833 struct constraint_expr temprhs = rhs;
2834 unsigned HOST_WIDE_INT fieldoffset;
2837 if (temprhs.type == SCALAR)
2838 temprhs.var = p->id;
2840 temprhs.offset = p->offset;
2842 q = get_varinfo (templhs.var);
2843 fieldoffset = p->offset - pstart;
2844 templhs.offset += fieldoffset;
2845 process_constraint (new_constraint (templhs, temprhs));
2849 /* Sometimes, frontends like to give us bad type information. This
2850 function will collapse all the fields from VAR to the end of VAR,
2851 into VAR, so that we treat those fields as a single variable.
2852 We return the variable they were collapsed into. */
2855 collapse_rest_of_var (unsigned int var)
2857 varinfo_t currvar = get_varinfo (var);
2860 for (field = currvar->next; field; field = field->next)
2863 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2864 field->name, currvar->name);
2866 gcc_assert (!field->collapsed_to);
2867 field->collapsed_to = currvar;
2870 currvar->next = NULL;
2871 currvar->size = currvar->fullsize - currvar->offset;
2876 /* Handle aggregate copies by expanding into copies of the respective
2877 fields of the structures. */
2880 do_structure_copy (tree lhsop, tree rhsop)
2882 struct constraint_expr lhs, rhs, tmp;
2883 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2885 unsigned HOST_WIDE_INT lhssize;
2886 unsigned HOST_WIDE_INT rhssize;
2888 get_constraint_for (lhsop, &lhsc);
2889 get_constraint_for (rhsop, &rhsc);
2890 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2891 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2892 lhs = *(VEC_last (ce_s, lhsc));
2893 rhs = *(VEC_last (ce_s, rhsc));
2895 VEC_free (ce_s, heap, lhsc);
2896 VEC_free (ce_s, heap, rhsc);
2898 /* If we have special var = x, swap it around. */
2899 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2906 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2907 possible it's something we could handle. However, most cases falling
2908 into this are dealing with transparent unions, which are slightly
2910 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2912 rhs.type = ADDRESSOF;
2913 rhs.var = anything_id;
2916 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2917 that special var. */
2918 if (rhs.var <= integer_id)
2920 for (p = get_varinfo (lhs.var); p; p = p->next)
2922 struct constraint_expr templhs = lhs;
2923 struct constraint_expr temprhs = rhs;
2925 if (templhs.type == SCALAR )
2926 templhs.var = p->id;
2928 templhs.offset += p->offset;
2929 process_constraint (new_constraint (templhs, temprhs));
2934 tree rhstype = TREE_TYPE (rhsop);
2935 tree lhstype = TREE_TYPE (lhsop);
2939 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2940 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2942 /* If we have a variably sized types on the rhs or lhs, and a deref
2943 constraint, add the constraint, lhsconstraint = &ANYTHING.
2944 This is conservatively correct because either the lhs is an unknown
2945 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2946 constraint, and every variable it can point to must be unknown sized
2947 anyway, so we don't need to worry about fields at all. */
2948 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2949 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2951 rhs.var = anything_id;
2952 rhs.type = ADDRESSOF;
2954 process_constraint (new_constraint (lhs, rhs));
2958 /* The size only really matters insofar as we don't set more or less of
2959 the variable. If we hit an unknown size var, the size should be the
2960 whole darn thing. */
2961 if (get_varinfo (rhs.var)->is_unknown_size_var)
2964 rhssize = TREE_INT_CST_LOW (rhstypesize);
2966 if (get_varinfo (lhs.var)->is_unknown_size_var)
2969 lhssize = TREE_INT_CST_LOW (lhstypesize);
2972 if (rhs.type == SCALAR && lhs.type == SCALAR)
2974 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2976 lhs.var = collapse_rest_of_var (lhs.var);
2977 rhs.var = collapse_rest_of_var (rhs.var);
2982 process_constraint (new_constraint (lhs, rhs));
2985 else if (lhs.type != DEREF && rhs.type == DEREF)
2986 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2987 else if (lhs.type == DEREF && rhs.type != DEREF)
2988 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2991 tree pointedtotype = lhstype;
2994 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2995 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2996 do_structure_copy (tmpvar, rhsop);
2997 do_structure_copy (lhsop, tmpvar);
3003 /* Update related alias information kept in AI. This is used when
3004 building name tags, alias sets and deciding grouping heuristics.
3005 STMT is the statement to process. This function also updates
3006 ADDRESSABLE_VARS. */
3009 update_alias_info (tree stmt, struct alias_info *ai)
3012 use_operand_p use_p;
3014 enum escape_type stmt_escape_type = is_escape_site (stmt);
3016 if (stmt_escape_type == ESCAPE_TO_CALL
3017 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3019 ai->num_calls_found++;
3020 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3021 ai->num_pure_const_calls_found++;
3024 /* Mark all the variables whose address are taken by the statement. */
3025 addr_taken = addresses_taken (stmt);
3028 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3030 /* If STMT is an escape point, all the addresses taken by it are
3032 if (stmt_escape_type != NO_ESCAPE)
3037 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3039 tree rvar = referenced_var (i);
3040 if (!unmodifiable_var_p (rvar))
3041 mark_call_clobbered (rvar, stmt_escape_type);
3046 /* Process each operand use. If an operand may be aliased, keep
3047 track of how many times it's being used. For pointers, determine
3048 whether they are dereferenced by the statement, or whether their
3049 value escapes, etc. */
3050 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3054 struct ptr_info_def *pi;
3055 bool is_store, is_potential_deref;
3056 unsigned num_uses, num_derefs;
3058 op = USE_FROM_PTR (use_p);
3060 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3061 to the set of addressable variables. */
3062 if (TREE_CODE (op) == ADDR_EXPR)
3064 bitmap addressable_vars = gimple_addressable_vars (cfun);
3066 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3067 gcc_assert (addressable_vars);
3069 /* PHI nodes don't have annotations for pinning the set
3070 of addresses taken, so we collect them here.
3072 FIXME, should we allow PHI nodes to have annotations
3073 so that they can be treated like regular statements?
3074 Currently, they are treated as second-class
3076 add_to_addressable_set (TREE_OPERAND (op, 0),
3081 /* Ignore constants. */
3082 if (TREE_CODE (op) != SSA_NAME)
3085 var = SSA_NAME_VAR (op);
3086 v_ann = var_ann (var);
3088 /* The base variable of an SSA name must be a GIMPLE register, and thus
3089 it cannot be aliased. */
3090 gcc_assert (!may_be_aliased (var));
3092 /* We are only interested in pointers. */
3093 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3096 pi = get_ptr_info (op);
3098 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3099 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3101 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3102 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3105 /* If STMT is a PHI node, then it will not have pointer
3106 dereferences and it will not be an escape point. */
3107 if (TREE_CODE (stmt) == PHI_NODE)
3110 /* Determine whether OP is a dereferenced pointer, and if STMT
3111 is an escape point, whether OP escapes. */
3112 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3114 /* Handle a corner case involving address expressions of the
3115 form '&PTR->FLD'. The problem with these expressions is that
3116 they do not represent a dereference of PTR. However, if some
3117 other transformation propagates them into an INDIRECT_REF
3118 expression, we end up with '*(&PTR->FLD)' which is folded
3121 So, if the original code had no other dereferences of PTR,
3122 the aliaser will not create memory tags for it, and when
3123 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3124 memory operations will receive no VDEF/VUSE operands.
3126 One solution would be to have count_uses_and_derefs consider
3127 &PTR->FLD a dereference of PTR. But that is wrong, since it
3128 is not really a dereference but an offset calculation.
3130 What we do here is to recognize these special ADDR_EXPR
3131 nodes. Since these expressions are never GIMPLE values (they
3132 are not GIMPLE invariants), they can only appear on the RHS
3133 of an assignment and their base address is always an
3134 INDIRECT_REF expression. */
3135 is_potential_deref = false;
3136 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3137 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3138 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3140 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3141 this represents a potential dereference of PTR. */
3142 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3143 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3144 if (TREE_CODE (base) == INDIRECT_REF
3145 && TREE_OPERAND (base, 0) == op)
3146 is_potential_deref = true;
3149 if (num_derefs > 0 || is_potential_deref)
3151 /* Mark OP as dereferenced. In a subsequent pass,
3152 dereferenced pointers that point to a set of
3153 variables will be assigned a name tag to alias
3154 all the variables OP points to. */
3155 pi->is_dereferenced = 1;
3157 /* If this is a store operation, mark OP as being
3158 dereferenced to store, otherwise mark it as being
3159 dereferenced to load. */
3161 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3163 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3166 if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
3168 /* If STMT is an escape point and STMT contains at
3169 least one direct use of OP, then the value of OP
3170 escapes and so the pointed-to variables need to
3171 be marked call-clobbered. */
3172 pi->value_escapes_p = 1;
3173 pi->escape_mask |= stmt_escape_type;
3175 /* If the statement makes a function call, assume
3176 that pointer OP will be dereferenced in a store
3177 operation inside the called function. */
3178 if (get_call_expr_in (stmt)
3179 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3181 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3182 pi->is_dereferenced = 1;
3187 if (TREE_CODE (stmt) == PHI_NODE)
3190 /* Mark stored variables in STMT as being written to and update the
3191 reference counter for potentially aliased symbols in STMT. */
3192 if (stmt_references_memory_p (stmt) && STORED_SYMS (stmt))
3196 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3197 pointer_set_insert (ai->written_vars, referenced_var (i));
3202 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3203 Expressions of the type PTR + CST can be handled in two ways:
3205 1- If the constraint for PTR is ADDRESSOF for a non-structure
3206 variable, then we can use it directly because adding or
3207 subtracting a constant may not alter the original ADDRESSOF
3208 constraint (i.e., pointer arithmetic may not legally go outside
3209 an object's boundaries).
3211 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3212 then if CST is a compile-time constant that can be used as an
3213 offset, we can determine which sub-variable will be pointed-to
3216 Return true if the expression is handled. For any other kind of
3217 expression, return false so that each operand can be added as a
3218 separate constraint by the caller. */
3221 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3224 struct constraint_expr *c, *c2;
3227 VEC (ce_s, heap) *temp = NULL;
3228 unsigned int rhsoffset = 0;
3230 if (TREE_CODE (expr) != PLUS_EXPR
3231 && TREE_CODE (expr) != MINUS_EXPR)
3234 op0 = TREE_OPERAND (expr, 0);
3235 op1 = TREE_OPERAND (expr, 1);
3237 get_constraint_for (op0, &temp);
3238 if (POINTER_TYPE_P (TREE_TYPE (op0))
3239 && TREE_CODE (op1) == INTEGER_CST
3240 && TREE_CODE (expr) == PLUS_EXPR)
3242 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3248 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3249 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3251 if (c2->type == ADDRESSOF && rhsoffset != 0)
3253 varinfo_t temp = get_varinfo (c2->var);
3255 /* An access one after the end of an array is valid,
3256 so simply punt on accesses we cannot resolve. */
3257 temp = first_vi_for_offset (temp, rhsoffset);
3264 c2->offset = rhsoffset;
3265 process_constraint (new_constraint (*c, *c2));
3268 VEC_free (ce_s, heap, temp);
3274 /* Walk statement T setting up aliasing constraints according to the
3275 references found in T. This function is the main part of the
3276 constraint builder. AI points to auxiliary alias information used
3277 when building alias sets and computing alias grouping heuristics. */
3280 find_func_aliases (tree origt)
3283 VEC(ce_s, heap) *lhsc = NULL;
3284 VEC(ce_s, heap) *rhsc = NULL;
3285 struct constraint_expr *c;
3287 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3288 t = TREE_OPERAND (t, 0);
3290 /* Now build constraints expressions. */
3291 if (TREE_CODE (t) == PHI_NODE)
3293 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3295 /* Only care about pointers and structures containing
3297 if (could_have_pointers (PHI_RESULT (t)))
3302 /* For a phi node, assign all the arguments to
3304 get_constraint_for (PHI_RESULT (t), &lhsc);
3305 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3308 tree strippedrhs = PHI_ARG_DEF (t, i);
3310 STRIP_NOPS (strippedrhs);
3311 rhstype = TREE_TYPE (strippedrhs);
3312 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3314 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3316 struct constraint_expr *c2;
3317 while (VEC_length (ce_s, rhsc) > 0)
3319 c2 = VEC_last (ce_s, rhsc);
3320 process_constraint (new_constraint (*c, *c2));
3321 VEC_pop (ce_s, rhsc);
3327 /* In IPA mode, we need to generate constraints to pass call
3328 arguments through their calls. There are two cases, either a
3329 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3330 CALL_EXPR when we are not. */
3331 else if (in_ipa_mode
3332 && ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3333 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3334 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3335 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3336 || (TREE_CODE (t) == CALL_EXPR
3337 && !(call_expr_flags (t)
3338 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3343 call_expr_arg_iterator iter;
3347 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3349 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3350 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3357 decl = get_callee_fndecl (rhsop);
3359 /* If we can directly resolve the function being called, do so.
3360 Otherwise, it must be some sort of indirect expression that
3361 we should still be able to handle. */
3364 fi = get_vi_for_tree (decl);
3368 decl = CALL_EXPR_FN (rhsop);
3369 fi = get_vi_for_tree (decl);
3372 /* Assign all the passed arguments to the appropriate incoming
3373 parameters of the function. */
3375 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3377 struct constraint_expr lhs ;
3378 struct constraint_expr *rhsp;
3380 get_constraint_for (arg, &rhsc);
3381 if (TREE_CODE (decl) != FUNCTION_DECL)
3390 lhs.var = first_vi_for_offset (fi, i)->id;
3393 while (VEC_length (ce_s, rhsc) != 0)
3395 rhsp = VEC_last (ce_s, rhsc);
3396 process_constraint (new_constraint (lhs, *rhsp));
3397 VEC_pop (ce_s, rhsc);
3402 /* If we are returning a value, assign it to the result. */
3405 struct constraint_expr rhs;
3406 struct constraint_expr *lhsp;
3409 get_constraint_for (lhsop, &lhsc);
3410 if (TREE_CODE (decl) != FUNCTION_DECL)
3419 rhs.var = first_vi_for_offset (fi, i)->id;
3422 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3423 process_constraint (new_constraint (*lhsp, rhs));
3426 /* Otherwise, just a regular assignment statement. */
3427 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3429 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3430 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3433 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3434 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3435 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3436 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3438 do_structure_copy (lhsop, rhsop);
3442 /* Only care about operations with pointers, structures
3443 containing pointers, dereferences, and call expressions. */
3444 if (could_have_pointers (lhsop)
3445 || TREE_CODE (rhsop) == CALL_EXPR)
3447 get_constraint_for (lhsop, &lhsc);
3448 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3450 /* RHS that consist of unary operations,
3451 exceptional types, or bare decls/constants, get
3452 handled directly by get_constraint_for. */
3454 case tcc_declaration:
3456 case tcc_exceptional:
3457 case tcc_expression:
3463 get_constraint_for (rhsop, &rhsc);
3464 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3466 struct constraint_expr *c2;
3469 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3470 process_constraint (new_constraint (*c, *c2));
3478 /* For pointer arithmetic of the form
3479 PTR + CST, we can simply use PTR's
3480 constraint because pointer arithmetic is
3481 not allowed to go out of bounds. */
3482 if (handle_ptr_arith (lhsc, rhsop))
3487 /* Otherwise, walk each operand. Notice that we
3488 can't use the operand interface because we need
3489 to process expressions other than simple operands
3490 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3492 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3494 tree op = TREE_OPERAND (rhsop, i);
3497 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3498 get_constraint_for (op, &rhsc);
3499 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3501 struct constraint_expr *c2;
3502 while (VEC_length (ce_s, rhsc) > 0)
3504 c2 = VEC_last (ce_s, rhsc);
3505 process_constraint (new_constraint (*c, *c2));
3506 VEC_pop (ce_s, rhsc);
3515 /* After promoting variables and computing aliasing we will
3516 need to re-scan most statements. FIXME: Try to minimize the
3517 number of statements re-scanned. It's not really necessary to
3518 re-scan *all* statements. */
3519 mark_stmt_modified (origt);
3520 VEC_free (ce_s, heap, rhsc);
3521 VEC_free (ce_s, heap, lhsc);
3525 /* Find the first varinfo in the same variable as START that overlaps with
3527 Effectively, walk the chain of fields for the variable START to find the
3528 first field that overlaps with OFFSET.
3529 Return NULL if we can't find one. */
3532 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3534 varinfo_t curr = start;
3537 /* We may not find a variable in the field list with the actual
3538 offset when when we have glommed a structure to a variable.
3539 In that case, however, offset should still be within the size
3541 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3549 /* Insert the varinfo FIELD into the field list for BASE, at the front
3553 insert_into_field_list (varinfo_t base, varinfo_t field)
3555 varinfo_t prev = base;
3556 varinfo_t curr = base->next;
3562 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3566 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3568 varinfo_t prev = base;
3569 varinfo_t curr = base->next;
3580 if (field->offset <= curr->offset)
3585 field->next = prev->next;
3590 /* qsort comparison function for two fieldoff's PA and PB */
3593 fieldoff_compare (const void *pa, const void *pb)
3595 const fieldoff_s *foa = (const fieldoff_s *)pa;
3596 const fieldoff_s *fob = (const fieldoff_s *)pb;
3597 HOST_WIDE_INT foasize, fobsize;
3599 if (foa->offset != fob->offset)
3600 return foa->offset - fob->offset;
3602 foasize = TREE_INT_CST_LOW (foa->size);
3603 fobsize = TREE_INT_CST_LOW (fob->size);
3604 return foasize - fobsize;
3607 /* Sort a fieldstack according to the field offset and sizes. */
3609 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3611 qsort (VEC_address (fieldoff_s, fieldstack),
3612 VEC_length (fieldoff_s, fieldstack),
3613 sizeof (fieldoff_s),
3617 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3618 of TYPE onto fieldstack, recording their offsets along the way.
3619 OFFSET is used to keep track of the offset in this entire structure, rather
3620 than just the immediately containing structure. Returns the number
3622 HAS_UNION is set to true if we find a union type as a field of
3626 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3627 HOST_WIDE_INT offset, bool *has_union)
3632 if (TREE_CODE (type) == COMPLEX_TYPE)
3634 fieldoff_s *real_part, *img_part;
3635 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3636 real_part->type = TREE_TYPE (type);
3637 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3638 real_part->offset = offset;
3639 real_part->decl = NULL_TREE;
3641 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3642 img_part->type = TREE_TYPE (type);
3643 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3644 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3645 img_part->decl = NULL_TREE;
3650 if (TREE_CODE (type) == ARRAY_TYPE)
3652 tree sz = TYPE_SIZE (type);
3653 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3658 || ! host_integerp (sz, 1)
3659 || TREE_INT_CST_LOW (sz) == 0
3661 || ! host_integerp (elsz, 1)
3662 || TREE_INT_CST_LOW (elsz) == 0)
3665 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3666 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3669 for (i = 0; i < nr; ++i)
3675 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3676 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3679 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3681 else if (!(pushed = push_fields_onto_fieldstack
3682 (TREE_TYPE (type), fieldstack,
3683 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3684 /* Empty structures may have actual size, like in C++. So
3685 see if we didn't push any subfields and the size is
3686 nonzero, push the field onto the stack */
3693 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3694 pair->type = TREE_TYPE (type);
3696 pair->decl = NULL_TREE;
3697 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3707 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3708 if (TREE_CODE (field) == FIELD_DECL)
3714 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3715 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3718 if (!var_can_have_subvars (field))
3720 else if (!(pushed = push_fields_onto_fieldstack
3721 (TREE_TYPE (field), fieldstack,
3722 offset + bitpos_of_field (field), has_union))
3723 && DECL_SIZE (field)
3724 && !integer_zerop (DECL_SIZE (field)))
3725 /* Empty structures may have actual size, like in C++. So
3726 see if we didn't push any subfields and the size is
3727 nonzero, push the field onto the stack */
3734 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3735 pair->type = TREE_TYPE (field);
3736 pair->size = DECL_SIZE (field);
3738 pair->offset = offset + bitpos_of_field (field);
3748 /* Create a constraint from ANYTHING variable to VI. */
3750 make_constraint_from_anything (varinfo_t vi)
3752 struct constraint_expr lhs, rhs;
3758 rhs.var = anything_id;
3760 rhs.type = ADDRESSOF;
3761 process_constraint (new_constraint (lhs, rhs));
3764 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3765 if it is a varargs function. */
3768 count_num_arguments (tree decl, bool *is_varargs)
3773 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3777 if (TREE_VALUE (t) == void_type_node)
3787 /* Creation function node for DECL, using NAME, and return the index
3788 of the variable we've created for the function. */
3791 create_function_info_for (tree decl, const char *name)
3793 unsigned int index = VEC_length (varinfo_t, varmap);
3797 bool is_varargs = false;
3799 /* Create the variable info. */
3801 vi = new_var_info (decl, index, name);
3806 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3807 insert_vi_for_tree (vi->decl, vi);
3808 VEC_safe_push (varinfo_t, heap, varmap, vi);
3812 /* If it's varargs, we don't know how many arguments it has, so we
3819 vi->is_unknown_size_var = true;
3824 arg = DECL_ARGUMENTS (decl);
3826 /* Set up variables for each argument. */
3827 for (i = 1; i < vi->fullsize; i++)
3830 const char *newname;
3832 unsigned int newindex;
3833 tree argdecl = decl;
3838 newindex = VEC_length (varinfo_t, varmap);
3839 asprintf (&tempname, "%s.arg%d", name, i-1);
3840 newname = ggc_strdup (tempname);
3843 argvi = new_var_info (argdecl, newindex, newname);
3844 argvi->decl = argdecl;
3845 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3848 argvi->fullsize = vi->fullsize;
3849 argvi->has_union = false;
3850 insert_into_field_list_sorted (vi, argvi);
3851 stats.total_vars ++;
3854 insert_vi_for_tree (arg, argvi);
3855 arg = TREE_CHAIN (arg);
3859 /* Create a variable for the return var. */
3860 if (DECL_RESULT (decl) != NULL
3861 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3864 const char *newname;
3866 unsigned int newindex;
3867 tree resultdecl = decl;
3871 if (DECL_RESULT (decl))
3872 resultdecl = DECL_RESULT (decl);
3874 newindex = VEC_length (varinfo_t, varmap);
3875 asprintf (&tempname, "%s.result", name);
3876 newname = ggc_strdup (tempname);
3879 resultvi = new_var_info (resultdecl, newindex, newname);
3880 resultvi->decl = resultdecl;
3881 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3882 resultvi->offset = i;
3884 resultvi->fullsize = vi->fullsize;
3885 resultvi->has_union = false;
3886 insert_into_field_list_sorted (vi, resultvi);
3887 stats.total_vars ++;
3888 if (DECL_RESULT (decl))
3889 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3895 /* Return true if FIELDSTACK contains fields that overlap.
3896 FIELDSTACK is assumed to be sorted by offset. */
3899 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3901 fieldoff_s *fo = NULL;
3903 HOST_WIDE_INT lastoffset = -1;
3905 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3907 if (fo->offset == lastoffset)
3909 lastoffset = fo->offset;
3914 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3915 This will also create any varinfo structures necessary for fields
3919 create_variable_info_for (tree decl, const char *name)
3921 unsigned int index = VEC_length (varinfo_t, varmap);
3923 tree decltype = TREE_TYPE (decl);
3924 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3925 bool notokay = false;
3927 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3928 VEC (fieldoff_s,heap) *fieldstack = NULL;
3930 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3931 return create_function_info_for (decl, name);
3933 hasunion = TREE_CODE (decltype) == UNION_TYPE
3934 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3935 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3937 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3940 VEC_free (fieldoff_s, heap, fieldstack);
3946 /* If the variable doesn't have subvars, we may end up needing to
3947 sort the field list and create fake variables for all the
3949 vi = new_var_info (decl, index, name);
3952 vi->has_union = hasunion;
3954 || TREE_CODE (declsize) != INTEGER_CST
3955 || TREE_CODE (decltype) == UNION_TYPE
3956 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3958 vi->is_unknown_size_var = true;
3964 vi->fullsize = TREE_INT_CST_LOW (declsize);
3965 vi->size = vi->fullsize;
3968 insert_vi_for_tree (vi->decl, vi);
3969 VEC_safe_push (varinfo_t, heap, varmap, vi);
3970 if (is_global && (!flag_whole_program || !in_ipa_mode))
3971 make_constraint_from_anything (vi);
3974 if (use_field_sensitive
3976 && !vi->is_unknown_size_var
3977 && var_can_have_subvars (decl)
3978 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3980 unsigned int newindex = VEC_length (varinfo_t, varmap);
3981 fieldoff_s *fo = NULL;
3984 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3987 || TREE_CODE (fo->size) != INTEGER_CST
3995 /* We can't sort them if we have a field with a variable sized type,
3996 which will make notokay = true. In that case, we are going to return
3997 without creating varinfos for the fields anyway, so sorting them is a
4001 sort_fieldstack (fieldstack);
4002 /* Due to some C++ FE issues, like PR 22488, we might end up
4003 what appear to be overlapping fields even though they,
4004 in reality, do not overlap. Until the C++ FE is fixed,
4005 we will simply disable field-sensitivity for these cases. */
4006 notokay = check_for_overlaps (fieldstack);
4010 if (VEC_length (fieldoff_s, fieldstack) != 0)
4011 fo = VEC_index (fieldoff_s, fieldstack, 0);
4013 if (fo == NULL || notokay)
4015 vi->is_unknown_size_var = 1;
4018 VEC_free (fieldoff_s, heap, fieldstack);
4022 vi->size = TREE_INT_CST_LOW (fo->size);
4023 vi->offset = fo->offset;
4024 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4025 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4029 const char *newname = "NULL";
4032 newindex = VEC_length (varinfo_t, varmap);
4036 asprintf (&tempname, "%s.%s",
4037 vi->name, alias_get_name (fo->decl));
4039 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4040 vi->name, fo->offset);
4041 newname = ggc_strdup (tempname);
4044 newvi = new_var_info (decl, newindex, newname);
4045 newvi->offset = fo->offset;
4046 newvi->size = TREE_INT_CST_LOW (fo->size);
4047 newvi->fullsize = vi->fullsize;
4048 insert_into_field_list (vi, newvi);
4049 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4050 if (is_global && (!flag_whole_program || !in_ipa_mode))
4051 make_constraint_from_anything (newvi);
4055 VEC_free (fieldoff_s, heap, fieldstack);
4060 /* Print out the points-to solution for VAR to FILE. */
4063 dump_solution_for_var (FILE *file, unsigned int var)
4065 varinfo_t vi = get_varinfo (var);
4069 if (find (var) != var)
4071 varinfo_t vipt = get_varinfo (find (var));
4072 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4076 fprintf (file, "%s = { ", vi->name);
4077 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4079 fprintf (file, "%s ", get_varinfo (i)->name);
4081 fprintf (file, "}\n");
4085 /* Print the points-to solution for VAR to stdout. */
4088 debug_solution_for_var (unsigned int var)
4090 dump_solution_for_var (stdout, var);
4093 /* Create varinfo structures for all of the variables in the
4094 function for intraprocedural mode. */
4097 intra_create_variable_infos (void)
4100 struct constraint_expr lhs, rhs;
4102 /* For each incoming pointer argument arg, create the constraint ARG
4103 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4104 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4108 if (!could_have_pointers (t))
4111 /* With flag_argument_noalias greater than two means that the incoming
4112 argument cannot alias anything except for itself so create a HEAP
4114 if (POINTER_TYPE_P (TREE_TYPE (t))
4115 && flag_argument_noalias > 2)
4118 tree heapvar = heapvar_lookup (t);
4122 lhs.var = get_vi_for_tree (t)->id;
4124 if (heapvar == NULL_TREE)
4126 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4128 get_var_ann (heapvar)->is_heapvar = 1;
4129 DECL_EXTERNAL (heapvar) = 1;
4130 if (gimple_referenced_vars (cfun))
4131 add_referenced_var (heapvar);
4132 heapvar_insert (t, heapvar);
4134 vi = get_vi_for_tree (heapvar);
4135 vi->is_artificial_var = 1;
4136 vi->is_heap_var = 1;
4138 rhs.type = ADDRESSOF;
4140 for (p = get_varinfo (lhs.var); p; p = p->next)
4142 struct constraint_expr temp = lhs;
4144 process_constraint (new_constraint (temp, rhs));
4149 varinfo_t arg_vi = get_vi_for_tree (t);
4151 for (p = arg_vi; p; p = p->next)
4152 make_constraint_from_anything (p);
4157 /* Structure used to put solution bitmaps in a hashtable so they can
4158 be shared among variables with the same points-to set. */
4160 typedef struct shared_bitmap_info
4164 } *shared_bitmap_info_t;
4166 static htab_t shared_bitmap_table;
4168 /* Hash function for a shared_bitmap_info_t */
4171 shared_bitmap_hash (const void *p)
4173 const shared_bitmap_info_t bi = (shared_bitmap_info_t) p;
4174 return bi->hashcode;
4177 /* Equality function for two shared_bitmap_info_t's. */
4180 shared_bitmap_eq (const void *p1, const void *p2)
4182 const shared_bitmap_info_t sbi1 = (shared_bitmap_info_t) p1;
4183 const shared_bitmap_info_t sbi2 = (shared_bitmap_info_t) p2;
4184 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4187 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4188 existing instance if there is one, NULL otherwise. */
4191 shared_bitmap_lookup (bitmap pt_vars)
4194 struct shared_bitmap_info sbi;
4196 sbi.pt_vars = pt_vars;
4197 sbi.hashcode = bitmap_hash (pt_vars);
4199 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4200 sbi.hashcode, NO_INSERT);
4204 return ((shared_bitmap_info_t) *slot)->pt_vars;
4208 /* Add a bitmap to the shared bitmap hashtable. */
4211 shared_bitmap_add (bitmap pt_vars)
4214 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4216 sbi->pt_vars = pt_vars;
4217 sbi->hashcode = bitmap_hash (pt_vars);
4219 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4220 sbi->hashcode, INSERT);
4221 gcc_assert (!*slot);
4222 *slot = (void *) sbi;
4226 /* Set bits in INTO corresponding to the variable uids in solution set
4227 FROM, which came from variable PTR.
4228 For variables that are actually dereferenced, we also use type
4229 based alias analysis to prune the points-to sets. */
4232 set_uids_in_ptset (tree ptr, bitmap into, bitmap from)
4237 HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4239 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4241 varinfo_t vi = get_varinfo (i);
4242 unsigned HOST_WIDE_INT var_alias_set;
4244 /* The only artificial variables that are allowed in a may-alias
4245 set are heap variables. */
4246 if (vi->is_artificial_var && !vi->is_heap_var)
4249 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4251 /* Variables containing unions may need to be converted to
4252 their SFT's, because SFT's can have unions and we cannot. */
4253 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4254 bitmap_set_bit (into, DECL_UID (sv->var));
4256 else if (TREE_CODE (vi->decl) == VAR_DECL
4257 || TREE_CODE (vi->decl) == PARM_DECL)
4259 if (var_can_have_subvars (vi->decl)
4260 && get_subvars_for_var (vi->decl))
4262 /* If VI->DECL is an aggregate for which we created
4263 SFTs, add the SFT corresponding to VI->OFFSET. */
4264 tree sft = get_subvar_at (vi->decl, vi->offset);
4267 var_alias_set = get_alias_set (sft);
4268 if (!vi->directly_dereferenced
4269 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4270 bitmap_set_bit (into, DECL_UID (sft));
4275 /* Otherwise, just add VI->DECL to the alias set.
4276 Don't type prune artificial vars. */
4277 if (vi->is_artificial_var)
4278 bitmap_set_bit (into, DECL_UID (vi->decl));
4281 var_alias_set = get_alias_set (vi->decl);
4282 if (!vi->directly_dereferenced
4283 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4284 bitmap_set_bit (into, DECL_UID (vi->decl));
4292 static bool have_alias_info = false;
4294 /* The list of SMT's that are in use by our pointer variables. This
4295 is the set of SMT's for all pointers that can point to anything. */
4296 static bitmap used_smts;
4298 /* Due to the ordering of points-to set calculation and SMT
4299 calculation being a bit co-dependent, we can't just calculate SMT
4300 used info whenever we want, we have to calculate it around the time
4301 that find_what_p_points_to is called. */
4303 /* Mark which SMT's are in use by points-to anything variables. */
4306 set_used_smts (void)
4310 used_smts = BITMAP_ALLOC (&pta_obstack);
4312 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4314 tree var = vi->decl;
4317 struct ptr_info_def *pi = NULL;
4319 /* For parm decls, the pointer info may be under the default
4321 if (TREE_CODE (vi->decl) == PARM_DECL
4322 && gimple_default_def (cfun, var))
4323 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4324 else if (TREE_CODE (var) == SSA_NAME)
4325 pi = SSA_NAME_PTR_INFO (var);
4327 /* Skip the special variables and those without their own
4329 if (vi->is_special_var || find (vi->id) != vi->id
4331 || (pi && !pi->is_dereferenced)
4332 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4333 || !POINTER_TYPE_P (TREE_TYPE (var)))
4336 if (TREE_CODE (var) == SSA_NAME)
4337 var = SSA_NAME_VAR (var);
4343 smt = va->symbol_mem_tag;
4344 if (smt && bitmap_bit_p (vi->solution, anything_id))
4345 bitmap_set_bit (used_smts, DECL_UID (smt));
4349 /* Merge the necessary SMT's into the bitmap INTO, which is
4350 P's varinfo. This involves merging all SMT's that are a subset of
4351 the SMT necessary for P. */
4354 merge_smts_into (tree p, bitmap solution)
4362 if (TREE_CODE (p) == SSA_NAME)
4363 var = SSA_NAME_VAR (p);
4365 smt = var_ann (var)->symbol_mem_tag;
4368 HOST_WIDE_INT smtset = get_alias_set (TREE_TYPE (smt));
4370 /* Need to set the SMT subsets first before this
4371 will work properly. */
4372 bitmap_set_bit (solution, DECL_UID (smt));
4373 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4375 tree newsmt = referenced_var (i);
4376 tree newsmttype = TREE_TYPE (newsmt);
4378 if (alias_set_subset_of (get_alias_set (newsmttype),
4380 bitmap_set_bit (solution, i);
4383 aliases = MTAG_ALIASES (smt);
4385 bitmap_ior_into (solution, aliases);
4389 /* Given a pointer variable P, fill in its points-to set, or return
4391 Rather than return false for variables that point-to anything, we
4392 instead find the corresponding SMT, and merge in it's aliases. In
4393 addition to these aliases, we also set the bits for the SMT's
4394 themselves and their subsets, as SMT's are still in use by
4395 non-SSA_NAME's, and pruning may eliminate every one of their
4396 aliases. In such a case, if we did not include the right set of
4397 SMT's in the points-to set of the variable, we'd end up with
4398 statements that do not conflict but should. */
4401 find_what_p_points_to (tree p)
4406 if (!have_alias_info)
4409 /* For parameters, get at the points-to set for the actual parm
4411 if (TREE_CODE (p) == SSA_NAME
4412 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4413 && SSA_NAME_IS_DEFAULT_DEF (p))
4414 lookup_p = SSA_NAME_VAR (p);
4416 vi = lookup_vi_for_tree (lookup_p);
4419 if (vi->is_artificial_var)
4422 /* See if this is a field or a structure. */
4423 if (vi->size != vi->fullsize)
4425 /* Nothing currently asks about structure fields directly,
4426 but when they do, we need code here to hand back the
4428 if (!var_can_have_subvars (vi->decl)
4429 || get_subvars_for_var (vi->decl) == NULL)
4434 struct ptr_info_def *pi = get_ptr_info (p);
4437 bool was_pt_anything = false;
4438 bitmap finished_solution;
4441 if (!pi->is_dereferenced)
4444 /* This variable may have been collapsed, let's get the real
4446 vi = get_varinfo (find (vi->id));
4448 /* Translate artificial variables into SSA_NAME_PTR_INFO
4450 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4452 varinfo_t vi = get_varinfo (i);
4454 if (vi->is_artificial_var)
4456 /* FIXME. READONLY should be handled better so that
4457 flow insensitive aliasing can disregard writable
4459 if (vi->id == nothing_id)
4461 else if (vi->id == anything_id)
4462 was_pt_anything = 1;
4463 else if (vi->id == readonly_id)
4464 was_pt_anything = 1;
4465 else if (vi->id == integer_id)
4466 was_pt_anything = 1;
4467 else if (vi->is_heap_var)
4468 pi->pt_global_mem = 1;
4472 /* Share the final set of variables when possible. */
4474 finished_solution = BITMAP_GGC_ALLOC ();
4475 stats.points_to_sets_created++;
4477 /* Instead of using pt_anything, we instead merge in the SMT
4478 aliases for the underlying SMT. */
4479 if (was_pt_anything)
4481 merge_smts_into (p, finished_solution);
4482 pi->pt_global_mem = 1;
4485 set_uids_in_ptset (vi->decl, finished_solution, vi->solution);
4486 result = shared_bitmap_lookup (finished_solution);
4490 shared_bitmap_add (finished_solution);
4491 pi->pt_vars = finished_solution;
4495 pi->pt_vars = result;
4496 bitmap_clear (finished_solution);
4499 if (bitmap_empty_p (pi->pt_vars))
4511 /* Dump points-to information to OUTFILE. */
4514 dump_sa_points_to_info (FILE *outfile)
4518 fprintf (outfile, "\nPoints-to sets\n\n");
4520 if (dump_flags & TDF_STATS)
4522 fprintf (outfile, "Stats:\n");
4523 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4524 fprintf (outfile, "Non-pointer vars: %d\n",
4525 stats.nonpointer_vars);
4526 fprintf (outfile, "Statically unified vars: %d\n",
4527 stats.unified_vars_static);
4528 fprintf (outfile, "Dynamically unified vars: %d\n",
4529 stats.unified_vars_dynamic);
4530 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4531 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4532 fprintf (outfile, "Number of implicit edges: %d\n",
4533 stats.num_implicit_edges);
4536 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4537 dump_solution_for_var (outfile, i);
4541 /* Debug points-to information to stderr. */
4544 debug_sa_points_to_info (void)
4546 dump_sa_points_to_info (stderr);
4550 /* Initialize the always-existing constraint variables for NULL
4551 ANYTHING, READONLY, and INTEGER */
4554 init_base_vars (void)
4556 struct constraint_expr lhs, rhs;
4558 /* Create the NULL variable, used to represent that a variable points
4560 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4561 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4562 insert_vi_for_tree (nothing_tree, var_nothing);
4563 var_nothing->is_artificial_var = 1;
4564 var_nothing->offset = 0;
4565 var_nothing->size = ~0;
4566 var_nothing->fullsize = ~0;
4567 var_nothing->is_special_var = 1;
4569 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4571 /* Create the ANYTHING variable, used to represent that a variable
4572 points to some unknown piece of memory. */
4573 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4574 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4575 insert_vi_for_tree (anything_tree, var_anything);
4576 var_anything->is_artificial_var = 1;
4577 var_anything->size = ~0;
4578 var_anything->offset = 0;
4579 var_anything->next = NULL;
4580 var_anything->fullsize = ~0;
4581 var_anything->is_special_var = 1;
4584 /* Anything points to anything. This makes deref constraints just
4585 work in the presence of linked list and other p = *p type loops,
4586 by saying that *ANYTHING = ANYTHING. */
4587 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4589 lhs.var = anything_id;
4591 rhs.type = ADDRESSOF;
4592 rhs.var = anything_id;
4595 /* This specifically does not use process_constraint because
4596 process_constraint ignores all anything = anything constraints, since all
4597 but this one are redundant. */
4598 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4600 /* Create the READONLY variable, used to represent that a variable
4601 points to readonly memory. */
4602 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4603 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4604 var_readonly->is_artificial_var = 1;
4605 var_readonly->offset = 0;
4606 var_readonly->size = ~0;
4607 var_readonly->fullsize = ~0;
4608 var_readonly->next = NULL;
4609 var_readonly->is_special_var = 1;
4610 insert_vi_for_tree (readonly_tree, var_readonly);
4612 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4614 /* readonly memory points to anything, in order to make deref
4615 easier. In reality, it points to anything the particular
4616 readonly variable can point to, but we don't track this
4619 lhs.var = readonly_id;
4621 rhs.type = ADDRESSOF;
4622 rhs.var = anything_id;
4625 process_constraint (new_constraint (lhs, rhs));
4627 /* Create the INTEGER variable, used to represent that a variable points
4629 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4630 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4631 insert_vi_for_tree (integer_tree, var_integer);
4632 var_integer->is_artificial_var = 1;
4633 var_integer->size = ~0;
4634 var_integer->fullsize = ~0;
4635 var_integer->offset = 0;
4636 var_integer->next = NULL;
4637 var_integer->is_special_var = 1;
4639 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4641 /* INTEGER = ANYTHING, because we don't know where a dereference of
4642 a random integer will point to. */
4644 lhs.var = integer_id;
4646 rhs.type = ADDRESSOF;
4647 rhs.var = anything_id;
4649 process_constraint (new_constraint (lhs, rhs));
4652 /* Initialize things necessary to perform PTA */
4655 init_alias_vars (void)
4657 bitmap_obstack_initialize (&pta_obstack);
4658 bitmap_obstack_initialize (&oldpta_obstack);
4659 bitmap_obstack_initialize (&predbitmap_obstack);
4661 constraint_pool = create_alloc_pool ("Constraint pool",
4662 sizeof (struct constraint), 30);
4663 variable_info_pool = create_alloc_pool ("Variable info pool",
4664 sizeof (struct variable_info), 30);
4665 constraints = VEC_alloc (constraint_t, heap, 8);
4666 varmap = VEC_alloc (varinfo_t, heap, 8);
4667 vi_for_tree = pointer_map_create ();
4669 memset (&stats, 0, sizeof (stats));
4670 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
4671 shared_bitmap_eq, free);
4675 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4676 predecessor edges. */
4679 remove_preds_and_fake_succs (constraint_graph_t graph)
4683 /* Clear the implicit ref and address nodes from the successor
4685 for (i = 0; i < FIRST_REF_NODE; i++)
4687 if (graph->succs[i])
4688 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4689 FIRST_REF_NODE * 2);
4692 /* Free the successor list for the non-ref nodes. */
4693 for (i = FIRST_REF_NODE; i < graph->size; i++)
4695 if (graph->succs[i])
4696 BITMAP_FREE (graph->succs[i]);
4699 /* Now reallocate the size of the successor list as, and blow away
4700 the predecessor bitmaps. */
4701 graph->size = VEC_length (varinfo_t, varmap);
4702 graph->succs = xrealloc (graph->succs, graph->size * sizeof (bitmap));
4704 free (graph->implicit_preds);
4705 graph->implicit_preds = NULL;
4706 free (graph->preds);
4707 graph->preds = NULL;
4708 bitmap_obstack_release (&predbitmap_obstack);
4711 /* Create points-to sets for the current function. See the comments
4712 at the start of the file for an algorithmic overview. */
4715 compute_points_to_sets (struct alias_info *ai)
4717 struct scc_info *si;
4720 timevar_push (TV_TREE_PTA);
4723 init_alias_heapvars ();
4725 intra_create_variable_infos ();
4727 /* Now walk all statements and derive aliases. */
4730 block_stmt_iterator bsi;
4733 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4735 if (is_gimple_reg (PHI_RESULT (phi)))
4737 find_func_aliases (phi);
4739 /* Update various related attributes like escaped
4740 addresses, pointer dereferences for loads and stores.
4741 This is used when creating name tags and alias
4743 update_alias_info (phi, ai);
4747 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4749 tree stmt = bsi_stmt (bsi);
4751 find_func_aliases (stmt);
4753 /* Update various related attributes like escaped
4754 addresses, pointer dereferences for loads and stores.
4755 This is used when creating name tags and alias
4757 update_alias_info (stmt, ai);
4764 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4765 dump_constraints (dump_file);
4770 "\nCollapsing static cycles and doing variable "
4772 build_pred_graph ();
4773 si = perform_var_substitution (graph);
4774 move_complex_constraints (graph, si);
4775 free_var_substitution_info (si);
4777 build_succ_graph ();
4778 find_indirect_cycles (graph);
4780 /* Implicit nodes and predecessors are no longer necessary at this
4782 remove_preds_and_fake_succs (graph);
4785 fprintf (dump_file, "\nSolving graph:\n");
4787 solve_graph (graph);
4790 dump_sa_points_to_info (dump_file);
4792 have_alias_info = true;
4794 timevar_pop (TV_TREE_PTA);
4798 /* Delete created points-to sets. */
4801 delete_points_to_sets (void)
4806 htab_delete (shared_bitmap_table);
4807 if (dump_file && (dump_flags & TDF_STATS))
4808 fprintf (dump_file, "Points to sets created:%d\n",
4809 stats.points_to_sets_created);
4811 pointer_map_destroy (vi_for_tree);
4812 bitmap_obstack_release (&pta_obstack);
4813 VEC_free (constraint_t, heap, constraints);
4815 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
4816 VEC_free (constraint_t, heap, graph->complex[i]);
4819 free (graph->succs);
4820 free (graph->indirect_cycles);
4823 VEC_free (varinfo_t, heap, varmap);
4824 free_alloc_pool (variable_info_pool);
4825 free_alloc_pool (constraint_pool);
4826 have_alias_info = false;
4829 /* Return true if we should execute IPA PTA. */
4833 return (flag_unit_at_a_time != 0
4835 /* Don't bother doing anything if the program has errors. */
4836 && !(errorcount || sorrycount));
4839 /* Execute the driver for IPA PTA. */
4841 ipa_pta_execute (void)
4843 struct cgraph_node *node;
4844 struct scc_info *si;
4847 init_alias_heapvars ();
4850 for (node = cgraph_nodes; node; node = node->next)
4852 if (!node->analyzed || cgraph_is_master_clone (node))
4856 varid = create_function_info_for (node->decl,
4857 cgraph_node_name (node));
4858 if (node->local.externally_visible)
4860 varinfo_t fi = get_varinfo (varid);
4861 for (; fi; fi = fi->next)
4862 make_constraint_from_anything (fi);
4866 for (node = cgraph_nodes; node; node = node->next)
4868 if (node->analyzed && cgraph_is_master_clone (node))
4870 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
4872 tree old_func_decl = current_function_decl;
4875 "Generating constraints for %s\n",
4876 cgraph_node_name (node));
4878 current_function_decl = node->decl;
4880 FOR_EACH_BB_FN (bb, cfun)
4882 block_stmt_iterator bsi;
4885 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4887 if (is_gimple_reg (PHI_RESULT (phi)))
4889 find_func_aliases (phi);
4893 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4895 tree stmt = bsi_stmt (bsi);
4896 find_func_aliases (stmt);
4899 current_function_decl = old_func_decl;
4904 /* Make point to anything. */
4912 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
4913 dump_constraints (dump_file);
4918 "\nCollapsing static cycles and doing variable "
4921 build_pred_graph ();
4922 si = perform_var_substitution (graph);
4923 move_complex_constraints (graph, si);
4924 free_var_substitution_info (si);
4926 build_succ_graph ();
4927 find_indirect_cycles (graph);
4929 /* Implicit nodes and predecessors are no longer necessary at this
4931 remove_preds_and_fake_succs (graph);
4934 fprintf (dump_file, "\nSolving graph:\n");
4936 solve_graph (graph);
4939 dump_sa_points_to_info (dump_file);
4942 delete_alias_heapvars ();
4943 delete_points_to_sets ();
4947 struct tree_opt_pass pass_ipa_pta =
4950 gate_ipa_pta, /* gate */
4951 ipa_pta_execute, /* execute */
4954 0, /* static_pass_number */
4955 TV_IPA_PTA, /* tv_id */
4956 0, /* properties_required */
4957 0, /* properties_provided */
4958 0, /* properties_destroyed */
4959 0, /* todo_flags_start */
4960 0, /* todo_flags_finish */
4964 /* Initialize the heapvar for statement mapping. */
4966 init_alias_heapvars (void)
4968 if (!heapvar_for_stmt)
4969 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
4974 delete_alias_heapvars (void)
4976 htab_delete (heapvar_for_stmt);
4977 heapvar_for_stmt = NULL;
4981 #include "gt-tree-ssa-structalias.h"