1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
209 /* ID of this variable */
212 /* Name of this variable */
215 /* Tree that this variable is associated with. */
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if this is a variable created by the constraint analysis, such as
231 heap variables and constraints we had to break up. */
232 unsigned int is_artificial_var:1;
234 /* True if this is a special variable whose solution set should not be
236 unsigned int is_special_var:1;
238 /* True for variables whose size is not known or variable. */
239 unsigned int is_unknown_size_var:1;
241 /* True for variables that have unions somewhere in them. */
242 unsigned int has_union:1;
244 /* True if this is a heap variable. */
245 unsigned int is_heap_var:1;
247 /* True if we may not use TBAA to prune references to this
248 variable. This is used for C++ placement new. */
249 unsigned int no_tbaa_pruning : 1;
251 /* Points-to set for this variable. */
254 /* Old points-to set for this variable. */
257 /* Variable id this was collapsed to due to type unsafety. This
258 should be unused completely after build_succ_graph, or something
260 struct variable_info *collapsed_to;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
266 /* Pool of variable info structures. */
267 static alloc_pool variable_info_pool;
269 DEF_VEC_P(varinfo_t);
271 DEF_VEC_ALLOC_P(varinfo_t, heap);
273 /* Table of variable info structures for constraint variables.
274 Indexed directly by variable info id. */
275 static VEC(varinfo_t,heap) *varmap;
277 /* Return the varmap element N */
279 static inline varinfo_t
280 get_varinfo (unsigned int n)
282 return VEC_index (varinfo_t, varmap, n);
285 /* Return the varmap element N, following the collapsed_to link. */
287 static inline varinfo_t
288 get_varinfo_fc (unsigned int n)
290 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 return v->collapsed_to;
297 /* Variable that represents the unknown pointer. */
298 static varinfo_t var_anything;
299 static tree anything_tree;
300 static unsigned int anything_id;
302 /* Variable that represents the NULL pointer. */
303 static varinfo_t var_nothing;
304 static tree nothing_tree;
305 static unsigned int nothing_id;
307 /* Variable that represents read only memory. */
308 static varinfo_t var_readonly;
309 static tree readonly_tree;
310 static unsigned int readonly_id;
312 /* Variable that represents integers. This is used for when people do things
314 static varinfo_t var_integer;
315 static tree integer_tree;
316 static unsigned int integer_id;
318 /* Lookup a heap var for FROM, and return it if we find one. */
321 heapvar_lookup (tree from)
323 struct tree_map *h, in;
326 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
327 htab_hash_pointer (from));
333 /* Insert a mapping FROM->TO in the heap var for statement
337 heapvar_insert (tree from, tree to)
342 h = GGC_NEW (struct tree_map);
343 h->hash = htab_hash_pointer (from);
346 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
347 *(struct tree_map **) loc = h;
350 /* Return a new variable info structure consisting for a variable
351 named NAME, and using constraint graph node NODE. */
354 new_var_info (tree t, unsigned int id, const char *name)
356 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 ret->is_artificial_var = false;
363 ret->is_heap_var = false;
364 ret->is_special_var = false;
365 ret->is_unknown_size_var = false;
366 ret->has_union = false;
368 if (TREE_CODE (var) == SSA_NAME)
369 var = SSA_NAME_VAR (var);
370 ret->no_tbaa_pruning = (DECL_P (var)
371 && POINTER_TYPE_P (TREE_TYPE (var))
372 && DECL_NO_TBAA_P (var));
373 ret->solution = BITMAP_ALLOC (&pta_obstack);
374 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
376 ret->collapsed_to = NULL;
380 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
382 /* An expression that appears in a constraint. */
384 struct constraint_expr
386 /* Constraint type. */
387 constraint_expr_type type;
389 /* Variable we are referring to in the constraint. */
392 /* Offset, in bits, of this constraint from the beginning of
393 variables it ends up referring to.
395 IOW, in a deref constraint, we would deref, get the result set,
396 then add OFFSET to each member. */
397 unsigned HOST_WIDE_INT offset;
400 typedef struct constraint_expr ce_s;
402 DEF_VEC_ALLOC_O(ce_s, heap);
403 static void get_constraint_for (tree, VEC(ce_s, heap) **);
404 static void do_deref (VEC (ce_s, heap) **);
406 /* Our set constraints are made up of two constraint expressions, one
409 As described in the introduction, our set constraints each represent an
410 operation between set valued variables.
414 struct constraint_expr lhs;
415 struct constraint_expr rhs;
418 /* List of constraints that we use to build the constraint graph from. */
420 static VEC(constraint_t,heap) *constraints;
421 static alloc_pool constraint_pool;
425 DEF_VEC_ALLOC_I(int, heap);
427 /* The constraint graph is represented as an array of bitmaps
428 containing successor nodes. */
430 struct constraint_graph
432 /* Size of this graph, which may be different than the number of
433 nodes in the variable map. */
436 /* Explicit successors of each node. */
439 /* Implicit predecessors of each node (Used for variable
441 bitmap *implicit_preds;
443 /* Explicit predecessors of each node (Used for variable substitution). */
446 /* Indirect cycle representatives, or -1 if the node has no indirect
448 int *indirect_cycles;
450 /* Representative node for a node. rep[a] == a unless the node has
454 /* Equivalence class representative for a label. This is used for
455 variable substitution. */
458 /* Pointer equivalence label for a node. All nodes with the same
459 pointer equivalence label can be unified together at some point
460 (either during constraint optimization or after the constraint
464 /* Pointer equivalence representative for a label. This is used to
465 handle nodes that are pointer equivalent but not location
466 equivalent. We can unite these once the addressof constraints
467 are transformed into initial points-to sets. */
470 /* Pointer equivalence label for each node, used during variable
472 unsigned int *pointer_label;
474 /* Location equivalence label for each node, used during location
475 equivalence finding. */
476 unsigned int *loc_label;
478 /* Pointed-by set for each node, used during location equivalence
479 finding. This is pointed-by rather than pointed-to, because it
480 is constructed using the predecessor graph. */
483 /* Points to sets for pointer equivalence. This is *not* the actual
484 points-to sets for nodes. */
487 /* Bitmap of nodes where the bit is set if the node is a direct
488 node. Used for variable substitution. */
489 sbitmap direct_nodes;
491 /* Bitmap of nodes where the bit is set if the node is address
492 taken. Used for variable substitution. */
493 bitmap address_taken;
495 /* True if points_to bitmap for this node is stored in the hash
499 /* Number of incoming edges remaining to be processed by pointer
501 Used for variable substitution. */
502 unsigned int *number_incoming;
505 /* Vector of complex constraints for each graph node. Complex
506 constraints are those involving dereferences or offsets that are
508 VEC(constraint_t,heap) **complex;
511 static constraint_graph_t graph;
513 /* During variable substitution and the offline version of indirect
514 cycle finding, we create nodes to represent dereferences and
515 address taken constraints. These represent where these start and
517 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
518 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
520 /* Return the representative node for NODE, if NODE has been unioned
522 This function performs path compression along the way to finding
523 the representative. */
526 find (unsigned int node)
528 gcc_assert (node < graph->size);
529 if (graph->rep[node] != node)
530 return graph->rep[node] = find (graph->rep[node]);
534 /* Union the TO and FROM nodes to the TO nodes.
535 Note that at some point in the future, we may want to do
536 union-by-rank, in which case we are going to have to return the
537 node we unified to. */
540 unite (unsigned int to, unsigned int from)
542 gcc_assert (to < graph->size && from < graph->size);
543 if (to != from && graph->rep[from] != to)
545 graph->rep[from] = to;
551 /* Create a new constraint consisting of LHS and RHS expressions. */
554 new_constraint (const struct constraint_expr lhs,
555 const struct constraint_expr rhs)
557 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
563 /* Print out constraint C to FILE. */
566 dump_constraint (FILE *file, constraint_t c)
568 if (c->lhs.type == ADDRESSOF)
570 else if (c->lhs.type == DEREF)
572 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
573 if (c->lhs.offset != 0)
574 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
575 fprintf (file, " = ");
576 if (c->rhs.type == ADDRESSOF)
578 else if (c->rhs.type == DEREF)
580 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
581 if (c->rhs.offset != 0)
582 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
583 fprintf (file, "\n");
586 /* Print out constraint C to stderr. */
589 debug_constraint (constraint_t c)
591 dump_constraint (stderr, c);
594 /* Print out all constraints to FILE */
597 dump_constraints (FILE *file)
601 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
602 dump_constraint (file, c);
605 /* Print out all constraints to stderr. */
608 debug_constraints (void)
610 dump_constraints (stderr);
615 The solver is a simple worklist solver, that works on the following
618 sbitmap changed_nodes = all zeroes;
620 For each node that is not already collapsed:
622 set bit in changed nodes
624 while (changed_count > 0)
626 compute topological ordering for constraint graph
628 find and collapse cycles in the constraint graph (updating
629 changed if necessary)
631 for each node (n) in the graph in topological order:
634 Process each complex constraint associated with the node,
635 updating changed if necessary.
637 For each outgoing edge from n, propagate the solution from n to
638 the destination of the edge, updating changed as necessary.
642 /* Return true if two constraint expressions A and B are equal. */
645 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
647 return a.type == b.type && a.var == b.var && a.offset == b.offset;
650 /* Return true if constraint expression A is less than constraint expression
651 B. This is just arbitrary, but consistent, in order to give them an
655 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
657 if (a.type == b.type)
660 return a.offset < b.offset;
662 return a.var < b.var;
665 return a.type < b.type;
668 /* Return true if constraint A is less than constraint B. This is just
669 arbitrary, but consistent, in order to give them an ordering. */
672 constraint_less (const constraint_t a, const constraint_t b)
674 if (constraint_expr_less (a->lhs, b->lhs))
676 else if (constraint_expr_less (b->lhs, a->lhs))
679 return constraint_expr_less (a->rhs, b->rhs);
682 /* Return true if two constraints A and B are equal. */
685 constraint_equal (struct constraint a, struct constraint b)
687 return constraint_expr_equal (a.lhs, b.lhs)
688 && constraint_expr_equal (a.rhs, b.rhs);
692 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
695 constraint_vec_find (VEC(constraint_t,heap) *vec,
696 struct constraint lookfor)
704 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
705 if (place >= VEC_length (constraint_t, vec))
707 found = VEC_index (constraint_t, vec, place);
708 if (!constraint_equal (*found, lookfor))
713 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
716 constraint_set_union (VEC(constraint_t,heap) **to,
717 VEC(constraint_t,heap) **from)
722 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
724 if (constraint_vec_find (*to, *c) == NULL)
726 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
728 VEC_safe_insert (constraint_t, heap, *to, place, c);
733 /* Take a solution set SET, add OFFSET to each member of the set, and
734 overwrite SET with the result when done. */
737 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
739 bitmap result = BITMAP_ALLOC (&iteration_obstack);
743 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
745 /* If this is a properly sized variable, only add offset if it's
746 less than end. Otherwise, it is globbed to a single
749 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
751 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
752 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
755 bitmap_set_bit (result, v->id);
757 else if (get_varinfo (i)->is_artificial_var
758 || get_varinfo (i)->has_union
759 || get_varinfo (i)->is_unknown_size_var)
761 bitmap_set_bit (result, i);
765 bitmap_copy (set, result);
766 BITMAP_FREE (result);
769 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
773 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
776 return bitmap_ior_into (to, from);
782 tmp = BITMAP_ALLOC (&iteration_obstack);
783 bitmap_copy (tmp, from);
784 solution_set_add (tmp, inc);
785 res = bitmap_ior_into (to, tmp);
791 /* Insert constraint C into the list of complex constraints for graph
795 insert_into_complex (constraint_graph_t graph,
796 unsigned int var, constraint_t c)
798 VEC (constraint_t, heap) *complex = graph->complex[var];
799 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
802 /* Only insert constraints that do not already exist. */
803 if (place >= VEC_length (constraint_t, complex)
804 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
805 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
809 /* Condense two variable nodes into a single variable node, by moving
810 all associated info from SRC to TO. */
813 merge_node_constraints (constraint_graph_t graph, unsigned int to,
819 gcc_assert (find (from) == to);
821 /* Move all complex constraints from src node into to node */
822 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
824 /* In complex constraints for node src, we may have either
825 a = *src, and *src = a, or an offseted constraint which are
826 always added to the rhs node's constraints. */
828 if (c->rhs.type == DEREF)
830 else if (c->lhs.type == DEREF)
835 constraint_set_union (&graph->complex[to], &graph->complex[from]);
836 VEC_free (constraint_t, heap, graph->complex[from]);
837 graph->complex[from] = NULL;
841 /* Remove edges involving NODE from GRAPH. */
844 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
846 if (graph->succs[node])
847 BITMAP_FREE (graph->succs[node]);
850 /* Merge GRAPH nodes FROM and TO into node TO. */
853 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
856 if (graph->indirect_cycles[from] != -1)
858 /* If we have indirect cycles with the from node, and we have
859 none on the to node, the to node has indirect cycles from the
860 from node now that they are unified.
861 If indirect cycles exist on both, unify the nodes that they
862 are in a cycle with, since we know they are in a cycle with
864 if (graph->indirect_cycles[to] == -1)
865 graph->indirect_cycles[to] = graph->indirect_cycles[from];
868 /* Merge all the successor edges. */
869 if (graph->succs[from])
871 if (!graph->succs[to])
872 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
873 bitmap_ior_into (graph->succs[to],
877 clear_edges_for_node (graph, from);
881 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
882 it doesn't exist in the graph already. */
885 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
891 if (!graph->implicit_preds[to])
892 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
894 if (!bitmap_bit_p (graph->implicit_preds[to], from))
896 stats.num_implicit_edges++;
897 bitmap_set_bit (graph->implicit_preds[to], from);
901 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
902 it doesn't exist in the graph already.
903 Return false if the edge already existed, true otherwise. */
906 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
909 if (!graph->preds[to])
910 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
911 if (!bitmap_bit_p (graph->preds[to], from))
912 bitmap_set_bit (graph->preds[to], from);
915 /* Add a graph edge to GRAPH, going from FROM to TO if
916 it doesn't exist in the graph already.
917 Return false if the edge already existed, true otherwise. */
920 add_graph_edge (constraint_graph_t graph, unsigned int to,
931 if (!graph->succs[from])
932 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
933 if (!bitmap_bit_p (graph->succs[from], to))
936 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
938 bitmap_set_bit (graph->succs[from], to);
945 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
948 valid_graph_edge (constraint_graph_t graph, unsigned int src,
951 return (graph->succs[dest]
952 && bitmap_bit_p (graph->succs[dest], src));
955 /* Initialize the constraint graph structure to contain SIZE nodes. */
958 init_graph (unsigned int size)
962 graph = XCNEW (struct constraint_graph);
964 graph->succs = XCNEWVEC (bitmap, graph->size);
965 graph->indirect_cycles = XNEWVEC (int, graph->size);
966 graph->rep = XNEWVEC (unsigned int, graph->size);
967 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
968 graph->pe = XCNEWVEC (unsigned int, graph->size);
969 graph->pe_rep = XNEWVEC (int, graph->size);
971 for (j = 0; j < graph->size; j++)
974 graph->pe_rep[j] = -1;
975 graph->indirect_cycles[j] = -1;
979 /* Build the constraint graph, adding only predecessor edges right now. */
982 build_pred_graph (void)
988 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
989 graph->preds = XCNEWVEC (bitmap, graph->size);
990 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
991 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
992 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
993 graph->points_to = XCNEWVEC (bitmap, graph->size);
994 graph->eq_rep = XNEWVEC (int, graph->size);
995 graph->direct_nodes = sbitmap_alloc (graph->size);
996 graph->pt_used = sbitmap_alloc (graph->size);
997 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
998 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
999 sbitmap_zero (graph->direct_nodes);
1000 sbitmap_zero (graph->pt_used);
1002 for (j = 0; j < FIRST_REF_NODE; j++)
1004 if (!get_varinfo (j)->is_special_var)
1005 SET_BIT (graph->direct_nodes, j);
1008 for (j = 0; j < graph->size; j++)
1009 graph->eq_rep[j] = -1;
1011 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1012 graph->indirect_cycles[j] = -1;
1014 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1016 struct constraint_expr lhs = c->lhs;
1017 struct constraint_expr rhs = c->rhs;
1018 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1019 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1021 if (lhs.type == DEREF)
1024 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1025 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1027 else if (rhs.type == DEREF)
1030 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1031 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1033 RESET_BIT (graph->direct_nodes, lhsvar);
1035 else if (rhs.type == ADDRESSOF)
1038 if (graph->points_to[lhsvar] == NULL)
1039 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1040 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1042 if (graph->pointed_by[rhsvar] == NULL)
1043 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1044 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1046 /* Implicitly, *x = y */
1047 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1049 RESET_BIT (graph->direct_nodes, rhsvar);
1050 bitmap_set_bit (graph->address_taken, rhsvar);
1052 else if (lhsvar > anything_id
1053 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1056 add_pred_graph_edge (graph, lhsvar, rhsvar);
1057 /* Implicitly, *x = *y */
1058 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1059 FIRST_REF_NODE + rhsvar);
1061 else if (lhs.offset != 0 || rhs.offset != 0)
1063 if (rhs.offset != 0)
1064 RESET_BIT (graph->direct_nodes, lhs.var);
1065 else if (lhs.offset != 0)
1066 RESET_BIT (graph->direct_nodes, rhs.var);
1071 /* Build the constraint graph, adding successor edges. */
1074 build_succ_graph (void)
1079 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1081 struct constraint_expr lhs;
1082 struct constraint_expr rhs;
1083 unsigned int lhsvar;
1084 unsigned int rhsvar;
1091 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1092 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1094 if (lhs.type == DEREF)
1096 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1097 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1099 else if (rhs.type == DEREF)
1101 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1102 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1104 else if (rhs.type == ADDRESSOF)
1107 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1108 == get_varinfo_fc (rhs.var)->id);
1109 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1111 else if (lhsvar > anything_id
1112 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1114 add_graph_edge (graph, lhsvar, rhsvar);
1120 /* Changed variables on the last iteration. */
1121 static unsigned int changed_count;
1122 static sbitmap changed;
1124 DEF_VEC_I(unsigned);
1125 DEF_VEC_ALLOC_I(unsigned,heap);
1128 /* Strongly Connected Component visitation info. */
1135 unsigned int *node_mapping;
1137 VEC(unsigned,heap) *scc_stack;
1141 /* Recursive routine to find strongly connected components in GRAPH.
1142 SI is the SCC info to store the information in, and N is the id of current
1143 graph node we are processing.
1145 This is Tarjan's strongly connected component finding algorithm, as
1146 modified by Nuutila to keep only non-root nodes on the stack.
1147 The algorithm can be found in "On finding the strongly connected
1148 connected components in a directed graph" by Esko Nuutila and Eljas
1149 Soisalon-Soininen, in Information Processing Letters volume 49,
1150 number 1, pages 9-14. */
1153 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1157 unsigned int my_dfs;
1159 SET_BIT (si->visited, n);
1160 si->dfs[n] = si->current_index ++;
1161 my_dfs = si->dfs[n];
1163 /* Visit all the successors. */
1164 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1168 if (i > LAST_REF_NODE)
1172 if (TEST_BIT (si->deleted, w))
1175 if (!TEST_BIT (si->visited, w))
1176 scc_visit (graph, si, w);
1178 unsigned int t = find (w);
1179 unsigned int nnode = find (n);
1180 gcc_assert (nnode == n);
1182 if (si->dfs[t] < si->dfs[nnode])
1183 si->dfs[n] = si->dfs[t];
1187 /* See if any components have been identified. */
1188 if (si->dfs[n] == my_dfs)
1190 if (VEC_length (unsigned, si->scc_stack) > 0
1191 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1193 bitmap scc = BITMAP_ALLOC (NULL);
1194 bool have_ref_node = n >= FIRST_REF_NODE;
1195 unsigned int lowest_node;
1198 bitmap_set_bit (scc, n);
1200 while (VEC_length (unsigned, si->scc_stack) != 0
1201 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1203 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1205 bitmap_set_bit (scc, w);
1206 if (w >= FIRST_REF_NODE)
1207 have_ref_node = true;
1210 lowest_node = bitmap_first_set_bit (scc);
1211 gcc_assert (lowest_node < FIRST_REF_NODE);
1213 /* Collapse the SCC nodes into a single node, and mark the
1215 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1217 if (i < FIRST_REF_NODE)
1219 if (unite (lowest_node, i))
1220 unify_nodes (graph, lowest_node, i, false);
1224 unite (lowest_node, i);
1225 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1229 SET_BIT (si->deleted, n);
1232 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1235 /* Unify node FROM into node TO, updating the changed count if
1236 necessary when UPDATE_CHANGED is true. */
1239 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1240 bool update_changed)
1243 gcc_assert (to != from && find (to) == to);
1244 if (dump_file && (dump_flags & TDF_DETAILS))
1245 fprintf (dump_file, "Unifying %s to %s\n",
1246 get_varinfo (from)->name,
1247 get_varinfo (to)->name);
1250 stats.unified_vars_dynamic++;
1252 stats.unified_vars_static++;
1254 merge_graph_nodes (graph, to, from);
1255 merge_node_constraints (graph, to, from);
1257 if (get_varinfo (from)->no_tbaa_pruning)
1258 get_varinfo (to)->no_tbaa_pruning = true;
1260 /* Mark TO as changed if FROM was changed. If TO was already marked
1261 as changed, decrease the changed count. */
1263 if (update_changed && TEST_BIT (changed, from))
1265 RESET_BIT (changed, from);
1266 if (!TEST_BIT (changed, to))
1267 SET_BIT (changed, to);
1270 gcc_assert (changed_count > 0);
1274 if (get_varinfo (from)->solution)
1276 /* If the solution changes because of the merging, we need to mark
1277 the variable as changed. */
1278 if (bitmap_ior_into (get_varinfo (to)->solution,
1279 get_varinfo (from)->solution))
1281 if (update_changed && !TEST_BIT (changed, to))
1283 SET_BIT (changed, to);
1288 BITMAP_FREE (get_varinfo (from)->solution);
1289 BITMAP_FREE (get_varinfo (from)->oldsolution);
1291 if (stats.iterations > 0)
1293 BITMAP_FREE (get_varinfo (to)->oldsolution);
1294 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1297 if (valid_graph_edge (graph, to, to))
1299 if (graph->succs[to])
1300 bitmap_clear_bit (graph->succs[to], to);
1304 /* Information needed to compute the topological ordering of a graph. */
1308 /* sbitmap of visited nodes. */
1310 /* Array that stores the topological order of the graph, *in
1312 VEC(unsigned,heap) *topo_order;
1316 /* Initialize and return a topological info structure. */
1318 static struct topo_info *
1319 init_topo_info (void)
1321 size_t size = graph->size;
1322 struct topo_info *ti = XNEW (struct topo_info);
1323 ti->visited = sbitmap_alloc (size);
1324 sbitmap_zero (ti->visited);
1325 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1330 /* Free the topological sort info pointed to by TI. */
1333 free_topo_info (struct topo_info *ti)
1335 sbitmap_free (ti->visited);
1336 VEC_free (unsigned, heap, ti->topo_order);
1340 /* Visit the graph in topological order, and store the order in the
1341 topo_info structure. */
1344 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1350 SET_BIT (ti->visited, n);
1352 if (graph->succs[n])
1353 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1355 if (!TEST_BIT (ti->visited, j))
1356 topo_visit (graph, ti, j);
1359 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1362 /* Return true if variable N + OFFSET is a legal field of N. */
1365 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1367 varinfo_t ninfo = get_varinfo (n);
1369 /* For things we've globbed to single variables, any offset into the
1370 variable acts like the entire variable, so that it becomes offset
1372 if (ninfo->is_special_var
1373 || ninfo->is_artificial_var
1374 || ninfo->is_unknown_size_var)
1379 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1382 /* Process a constraint C that represents x = *y, using DELTA as the
1383 starting solution. */
1386 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1389 unsigned int lhs = c->lhs.var;
1391 bitmap sol = get_varinfo (lhs)->solution;
1395 if (bitmap_bit_p (delta, anything_id))
1397 flag = !bitmap_bit_p (sol, anything_id);
1399 bitmap_set_bit (sol, anything_id);
1402 /* For each variable j in delta (Sol(y)), add
1403 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1404 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1406 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1407 if (type_safe (j, &roffset))
1410 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1413 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1418 /* Adding edges from the special vars is pointless.
1419 They don't have sets that can change. */
1420 if (get_varinfo (t) ->is_special_var)
1421 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1422 else if (add_graph_edge (graph, lhs, t))
1423 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1428 /* If the LHS solution changed, mark the var as changed. */
1431 get_varinfo (lhs)->solution = sol;
1432 if (!TEST_BIT (changed, lhs))
1434 SET_BIT (changed, lhs);
1440 /* Process a constraint C that represents *x = y. */
1443 do_ds_constraint (constraint_t c, bitmap delta)
1445 unsigned int rhs = c->rhs.var;
1446 bitmap sol = get_varinfo (rhs)->solution;
1450 if (bitmap_bit_p (sol, anything_id))
1452 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1454 varinfo_t jvi = get_varinfo (j);
1456 unsigned int loff = c->lhs.offset;
1457 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1460 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1465 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1467 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1468 if (!TEST_BIT (changed, t))
1470 SET_BIT (changed, t);
1478 /* For each member j of delta (Sol(x)), add an edge from y to j and
1479 union Sol(y) into Sol(j) */
1480 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1482 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1483 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1487 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1490 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1494 tmp = get_varinfo (t)->solution;
1496 if (set_union_with_increment (tmp, sol, 0))
1498 get_varinfo (t)->solution = tmp;
1500 sol = get_varinfo (rhs)->solution;
1501 if (!TEST_BIT (changed, t))
1503 SET_BIT (changed, t);
1511 /* Handle a non-simple (simple meaning requires no iteration),
1512 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1515 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1517 if (c->lhs.type == DEREF)
1519 if (c->rhs.type == ADDRESSOF)
1526 do_ds_constraint (c, delta);
1529 else if (c->rhs.type == DEREF)
1532 if (!(get_varinfo (c->lhs.var)->is_special_var))
1533 do_sd_constraint (graph, c, delta);
1541 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1542 solution = get_varinfo (c->rhs.var)->solution;
1543 tmp = get_varinfo (c->lhs.var)->solution;
1545 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1549 get_varinfo (c->lhs.var)->solution = tmp;
1550 if (!TEST_BIT (changed, c->lhs.var))
1552 SET_BIT (changed, c->lhs.var);
1559 /* Initialize and return a new SCC info structure. */
1561 static struct scc_info *
1562 init_scc_info (size_t size)
1564 struct scc_info *si = XNEW (struct scc_info);
1567 si->current_index = 0;
1568 si->visited = sbitmap_alloc (size);
1569 sbitmap_zero (si->visited);
1570 si->deleted = sbitmap_alloc (size);
1571 sbitmap_zero (si->deleted);
1572 si->node_mapping = XNEWVEC (unsigned int, size);
1573 si->dfs = XCNEWVEC (unsigned int, size);
1575 for (i = 0; i < size; i++)
1576 si->node_mapping[i] = i;
1578 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1582 /* Free an SCC info structure pointed to by SI */
1585 free_scc_info (struct scc_info *si)
1587 sbitmap_free (si->visited);
1588 sbitmap_free (si->deleted);
1589 free (si->node_mapping);
1591 VEC_free (unsigned, heap, si->scc_stack);
1596 /* Find indirect cycles in GRAPH that occur, using strongly connected
1597 components, and note them in the indirect cycles map.
1599 This technique comes from Ben Hardekopf and Calvin Lin,
1600 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1601 Lines of Code", submitted to PLDI 2007. */
1604 find_indirect_cycles (constraint_graph_t graph)
1607 unsigned int size = graph->size;
1608 struct scc_info *si = init_scc_info (size);
1610 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1611 if (!TEST_BIT (si->visited, i) && find (i) == i)
1612 scc_visit (graph, si, i);
1617 /* Compute a topological ordering for GRAPH, and store the result in the
1618 topo_info structure TI. */
1621 compute_topo_order (constraint_graph_t graph,
1622 struct topo_info *ti)
1625 unsigned int size = graph->size;
1627 for (i = 0; i != size; ++i)
1628 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1629 topo_visit (graph, ti, i);
1632 /* Structure used to for hash value numbering of pointer equivalence
1635 typedef struct equiv_class_label
1637 unsigned int equivalence_class;
1640 } *equiv_class_label_t;
1641 typedef const struct equiv_class_label *const_equiv_class_label_t;
1643 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1645 static htab_t pointer_equiv_class_table;
1647 /* A hashtable for mapping a bitmap of labels->location equivalence
1649 static htab_t location_equiv_class_table;
1651 /* Hash function for a equiv_class_label_t */
1654 equiv_class_label_hash (const void *p)
1656 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1657 return ecl->hashcode;
1660 /* Equality function for two equiv_class_label_t's. */
1663 equiv_class_label_eq (const void *p1, const void *p2)
1665 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1666 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1667 return bitmap_equal_p (eql1->labels, eql2->labels);
1670 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1674 equiv_class_lookup (htab_t table, bitmap labels)
1677 struct equiv_class_label ecl;
1679 ecl.labels = labels;
1680 ecl.hashcode = bitmap_hash (labels);
1682 slot = htab_find_slot_with_hash (table, &ecl,
1683 ecl.hashcode, NO_INSERT);
1687 return ((equiv_class_label_t) *slot)->equivalence_class;
1691 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1695 equiv_class_add (htab_t table, unsigned int equivalence_class,
1699 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1701 ecl->labels = labels;
1702 ecl->equivalence_class = equivalence_class;
1703 ecl->hashcode = bitmap_hash (labels);
1705 slot = htab_find_slot_with_hash (table, ecl,
1706 ecl->hashcode, INSERT);
1707 gcc_assert (!*slot);
1708 *slot = (void *) ecl;
1711 /* Perform offline variable substitution.
1713 This is a worst case quadratic time way of identifying variables
1714 that must have equivalent points-to sets, including those caused by
1715 static cycles, and single entry subgraphs, in the constraint graph.
1717 The technique is described in "Exploiting Pointer and Location
1718 Equivalence to Optimize Pointer Analysis. In the 14th International
1719 Static Analysis Symposium (SAS), August 2007." It is known as the
1720 "HU" algorithm, and is equivalent to value numbering the collapsed
1721 constraint graph including evaluating unions.
1723 The general method of finding equivalence classes is as follows:
1724 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1725 Initialize all non-REF nodes to be direct nodes.
1726 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1728 For each constraint containing the dereference, we also do the same
1731 We then compute SCC's in the graph and unify nodes in the same SCC,
1734 For each non-collapsed node x:
1735 Visit all unvisited explicit incoming edges.
1736 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1738 Lookup the equivalence class for pts(x).
1739 If we found one, equivalence_class(x) = found class.
1740 Otherwise, equivalence_class(x) = new class, and new_class is
1741 added to the lookup table.
1743 All direct nodes with the same equivalence class can be replaced
1744 with a single representative node.
1745 All unlabeled nodes (label == 0) are not pointers and all edges
1746 involving them can be eliminated.
1747 We perform these optimizations during rewrite_constraints
1749 In addition to pointer equivalence class finding, we also perform
1750 location equivalence class finding. This is the set of variables
1751 that always appear together in points-to sets. We use this to
1752 compress the size of the points-to sets. */
1754 /* Current maximum pointer equivalence class id. */
1755 static int pointer_equiv_class;
1757 /* Current maximum location equivalence class id. */
1758 static int location_equiv_class;
1760 /* Recursive routine to find strongly connected components in GRAPH,
1761 and label it's nodes with DFS numbers. */
1764 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1768 unsigned int my_dfs;
1770 gcc_assert (si->node_mapping[n] == n);
1771 SET_BIT (si->visited, n);
1772 si->dfs[n] = si->current_index ++;
1773 my_dfs = si->dfs[n];
1775 /* Visit all the successors. */
1776 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1778 unsigned int w = si->node_mapping[i];
1780 if (TEST_BIT (si->deleted, w))
1783 if (!TEST_BIT (si->visited, w))
1784 condense_visit (graph, si, w);
1786 unsigned int t = si->node_mapping[w];
1787 unsigned int nnode = si->node_mapping[n];
1788 gcc_assert (nnode == n);
1790 if (si->dfs[t] < si->dfs[nnode])
1791 si->dfs[n] = si->dfs[t];
1795 /* Visit all the implicit predecessors. */
1796 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1798 unsigned int w = si->node_mapping[i];
1800 if (TEST_BIT (si->deleted, w))
1803 if (!TEST_BIT (si->visited, w))
1804 condense_visit (graph, si, w);
1806 unsigned int t = si->node_mapping[w];
1807 unsigned int nnode = si->node_mapping[n];
1808 gcc_assert (nnode == n);
1810 if (si->dfs[t] < si->dfs[nnode])
1811 si->dfs[n] = si->dfs[t];
1815 /* See if any components have been identified. */
1816 if (si->dfs[n] == my_dfs)
1818 while (VEC_length (unsigned, si->scc_stack) != 0
1819 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1821 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1822 si->node_mapping[w] = n;
1824 if (!TEST_BIT (graph->direct_nodes, w))
1825 RESET_BIT (graph->direct_nodes, n);
1827 /* Unify our nodes. */
1828 if (graph->preds[w])
1830 if (!graph->preds[n])
1831 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1832 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1834 if (graph->implicit_preds[w])
1836 if (!graph->implicit_preds[n])
1837 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1838 bitmap_ior_into (graph->implicit_preds[n],
1839 graph->implicit_preds[w]);
1841 if (graph->points_to[w])
1843 if (!graph->points_to[n])
1844 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1845 bitmap_ior_into (graph->points_to[n],
1846 graph->points_to[w]);
1848 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1850 unsigned int rep = si->node_mapping[i];
1851 graph->number_incoming[rep]++;
1854 SET_BIT (si->deleted, n);
1857 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1860 /* Label pointer equivalences. */
1863 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1867 SET_BIT (si->visited, n);
1869 if (!graph->points_to[n])
1870 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1872 /* Label and union our incoming edges's points to sets. */
1873 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1875 unsigned int w = si->node_mapping[i];
1876 if (!TEST_BIT (si->visited, w))
1877 label_visit (graph, si, w);
1879 /* Skip unused edges */
1880 if (w == n || graph->pointer_label[w] == 0)
1882 graph->number_incoming[w]--;
1885 if (graph->points_to[w])
1886 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1888 /* If all incoming edges to w have been processed and
1889 graph->points_to[w] was not stored in the hash table, we can
1891 graph->number_incoming[w]--;
1892 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1894 BITMAP_FREE (graph->points_to[w]);
1897 /* Indirect nodes get fresh variables. */
1898 if (!TEST_BIT (graph->direct_nodes, n))
1899 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1901 if (!bitmap_empty_p (graph->points_to[n]))
1903 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1904 graph->points_to[n]);
1907 SET_BIT (graph->pt_used, n);
1908 label = pointer_equiv_class++;
1909 equiv_class_add (pointer_equiv_class_table,
1910 label, graph->points_to[n]);
1912 graph->pointer_label[n] = label;
1916 /* Perform offline variable substitution, discovering equivalence
1917 classes, and eliminating non-pointer variables. */
1919 static struct scc_info *
1920 perform_var_substitution (constraint_graph_t graph)
1923 unsigned int size = graph->size;
1924 struct scc_info *si = init_scc_info (size);
1926 bitmap_obstack_initialize (&iteration_obstack);
1927 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1928 equiv_class_label_eq, free);
1929 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1930 equiv_class_label_eq, free);
1931 pointer_equiv_class = 1;
1932 location_equiv_class = 1;
1934 /* Condense the nodes, which means to find SCC's, count incoming
1935 predecessors, and unite nodes in SCC's. */
1936 for (i = 0; i < FIRST_REF_NODE; i++)
1937 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1938 condense_visit (graph, si, si->node_mapping[i]);
1940 sbitmap_zero (si->visited);
1941 /* Actually the label the nodes for pointer equivalences */
1942 for (i = 0; i < FIRST_REF_NODE; i++)
1943 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1944 label_visit (graph, si, si->node_mapping[i]);
1946 /* Calculate location equivalence labels. */
1947 for (i = 0; i < FIRST_REF_NODE; i++)
1954 if (!graph->pointed_by[i])
1956 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1958 /* Translate the pointed-by mapping for pointer equivalence
1960 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1962 bitmap_set_bit (pointed_by,
1963 graph->pointer_label[si->node_mapping[j]]);
1965 /* The original pointed_by is now dead. */
1966 BITMAP_FREE (graph->pointed_by[i]);
1968 /* Look up the location equivalence label if one exists, or make
1970 label = equiv_class_lookup (location_equiv_class_table,
1974 label = location_equiv_class++;
1975 equiv_class_add (location_equiv_class_table,
1980 if (dump_file && (dump_flags & TDF_DETAILS))
1981 fprintf (dump_file, "Found location equivalence for node %s\n",
1982 get_varinfo (i)->name);
1983 BITMAP_FREE (pointed_by);
1985 graph->loc_label[i] = label;
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1990 for (i = 0; i < FIRST_REF_NODE; i++)
1992 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1994 "Equivalence classes for %s node id %d:%s are pointer: %d"
1996 direct_node ? "Direct node" : "Indirect node", i,
1997 get_varinfo (i)->name,
1998 graph->pointer_label[si->node_mapping[i]],
1999 graph->loc_label[si->node_mapping[i]]);
2002 /* Quickly eliminate our non-pointer variables. */
2004 for (i = 0; i < FIRST_REF_NODE; i++)
2006 unsigned int node = si->node_mapping[i];
2008 if (graph->pointer_label[node] == 0)
2010 if (dump_file && (dump_flags & TDF_DETAILS))
2012 "%s is a non-pointer variable, eliminating edges.\n",
2013 get_varinfo (node)->name);
2014 stats.nonpointer_vars++;
2015 clear_edges_for_node (graph, node);
2022 /* Free information that was only necessary for variable
2026 free_var_substitution_info (struct scc_info *si)
2029 free (graph->pointer_label);
2030 free (graph->loc_label);
2031 free (graph->pointed_by);
2032 free (graph->points_to);
2033 free (graph->number_incoming);
2034 free (graph->eq_rep);
2035 sbitmap_free (graph->direct_nodes);
2036 sbitmap_free (graph->pt_used);
2037 htab_delete (pointer_equiv_class_table);
2038 htab_delete (location_equiv_class_table);
2039 bitmap_obstack_release (&iteration_obstack);
2042 /* Return an existing node that is equivalent to NODE, which has
2043 equivalence class LABEL, if one exists. Return NODE otherwise. */
2046 find_equivalent_node (constraint_graph_t graph,
2047 unsigned int node, unsigned int label)
2049 /* If the address version of this variable is unused, we can
2050 substitute it for anything else with the same label.
2051 Otherwise, we know the pointers are equivalent, but not the
2052 locations, and we can unite them later. */
2054 if (!bitmap_bit_p (graph->address_taken, node))
2056 gcc_assert (label < graph->size);
2058 if (graph->eq_rep[label] != -1)
2060 /* Unify the two variables since we know they are equivalent. */
2061 if (unite (graph->eq_rep[label], node))
2062 unify_nodes (graph, graph->eq_rep[label], node, false);
2063 return graph->eq_rep[label];
2067 graph->eq_rep[label] = node;
2068 graph->pe_rep[label] = node;
2073 gcc_assert (label < graph->size);
2074 graph->pe[node] = label;
2075 if (graph->pe_rep[label] == -1)
2076 graph->pe_rep[label] = node;
2082 /* Unite pointer equivalent but not location equivalent nodes in
2083 GRAPH. This may only be performed once variable substitution is
2087 unite_pointer_equivalences (constraint_graph_t graph)
2091 /* Go through the pointer equivalences and unite them to their
2092 representative, if they aren't already. */
2093 for (i = 0; i < FIRST_REF_NODE; i++)
2095 unsigned int label = graph->pe[i];
2098 int label_rep = graph->pe_rep[label];
2100 if (label_rep == -1)
2103 label_rep = find (label_rep);
2104 if (label_rep >= 0 && unite (label_rep, find (i)))
2105 unify_nodes (graph, label_rep, i, false);
2110 /* Move complex constraints to the GRAPH nodes they belong to. */
2113 move_complex_constraints (constraint_graph_t graph)
2118 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2122 struct constraint_expr lhs = c->lhs;
2123 struct constraint_expr rhs = c->rhs;
2125 if (lhs.type == DEREF)
2127 insert_into_complex (graph, lhs.var, c);
2129 else if (rhs.type == DEREF)
2131 if (!(get_varinfo (lhs.var)->is_special_var))
2132 insert_into_complex (graph, rhs.var, c);
2134 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2135 && (lhs.offset != 0 || rhs.offset != 0))
2137 insert_into_complex (graph, rhs.var, c);
2144 /* Optimize and rewrite complex constraints while performing
2145 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2146 result of perform_variable_substitution. */
2149 rewrite_constraints (constraint_graph_t graph,
2150 struct scc_info *si)
2156 for (j = 0; j < graph->size; j++)
2157 gcc_assert (find (j) == j);
2159 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2161 struct constraint_expr lhs = c->lhs;
2162 struct constraint_expr rhs = c->rhs;
2163 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2164 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2165 unsigned int lhsnode, rhsnode;
2166 unsigned int lhslabel, rhslabel;
2168 lhsnode = si->node_mapping[lhsvar];
2169 rhsnode = si->node_mapping[rhsvar];
2170 lhslabel = graph->pointer_label[lhsnode];
2171 rhslabel = graph->pointer_label[rhsnode];
2173 /* See if it is really a non-pointer variable, and if so, ignore
2177 if (dump_file && (dump_flags & TDF_DETAILS))
2180 fprintf (dump_file, "%s is a non-pointer variable,"
2181 "ignoring constraint:",
2182 get_varinfo (lhs.var)->name);
2183 dump_constraint (dump_file, c);
2185 VEC_replace (constraint_t, constraints, i, NULL);
2191 if (dump_file && (dump_flags & TDF_DETAILS))
2194 fprintf (dump_file, "%s is a non-pointer variable,"
2195 "ignoring constraint:",
2196 get_varinfo (rhs.var)->name);
2197 dump_constraint (dump_file, c);
2199 VEC_replace (constraint_t, constraints, i, NULL);
2203 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2204 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2205 c->lhs.var = lhsvar;
2206 c->rhs.var = rhsvar;
2211 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2212 part of an SCC, false otherwise. */
2215 eliminate_indirect_cycles (unsigned int node)
2217 if (graph->indirect_cycles[node] != -1
2218 && !bitmap_empty_p (get_varinfo (node)->solution))
2221 VEC(unsigned,heap) *queue = NULL;
2223 unsigned int to = find (graph->indirect_cycles[node]);
2226 /* We can't touch the solution set and call unify_nodes
2227 at the same time, because unify_nodes is going to do
2228 bitmap unions into it. */
2230 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2232 if (find (i) == i && i != to)
2235 VEC_safe_push (unsigned, heap, queue, i);
2240 VEC_iterate (unsigned, queue, queuepos, i);
2243 unify_nodes (graph, to, i, true);
2245 VEC_free (unsigned, heap, queue);
2251 /* Solve the constraint graph GRAPH using our worklist solver.
2252 This is based on the PW* family of solvers from the "Efficient Field
2253 Sensitive Pointer Analysis for C" paper.
2254 It works by iterating over all the graph nodes, processing the complex
2255 constraints and propagating the copy constraints, until everything stops
2256 changed. This corresponds to steps 6-8 in the solving list given above. */
2259 solve_graph (constraint_graph_t graph)
2261 unsigned int size = graph->size;
2266 changed = sbitmap_alloc (size);
2267 sbitmap_zero (changed);
2269 /* Mark all initial non-collapsed nodes as changed. */
2270 for (i = 0; i < size; i++)
2272 varinfo_t ivi = get_varinfo (i);
2273 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2274 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2275 || VEC_length (constraint_t, graph->complex[i]) > 0))
2277 SET_BIT (changed, i);
2282 /* Allocate a bitmap to be used to store the changed bits. */
2283 pts = BITMAP_ALLOC (&pta_obstack);
2285 while (changed_count > 0)
2288 struct topo_info *ti = init_topo_info ();
2291 bitmap_obstack_initialize (&iteration_obstack);
2293 compute_topo_order (graph, ti);
2295 while (VEC_length (unsigned, ti->topo_order) != 0)
2298 i = VEC_pop (unsigned, ti->topo_order);
2300 /* If this variable is not a representative, skip it. */
2304 /* In certain indirect cycle cases, we may merge this
2305 variable to another. */
2306 if (eliminate_indirect_cycles (i) && find (i) != i)
2309 /* If the node has changed, we need to process the
2310 complex constraints and outgoing edges again. */
2311 if (TEST_BIT (changed, i))
2316 VEC(constraint_t,heap) *complex = graph->complex[i];
2317 bool solution_empty;
2319 RESET_BIT (changed, i);
2322 /* Compute the changed set of solution bits. */
2323 bitmap_and_compl (pts, get_varinfo (i)->solution,
2324 get_varinfo (i)->oldsolution);
2326 if (bitmap_empty_p (pts))
2329 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2331 solution = get_varinfo (i)->solution;
2332 solution_empty = bitmap_empty_p (solution);
2334 /* Process the complex constraints */
2335 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2337 /* XXX: This is going to unsort the constraints in
2338 some cases, which will occasionally add duplicate
2339 constraints during unification. This does not
2340 affect correctness. */
2341 c->lhs.var = find (c->lhs.var);
2342 c->rhs.var = find (c->rhs.var);
2344 /* The only complex constraint that can change our
2345 solution to non-empty, given an empty solution,
2346 is a constraint where the lhs side is receiving
2347 some set from elsewhere. */
2348 if (!solution_empty || c->lhs.type != DEREF)
2349 do_complex_constraint (graph, c, pts);
2352 solution_empty = bitmap_empty_p (solution);
2354 if (!solution_empty)
2358 /* Propagate solution to all successors. */
2359 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2365 unsigned int to = find (j);
2366 tmp = get_varinfo (to)->solution;
2369 /* Don't try to propagate to ourselves. */
2373 flag = set_union_with_increment (tmp, pts, 0);
2377 get_varinfo (to)->solution = tmp;
2378 if (!TEST_BIT (changed, to))
2380 SET_BIT (changed, to);
2388 free_topo_info (ti);
2389 bitmap_obstack_release (&iteration_obstack);
2393 sbitmap_free (changed);
2394 bitmap_obstack_release (&oldpta_obstack);
2397 /* Map from trees to variable infos. */
2398 static struct pointer_map_t *vi_for_tree;
2401 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2404 insert_vi_for_tree (tree t, varinfo_t vi)
2406 void **slot = pointer_map_insert (vi_for_tree, t);
2408 gcc_assert (*slot == NULL);
2412 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2413 exist in the map, return NULL, otherwise, return the varinfo we found. */
2416 lookup_vi_for_tree (tree t)
2418 void **slot = pointer_map_contains (vi_for_tree, t);
2422 return (varinfo_t) *slot;
2425 /* Return a printable name for DECL */
2428 alias_get_name (tree decl)
2430 const char *res = get_name (decl);
2432 int num_printed = 0;
2441 if (TREE_CODE (decl) == SSA_NAME)
2443 num_printed = asprintf (&temp, "%s_%u",
2444 alias_get_name (SSA_NAME_VAR (decl)),
2445 SSA_NAME_VERSION (decl));
2447 else if (DECL_P (decl))
2449 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2451 if (num_printed > 0)
2453 res = ggc_strdup (temp);
2459 /* Find the variable id for tree T in the map.
2460 If T doesn't exist in the map, create an entry for it and return it. */
2463 get_vi_for_tree (tree t)
2465 void **slot = pointer_map_contains (vi_for_tree, t);
2467 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2469 return (varinfo_t) *slot;
2472 /* Get a constraint expression from an SSA_VAR_P node. */
2474 static struct constraint_expr
2475 get_constraint_exp_from_ssa_var (tree t)
2477 struct constraint_expr cexpr;
2479 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2481 /* For parameters, get at the points-to set for the actual parm
2483 if (TREE_CODE (t) == SSA_NAME
2484 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2485 && SSA_NAME_IS_DEFAULT_DEF (t))
2486 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2488 cexpr.type = SCALAR;
2490 cexpr.var = get_vi_for_tree (t)->id;
2491 /* If we determine the result is "anything", and we know this is readonly,
2492 say it points to readonly memory instead. */
2493 if (cexpr.var == anything_id && TREE_READONLY (t))
2495 cexpr.type = ADDRESSOF;
2496 cexpr.var = readonly_id;
2503 /* Process a completed constraint T, and add it to the constraint
2504 list. FROM_CALL is true if this is a constraint coming from a
2505 call, which means any DEREFs we see are "may-deref's", not
2509 process_constraint_1 (constraint_t t, bool from_call)
2511 struct constraint_expr rhs = t->rhs;
2512 struct constraint_expr lhs = t->lhs;
2514 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2515 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2517 if (!use_field_sensitive)
2523 /* ANYTHING == ANYTHING is pointless. */
2524 if (lhs.var == anything_id && rhs.var == anything_id)
2527 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2528 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2533 process_constraint_1 (t, from_call);
2535 /* This can happen in our IR with things like n->a = *p */
2536 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2538 /* Split into tmp = *rhs, *lhs = tmp */
2539 tree rhsdecl = get_varinfo (rhs.var)->decl;
2540 tree pointertype = TREE_TYPE (rhsdecl);
2541 tree pointedtotype = TREE_TYPE (pointertype);
2542 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2543 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2545 /* If this is an aggregate of known size, we should have passed
2546 this off to do_structure_copy, and it should have broken it
2548 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2549 || get_varinfo (rhs.var)->is_unknown_size_var);
2551 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2552 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2554 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2556 /* Split into tmp = &rhs, *lhs = tmp */
2557 tree rhsdecl = get_varinfo (rhs.var)->decl;
2558 tree pointertype = TREE_TYPE (rhsdecl);
2559 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2560 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2562 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2563 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2567 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2568 VEC_safe_push (constraint_t, heap, constraints, t);
2573 /* Process constraint T, performing various simplifications and then
2574 adding it to our list of overall constraints. */
2577 process_constraint (constraint_t t)
2579 process_constraint_1 (t, false);
2582 /* Return true if T is a variable of a type that could contain
2586 could_have_pointers (tree t)
2588 tree type = TREE_TYPE (t);
2590 if (POINTER_TYPE_P (type)
2591 || AGGREGATE_TYPE_P (type)
2592 || TREE_CODE (type) == COMPLEX_TYPE)
2598 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2601 static unsigned HOST_WIDE_INT
2602 bitpos_of_field (const tree fdecl)
2605 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2606 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2609 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2610 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2614 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2617 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2620 HOST_WIDE_INT bitsize = -1;
2621 HOST_WIDE_INT bitmaxsize = -1;
2622 HOST_WIDE_INT bitpos;
2624 struct constraint_expr *result;
2625 unsigned int beforelength = VEC_length (ce_s, *results);
2627 /* Some people like to do cute things like take the address of
2630 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2631 forzero = TREE_OPERAND (forzero, 0);
2633 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2635 struct constraint_expr temp;
2638 temp.var = integer_id;
2640 VEC_safe_push (ce_s, heap, *results, &temp);
2644 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2646 get_constraint_for (t, results);
2647 result = VEC_last (ce_s, *results);
2648 result->offset = bitpos;
2650 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2652 /* This can also happen due to weird offsetof type macros. */
2653 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2654 result->type = SCALAR;
2656 if (result->type == SCALAR)
2658 /* In languages like C, you can access one past the end of an
2659 array. You aren't allowed to dereference it, so we can
2660 ignore this constraint. When we handle pointer subtraction,
2661 we may have to do something cute here. */
2663 if (result->offset < get_varinfo (result->var)->fullsize
2666 /* It's also not true that the constraint will actually start at the
2667 right offset, it may start in some padding. We only care about
2668 setting the constraint to the first actual field it touches, so
2671 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2673 if (ranges_overlap_p (curr->offset, curr->size,
2674 result->offset, bitmaxsize))
2676 result->var = curr->id;
2680 /* assert that we found *some* field there. The user couldn't be
2681 accessing *only* padding. */
2682 /* Still the user could access one past the end of an array
2683 embedded in a struct resulting in accessing *only* padding. */
2684 gcc_assert (curr || ref_contains_array_ref (orig_t));
2686 else if (bitmaxsize == 0)
2688 if (dump_file && (dump_flags & TDF_DETAILS))
2689 fprintf (dump_file, "Access to zero-sized part of variable,"
2693 if (dump_file && (dump_flags & TDF_DETAILS))
2694 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2698 else if (bitmaxsize == -1)
2700 /* We can't handle DEREF constraints with unknown size, we'll
2701 get the wrong answer. Punt and return anything. */
2702 result->var = anything_id;
2708 /* Dereference the constraint expression CONS, and return the result.
2709 DEREF (ADDRESSOF) = SCALAR
2710 DEREF (SCALAR) = DEREF
2711 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2712 This is needed so that we can handle dereferencing DEREF constraints. */
2715 do_deref (VEC (ce_s, heap) **constraints)
2717 struct constraint_expr *c;
2720 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2722 if (c->type == SCALAR)
2724 else if (c->type == ADDRESSOF)
2726 else if (c->type == DEREF)
2728 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2729 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2730 process_constraint (new_constraint (tmplhs, *c));
2731 c->var = tmplhs.var;
2738 /* Given a tree T, return the constraint expression for it. */
2741 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2743 struct constraint_expr temp;
2745 /* x = integer is all glommed to a single variable, which doesn't
2746 point to anything by itself. That is, of course, unless it is an
2747 integer constant being treated as a pointer, in which case, we
2748 will return that this is really the addressof anything. This
2749 happens below, since it will fall into the default case. The only
2750 case we know something about an integer treated like a pointer is
2751 when it is the NULL pointer, and then we just say it points to
2753 if (TREE_CODE (t) == INTEGER_CST
2754 && integer_zerop (t))
2756 temp.var = nothing_id;
2757 temp.type = ADDRESSOF;
2759 VEC_safe_push (ce_s, heap, *results, &temp);
2763 /* String constants are read-only. */
2764 if (TREE_CODE (t) == STRING_CST)
2766 temp.var = readonly_id;
2769 VEC_safe_push (ce_s, heap, *results, &temp);
2773 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2775 case tcc_expression:
2778 switch (TREE_CODE (t))
2782 struct constraint_expr *c;
2784 tree exp = TREE_OPERAND (t, 0);
2785 tree pttype = TREE_TYPE (TREE_TYPE (t));
2787 get_constraint_for (exp, results);
2790 /* Complex types are special. Taking the address of one
2791 allows you to access either part of it through that
2793 if (VEC_length (ce_s, *results) == 1 &&
2794 TREE_CODE (pttype) == COMPLEX_TYPE)
2796 struct constraint_expr *origrhs;
2798 struct constraint_expr tmp;
2800 gcc_assert (VEC_length (ce_s, *results) == 1);
2801 origrhs = VEC_last (ce_s, *results);
2803 VEC_pop (ce_s, *results);
2804 origvar = get_varinfo (origrhs->var);
2805 for (; origvar; origvar = origvar->next)
2807 tmp.var = origvar->id;
2808 VEC_safe_push (ce_s, heap, *results, &tmp);
2812 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2814 if (c->type == DEREF)
2817 c->type = ADDRESSOF;
2823 /* XXX: In interprocedural mode, if we didn't have the
2824 body, we would need to do *each pointer argument =
2826 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2829 tree heapvar = heapvar_lookup (t);
2831 if (heapvar == NULL)
2833 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2834 DECL_EXTERNAL (heapvar) = 1;
2835 get_var_ann (heapvar)->is_heapvar = 1;
2836 if (gimple_referenced_vars (cfun))
2837 add_referenced_var (heapvar);
2838 heapvar_insert (t, heapvar);
2841 temp.var = create_variable_info_for (heapvar,
2842 alias_get_name (heapvar));
2844 vi = get_varinfo (temp.var);
2845 vi->is_artificial_var = 1;
2846 vi->is_heap_var = 1;
2847 temp.type = ADDRESSOF;
2849 VEC_safe_push (ce_s, heap, *results, &temp);
2854 temp.var = anything_id;
2857 VEC_safe_push (ce_s, heap, *results, &temp);
2863 temp.type = ADDRESSOF;
2864 temp.var = anything_id;
2866 VEC_safe_push (ce_s, heap, *results, &temp);
2873 switch (TREE_CODE (t))
2877 get_constraint_for (TREE_OPERAND (t, 0), results);
2882 case ARRAY_RANGE_REF:
2884 get_constraint_for_component_ref (t, results);
2888 temp.type = ADDRESSOF;
2889 temp.var = anything_id;
2891 VEC_safe_push (ce_s, heap, *results, &temp);
2898 switch (TREE_CODE (t))
2902 tree op = TREE_OPERAND (t, 0);
2904 /* Cast from non-pointer to pointers are bad news for us.
2905 Anything else, we see through */
2906 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2907 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2909 get_constraint_for (op, results);
2917 temp.type = ADDRESSOF;
2918 temp.var = anything_id;
2920 VEC_safe_push (ce_s, heap, *results, &temp);
2925 case tcc_exceptional:
2927 switch (TREE_CODE (t))
2931 get_constraint_for (PHI_RESULT (t), results);
2937 struct constraint_expr temp;
2938 temp = get_constraint_exp_from_ssa_var (t);
2939 VEC_safe_push (ce_s, heap, *results, &temp);
2945 temp.type = ADDRESSOF;
2946 temp.var = anything_id;
2948 VEC_safe_push (ce_s, heap, *results, &temp);
2953 case tcc_declaration:
2955 struct constraint_expr temp;
2956 temp = get_constraint_exp_from_ssa_var (t);
2957 VEC_safe_push (ce_s, heap, *results, &temp);
2962 temp.type = ADDRESSOF;
2963 temp.var = anything_id;
2965 VEC_safe_push (ce_s, heap, *results, &temp);
2972 /* Handle the structure copy case where we have a simple structure copy
2973 between LHS and RHS that is of SIZE (in bits)
2975 For each field of the lhs variable (lhsfield)
2976 For each field of the rhs variable at lhsfield.offset (rhsfield)
2977 add the constraint lhsfield = rhsfield
2979 If we fail due to some kind of type unsafety or other thing we
2980 can't handle, return false. We expect the caller to collapse the
2981 variable in that case. */
2984 do_simple_structure_copy (const struct constraint_expr lhs,
2985 const struct constraint_expr rhs,
2986 const unsigned HOST_WIDE_INT size)
2988 varinfo_t p = get_varinfo (lhs.var);
2989 unsigned HOST_WIDE_INT pstart, last;
2991 last = p->offset + size;
2992 for (; p && p->offset < last; p = p->next)
2995 struct constraint_expr templhs = lhs;
2996 struct constraint_expr temprhs = rhs;
2997 unsigned HOST_WIDE_INT fieldoffset;
2999 templhs.var = p->id;
3000 q = get_varinfo (temprhs.var);
3001 fieldoffset = p->offset - pstart;
3002 q = first_vi_for_offset (q, q->offset + fieldoffset);
3005 temprhs.var = q->id;
3006 process_constraint (new_constraint (templhs, temprhs));
3012 /* Handle the structure copy case where we have a structure copy between a
3013 aggregate on the LHS and a dereference of a pointer on the RHS
3014 that is of SIZE (in bits)
3016 For each field of the lhs variable (lhsfield)
3017 rhs.offset = lhsfield->offset
3018 add the constraint lhsfield = rhs
3022 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3023 const struct constraint_expr rhs,
3024 const unsigned HOST_WIDE_INT size)
3026 varinfo_t p = get_varinfo (lhs.var);
3027 unsigned HOST_WIDE_INT pstart,last;
3029 last = p->offset + size;
3031 for (; p && p->offset < last; p = p->next)
3034 struct constraint_expr templhs = lhs;
3035 struct constraint_expr temprhs = rhs;
3036 unsigned HOST_WIDE_INT fieldoffset;
3039 if (templhs.type == SCALAR)
3040 templhs.var = p->id;
3042 templhs.offset = p->offset;
3044 q = get_varinfo (temprhs.var);
3045 fieldoffset = p->offset - pstart;
3046 temprhs.offset += fieldoffset;
3047 process_constraint (new_constraint (templhs, temprhs));
3051 /* Handle the structure copy case where we have a structure copy
3052 between an aggregate on the RHS and a dereference of a pointer on
3053 the LHS that is of SIZE (in bits)
3055 For each field of the rhs variable (rhsfield)
3056 lhs.offset = rhsfield->offset
3057 add the constraint lhs = rhsfield
3061 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3062 const struct constraint_expr rhs,
3063 const unsigned HOST_WIDE_INT size)
3065 varinfo_t p = get_varinfo (rhs.var);
3066 unsigned HOST_WIDE_INT pstart,last;
3068 last = p->offset + size;
3070 for (; p && p->offset < last; p = p->next)
3073 struct constraint_expr templhs = lhs;
3074 struct constraint_expr temprhs = rhs;
3075 unsigned HOST_WIDE_INT fieldoffset;
3078 if (temprhs.type == SCALAR)
3079 temprhs.var = p->id;
3081 temprhs.offset = p->offset;
3083 q = get_varinfo (templhs.var);
3084 fieldoffset = p->offset - pstart;
3085 templhs.offset += fieldoffset;
3086 process_constraint (new_constraint (templhs, temprhs));
3090 /* Sometimes, frontends like to give us bad type information. This
3091 function will collapse all the fields from VAR to the end of VAR,
3092 into VAR, so that we treat those fields as a single variable.
3093 We return the variable they were collapsed into. */
3096 collapse_rest_of_var (unsigned int var)
3098 varinfo_t currvar = get_varinfo (var);
3101 for (field = currvar->next; field; field = field->next)
3104 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3105 field->name, currvar->name);
3107 gcc_assert (!field->collapsed_to);
3108 field->collapsed_to = currvar;
3111 currvar->next = NULL;
3112 currvar->size = currvar->fullsize - currvar->offset;
3117 /* Handle aggregate copies by expanding into copies of the respective
3118 fields of the structures. */
3121 do_structure_copy (tree lhsop, tree rhsop)
3123 struct constraint_expr lhs, rhs, tmp;
3124 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3126 unsigned HOST_WIDE_INT lhssize;
3127 unsigned HOST_WIDE_INT rhssize;
3129 get_constraint_for (lhsop, &lhsc);
3130 get_constraint_for (rhsop, &rhsc);
3131 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3132 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3133 lhs = *(VEC_last (ce_s, lhsc));
3134 rhs = *(VEC_last (ce_s, rhsc));
3136 VEC_free (ce_s, heap, lhsc);
3137 VEC_free (ce_s, heap, rhsc);
3139 /* If we have special var = x, swap it around. */
3140 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3147 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3148 possible it's something we could handle. However, most cases falling
3149 into this are dealing with transparent unions, which are slightly
3151 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3153 rhs.type = ADDRESSOF;
3154 rhs.var = anything_id;
3157 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3158 that special var. */
3159 if (rhs.var <= integer_id)
3161 for (p = get_varinfo (lhs.var); p; p = p->next)
3163 struct constraint_expr templhs = lhs;
3164 struct constraint_expr temprhs = rhs;
3166 if (templhs.type == SCALAR )
3167 templhs.var = p->id;
3169 templhs.offset += p->offset;
3170 process_constraint (new_constraint (templhs, temprhs));
3175 tree rhstype = TREE_TYPE (rhsop);
3176 tree lhstype = TREE_TYPE (lhsop);
3180 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3181 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3183 /* If we have a variably sized types on the rhs or lhs, and a deref
3184 constraint, add the constraint, lhsconstraint = &ANYTHING.
3185 This is conservatively correct because either the lhs is an unknown
3186 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3187 constraint, and every variable it can point to must be unknown sized
3188 anyway, so we don't need to worry about fields at all. */
3189 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3190 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3192 rhs.var = anything_id;
3193 rhs.type = ADDRESSOF;
3195 process_constraint (new_constraint (lhs, rhs));
3199 /* The size only really matters insofar as we don't set more or less of
3200 the variable. If we hit an unknown size var, the size should be the
3201 whole darn thing. */
3202 if (get_varinfo (rhs.var)->is_unknown_size_var)
3205 rhssize = TREE_INT_CST_LOW (rhstypesize);
3207 if (get_varinfo (lhs.var)->is_unknown_size_var)
3210 lhssize = TREE_INT_CST_LOW (lhstypesize);
3213 if (rhs.type == SCALAR && lhs.type == SCALAR)
3215 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3217 lhs.var = collapse_rest_of_var (lhs.var);
3218 rhs.var = collapse_rest_of_var (rhs.var);
3223 process_constraint (new_constraint (lhs, rhs));
3226 else if (lhs.type != DEREF && rhs.type == DEREF)
3227 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3228 else if (lhs.type == DEREF && rhs.type != DEREF)
3229 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3232 tree pointedtotype = lhstype;
3235 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3236 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3237 do_structure_copy (tmpvar, rhsop);
3238 do_structure_copy (lhsop, tmpvar);
3244 /* Update related alias information kept in AI. This is used when
3245 building name tags, alias sets and deciding grouping heuristics.
3246 STMT is the statement to process. This function also updates
3247 ADDRESSABLE_VARS. */
3250 update_alias_info (tree stmt, struct alias_info *ai)
3253 use_operand_p use_p;
3255 bool stmt_dereferences_ptr_p;
3256 enum escape_type stmt_escape_type = is_escape_site (stmt);
3257 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3259 stmt_dereferences_ptr_p = false;
3261 if (stmt_escape_type == ESCAPE_TO_CALL
3262 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3264 mem_ref_stats->num_call_sites++;
3265 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3266 mem_ref_stats->num_pure_const_call_sites++;
3268 else if (stmt_escape_type == ESCAPE_TO_ASM)
3269 mem_ref_stats->num_asm_sites++;
3271 /* Mark all the variables whose address are taken by the statement. */
3272 addr_taken = addresses_taken (stmt);
3275 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3277 /* If STMT is an escape point, all the addresses taken by it are
3279 if (stmt_escape_type != NO_ESCAPE)
3284 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3286 tree rvar = referenced_var (i);
3287 if (!unmodifiable_var_p (rvar))
3288 mark_call_clobbered (rvar, stmt_escape_type);
3293 /* Process each operand use. For pointers, determine whether they
3294 are dereferenced by the statement, or whether their value
3296 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3300 struct ptr_info_def *pi;
3301 unsigned num_uses, num_loads, num_stores;
3303 op = USE_FROM_PTR (use_p);
3305 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3306 to the set of addressable variables. */
3307 if (TREE_CODE (op) == ADDR_EXPR)
3309 bitmap addressable_vars = gimple_addressable_vars (cfun);
3311 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3312 gcc_assert (addressable_vars);
3314 /* PHI nodes don't have annotations for pinning the set
3315 of addresses taken, so we collect them here.
3317 FIXME, should we allow PHI nodes to have annotations
3318 so that they can be treated like regular statements?
3319 Currently, they are treated as second-class
3321 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3325 /* Ignore constants (they may occur in PHI node arguments). */
3326 if (TREE_CODE (op) != SSA_NAME)
3329 var = SSA_NAME_VAR (op);
3330 v_ann = var_ann (var);
3332 /* The base variable of an SSA name must be a GIMPLE register, and thus
3333 it cannot be aliased. */
3334 gcc_assert (!may_be_aliased (var));
3336 /* We are only interested in pointers. */
3337 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3340 pi = get_ptr_info (op);
3342 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3343 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3345 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3346 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3349 /* If STMT is a PHI node, then it will not have pointer
3350 dereferences and it will not be an escape point. */
3351 if (TREE_CODE (stmt) == PHI_NODE)
3354 /* Determine whether OP is a dereferenced pointer, and if STMT
3355 is an escape point, whether OP escapes. */
3356 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3358 /* For directly dereferenced pointers we can apply
3359 TBAA-pruning to their points-to set. We may not count the
3360 implicit dereferences &PTR->FLD here. */
3361 if (num_loads + num_stores > 0)
3362 pi->is_dereferenced = 1;
3364 /* Handle a corner case involving address expressions of the
3365 form '&PTR->FLD'. The problem with these expressions is that
3366 they do not represent a dereference of PTR. However, if some
3367 other transformation propagates them into an INDIRECT_REF
3368 expression, we end up with '*(&PTR->FLD)' which is folded
3371 So, if the original code had no other dereferences of PTR,
3372 the aliaser will not create memory tags for it, and when
3373 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3374 memory operations will receive no VDEF/VUSE operands.
3376 One solution would be to have count_uses_and_derefs consider
3377 &PTR->FLD a dereference of PTR. But that is wrong, since it
3378 is not really a dereference but an offset calculation.
3380 What we do here is to recognize these special ADDR_EXPR
3381 nodes. Since these expressions are never GIMPLE values (they
3382 are not GIMPLE invariants), they can only appear on the RHS
3383 of an assignment and their base address is always an
3384 INDIRECT_REF expression. */
3385 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3386 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3387 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3389 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3390 this represents a potential dereference of PTR. */
3391 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3392 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3393 if (TREE_CODE (base) == INDIRECT_REF
3394 && TREE_OPERAND (base, 0) == op)
3398 if (num_loads + num_stores > 0)
3400 /* Mark OP as dereferenced. In a subsequent pass,
3401 dereferenced pointers that point to a set of
3402 variables will be assigned a name tag to alias
3403 all the variables OP points to. */
3404 pi->memory_tag_needed = 1;
3406 /* ??? For always executed direct dereferences we can
3407 apply TBAA-pruning to their escape set. */
3409 /* If this is a store operation, mark OP as being
3410 dereferenced to store, otherwise mark it as being
3411 dereferenced to load. */
3413 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3415 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3417 /* Update the frequency estimate for all the dereferences of
3419 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3421 /* Indicate that STMT contains pointer dereferences. */
3422 stmt_dereferences_ptr_p = true;
3425 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3427 /* If STMT is an escape point and STMT contains at
3428 least one direct use of OP, then the value of OP
3429 escapes and so the pointed-to variables need to
3430 be marked call-clobbered. */
3431 pi->value_escapes_p = 1;
3432 pi->escape_mask |= stmt_escape_type;
3434 /* If the statement makes a function call, assume
3435 that pointer OP will be dereferenced in a store
3436 operation inside the called function. */
3437 if (get_call_expr_in (stmt)
3438 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3440 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3441 pi->memory_tag_needed = 1;
3446 if (TREE_CODE (stmt) == PHI_NODE)
3449 /* Mark stored variables in STMT as being written to and update the
3450 memory reference stats for all memory symbols referenced by STMT. */
3451 if (stmt_references_memory_p (stmt))
3456 mem_ref_stats->num_mem_stmts++;
3458 /* Notice that we only update memory reference stats for symbols
3459 loaded and stored by the statement if the statement does not
3460 contain pointer dereferences and it is not a call/asm site.
3461 This is to avoid double accounting problems when creating
3462 memory partitions. After computing points-to information,
3463 pointer dereference statistics are used to update the
3464 reference stats of the pointed-to variables, so here we
3465 should only update direct references to symbols.
3467 Indirect references are not updated here for two reasons: (1)
3468 The first time we compute alias information, the sets
3469 LOADED/STORED are empty for pointer dereferences, (2) After
3470 partitioning, LOADED/STORED may have references to
3471 partitions, not the original pointed-to variables. So, if we
3472 always counted LOADED/STORED here and during partitioning, we
3473 would count many symbols more than once.
3475 This does cause some imprecision when a statement has a
3476 combination of direct symbol references and pointer
3477 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3478 memory symbols in its argument list, but these cases do not
3479 occur so frequently as to constitute a serious problem. */
3480 if (STORED_SYMS (stmt))
3481 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3483 tree sym = referenced_var (i);
3484 pointer_set_insert (ai->written_vars, sym);
3485 if (!stmt_dereferences_ptr_p
3486 && stmt_escape_type != ESCAPE_TO_CALL
3487 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3488 && stmt_escape_type != ESCAPE_TO_ASM)
3489 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3492 if (!stmt_dereferences_ptr_p
3493 && LOADED_SYMS (stmt)
3494 && stmt_escape_type != ESCAPE_TO_CALL
3495 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3496 && stmt_escape_type != ESCAPE_TO_ASM)
3497 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3498 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3503 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3504 Expressions of the type PTR + CST can be handled in two ways:
3506 1- If the constraint for PTR is ADDRESSOF for a non-structure
3507 variable, then we can use it directly because adding or
3508 subtracting a constant may not alter the original ADDRESSOF
3509 constraint (i.e., pointer arithmetic may not legally go outside
3510 an object's boundaries).
3512 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3513 then if CST is a compile-time constant that can be used as an
3514 offset, we can determine which sub-variable will be pointed-to
3517 Return true if the expression is handled. For any other kind of
3518 expression, return false so that each operand can be added as a
3519 separate constraint by the caller. */
3522 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3525 struct constraint_expr *c, *c2;
3528 VEC (ce_s, heap) *temp = NULL;
3529 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3531 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3534 op0 = TREE_OPERAND (expr, 0);
3535 op1 = TREE_OPERAND (expr, 1);
3536 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3538 /* If the offset is not a non-negative integer constant that fits
3539 in a HOST_WIDE_INT, we cannot handle it here. */
3540 if (!host_integerp (op1, 1))
3543 /* Make sure the bit-offset also fits. */
3544 rhsunitoffset = TREE_INT_CST_LOW (op1);
3545 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3546 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3549 get_constraint_for (op0, &temp);
3551 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3552 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3554 if (c2->type == ADDRESSOF && rhsoffset != 0)
3556 varinfo_t temp = get_varinfo (c2->var);
3558 /* An access one after the end of an array is valid,
3559 so simply punt on accesses we cannot resolve. */
3560 temp = first_vi_for_offset (temp, rhsoffset);
3567 c2->offset = rhsoffset;
3568 process_constraint (new_constraint (*c, *c2));
3571 VEC_free (ce_s, heap, temp);
3576 /* For non-IPA mode, generate constraints necessary for a call on the
3580 handle_rhs_call (tree rhs)
3583 call_expr_arg_iterator iter;
3584 struct constraint_expr rhsc;
3586 rhsc.var = anything_id;
3588 rhsc.type = ADDRESSOF;
3590 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3592 VEC(ce_s, heap) *lhsc = NULL;
3594 /* Find those pointers being passed, and make sure they end up
3595 pointing to anything. */
3596 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3599 struct constraint_expr *lhsp;
3601 get_constraint_for (arg, &lhsc);
3603 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3604 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3605 VEC_free (ce_s, heap, lhsc);
3610 /* For non-IPA mode, generate constraints necessary for a call
3611 that returns a pointer and assigns it to LHS. This simply makes
3612 the LHS point to anything. */
3615 handle_lhs_call (tree lhs)
3617 VEC(ce_s, heap) *lhsc = NULL;
3618 struct constraint_expr rhsc;
3620 struct constraint_expr *lhsp;
3622 rhsc.var = anything_id;
3624 rhsc.type = ADDRESSOF;
3625 get_constraint_for (lhs, &lhsc);
3626 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3627 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3628 VEC_free (ce_s, heap, lhsc);
3631 /* Walk statement T setting up aliasing constraints according to the
3632 references found in T. This function is the main part of the
3633 constraint builder. AI points to auxiliary alias information used
3634 when building alias sets and computing alias grouping heuristics. */
3637 find_func_aliases (tree origt)
3640 VEC(ce_s, heap) *lhsc = NULL;
3641 VEC(ce_s, heap) *rhsc = NULL;
3642 struct constraint_expr *c;
3644 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3645 t = TREE_OPERAND (t, 0);
3647 /* Now build constraints expressions. */
3648 if (TREE_CODE (t) == PHI_NODE)
3650 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3652 /* Only care about pointers and structures containing
3654 if (could_have_pointers (PHI_RESULT (t)))
3659 /* For a phi node, assign all the arguments to
3661 get_constraint_for (PHI_RESULT (t), &lhsc);
3662 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3665 tree strippedrhs = PHI_ARG_DEF (t, i);
3667 STRIP_NOPS (strippedrhs);
3668 rhstype = TREE_TYPE (strippedrhs);
3669 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3671 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3673 struct constraint_expr *c2;
3674 while (VEC_length (ce_s, rhsc) > 0)
3676 c2 = VEC_last (ce_s, rhsc);
3677 process_constraint (new_constraint (*c, *c2));
3678 VEC_pop (ce_s, rhsc);
3684 /* In IPA mode, we need to generate constraints to pass call
3685 arguments through their calls. There are two cases, either a
3686 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3687 CALL_EXPR when we are not.
3689 In non-ipa mode, we need to generate constraints for each
3690 pointer passed by address. */
3691 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3692 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3693 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3694 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3695 || (TREE_CODE (t) == CALL_EXPR
3696 && !(call_expr_flags (t)
3697 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3701 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3703 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3704 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3705 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3708 handle_rhs_call (t);
3715 call_expr_arg_iterator iter;
3719 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3721 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3722 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3729 decl = get_callee_fndecl (rhsop);
3731 /* If we can directly resolve the function being called, do so.
3732 Otherwise, it must be some sort of indirect expression that
3733 we should still be able to handle. */
3736 fi = get_vi_for_tree (decl);
3740 decl = CALL_EXPR_FN (rhsop);
3741 fi = get_vi_for_tree (decl);
3744 /* Assign all the passed arguments to the appropriate incoming
3745 parameters of the function. */
3747 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3749 struct constraint_expr lhs ;
3750 struct constraint_expr *rhsp;
3752 get_constraint_for (arg, &rhsc);
3753 if (TREE_CODE (decl) != FUNCTION_DECL)
3762 lhs.var = first_vi_for_offset (fi, i)->id;
3765 while (VEC_length (ce_s, rhsc) != 0)
3767 rhsp = VEC_last (ce_s, rhsc);
3768 process_constraint (new_constraint (lhs, *rhsp));
3769 VEC_pop (ce_s, rhsc);
3774 /* If we are returning a value, assign it to the result. */
3777 struct constraint_expr rhs;
3778 struct constraint_expr *lhsp;
3781 get_constraint_for (lhsop, &lhsc);
3782 if (TREE_CODE (decl) != FUNCTION_DECL)
3791 rhs.var = first_vi_for_offset (fi, i)->id;
3794 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3795 process_constraint (new_constraint (*lhsp, rhs));
3799 /* Otherwise, just a regular assignment statement. */
3800 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3802 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3803 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3806 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3807 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3808 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3809 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3811 do_structure_copy (lhsop, rhsop);
3815 /* Only care about operations with pointers, structures
3816 containing pointers, dereferences, and call expressions. */
3817 if (could_have_pointers (lhsop)
3818 || TREE_CODE (rhsop) == CALL_EXPR)
3820 get_constraint_for (lhsop, &lhsc);
3821 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3823 /* RHS that consist of unary operations,
3824 exceptional types, or bare decls/constants, get
3825 handled directly by get_constraint_for. */
3827 case tcc_declaration:
3829 case tcc_exceptional:
3830 case tcc_expression:
3836 get_constraint_for (rhsop, &rhsc);
3837 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3839 struct constraint_expr *c2;
3842 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3843 process_constraint (new_constraint (*c, *c2));
3851 /* For pointer arithmetic of the form
3852 PTR + CST, we can simply use PTR's
3853 constraint because pointer arithmetic is
3854 not allowed to go out of bounds. */
3855 if (handle_ptr_arith (lhsc, rhsop))
3860 /* Otherwise, walk each operand. Notice that we
3861 can't use the operand interface because we need
3862 to process expressions other than simple operands
3863 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3865 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3867 tree op = TREE_OPERAND (rhsop, i);
3870 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3871 get_constraint_for (op, &rhsc);
3872 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3874 struct constraint_expr *c2;
3875 while (VEC_length (ce_s, rhsc) > 0)
3877 c2 = VEC_last (ce_s, rhsc);
3878 process_constraint (new_constraint (*c, *c2));
3879 VEC_pop (ce_s, rhsc);
3887 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3891 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3892 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3893 get_varinfo (c->var)->no_tbaa_pruning = true;
3896 /* After promoting variables and computing aliasing we will
3897 need to re-scan most statements. FIXME: Try to minimize the
3898 number of statements re-scanned. It's not really necessary to
3899 re-scan *all* statements. */
3900 mark_stmt_modified (origt);
3901 VEC_free (ce_s, heap, rhsc);
3902 VEC_free (ce_s, heap, lhsc);
3906 /* Find the first varinfo in the same variable as START that overlaps with
3908 Effectively, walk the chain of fields for the variable START to find the
3909 first field that overlaps with OFFSET.
3910 Return NULL if we can't find one. */
3913 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3915 varinfo_t curr = start;
3918 /* We may not find a variable in the field list with the actual
3919 offset when when we have glommed a structure to a variable.
3920 In that case, however, offset should still be within the size
3922 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3930 /* Insert the varinfo FIELD into the field list for BASE, at the front
3934 insert_into_field_list (varinfo_t base, varinfo_t field)
3936 varinfo_t prev = base;
3937 varinfo_t curr = base->next;
3943 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3947 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3949 varinfo_t prev = base;
3950 varinfo_t curr = base->next;
3961 if (field->offset <= curr->offset)
3966 field->next = prev->next;
3971 /* This structure is used during pushing fields onto the fieldstack
3972 to track the offset of the field, since bitpos_of_field gives it
3973 relative to its immediate containing type, and we want it relative
3974 to the ultimate containing object. */
3978 /* Type of the field. */
3981 /* Size, in bits, of the field. */
3987 /* Offset from the base of the base containing object to this field. */
3988 HOST_WIDE_INT offset;
3990 typedef struct fieldoff fieldoff_s;
3992 DEF_VEC_O(fieldoff_s);
3993 DEF_VEC_ALLOC_O(fieldoff_s,heap);
3995 /* qsort comparison function for two fieldoff's PA and PB */
3998 fieldoff_compare (const void *pa, const void *pb)
4000 const fieldoff_s *foa = (const fieldoff_s *)pa;
4001 const fieldoff_s *fob = (const fieldoff_s *)pb;
4002 HOST_WIDE_INT foasize, fobsize;
4004 if (foa->offset != fob->offset)
4005 return foa->offset - fob->offset;
4007 foasize = TREE_INT_CST_LOW (foa->size);
4008 fobsize = TREE_INT_CST_LOW (fob->size);
4009 return foasize - fobsize;
4012 /* Sort a fieldstack according to the field offset and sizes. */
4014 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4016 qsort (VEC_address (fieldoff_s, fieldstack),
4017 VEC_length (fieldoff_s, fieldstack),
4018 sizeof (fieldoff_s),
4022 /* Return true if V is a tree that we can have subvars for.
4023 Normally, this is any aggregate type. Also complex
4024 types which are not gimple registers can have subvars. */
4027 var_can_have_subvars (const_tree v)
4029 /* Volatile variables should never have subvars. */
4030 if (TREE_THIS_VOLATILE (v))
4033 /* Non decls or memory tags can never have subvars. */
4034 if (!DECL_P (v) || MTAG_P (v))
4037 /* Aggregates without overlapping fields can have subvars. */
4038 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4044 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4045 the fields of TYPE onto fieldstack, recording their offsets along
4048 OFFSET is used to keep track of the offset in this entire
4049 structure, rather than just the immediately containing structure.
4050 Returns the number of fields pushed.
4052 HAS_UNION is set to true if we find a union type as a field of
4056 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4057 HOST_WIDE_INT offset, bool *has_union)
4062 if (TREE_CODE (type) != RECORD_TYPE)
4065 /* If the vector of fields is growing too big, bail out early.
4066 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4068 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4071 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4072 if (TREE_CODE (field) == FIELD_DECL)
4078 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4079 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4082 if (!var_can_have_subvars (field))
4084 else if (!(pushed = push_fields_onto_fieldstack
4087 offset + bitpos_of_field (field),
4089 && (DECL_SIZE (field)
4090 && !integer_zerop (DECL_SIZE (field))))
4091 /* Empty structures may have actual size, like in C++. So
4092 see if we didn't push any subfields and the size is
4093 nonzero, push the field onto the stack. */
4100 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4101 pair->type = TREE_TYPE (field);
4102 pair->size = DECL_SIZE (field);
4104 pair->offset = offset + bitpos_of_field (field);
4114 /* Create a constraint from ANYTHING variable to VI. */
4116 make_constraint_from_anything (varinfo_t vi)
4118 struct constraint_expr lhs, rhs;
4124 rhs.var = anything_id;
4126 rhs.type = ADDRESSOF;
4127 process_constraint (new_constraint (lhs, rhs));
4130 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4131 if it is a varargs function. */
4134 count_num_arguments (tree decl, bool *is_varargs)
4139 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4143 if (TREE_VALUE (t) == void_type_node)
4153 /* Creation function node for DECL, using NAME, and return the index
4154 of the variable we've created for the function. */
4157 create_function_info_for (tree decl, const char *name)
4159 unsigned int index = VEC_length (varinfo_t, varmap);
4163 bool is_varargs = false;
4165 /* Create the variable info. */
4167 vi = new_var_info (decl, index, name);
4172 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4173 insert_vi_for_tree (vi->decl, vi);
4174 VEC_safe_push (varinfo_t, heap, varmap, vi);
4178 /* If it's varargs, we don't know how many arguments it has, so we
4185 vi->is_unknown_size_var = true;
4190 arg = DECL_ARGUMENTS (decl);
4192 /* Set up variables for each argument. */
4193 for (i = 1; i < vi->fullsize; i++)
4196 const char *newname;
4198 unsigned int newindex;
4199 tree argdecl = decl;
4204 newindex = VEC_length (varinfo_t, varmap);
4205 asprintf (&tempname, "%s.arg%d", name, i-1);
4206 newname = ggc_strdup (tempname);
4209 argvi = new_var_info (argdecl, newindex, newname);
4210 argvi->decl = argdecl;
4211 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4214 argvi->fullsize = vi->fullsize;
4215 argvi->has_union = false;
4216 insert_into_field_list_sorted (vi, argvi);
4217 stats.total_vars ++;
4220 insert_vi_for_tree (arg, argvi);
4221 arg = TREE_CHAIN (arg);
4225 /* Create a variable for the return var. */
4226 if (DECL_RESULT (decl) != NULL
4227 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4230 const char *newname;
4232 unsigned int newindex;
4233 tree resultdecl = decl;
4237 if (DECL_RESULT (decl))
4238 resultdecl = DECL_RESULT (decl);
4240 newindex = VEC_length (varinfo_t, varmap);
4241 asprintf (&tempname, "%s.result", name);
4242 newname = ggc_strdup (tempname);
4245 resultvi = new_var_info (resultdecl, newindex, newname);
4246 resultvi->decl = resultdecl;
4247 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4248 resultvi->offset = i;
4250 resultvi->fullsize = vi->fullsize;
4251 resultvi->has_union = false;
4252 insert_into_field_list_sorted (vi, resultvi);
4253 stats.total_vars ++;
4254 if (DECL_RESULT (decl))
4255 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4261 /* Return true if FIELDSTACK contains fields that overlap.
4262 FIELDSTACK is assumed to be sorted by offset. */
4265 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4267 fieldoff_s *fo = NULL;
4269 HOST_WIDE_INT lastoffset = -1;
4271 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4273 if (fo->offset == lastoffset)
4275 lastoffset = fo->offset;
4280 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4281 This will also create any varinfo structures necessary for fields
4285 create_variable_info_for (tree decl, const char *name)
4287 unsigned int index = VEC_length (varinfo_t, varmap);
4289 tree decltype = TREE_TYPE (decl);
4290 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4291 bool notokay = false;
4293 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4294 VEC (fieldoff_s,heap) *fieldstack = NULL;
4296 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4297 return create_function_info_for (decl, name);
4299 hasunion = TREE_CODE (decltype) == UNION_TYPE
4300 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4301 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4303 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4306 VEC_free (fieldoff_s, heap, fieldstack);
4311 /* If the variable doesn't have subvars, we may end up needing to
4312 sort the field list and create fake variables for all the
4314 vi = new_var_info (decl, index, name);
4317 vi->has_union = hasunion;
4319 || TREE_CODE (declsize) != INTEGER_CST
4320 || TREE_CODE (decltype) == UNION_TYPE
4321 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4323 vi->is_unknown_size_var = true;
4329 vi->fullsize = TREE_INT_CST_LOW (declsize);
4330 vi->size = vi->fullsize;
4333 insert_vi_for_tree (vi->decl, vi);
4334 VEC_safe_push (varinfo_t, heap, varmap, vi);
4335 if (is_global && (!flag_whole_program || !in_ipa_mode))
4336 make_constraint_from_anything (vi);
4339 if (use_field_sensitive
4341 && !vi->is_unknown_size_var
4342 && var_can_have_subvars (decl)
4343 && VEC_length (fieldoff_s, fieldstack) > 1
4344 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4346 unsigned int newindex = VEC_length (varinfo_t, varmap);
4347 fieldoff_s *fo = NULL;
4350 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4353 || TREE_CODE (fo->size) != INTEGER_CST
4361 /* We can't sort them if we have a field with a variable sized type,
4362 which will make notokay = true. In that case, we are going to return
4363 without creating varinfos for the fields anyway, so sorting them is a
4367 sort_fieldstack (fieldstack);
4368 /* Due to some C++ FE issues, like PR 22488, we might end up
4369 what appear to be overlapping fields even though they,
4370 in reality, do not overlap. Until the C++ FE is fixed,
4371 we will simply disable field-sensitivity for these cases. */
4372 notokay = check_for_overlaps (fieldstack);
4376 if (VEC_length (fieldoff_s, fieldstack) != 0)
4377 fo = VEC_index (fieldoff_s, fieldstack, 0);
4379 if (fo == NULL || notokay)
4381 vi->is_unknown_size_var = 1;
4384 VEC_free (fieldoff_s, heap, fieldstack);
4388 vi->size = TREE_INT_CST_LOW (fo->size);
4389 vi->offset = fo->offset;
4390 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4391 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4395 const char *newname = "NULL";
4398 newindex = VEC_length (varinfo_t, varmap);
4402 asprintf (&tempname, "%s.%s",
4403 vi->name, alias_get_name (fo->decl));
4405 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4406 vi->name, fo->offset);
4407 newname = ggc_strdup (tempname);
4410 newvi = new_var_info (decl, newindex, newname);
4411 newvi->offset = fo->offset;
4412 newvi->size = TREE_INT_CST_LOW (fo->size);
4413 newvi->fullsize = vi->fullsize;
4414 insert_into_field_list (vi, newvi);
4415 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4416 if (is_global && (!flag_whole_program || !in_ipa_mode))
4417 make_constraint_from_anything (newvi);
4423 VEC_free (fieldoff_s, heap, fieldstack);
4428 /* Print out the points-to solution for VAR to FILE. */
4431 dump_solution_for_var (FILE *file, unsigned int var)
4433 varinfo_t vi = get_varinfo (var);
4437 if (find (var) != var)
4439 varinfo_t vipt = get_varinfo (find (var));
4440 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4444 fprintf (file, "%s = { ", vi->name);
4445 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4447 fprintf (file, "%s ", get_varinfo (i)->name);
4449 fprintf (file, "}");
4450 if (vi->no_tbaa_pruning)
4451 fprintf (file, " no-tbaa-pruning");
4452 fprintf (file, "\n");
4456 /* Print the points-to solution for VAR to stdout. */
4459 debug_solution_for_var (unsigned int var)
4461 dump_solution_for_var (stdout, var);
4464 /* Create varinfo structures for all of the variables in the
4465 function for intraprocedural mode. */
4468 intra_create_variable_infos (void)
4471 struct constraint_expr lhs, rhs;
4473 /* For each incoming pointer argument arg, create the constraint ARG
4474 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4475 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4479 if (!could_have_pointers (t))
4482 /* If flag_argument_noalias is set, then function pointer
4483 arguments are guaranteed not to point to each other. In that
4484 case, create an artificial variable PARM_NOALIAS and the
4485 constraint ARG = &PARM_NOALIAS. */
4486 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4489 tree heapvar = heapvar_lookup (t);
4493 lhs.var = get_vi_for_tree (t)->id;
4495 if (heapvar == NULL_TREE)
4498 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4500 DECL_EXTERNAL (heapvar) = 1;
4501 if (gimple_referenced_vars (cfun))
4502 add_referenced_var (heapvar);
4504 heapvar_insert (t, heapvar);
4506 ann = get_var_ann (heapvar);
4507 if (flag_argument_noalias == 1)
4508 ann->noalias_state = NO_ALIAS;
4509 else if (flag_argument_noalias == 2)
4510 ann->noalias_state = NO_ALIAS_GLOBAL;
4511 else if (flag_argument_noalias == 3)
4512 ann->noalias_state = NO_ALIAS_ANYTHING;
4517 vi = get_vi_for_tree (heapvar);
4518 vi->is_artificial_var = 1;
4519 vi->is_heap_var = 1;
4521 rhs.type = ADDRESSOF;
4523 for (p = get_varinfo (lhs.var); p; p = p->next)
4525 struct constraint_expr temp = lhs;
4527 process_constraint (new_constraint (temp, rhs));
4532 varinfo_t arg_vi = get_vi_for_tree (t);
4534 for (p = arg_vi; p; p = p->next)
4535 make_constraint_from_anything (p);
4540 /* Structure used to put solution bitmaps in a hashtable so they can
4541 be shared among variables with the same points-to set. */
4543 typedef struct shared_bitmap_info
4547 } *shared_bitmap_info_t;
4548 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4550 static htab_t shared_bitmap_table;
4552 /* Hash function for a shared_bitmap_info_t */
4555 shared_bitmap_hash (const void *p)
4557 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4558 return bi->hashcode;
4561 /* Equality function for two shared_bitmap_info_t's. */
4564 shared_bitmap_eq (const void *p1, const void *p2)
4566 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4567 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4568 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4571 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4572 existing instance if there is one, NULL otherwise. */
4575 shared_bitmap_lookup (bitmap pt_vars)
4578 struct shared_bitmap_info sbi;
4580 sbi.pt_vars = pt_vars;
4581 sbi.hashcode = bitmap_hash (pt_vars);
4583 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4584 sbi.hashcode, NO_INSERT);
4588 return ((shared_bitmap_info_t) *slot)->pt_vars;
4592 /* Add a bitmap to the shared bitmap hashtable. */
4595 shared_bitmap_add (bitmap pt_vars)
4598 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4600 sbi->pt_vars = pt_vars;
4601 sbi->hashcode = bitmap_hash (pt_vars);
4603 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4604 sbi->hashcode, INSERT);
4605 gcc_assert (!*slot);
4606 *slot = (void *) sbi;
4610 /* Set bits in INTO corresponding to the variable uids in solution set
4611 FROM, which came from variable PTR.
4612 For variables that are actually dereferenced, we also use type
4613 based alias analysis to prune the points-to sets.
4614 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4615 help determine whether we are we are allowed to prune using TBAA.
4616 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4620 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4621 bool no_tbaa_pruning)
4626 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4628 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4630 varinfo_t vi = get_varinfo (i);
4632 /* The only artificial variables that are allowed in a may-alias
4633 set are heap variables. */
4634 if (vi->is_artificial_var && !vi->is_heap_var)
4637 if (TREE_CODE (vi->decl) == VAR_DECL
4638 || TREE_CODE (vi->decl) == PARM_DECL
4639 || TREE_CODE (vi->decl) == RESULT_DECL)
4641 /* Just add VI->DECL to the alias set.
4642 Don't type prune artificial vars or points-to sets
4643 for pointers that have not been dereferenced or with
4644 type-based pruning disabled. */
4645 if (vi->is_artificial_var
4648 bitmap_set_bit (into, DECL_UID (vi->decl));
4651 alias_set_type var_alias_set, mem_alias_set;
4652 var_alias_set = get_alias_set (vi->decl);
4653 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4654 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4655 vi->decl, var_alias_set, true))
4656 bitmap_set_bit (into, DECL_UID (vi->decl));
4663 static bool have_alias_info = false;
4665 /* The list of SMT's that are in use by our pointer variables. This
4666 is the set of SMT's for all pointers that can point to anything. */
4667 static bitmap used_smts;
4669 /* Due to the ordering of points-to set calculation and SMT
4670 calculation being a bit co-dependent, we can't just calculate SMT
4671 used info whenever we want, we have to calculate it around the time
4672 that find_what_p_points_to is called. */
4674 /* Mark which SMT's are in use by points-to anything variables. */
4677 set_used_smts (void)
4681 used_smts = BITMAP_ALLOC (&pta_obstack);
4683 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4685 tree var = vi->decl;
4686 varinfo_t withsolution = get_varinfo (find (i));
4689 struct ptr_info_def *pi = NULL;
4691 /* For parm decls, the pointer info may be under the default
4693 if (TREE_CODE (vi->decl) == PARM_DECL
4694 && gimple_default_def (cfun, var))
4695 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4696 else if (TREE_CODE (var) == SSA_NAME)
4697 pi = SSA_NAME_PTR_INFO (var);
4699 /* Skip the special variables and those that can't be aliased. */
4700 if (vi->is_special_var
4702 || (pi && !pi->memory_tag_needed)
4703 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4704 || !POINTER_TYPE_P (TREE_TYPE (var)))
4707 if (TREE_CODE (var) == SSA_NAME)
4708 var = SSA_NAME_VAR (var);
4714 smt = va->symbol_mem_tag;
4715 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4716 bitmap_set_bit (used_smts, DECL_UID (smt));
4720 /* Given a pointer variable P, fill in its points-to set, or return
4722 Rather than return false for variables that point-to anything, we
4723 instead find the corresponding SMT, and merge in its aliases. In
4724 addition to these aliases, we also set the bits for the SMT's
4725 themselves and their subsets, as SMT's are still in use by
4726 non-SSA_NAME's, and pruning may eliminate every one of their
4727 aliases. In such a case, if we did not include the right set of
4728 SMT's in the points-to set of the variable, we'd end up with
4729 statements that do not conflict but should. */
4732 find_what_p_points_to (tree p)
4737 if (!have_alias_info)
4740 /* For parameters, get at the points-to set for the actual parm
4742 if (TREE_CODE (p) == SSA_NAME
4743 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4744 && SSA_NAME_IS_DEFAULT_DEF (p))
4745 lookup_p = SSA_NAME_VAR (p);
4747 vi = lookup_vi_for_tree (lookup_p);
4750 if (vi->is_artificial_var)
4753 /* See if this is a field or a structure. */
4754 if (vi->size != vi->fullsize)
4756 /* Nothing currently asks about structure fields directly,
4757 but when they do, we need code here to hand back the
4763 struct ptr_info_def *pi = get_ptr_info (p);
4766 bool was_pt_anything = false;
4767 bitmap finished_solution;
4770 if (!pi->memory_tag_needed)
4773 /* This variable may have been collapsed, let's get the real
4775 vi = get_varinfo (find (vi->id));
4777 /* Translate artificial variables into SSA_NAME_PTR_INFO
4779 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4781 varinfo_t vi = get_varinfo (i);
4783 if (vi->is_artificial_var)
4785 /* FIXME. READONLY should be handled better so that
4786 flow insensitive aliasing can disregard writable
4788 if (vi->id == nothing_id)
4790 else if (vi->id == anything_id)
4791 was_pt_anything = 1;
4792 else if (vi->id == readonly_id)
4793 was_pt_anything = 1;
4794 else if (vi->id == integer_id)
4795 was_pt_anything = 1;
4796 else if (vi->is_heap_var)
4797 pi->pt_global_mem = 1;
4801 /* Instead of doing extra work, simply do not create
4802 points-to information for pt_anything pointers. This
4803 will cause the operand scanner to fall back to the
4804 type-based SMT and its aliases. Which is the best
4805 we could do here for the points-to set as well. */
4806 if (was_pt_anything)
4809 /* Share the final set of variables when possible. */
4810 finished_solution = BITMAP_GGC_ALLOC ();
4811 stats.points_to_sets_created++;
4813 set_uids_in_ptset (p, finished_solution, vi->solution,
4814 pi->is_dereferenced,
4815 vi->no_tbaa_pruning);
4816 result = shared_bitmap_lookup (finished_solution);
4820 shared_bitmap_add (finished_solution);
4821 pi->pt_vars = finished_solution;
4825 pi->pt_vars = result;
4826 bitmap_clear (finished_solution);
4829 if (bitmap_empty_p (pi->pt_vars))
4839 /* Mark everything that p points to as call clobbered. Returns true
4840 if everything is done and false if all addressable variables need to
4841 be clobbered because p points to anything. */
4844 clobber_what_p_points_to (tree p)
4848 struct ptr_info_def *pi;
4852 if (!have_alias_info)
4855 /* For parameters, get at the points-to set for the actual parm
4857 if (TREE_CODE (p) == SSA_NAME
4858 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4859 && SSA_NAME_IS_DEFAULT_DEF (p))
4860 lookup_p = SSA_NAME_VAR (p);
4862 vi = lookup_vi_for_tree (lookup_p);
4866 /* We are asking for the points-to solution of pointers. */
4867 gcc_assert (!vi->is_artificial_var
4868 && vi->size == vi->fullsize);
4870 pi = get_ptr_info (p);
4872 /* This variable may have been collapsed, let's get the real
4874 vi = get_varinfo (find (vi->id));
4876 /* Mark variables in the solution call-clobbered. */
4877 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4879 varinfo_t vi = get_varinfo (i);
4881 if (vi->is_artificial_var)
4883 /* nothing_id and readonly_id do not cause any
4884 call clobber ops. For anything_id and integer_id
4885 we need to clobber all addressable vars. */
4886 if (vi->id == anything_id
4887 || vi->id == integer_id)
4891 /* Only artificial heap-vars are further interesting. */
4892 if (vi->is_artificial_var && !vi->is_heap_var)
4895 if ((TREE_CODE (vi->decl) == VAR_DECL
4896 || TREE_CODE (vi->decl) == PARM_DECL
4897 || TREE_CODE (vi->decl) == RESULT_DECL)
4898 && !unmodifiable_var_p (vi->decl))
4899 mark_call_clobbered (vi->decl, pi->escape_mask);
4905 /* Dump points-to information to OUTFILE. */
4908 dump_sa_points_to_info (FILE *outfile)
4912 fprintf (outfile, "\nPoints-to sets\n\n");
4914 if (dump_flags & TDF_STATS)
4916 fprintf (outfile, "Stats:\n");
4917 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4918 fprintf (outfile, "Non-pointer vars: %d\n",
4919 stats.nonpointer_vars);
4920 fprintf (outfile, "Statically unified vars: %d\n",
4921 stats.unified_vars_static);
4922 fprintf (outfile, "Dynamically unified vars: %d\n",
4923 stats.unified_vars_dynamic);
4924 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4925 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4926 fprintf (outfile, "Number of implicit edges: %d\n",
4927 stats.num_implicit_edges);
4930 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4931 dump_solution_for_var (outfile, i);
4935 /* Debug points-to information to stderr. */
4938 debug_sa_points_to_info (void)
4940 dump_sa_points_to_info (stderr);
4944 /* Initialize the always-existing constraint variables for NULL
4945 ANYTHING, READONLY, and INTEGER */
4948 init_base_vars (void)
4950 struct constraint_expr lhs, rhs;
4952 /* Create the NULL variable, used to represent that a variable points
4954 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4955 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4956 insert_vi_for_tree (nothing_tree, var_nothing);
4957 var_nothing->is_artificial_var = 1;
4958 var_nothing->offset = 0;
4959 var_nothing->size = ~0;
4960 var_nothing->fullsize = ~0;
4961 var_nothing->is_special_var = 1;
4963 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4965 /* Create the ANYTHING variable, used to represent that a variable
4966 points to some unknown piece of memory. */
4967 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4968 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4969 insert_vi_for_tree (anything_tree, var_anything);
4970 var_anything->is_artificial_var = 1;
4971 var_anything->size = ~0;
4972 var_anything->offset = 0;
4973 var_anything->next = NULL;
4974 var_anything->fullsize = ~0;
4975 var_anything->is_special_var = 1;
4978 /* Anything points to anything. This makes deref constraints just
4979 work in the presence of linked list and other p = *p type loops,
4980 by saying that *ANYTHING = ANYTHING. */
4981 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4983 lhs.var = anything_id;
4985 rhs.type = ADDRESSOF;
4986 rhs.var = anything_id;
4989 /* This specifically does not use process_constraint because
4990 process_constraint ignores all anything = anything constraints, since all
4991 but this one are redundant. */
4992 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4994 /* Create the READONLY variable, used to represent that a variable
4995 points to readonly memory. */
4996 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4997 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4998 var_readonly->is_artificial_var = 1;
4999 var_readonly->offset = 0;
5000 var_readonly->size = ~0;
5001 var_readonly->fullsize = ~0;
5002 var_readonly->next = NULL;
5003 var_readonly->is_special_var = 1;
5004 insert_vi_for_tree (readonly_tree, var_readonly);
5006 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5008 /* readonly memory points to anything, in order to make deref
5009 easier. In reality, it points to anything the particular
5010 readonly variable can point to, but we don't track this
5013 lhs.var = readonly_id;
5015 rhs.type = ADDRESSOF;
5016 rhs.var = anything_id;
5019 process_constraint (new_constraint (lhs, rhs));
5021 /* Create the INTEGER variable, used to represent that a variable points
5023 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5024 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5025 insert_vi_for_tree (integer_tree, var_integer);
5026 var_integer->is_artificial_var = 1;
5027 var_integer->size = ~0;
5028 var_integer->fullsize = ~0;
5029 var_integer->offset = 0;
5030 var_integer->next = NULL;
5031 var_integer->is_special_var = 1;
5033 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5035 /* INTEGER = ANYTHING, because we don't know where a dereference of
5036 a random integer will point to. */
5038 lhs.var = integer_id;
5040 rhs.type = ADDRESSOF;
5041 rhs.var = anything_id;
5043 process_constraint (new_constraint (lhs, rhs));
5046 /* Initialize things necessary to perform PTA */
5049 init_alias_vars (void)
5051 bitmap_obstack_initialize (&pta_obstack);
5052 bitmap_obstack_initialize (&oldpta_obstack);
5053 bitmap_obstack_initialize (&predbitmap_obstack);
5055 constraint_pool = create_alloc_pool ("Constraint pool",
5056 sizeof (struct constraint), 30);
5057 variable_info_pool = create_alloc_pool ("Variable info pool",
5058 sizeof (struct variable_info), 30);
5059 constraints = VEC_alloc (constraint_t, heap, 8);
5060 varmap = VEC_alloc (varinfo_t, heap, 8);
5061 vi_for_tree = pointer_map_create ();
5063 memset (&stats, 0, sizeof (stats));
5064 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5065 shared_bitmap_eq, free);
5069 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5070 predecessor edges. */
5073 remove_preds_and_fake_succs (constraint_graph_t graph)
5077 /* Clear the implicit ref and address nodes from the successor
5079 for (i = 0; i < FIRST_REF_NODE; i++)
5081 if (graph->succs[i])
5082 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5083 FIRST_REF_NODE * 2);
5086 /* Free the successor list for the non-ref nodes. */
5087 for (i = FIRST_REF_NODE; i < graph->size; i++)
5089 if (graph->succs[i])
5090 BITMAP_FREE (graph->succs[i]);
5093 /* Now reallocate the size of the successor list as, and blow away
5094 the predecessor bitmaps. */
5095 graph->size = VEC_length (varinfo_t, varmap);
5096 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5098 free (graph->implicit_preds);
5099 graph->implicit_preds = NULL;
5100 free (graph->preds);
5101 graph->preds = NULL;
5102 bitmap_obstack_release (&predbitmap_obstack);
5105 /* Compute the set of variables we can't TBAA prune. */
5108 compute_tbaa_pruning (void)
5110 unsigned int size = VEC_length (varinfo_t, varmap);
5115 changed = sbitmap_alloc (size);
5116 sbitmap_zero (changed);
5118 /* Mark all initial no_tbaa_pruning nodes as changed. */
5120 for (i = 0; i < size; ++i)
5122 varinfo_t ivi = get_varinfo (i);
5124 if (find (i) == i && ivi->no_tbaa_pruning)
5127 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5128 || VEC_length (constraint_t, graph->complex[i]) > 0)
5130 SET_BIT (changed, i);
5136 while (changed_count > 0)
5138 struct topo_info *ti = init_topo_info ();
5141 compute_topo_order (graph, ti);
5143 while (VEC_length (unsigned, ti->topo_order) != 0)
5147 i = VEC_pop (unsigned, ti->topo_order);
5149 /* If this variable is not a representative, skip it. */
5153 /* If the node has changed, we need to process the complex
5154 constraints and outgoing edges again. */
5155 if (TEST_BIT (changed, i))
5159 VEC(constraint_t,heap) *complex = graph->complex[i];
5161 RESET_BIT (changed, i);
5164 /* Process the complex copy constraints. */
5165 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5167 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5169 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5171 if (!lhsvi->no_tbaa_pruning)
5173 lhsvi->no_tbaa_pruning = true;
5174 if (!TEST_BIT (changed, lhsvi->id))
5176 SET_BIT (changed, lhsvi->id);
5183 /* Propagate to all successors. */
5184 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5186 unsigned int to = find (j);
5187 varinfo_t tovi = get_varinfo (to);
5189 /* Don't propagate to ourselves. */
5193 if (!tovi->no_tbaa_pruning)
5195 tovi->no_tbaa_pruning = true;
5196 if (!TEST_BIT (changed, to))
5198 SET_BIT (changed, to);
5206 free_topo_info (ti);
5209 sbitmap_free (changed);
5213 for (i = 0; i < size; ++i)
5215 varinfo_t ivi = get_varinfo (i);
5216 varinfo_t ivip = get_varinfo (find (i));
5218 if (ivip->no_tbaa_pruning)
5220 tree var = ivi->decl;
5222 if (TREE_CODE (var) == SSA_NAME)
5223 var = SSA_NAME_VAR (var);
5225 if (POINTER_TYPE_P (TREE_TYPE (var)))
5227 DECL_NO_TBAA_P (var) = 1;
5229 /* Tell the RTL layer that this pointer can alias
5231 DECL_POINTER_ALIAS_SET (var) = 0;
5238 /* Create points-to sets for the current function. See the comments
5239 at the start of the file for an algorithmic overview. */
5242 compute_points_to_sets (struct alias_info *ai)
5244 struct scc_info *si;
5247 timevar_push (TV_TREE_PTA);
5250 init_alias_heapvars ();
5252 intra_create_variable_infos ();
5254 /* Now walk all statements and derive aliases. */
5257 block_stmt_iterator bsi;
5260 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5262 if (is_gimple_reg (PHI_RESULT (phi)))
5264 find_func_aliases (phi);
5266 /* Update various related attributes like escaped
5267 addresses, pointer dereferences for loads and stores.
5268 This is used when creating name tags and alias
5270 update_alias_info (phi, ai);
5274 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5276 tree stmt = bsi_stmt (bsi);
5278 find_func_aliases (stmt);
5280 /* Update various related attributes like escaped
5281 addresses, pointer dereferences for loads and stores.
5282 This is used when creating name tags and alias
5284 update_alias_info (stmt, ai);
5286 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5287 been captured, and we can remove them. */
5288 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5289 bsi_remove (&bsi, true);
5298 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5299 dump_constraints (dump_file);
5304 "\nCollapsing static cycles and doing variable "
5307 init_graph (VEC_length (varinfo_t, varmap) * 2);
5310 fprintf (dump_file, "Building predecessor graph\n");
5311 build_pred_graph ();
5314 fprintf (dump_file, "Detecting pointer and location "
5316 si = perform_var_substitution (graph);
5319 fprintf (dump_file, "Rewriting constraints and unifying "
5321 rewrite_constraints (graph, si);
5322 free_var_substitution_info (si);
5324 build_succ_graph ();
5325 move_complex_constraints (graph);
5328 fprintf (dump_file, "Uniting pointer but not location equivalent "
5330 unite_pointer_equivalences (graph);
5333 fprintf (dump_file, "Finding indirect cycles\n");
5334 find_indirect_cycles (graph);
5336 /* Implicit nodes and predecessors are no longer necessary at this
5338 remove_preds_and_fake_succs (graph);
5341 fprintf (dump_file, "Solving graph\n");
5343 solve_graph (graph);
5345 compute_tbaa_pruning ();
5348 dump_sa_points_to_info (dump_file);
5350 have_alias_info = true;
5352 timevar_pop (TV_TREE_PTA);
5356 /* Delete created points-to sets. */
5359 delete_points_to_sets (void)
5363 htab_delete (shared_bitmap_table);
5364 if (dump_file && (dump_flags & TDF_STATS))
5365 fprintf (dump_file, "Points to sets created:%d\n",
5366 stats.points_to_sets_created);
5368 pointer_map_destroy (vi_for_tree);
5369 bitmap_obstack_release (&pta_obstack);
5370 VEC_free (constraint_t, heap, constraints);
5372 for (i = 0; i < graph->size; i++)
5373 VEC_free (constraint_t, heap, graph->complex[i]);
5374 free (graph->complex);
5377 free (graph->succs);
5379 free (graph->pe_rep);
5380 free (graph->indirect_cycles);
5383 VEC_free (varinfo_t, heap, varmap);
5384 free_alloc_pool (variable_info_pool);
5385 free_alloc_pool (constraint_pool);
5386 have_alias_info = false;
5389 /* Return true if we should execute IPA PTA. */
5393 return (flag_unit_at_a_time != 0
5395 /* Don't bother doing anything if the program has errors. */
5396 && !(errorcount || sorrycount));
5399 /* Execute the driver for IPA PTA. */
5401 ipa_pta_execute (void)
5403 struct cgraph_node *node;
5404 struct scc_info *si;
5407 init_alias_heapvars ();
5410 for (node = cgraph_nodes; node; node = node->next)
5412 if (!node->analyzed || cgraph_is_master_clone (node))
5416 varid = create_function_info_for (node->decl,
5417 cgraph_node_name (node));
5418 if (node->local.externally_visible)
5420 varinfo_t fi = get_varinfo (varid);
5421 for (; fi; fi = fi->next)
5422 make_constraint_from_anything (fi);
5426 for (node = cgraph_nodes; node; node = node->next)
5428 if (node->analyzed && cgraph_is_master_clone (node))
5430 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5432 tree old_func_decl = current_function_decl;
5435 "Generating constraints for %s\n",
5436 cgraph_node_name (node));
5438 current_function_decl = node->decl;
5440 FOR_EACH_BB_FN (bb, func)
5442 block_stmt_iterator bsi;
5445 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5447 if (is_gimple_reg (PHI_RESULT (phi)))
5449 find_func_aliases (phi);
5453 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5455 tree stmt = bsi_stmt (bsi);
5456 find_func_aliases (stmt);
5459 current_function_decl = old_func_decl;
5464 /* Make point to anything. */
5470 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5471 dump_constraints (dump_file);
5476 "\nCollapsing static cycles and doing variable "
5479 init_graph (VEC_length (varinfo_t, varmap) * 2);
5480 build_pred_graph ();
5481 si = perform_var_substitution (graph);
5482 rewrite_constraints (graph, si);
5483 free_var_substitution_info (si);
5485 build_succ_graph ();
5486 move_complex_constraints (graph);
5487 unite_pointer_equivalences (graph);
5488 find_indirect_cycles (graph);
5490 /* Implicit nodes and predecessors are no longer necessary at this
5492 remove_preds_and_fake_succs (graph);
5495 fprintf (dump_file, "\nSolving graph\n");
5497 solve_graph (graph);
5500 dump_sa_points_to_info (dump_file);
5503 delete_alias_heapvars ();
5504 delete_points_to_sets ();
5508 struct simple_ipa_opt_pass pass_ipa_pta =
5513 gate_ipa_pta, /* gate */
5514 ipa_pta_execute, /* execute */
5517 0, /* static_pass_number */
5518 TV_IPA_PTA, /* tv_id */
5519 0, /* properties_required */
5520 0, /* properties_provided */
5521 0, /* properties_destroyed */
5522 0, /* todo_flags_start */
5523 TODO_update_ssa /* todo_flags_finish */
5527 /* Initialize the heapvar for statement mapping. */
5529 init_alias_heapvars (void)
5531 if (!heapvar_for_stmt)
5532 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5537 delete_alias_heapvars (void)
5539 htab_delete (heapvar_for_stmt);
5540 heapvar_for_stmt = NULL;
5544 #include "gt-tree-ssa-structalias.h"