1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
209 /* ID of this variable */
212 /* Name of this variable */
215 /* Tree that this variable is associated with. */
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
259 /* Old points-to set for this variable. */
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
298 return v->collapsed_to;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
338 /* Insert a mapping FROM->TO in the heap var for statement
342 heapvar_insert (tree from, tree to)
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
382 ret->collapsed_to = NULL;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
442 /* Explicit successors of each node. */
445 /* Implicit predecessors of each node (Used for variable
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
452 /* Indirect cycle representatives, or -1 if the node has no indirect
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
464 /* Pointer equivalence label for a node. All nodes with the same
465 pointer equivalence label can be unified together at some point
466 (either during constraint optimization or after the constraint
470 /* Pointer equivalence representative for a label. This is used to
471 handle nodes that are pointer equivalent but not location
472 equivalent. We can unite these once the addressof constraints
473 are transformed into initial points-to sets. */
476 /* Pointer equivalence label for each node, used during variable
478 unsigned int *pointer_label;
480 /* Location equivalence label for each node, used during location
481 equivalence finding. */
482 unsigned int *loc_label;
484 /* Pointed-by set for each node, used during location equivalence
485 finding. This is pointed-by rather than pointed-to, because it
486 is constructed using the predecessor graph. */
489 /* Points to sets for pointer equivalence. This is *not* the actual
490 points-to sets for nodes. */
493 /* Bitmap of nodes where the bit is set if the node is a direct
494 node. Used for variable substitution. */
495 sbitmap direct_nodes;
497 /* Bitmap of nodes where the bit is set if the node is address
498 taken. Used for variable substitution. */
499 bitmap address_taken;
501 /* True if points_to bitmap for this node is stored in the hash
505 /* Number of incoming edges remaining to be processed by pointer
507 Used for variable substitution. */
508 unsigned int *number_incoming;
511 /* Vector of complex constraints for each graph node. Complex
512 constraints are those involving dereferences or offsets that are
514 VEC(constraint_t,heap) **complex;
517 static constraint_graph_t graph;
519 /* During variable substitution and the offline version of indirect
520 cycle finding, we create nodes to represent dereferences and
521 address taken constraints. These represent where these start and
523 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
524 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
526 /* Return the representative node for NODE, if NODE has been unioned
528 This function performs path compression along the way to finding
529 the representative. */
532 find (unsigned int node)
534 gcc_assert (node < graph->size);
535 if (graph->rep[node] != node)
536 return graph->rep[node] = find (graph->rep[node]);
540 /* Union the TO and FROM nodes to the TO nodes.
541 Note that at some point in the future, we may want to do
542 union-by-rank, in which case we are going to have to return the
543 node we unified to. */
546 unite (unsigned int to, unsigned int from)
548 gcc_assert (to < graph->size && from < graph->size);
549 if (to != from && graph->rep[from] != to)
551 graph->rep[from] = to;
557 /* Create a new constraint consisting of LHS and RHS expressions. */
560 new_constraint (const struct constraint_expr lhs,
561 const struct constraint_expr rhs)
563 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
569 /* Print out constraint C to FILE. */
572 dump_constraint (FILE *file, constraint_t c)
574 if (c->lhs.type == ADDRESSOF)
576 else if (c->lhs.type == DEREF)
578 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
579 if (c->lhs.offset != 0)
580 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
581 fprintf (file, " = ");
582 if (c->rhs.type == ADDRESSOF)
584 else if (c->rhs.type == DEREF)
586 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
587 if (c->rhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
589 fprintf (file, "\n");
592 /* Print out constraint C to stderr. */
595 debug_constraint (constraint_t c)
597 dump_constraint (stderr, c);
600 /* Print out all constraints to FILE */
603 dump_constraints (FILE *file)
607 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
608 dump_constraint (file, c);
611 /* Print out all constraints to stderr. */
614 debug_constraints (void)
616 dump_constraints (stderr);
621 The solver is a simple worklist solver, that works on the following
624 sbitmap changed_nodes = all zeroes;
626 For each node that is not already collapsed:
628 set bit in changed nodes
630 while (changed_count > 0)
632 compute topological ordering for constraint graph
634 find and collapse cycles in the constraint graph (updating
635 changed if necessary)
637 for each node (n) in the graph in topological order:
640 Process each complex constraint associated with the node,
641 updating changed if necessary.
643 For each outgoing edge from n, propagate the solution from n to
644 the destination of the edge, updating changed as necessary.
648 /* Return true if two constraint expressions A and B are equal. */
651 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
653 return a.type == b.type && a.var == b.var && a.offset == b.offset;
656 /* Return true if constraint expression A is less than constraint expression
657 B. This is just arbitrary, but consistent, in order to give them an
661 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
663 if (a.type == b.type)
666 return a.offset < b.offset;
668 return a.var < b.var;
671 return a.type < b.type;
674 /* Return true if constraint A is less than constraint B. This is just
675 arbitrary, but consistent, in order to give them an ordering. */
678 constraint_less (const constraint_t a, const constraint_t b)
680 if (constraint_expr_less (a->lhs, b->lhs))
682 else if (constraint_expr_less (b->lhs, a->lhs))
685 return constraint_expr_less (a->rhs, b->rhs);
688 /* Return true if two constraints A and B are equal. */
691 constraint_equal (struct constraint a, struct constraint b)
693 return constraint_expr_equal (a.lhs, b.lhs)
694 && constraint_expr_equal (a.rhs, b.rhs);
698 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
701 constraint_vec_find (VEC(constraint_t,heap) *vec,
702 struct constraint lookfor)
710 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
711 if (place >= VEC_length (constraint_t, vec))
713 found = VEC_index (constraint_t, vec, place);
714 if (!constraint_equal (*found, lookfor))
719 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
722 constraint_set_union (VEC(constraint_t,heap) **to,
723 VEC(constraint_t,heap) **from)
728 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
730 if (constraint_vec_find (*to, *c) == NULL)
732 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
734 VEC_safe_insert (constraint_t, heap, *to, place, c);
739 /* Take a solution set SET, add OFFSET to each member of the set, and
740 overwrite SET with the result when done. */
743 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
745 bitmap result = BITMAP_ALLOC (&iteration_obstack);
749 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
751 /* If this is a properly sized variable, only add offset if it's
752 less than end. Otherwise, it is globbed to a single
755 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
757 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
758 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
761 bitmap_set_bit (result, v->id);
763 else if (get_varinfo (i)->is_artificial_var
764 || get_varinfo (i)->has_union
765 || get_varinfo (i)->is_unknown_size_var)
767 bitmap_set_bit (result, i);
771 bitmap_copy (set, result);
772 BITMAP_FREE (result);
775 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
779 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
782 return bitmap_ior_into (to, from);
788 tmp = BITMAP_ALLOC (&iteration_obstack);
789 bitmap_copy (tmp, from);
790 solution_set_add (tmp, inc);
791 res = bitmap_ior_into (to, tmp);
797 /* Insert constraint C into the list of complex constraints for graph
801 insert_into_complex (constraint_graph_t graph,
802 unsigned int var, constraint_t c)
804 VEC (constraint_t, heap) *complex = graph->complex[var];
805 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
808 /* Only insert constraints that do not already exist. */
809 if (place >= VEC_length (constraint_t, complex)
810 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
811 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
815 /* Condense two variable nodes into a single variable node, by moving
816 all associated info from SRC to TO. */
819 merge_node_constraints (constraint_graph_t graph, unsigned int to,
825 gcc_assert (find (from) == to);
827 /* Move all complex constraints from src node into to node */
828 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
830 /* In complex constraints for node src, we may have either
831 a = *src, and *src = a, or an offseted constraint which are
832 always added to the rhs node's constraints. */
834 if (c->rhs.type == DEREF)
836 else if (c->lhs.type == DEREF)
841 constraint_set_union (&graph->complex[to], &graph->complex[from]);
842 VEC_free (constraint_t, heap, graph->complex[from]);
843 graph->complex[from] = NULL;
847 /* Remove edges involving NODE from GRAPH. */
850 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
852 if (graph->succs[node])
853 BITMAP_FREE (graph->succs[node]);
856 /* Merge GRAPH nodes FROM and TO into node TO. */
859 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
862 if (graph->indirect_cycles[from] != -1)
864 /* If we have indirect cycles with the from node, and we have
865 none on the to node, the to node has indirect cycles from the
866 from node now that they are unified.
867 If indirect cycles exist on both, unify the nodes that they
868 are in a cycle with, since we know they are in a cycle with
870 if (graph->indirect_cycles[to] == -1)
871 graph->indirect_cycles[to] = graph->indirect_cycles[from];
874 /* Merge all the successor edges. */
875 if (graph->succs[from])
877 if (!graph->succs[to])
878 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
879 bitmap_ior_into (graph->succs[to],
883 clear_edges_for_node (graph, from);
887 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
888 it doesn't exist in the graph already. */
891 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
897 if (!graph->implicit_preds[to])
898 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
900 if (!bitmap_bit_p (graph->implicit_preds[to], from))
902 stats.num_implicit_edges++;
903 bitmap_set_bit (graph->implicit_preds[to], from);
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
912 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
915 if (!graph->preds[to])
916 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
917 if (!bitmap_bit_p (graph->preds[to], from))
918 bitmap_set_bit (graph->preds[to], from);
921 /* Add a graph edge to GRAPH, going from FROM to TO if
922 it doesn't exist in the graph already.
923 Return false if the edge already existed, true otherwise. */
926 add_graph_edge (constraint_graph_t graph, unsigned int to,
937 if (!graph->succs[from])
938 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
939 if (!bitmap_bit_p (graph->succs[from], to))
942 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
944 bitmap_set_bit (graph->succs[from], to);
951 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
954 valid_graph_edge (constraint_graph_t graph, unsigned int src,
957 return (graph->succs[dest]
958 && bitmap_bit_p (graph->succs[dest], src));
961 /* Initialize the constraint graph structure to contain SIZE nodes. */
964 init_graph (unsigned int size)
968 graph = XCNEW (struct constraint_graph);
970 graph->succs = XCNEWVEC (bitmap, graph->size);
971 graph->indirect_cycles = XNEWVEC (int, graph->size);
972 graph->rep = XNEWVEC (unsigned int, graph->size);
973 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
974 graph->pe = XCNEWVEC (unsigned int, graph->size);
975 graph->pe_rep = XNEWVEC (int, graph->size);
977 for (j = 0; j < graph->size; j++)
980 graph->pe_rep[j] = -1;
981 graph->indirect_cycles[j] = -1;
985 /* Build the constraint graph, adding only predecessor edges right now. */
988 build_pred_graph (void)
994 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
995 graph->preds = XCNEWVEC (bitmap, graph->size);
996 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
997 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
998 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
999 graph->points_to = XCNEWVEC (bitmap, graph->size);
1000 graph->eq_rep = XNEWVEC (int, graph->size);
1001 graph->direct_nodes = sbitmap_alloc (graph->size);
1002 graph->pt_used = sbitmap_alloc (graph->size);
1003 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1004 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1005 sbitmap_zero (graph->direct_nodes);
1006 sbitmap_zero (graph->pt_used);
1008 for (j = 0; j < FIRST_REF_NODE; j++)
1010 if (!get_varinfo (j)->is_special_var)
1011 SET_BIT (graph->direct_nodes, j);
1014 for (j = 0; j < graph->size; j++)
1015 graph->eq_rep[j] = -1;
1017 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1018 graph->indirect_cycles[j] = -1;
1020 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1022 struct constraint_expr lhs = c->lhs;
1023 struct constraint_expr rhs = c->rhs;
1024 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1025 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1027 if (lhs.type == DEREF)
1030 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1031 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1033 else if (rhs.type == DEREF)
1036 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1037 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1039 RESET_BIT (graph->direct_nodes, lhsvar);
1041 else if (rhs.type == ADDRESSOF)
1044 if (graph->points_to[lhsvar] == NULL)
1045 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1046 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1048 if (graph->pointed_by[rhsvar] == NULL)
1049 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1050 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1052 /* Implicitly, *x = y */
1053 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1055 RESET_BIT (graph->direct_nodes, rhsvar);
1056 bitmap_set_bit (graph->address_taken, rhsvar);
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1062 add_pred_graph_edge (graph, lhsvar, rhsvar);
1063 /* Implicitly, *x = *y */
1064 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1065 FIRST_REF_NODE + rhsvar);
1067 else if (lhs.offset != 0 || rhs.offset != 0)
1069 if (rhs.offset != 0)
1070 RESET_BIT (graph->direct_nodes, lhs.var);
1071 else if (lhs.offset != 0)
1072 RESET_BIT (graph->direct_nodes, rhs.var);
1077 /* Build the constraint graph, adding successor edges. */
1080 build_succ_graph (void)
1085 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1087 struct constraint_expr lhs;
1088 struct constraint_expr rhs;
1089 unsigned int lhsvar;
1090 unsigned int rhsvar;
1097 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1098 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1100 if (lhs.type == DEREF)
1102 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1103 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1105 else if (rhs.type == DEREF)
1107 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1108 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1110 else if (rhs.type == ADDRESSOF)
1113 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1114 == get_varinfo_fc (rhs.var)->id);
1115 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1117 else if (lhsvar > anything_id
1118 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1120 add_graph_edge (graph, lhsvar, rhsvar);
1126 /* Changed variables on the last iteration. */
1127 static unsigned int changed_count;
1128 static sbitmap changed;
1130 DEF_VEC_I(unsigned);
1131 DEF_VEC_ALLOC_I(unsigned,heap);
1134 /* Strongly Connected Component visitation info. */
1141 unsigned int *node_mapping;
1143 VEC(unsigned,heap) *scc_stack;
1147 /* Recursive routine to find strongly connected components in GRAPH.
1148 SI is the SCC info to store the information in, and N is the id of current
1149 graph node we are processing.
1151 This is Tarjan's strongly connected component finding algorithm, as
1152 modified by Nuutila to keep only non-root nodes on the stack.
1153 The algorithm can be found in "On finding the strongly connected
1154 connected components in a directed graph" by Esko Nuutila and Eljas
1155 Soisalon-Soininen, in Information Processing Letters volume 49,
1156 number 1, pages 9-14. */
1159 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1163 unsigned int my_dfs;
1165 SET_BIT (si->visited, n);
1166 si->dfs[n] = si->current_index ++;
1167 my_dfs = si->dfs[n];
1169 /* Visit all the successors. */
1170 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1174 if (i > LAST_REF_NODE)
1178 if (TEST_BIT (si->deleted, w))
1181 if (!TEST_BIT (si->visited, w))
1182 scc_visit (graph, si, w);
1184 unsigned int t = find (w);
1185 unsigned int nnode = find (n);
1186 gcc_assert (nnode == n);
1188 if (si->dfs[t] < si->dfs[nnode])
1189 si->dfs[n] = si->dfs[t];
1193 /* See if any components have been identified. */
1194 if (si->dfs[n] == my_dfs)
1196 if (VEC_length (unsigned, si->scc_stack) > 0
1197 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1199 bitmap scc = BITMAP_ALLOC (NULL);
1200 bool have_ref_node = n >= FIRST_REF_NODE;
1201 unsigned int lowest_node;
1204 bitmap_set_bit (scc, n);
1206 while (VEC_length (unsigned, si->scc_stack) != 0
1207 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1209 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1211 bitmap_set_bit (scc, w);
1212 if (w >= FIRST_REF_NODE)
1213 have_ref_node = true;
1216 lowest_node = bitmap_first_set_bit (scc);
1217 gcc_assert (lowest_node < FIRST_REF_NODE);
1219 /* Collapse the SCC nodes into a single node, and mark the
1221 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1223 if (i < FIRST_REF_NODE)
1225 if (unite (lowest_node, i))
1226 unify_nodes (graph, lowest_node, i, false);
1230 unite (lowest_node, i);
1231 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1235 SET_BIT (si->deleted, n);
1238 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1241 /* Unify node FROM into node TO, updating the changed count if
1242 necessary when UPDATE_CHANGED is true. */
1245 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1246 bool update_changed)
1249 gcc_assert (to != from && find (to) == to);
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file, "Unifying %s to %s\n",
1252 get_varinfo (from)->name,
1253 get_varinfo (to)->name);
1256 stats.unified_vars_dynamic++;
1258 stats.unified_vars_static++;
1260 merge_graph_nodes (graph, to, from);
1261 merge_node_constraints (graph, to, from);
1263 if (get_varinfo (from)->no_tbaa_pruning)
1264 get_varinfo (to)->no_tbaa_pruning = true;
1266 /* Mark TO as changed if FROM was changed. If TO was already marked
1267 as changed, decrease the changed count. */
1269 if (update_changed && TEST_BIT (changed, from))
1271 RESET_BIT (changed, from);
1272 if (!TEST_BIT (changed, to))
1273 SET_BIT (changed, to);
1276 gcc_assert (changed_count > 0);
1280 if (get_varinfo (from)->solution)
1282 /* If the solution changes because of the merging, we need to mark
1283 the variable as changed. */
1284 if (bitmap_ior_into (get_varinfo (to)->solution,
1285 get_varinfo (from)->solution))
1287 if (update_changed && !TEST_BIT (changed, to))
1289 SET_BIT (changed, to);
1294 BITMAP_FREE (get_varinfo (from)->solution);
1295 BITMAP_FREE (get_varinfo (from)->oldsolution);
1297 if (stats.iterations > 0)
1299 BITMAP_FREE (get_varinfo (to)->oldsolution);
1300 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1303 if (valid_graph_edge (graph, to, to))
1305 if (graph->succs[to])
1306 bitmap_clear_bit (graph->succs[to], to);
1310 /* Information needed to compute the topological ordering of a graph. */
1314 /* sbitmap of visited nodes. */
1316 /* Array that stores the topological order of the graph, *in
1318 VEC(unsigned,heap) *topo_order;
1322 /* Initialize and return a topological info structure. */
1324 static struct topo_info *
1325 init_topo_info (void)
1327 size_t size = graph->size;
1328 struct topo_info *ti = XNEW (struct topo_info);
1329 ti->visited = sbitmap_alloc (size);
1330 sbitmap_zero (ti->visited);
1331 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1336 /* Free the topological sort info pointed to by TI. */
1339 free_topo_info (struct topo_info *ti)
1341 sbitmap_free (ti->visited);
1342 VEC_free (unsigned, heap, ti->topo_order);
1346 /* Visit the graph in topological order, and store the order in the
1347 topo_info structure. */
1350 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1356 SET_BIT (ti->visited, n);
1358 if (graph->succs[n])
1359 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1361 if (!TEST_BIT (ti->visited, j))
1362 topo_visit (graph, ti, j);
1365 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1368 /* Return true if variable N + OFFSET is a legal field of N. */
1371 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1373 varinfo_t ninfo = get_varinfo (n);
1375 /* For things we've globbed to single variables, any offset into the
1376 variable acts like the entire variable, so that it becomes offset
1378 if (ninfo->is_special_var
1379 || ninfo->is_artificial_var
1380 || ninfo->is_unknown_size_var)
1385 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1388 /* Process a constraint C that represents x = *y, using DELTA as the
1389 starting solution. */
1392 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1395 unsigned int lhs = c->lhs.var;
1397 bitmap sol = get_varinfo (lhs)->solution;
1401 if (bitmap_bit_p (delta, anything_id))
1403 flag = !bitmap_bit_p (sol, anything_id);
1405 bitmap_set_bit (sol, anything_id);
1408 /* For each variable j in delta (Sol(y)), add
1409 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1410 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1412 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1413 if (type_safe (j, &roffset))
1416 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1419 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1424 /* Adding edges from the special vars is pointless.
1425 They don't have sets that can change. */
1426 if (get_varinfo (t) ->is_special_var)
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1428 else if (add_graph_edge (graph, lhs, t))
1429 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1434 /* If the LHS solution changed, mark the var as changed. */
1437 get_varinfo (lhs)->solution = sol;
1438 if (!TEST_BIT (changed, lhs))
1440 SET_BIT (changed, lhs);
1446 /* Process a constraint C that represents *x = y. */
1449 do_ds_constraint (constraint_t c, bitmap delta)
1451 unsigned int rhs = c->rhs.var;
1452 bitmap sol = get_varinfo (rhs)->solution;
1456 if (bitmap_bit_p (sol, anything_id))
1458 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1460 varinfo_t jvi = get_varinfo (j);
1462 unsigned int loff = c->lhs.offset;
1463 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1466 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1471 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1473 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1474 if (!TEST_BIT (changed, t))
1476 SET_BIT (changed, t);
1484 /* For each member j of delta (Sol(x)), add an edge from y to j and
1485 union Sol(y) into Sol(j) */
1486 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1488 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1489 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1493 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1496 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1500 tmp = get_varinfo (t)->solution;
1502 if (set_union_with_increment (tmp, sol, 0))
1504 get_varinfo (t)->solution = tmp;
1506 sol = get_varinfo (rhs)->solution;
1507 if (!TEST_BIT (changed, t))
1509 SET_BIT (changed, t);
1517 /* Handle a non-simple (simple meaning requires no iteration),
1518 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1521 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1523 if (c->lhs.type == DEREF)
1525 if (c->rhs.type == ADDRESSOF)
1532 do_ds_constraint (c, delta);
1535 else if (c->rhs.type == DEREF)
1538 if (!(get_varinfo (c->lhs.var)->is_special_var))
1539 do_sd_constraint (graph, c, delta);
1547 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1548 solution = get_varinfo (c->rhs.var)->solution;
1549 tmp = get_varinfo (c->lhs.var)->solution;
1551 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1555 get_varinfo (c->lhs.var)->solution = tmp;
1556 if (!TEST_BIT (changed, c->lhs.var))
1558 SET_BIT (changed, c->lhs.var);
1565 /* Initialize and return a new SCC info structure. */
1567 static struct scc_info *
1568 init_scc_info (size_t size)
1570 struct scc_info *si = XNEW (struct scc_info);
1573 si->current_index = 0;
1574 si->visited = sbitmap_alloc (size);
1575 sbitmap_zero (si->visited);
1576 si->deleted = sbitmap_alloc (size);
1577 sbitmap_zero (si->deleted);
1578 si->node_mapping = XNEWVEC (unsigned int, size);
1579 si->dfs = XCNEWVEC (unsigned int, size);
1581 for (i = 0; i < size; i++)
1582 si->node_mapping[i] = i;
1584 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1588 /* Free an SCC info structure pointed to by SI */
1591 free_scc_info (struct scc_info *si)
1593 sbitmap_free (si->visited);
1594 sbitmap_free (si->deleted);
1595 free (si->node_mapping);
1597 VEC_free (unsigned, heap, si->scc_stack);
1602 /* Find indirect cycles in GRAPH that occur, using strongly connected
1603 components, and note them in the indirect cycles map.
1605 This technique comes from Ben Hardekopf and Calvin Lin,
1606 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1607 Lines of Code", submitted to PLDI 2007. */
1610 find_indirect_cycles (constraint_graph_t graph)
1613 unsigned int size = graph->size;
1614 struct scc_info *si = init_scc_info (size);
1616 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1617 if (!TEST_BIT (si->visited, i) && find (i) == i)
1618 scc_visit (graph, si, i);
1623 /* Compute a topological ordering for GRAPH, and store the result in the
1624 topo_info structure TI. */
1627 compute_topo_order (constraint_graph_t graph,
1628 struct topo_info *ti)
1631 unsigned int size = graph->size;
1633 for (i = 0; i != size; ++i)
1634 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1635 topo_visit (graph, ti, i);
1638 /* Structure used to for hash value numbering of pointer equivalence
1641 typedef struct equiv_class_label
1643 unsigned int equivalence_class;
1646 } *equiv_class_label_t;
1647 typedef const struct equiv_class_label *const_equiv_class_label_t;
1649 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1651 static htab_t pointer_equiv_class_table;
1653 /* A hashtable for mapping a bitmap of labels->location equivalence
1655 static htab_t location_equiv_class_table;
1657 /* Hash function for a equiv_class_label_t */
1660 equiv_class_label_hash (const void *p)
1662 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1663 return ecl->hashcode;
1666 /* Equality function for two equiv_class_label_t's. */
1669 equiv_class_label_eq (const void *p1, const void *p2)
1671 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1672 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1673 return bitmap_equal_p (eql1->labels, eql2->labels);
1676 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1680 equiv_class_lookup (htab_t table, bitmap labels)
1683 struct equiv_class_label ecl;
1685 ecl.labels = labels;
1686 ecl.hashcode = bitmap_hash (labels);
1688 slot = htab_find_slot_with_hash (table, &ecl,
1689 ecl.hashcode, NO_INSERT);
1693 return ((equiv_class_label_t) *slot)->equivalence_class;
1697 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1701 equiv_class_add (htab_t table, unsigned int equivalence_class,
1705 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1707 ecl->labels = labels;
1708 ecl->equivalence_class = equivalence_class;
1709 ecl->hashcode = bitmap_hash (labels);
1711 slot = htab_find_slot_with_hash (table, ecl,
1712 ecl->hashcode, INSERT);
1713 gcc_assert (!*slot);
1714 *slot = (void *) ecl;
1717 /* Perform offline variable substitution.
1719 This is a worst case quadratic time way of identifying variables
1720 that must have equivalent points-to sets, including those caused by
1721 static cycles, and single entry subgraphs, in the constraint graph.
1723 The technique is described in "Exploiting Pointer and Location
1724 Equivalence to Optimize Pointer Analysis. In the 14th International
1725 Static Analysis Symposium (SAS), August 2007." It is known as the
1726 "HU" algorithm, and is equivalent to value numbering the collapsed
1727 constraint graph including evaluating unions.
1729 The general method of finding equivalence classes is as follows:
1730 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1731 Initialize all non-REF nodes to be direct nodes.
1732 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1734 For each constraint containing the dereference, we also do the same
1737 We then compute SCC's in the graph and unify nodes in the same SCC,
1740 For each non-collapsed node x:
1741 Visit all unvisited explicit incoming edges.
1742 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1744 Lookup the equivalence class for pts(x).
1745 If we found one, equivalence_class(x) = found class.
1746 Otherwise, equivalence_class(x) = new class, and new_class is
1747 added to the lookup table.
1749 All direct nodes with the same equivalence class can be replaced
1750 with a single representative node.
1751 All unlabeled nodes (label == 0) are not pointers and all edges
1752 involving them can be eliminated.
1753 We perform these optimizations during rewrite_constraints
1755 In addition to pointer equivalence class finding, we also perform
1756 location equivalence class finding. This is the set of variables
1757 that always appear together in points-to sets. We use this to
1758 compress the size of the points-to sets. */
1760 /* Current maximum pointer equivalence class id. */
1761 static int pointer_equiv_class;
1763 /* Current maximum location equivalence class id. */
1764 static int location_equiv_class;
1766 /* Recursive routine to find strongly connected components in GRAPH,
1767 and label it's nodes with DFS numbers. */
1770 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1774 unsigned int my_dfs;
1776 gcc_assert (si->node_mapping[n] == n);
1777 SET_BIT (si->visited, n);
1778 si->dfs[n] = si->current_index ++;
1779 my_dfs = si->dfs[n];
1781 /* Visit all the successors. */
1782 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1784 unsigned int w = si->node_mapping[i];
1786 if (TEST_BIT (si->deleted, w))
1789 if (!TEST_BIT (si->visited, w))
1790 condense_visit (graph, si, w);
1792 unsigned int t = si->node_mapping[w];
1793 unsigned int nnode = si->node_mapping[n];
1794 gcc_assert (nnode == n);
1796 if (si->dfs[t] < si->dfs[nnode])
1797 si->dfs[n] = si->dfs[t];
1801 /* Visit all the implicit predecessors. */
1802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1804 unsigned int w = si->node_mapping[i];
1806 if (TEST_BIT (si->deleted, w))
1809 if (!TEST_BIT (si->visited, w))
1810 condense_visit (graph, si, w);
1812 unsigned int t = si->node_mapping[w];
1813 unsigned int nnode = si->node_mapping[n];
1814 gcc_assert (nnode == n);
1816 if (si->dfs[t] < si->dfs[nnode])
1817 si->dfs[n] = si->dfs[t];
1821 /* See if any components have been identified. */
1822 if (si->dfs[n] == my_dfs)
1824 while (VEC_length (unsigned, si->scc_stack) != 0
1825 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1827 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1828 si->node_mapping[w] = n;
1830 if (!TEST_BIT (graph->direct_nodes, w))
1831 RESET_BIT (graph->direct_nodes, n);
1833 /* Unify our nodes. */
1834 if (graph->preds[w])
1836 if (!graph->preds[n])
1837 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1838 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1840 if (graph->implicit_preds[w])
1842 if (!graph->implicit_preds[n])
1843 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1844 bitmap_ior_into (graph->implicit_preds[n],
1845 graph->implicit_preds[w]);
1847 if (graph->points_to[w])
1849 if (!graph->points_to[n])
1850 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1851 bitmap_ior_into (graph->points_to[n],
1852 graph->points_to[w]);
1854 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1856 unsigned int rep = si->node_mapping[i];
1857 graph->number_incoming[rep]++;
1860 SET_BIT (si->deleted, n);
1863 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1866 /* Label pointer equivalences. */
1869 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1873 SET_BIT (si->visited, n);
1875 if (!graph->points_to[n])
1876 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1878 /* Label and union our incoming edges's points to sets. */
1879 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1881 unsigned int w = si->node_mapping[i];
1882 if (!TEST_BIT (si->visited, w))
1883 label_visit (graph, si, w);
1885 /* Skip unused edges */
1886 if (w == n || graph->pointer_label[w] == 0)
1888 graph->number_incoming[w]--;
1891 if (graph->points_to[w])
1892 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1894 /* If all incoming edges to w have been processed and
1895 graph->points_to[w] was not stored in the hash table, we can
1897 graph->number_incoming[w]--;
1898 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1900 BITMAP_FREE (graph->points_to[w]);
1903 /* Indirect nodes get fresh variables. */
1904 if (!TEST_BIT (graph->direct_nodes, n))
1905 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1907 if (!bitmap_empty_p (graph->points_to[n]))
1909 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1910 graph->points_to[n]);
1913 SET_BIT (graph->pt_used, n);
1914 label = pointer_equiv_class++;
1915 equiv_class_add (pointer_equiv_class_table,
1916 label, graph->points_to[n]);
1918 graph->pointer_label[n] = label;
1922 /* Perform offline variable substitution, discovering equivalence
1923 classes, and eliminating non-pointer variables. */
1925 static struct scc_info *
1926 perform_var_substitution (constraint_graph_t graph)
1929 unsigned int size = graph->size;
1930 struct scc_info *si = init_scc_info (size);
1932 bitmap_obstack_initialize (&iteration_obstack);
1933 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1934 equiv_class_label_eq, free);
1935 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1936 equiv_class_label_eq, free);
1937 pointer_equiv_class = 1;
1938 location_equiv_class = 1;
1940 /* Condense the nodes, which means to find SCC's, count incoming
1941 predecessors, and unite nodes in SCC's. */
1942 for (i = 0; i < FIRST_REF_NODE; i++)
1943 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1944 condense_visit (graph, si, si->node_mapping[i]);
1946 sbitmap_zero (si->visited);
1947 /* Actually the label the nodes for pointer equivalences */
1948 for (i = 0; i < FIRST_REF_NODE; i++)
1949 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1950 label_visit (graph, si, si->node_mapping[i]);
1952 /* Calculate location equivalence labels. */
1953 for (i = 0; i < FIRST_REF_NODE; i++)
1960 if (!graph->pointed_by[i])
1962 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1964 /* Translate the pointed-by mapping for pointer equivalence
1966 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1968 bitmap_set_bit (pointed_by,
1969 graph->pointer_label[si->node_mapping[j]]);
1971 /* The original pointed_by is now dead. */
1972 BITMAP_FREE (graph->pointed_by[i]);
1974 /* Look up the location equivalence label if one exists, or make
1976 label = equiv_class_lookup (location_equiv_class_table,
1980 label = location_equiv_class++;
1981 equiv_class_add (location_equiv_class_table,
1986 if (dump_file && (dump_flags & TDF_DETAILS))
1987 fprintf (dump_file, "Found location equivalence for node %s\n",
1988 get_varinfo (i)->name);
1989 BITMAP_FREE (pointed_by);
1991 graph->loc_label[i] = label;
1995 if (dump_file && (dump_flags & TDF_DETAILS))
1996 for (i = 0; i < FIRST_REF_NODE; i++)
1998 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2000 "Equivalence classes for %s node id %d:%s are pointer: %d"
2002 direct_node ? "Direct node" : "Indirect node", i,
2003 get_varinfo (i)->name,
2004 graph->pointer_label[si->node_mapping[i]],
2005 graph->loc_label[si->node_mapping[i]]);
2008 /* Quickly eliminate our non-pointer variables. */
2010 for (i = 0; i < FIRST_REF_NODE; i++)
2012 unsigned int node = si->node_mapping[i];
2014 if (graph->pointer_label[node] == 0)
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2018 "%s is a non-pointer variable, eliminating edges.\n",
2019 get_varinfo (node)->name);
2020 stats.nonpointer_vars++;
2021 clear_edges_for_node (graph, node);
2028 /* Free information that was only necessary for variable
2032 free_var_substitution_info (struct scc_info *si)
2035 free (graph->pointer_label);
2036 free (graph->loc_label);
2037 free (graph->pointed_by);
2038 free (graph->points_to);
2039 free (graph->number_incoming);
2040 free (graph->eq_rep);
2041 sbitmap_free (graph->direct_nodes);
2042 sbitmap_free (graph->pt_used);
2043 htab_delete (pointer_equiv_class_table);
2044 htab_delete (location_equiv_class_table);
2045 bitmap_obstack_release (&iteration_obstack);
2048 /* Return an existing node that is equivalent to NODE, which has
2049 equivalence class LABEL, if one exists. Return NODE otherwise. */
2052 find_equivalent_node (constraint_graph_t graph,
2053 unsigned int node, unsigned int label)
2055 /* If the address version of this variable is unused, we can
2056 substitute it for anything else with the same label.
2057 Otherwise, we know the pointers are equivalent, but not the
2058 locations, and we can unite them later. */
2060 if (!bitmap_bit_p (graph->address_taken, node))
2062 gcc_assert (label < graph->size);
2064 if (graph->eq_rep[label] != -1)
2066 /* Unify the two variables since we know they are equivalent. */
2067 if (unite (graph->eq_rep[label], node))
2068 unify_nodes (graph, graph->eq_rep[label], node, false);
2069 return graph->eq_rep[label];
2073 graph->eq_rep[label] = node;
2074 graph->pe_rep[label] = node;
2079 gcc_assert (label < graph->size);
2080 graph->pe[node] = label;
2081 if (graph->pe_rep[label] == -1)
2082 graph->pe_rep[label] = node;
2088 /* Unite pointer equivalent but not location equivalent nodes in
2089 GRAPH. This may only be performed once variable substitution is
2093 unite_pointer_equivalences (constraint_graph_t graph)
2097 /* Go through the pointer equivalences and unite them to their
2098 representative, if they aren't already. */
2099 for (i = 0; i < FIRST_REF_NODE; i++)
2101 unsigned int label = graph->pe[i];
2104 int label_rep = graph->pe_rep[label];
2106 if (label_rep == -1)
2109 label_rep = find (label_rep);
2110 if (label_rep >= 0 && unite (label_rep, find (i)))
2111 unify_nodes (graph, label_rep, i, false);
2116 /* Move complex constraints to the GRAPH nodes they belong to. */
2119 move_complex_constraints (constraint_graph_t graph)
2124 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2128 struct constraint_expr lhs = c->lhs;
2129 struct constraint_expr rhs = c->rhs;
2131 if (lhs.type == DEREF)
2133 insert_into_complex (graph, lhs.var, c);
2135 else if (rhs.type == DEREF)
2137 if (!(get_varinfo (lhs.var)->is_special_var))
2138 insert_into_complex (graph, rhs.var, c);
2140 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2141 && (lhs.offset != 0 || rhs.offset != 0))
2143 insert_into_complex (graph, rhs.var, c);
2150 /* Optimize and rewrite complex constraints while performing
2151 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2152 result of perform_variable_substitution. */
2155 rewrite_constraints (constraint_graph_t graph,
2156 struct scc_info *si)
2162 for (j = 0; j < graph->size; j++)
2163 gcc_assert (find (j) == j);
2165 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2167 struct constraint_expr lhs = c->lhs;
2168 struct constraint_expr rhs = c->rhs;
2169 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2170 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2171 unsigned int lhsnode, rhsnode;
2172 unsigned int lhslabel, rhslabel;
2174 lhsnode = si->node_mapping[lhsvar];
2175 rhsnode = si->node_mapping[rhsvar];
2176 lhslabel = graph->pointer_label[lhsnode];
2177 rhslabel = graph->pointer_label[rhsnode];
2179 /* See if it is really a non-pointer variable, and if so, ignore
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2186 fprintf (dump_file, "%s is a non-pointer variable,"
2187 "ignoring constraint:",
2188 get_varinfo (lhs.var)->name);
2189 dump_constraint (dump_file, c);
2191 VEC_replace (constraint_t, constraints, i, NULL);
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2200 fprintf (dump_file, "%s is a non-pointer variable,"
2201 "ignoring constraint:",
2202 get_varinfo (rhs.var)->name);
2203 dump_constraint (dump_file, c);
2205 VEC_replace (constraint_t, constraints, i, NULL);
2209 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2210 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2211 c->lhs.var = lhsvar;
2212 c->rhs.var = rhsvar;
2217 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2218 part of an SCC, false otherwise. */
2221 eliminate_indirect_cycles (unsigned int node)
2223 if (graph->indirect_cycles[node] != -1
2224 && !bitmap_empty_p (get_varinfo (node)->solution))
2227 VEC(unsigned,heap) *queue = NULL;
2229 unsigned int to = find (graph->indirect_cycles[node]);
2232 /* We can't touch the solution set and call unify_nodes
2233 at the same time, because unify_nodes is going to do
2234 bitmap unions into it. */
2236 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2238 if (find (i) == i && i != to)
2241 VEC_safe_push (unsigned, heap, queue, i);
2246 VEC_iterate (unsigned, queue, queuepos, i);
2249 unify_nodes (graph, to, i, true);
2251 VEC_free (unsigned, heap, queue);
2257 /* Solve the constraint graph GRAPH using our worklist solver.
2258 This is based on the PW* family of solvers from the "Efficient Field
2259 Sensitive Pointer Analysis for C" paper.
2260 It works by iterating over all the graph nodes, processing the complex
2261 constraints and propagating the copy constraints, until everything stops
2262 changed. This corresponds to steps 6-8 in the solving list given above. */
2265 solve_graph (constraint_graph_t graph)
2267 unsigned int size = graph->size;
2272 changed = sbitmap_alloc (size);
2273 sbitmap_zero (changed);
2275 /* Mark all initial non-collapsed nodes as changed. */
2276 for (i = 0; i < size; i++)
2278 varinfo_t ivi = get_varinfo (i);
2279 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2280 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2281 || VEC_length (constraint_t, graph->complex[i]) > 0))
2283 SET_BIT (changed, i);
2288 /* Allocate a bitmap to be used to store the changed bits. */
2289 pts = BITMAP_ALLOC (&pta_obstack);
2291 while (changed_count > 0)
2294 struct topo_info *ti = init_topo_info ();
2297 bitmap_obstack_initialize (&iteration_obstack);
2299 compute_topo_order (graph, ti);
2301 while (VEC_length (unsigned, ti->topo_order) != 0)
2304 i = VEC_pop (unsigned, ti->topo_order);
2306 /* If this variable is not a representative, skip it. */
2310 /* In certain indirect cycle cases, we may merge this
2311 variable to another. */
2312 if (eliminate_indirect_cycles (i) && find (i) != i)
2315 /* If the node has changed, we need to process the
2316 complex constraints and outgoing edges again. */
2317 if (TEST_BIT (changed, i))
2322 VEC(constraint_t,heap) *complex = graph->complex[i];
2323 bool solution_empty;
2325 RESET_BIT (changed, i);
2328 /* Compute the changed set of solution bits. */
2329 bitmap_and_compl (pts, get_varinfo (i)->solution,
2330 get_varinfo (i)->oldsolution);
2332 if (bitmap_empty_p (pts))
2335 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2337 solution = get_varinfo (i)->solution;
2338 solution_empty = bitmap_empty_p (solution);
2340 /* Process the complex constraints */
2341 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2343 /* XXX: This is going to unsort the constraints in
2344 some cases, which will occasionally add duplicate
2345 constraints during unification. This does not
2346 affect correctness. */
2347 c->lhs.var = find (c->lhs.var);
2348 c->rhs.var = find (c->rhs.var);
2350 /* The only complex constraint that can change our
2351 solution to non-empty, given an empty solution,
2352 is a constraint where the lhs side is receiving
2353 some set from elsewhere. */
2354 if (!solution_empty || c->lhs.type != DEREF)
2355 do_complex_constraint (graph, c, pts);
2358 solution_empty = bitmap_empty_p (solution);
2360 if (!solution_empty)
2364 /* Propagate solution to all successors. */
2365 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2371 unsigned int to = find (j);
2372 tmp = get_varinfo (to)->solution;
2375 /* Don't try to propagate to ourselves. */
2379 flag = set_union_with_increment (tmp, pts, 0);
2383 get_varinfo (to)->solution = tmp;
2384 if (!TEST_BIT (changed, to))
2386 SET_BIT (changed, to);
2394 free_topo_info (ti);
2395 bitmap_obstack_release (&iteration_obstack);
2399 sbitmap_free (changed);
2400 bitmap_obstack_release (&oldpta_obstack);
2403 /* Map from trees to variable infos. */
2404 static struct pointer_map_t *vi_for_tree;
2407 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2410 insert_vi_for_tree (tree t, varinfo_t vi)
2412 void **slot = pointer_map_insert (vi_for_tree, t);
2414 gcc_assert (*slot == NULL);
2418 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2419 exist in the map, return NULL, otherwise, return the varinfo we found. */
2422 lookup_vi_for_tree (tree t)
2424 void **slot = pointer_map_contains (vi_for_tree, t);
2428 return (varinfo_t) *slot;
2431 /* Return a printable name for DECL */
2434 alias_get_name (tree decl)
2436 const char *res = get_name (decl);
2438 int num_printed = 0;
2447 if (TREE_CODE (decl) == SSA_NAME)
2449 num_printed = asprintf (&temp, "%s_%u",
2450 alias_get_name (SSA_NAME_VAR (decl)),
2451 SSA_NAME_VERSION (decl));
2453 else if (DECL_P (decl))
2455 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2457 if (num_printed > 0)
2459 res = ggc_strdup (temp);
2465 /* Find the variable id for tree T in the map.
2466 If T doesn't exist in the map, create an entry for it and return it. */
2469 get_vi_for_tree (tree t)
2471 void **slot = pointer_map_contains (vi_for_tree, t);
2473 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2475 return (varinfo_t) *slot;
2478 /* Get a constraint expression from an SSA_VAR_P node. */
2480 static struct constraint_expr
2481 get_constraint_exp_from_ssa_var (tree t)
2483 struct constraint_expr cexpr;
2485 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2487 /* For parameters, get at the points-to set for the actual parm
2489 if (TREE_CODE (t) == SSA_NAME
2490 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2491 && SSA_NAME_IS_DEFAULT_DEF (t))
2492 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2494 cexpr.type = SCALAR;
2496 cexpr.var = get_vi_for_tree (t)->id;
2497 /* If we determine the result is "anything", and we know this is readonly,
2498 say it points to readonly memory instead. */
2499 if (cexpr.var == anything_id && TREE_READONLY (t))
2501 cexpr.type = ADDRESSOF;
2502 cexpr.var = readonly_id;
2509 /* Process a completed constraint T, and add it to the constraint
2510 list. FROM_CALL is true if this is a constraint coming from a
2511 call, which means any DEREFs we see are "may-deref's", not
2515 process_constraint_1 (constraint_t t, bool from_call)
2517 struct constraint_expr rhs = t->rhs;
2518 struct constraint_expr lhs = t->lhs;
2520 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2521 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2525 if (lhs.type == DEREF)
2526 get_varinfo (lhs.var)->directly_dereferenced = true;
2527 if (rhs.type == DEREF)
2528 get_varinfo (rhs.var)->directly_dereferenced = true;
2531 if (!use_field_sensitive)
2537 /* ANYTHING == ANYTHING is pointless. */
2538 if (lhs.var == anything_id && rhs.var == anything_id)
2541 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2542 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2547 process_constraint_1 (t, from_call);
2549 /* This can happen in our IR with things like n->a = *p */
2550 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2552 /* Split into tmp = *rhs, *lhs = tmp */
2553 tree rhsdecl = get_varinfo (rhs.var)->decl;
2554 tree pointertype = TREE_TYPE (rhsdecl);
2555 tree pointedtotype = TREE_TYPE (pointertype);
2556 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2557 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2559 /* If this is an aggregate of known size, we should have passed
2560 this off to do_structure_copy, and it should have broken it
2562 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2563 || get_varinfo (rhs.var)->is_unknown_size_var);
2565 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2566 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2568 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2570 /* Split into tmp = &rhs, *lhs = tmp */
2571 tree rhsdecl = get_varinfo (rhs.var)->decl;
2572 tree pointertype = TREE_TYPE (rhsdecl);
2573 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2574 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2576 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2577 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2581 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2582 VEC_safe_push (constraint_t, heap, constraints, t);
2587 /* Process constraint T, performing various simplifications and then
2588 adding it to our list of overall constraints. */
2591 process_constraint (constraint_t t)
2593 process_constraint_1 (t, false);
2596 /* Return true if T is a variable of a type that could contain
2600 could_have_pointers (tree t)
2602 tree type = TREE_TYPE (t);
2604 if (POINTER_TYPE_P (type)
2605 || AGGREGATE_TYPE_P (type)
2606 || TREE_CODE (type) == COMPLEX_TYPE)
2612 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2615 static unsigned HOST_WIDE_INT
2616 bitpos_of_field (const tree fdecl)
2619 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2620 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2623 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2624 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2628 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2631 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2634 HOST_WIDE_INT bitsize = -1;
2635 HOST_WIDE_INT bitmaxsize = -1;
2636 HOST_WIDE_INT bitpos;
2638 struct constraint_expr *result;
2639 unsigned int beforelength = VEC_length (ce_s, *results);
2641 /* Some people like to do cute things like take the address of
2644 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2645 forzero = TREE_OPERAND (forzero, 0);
2647 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2649 struct constraint_expr temp;
2652 temp.var = integer_id;
2654 VEC_safe_push (ce_s, heap, *results, &temp);
2658 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2660 /* String constants are readonly, so there is nothing to really do
2662 if (TREE_CODE (t) == STRING_CST)
2665 get_constraint_for (t, results);
2666 result = VEC_last (ce_s, *results);
2667 result->offset = bitpos;
2669 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2671 /* This can also happen due to weird offsetof type macros. */
2672 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2673 result->type = SCALAR;
2675 if (result->type == SCALAR)
2677 /* In languages like C, you can access one past the end of an
2678 array. You aren't allowed to dereference it, so we can
2679 ignore this constraint. When we handle pointer subtraction,
2680 we may have to do something cute here. */
2682 if (result->offset < get_varinfo (result->var)->fullsize
2685 /* It's also not true that the constraint will actually start at the
2686 right offset, it may start in some padding. We only care about
2687 setting the constraint to the first actual field it touches, so
2690 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2692 if (ranges_overlap_p (curr->offset, curr->size,
2693 result->offset, bitmaxsize))
2695 result->var = curr->id;
2699 /* assert that we found *some* field there. The user couldn't be
2700 accessing *only* padding. */
2701 /* Still the user could access one past the end of an array
2702 embedded in a struct resulting in accessing *only* padding. */
2703 gcc_assert (curr || ref_contains_array_ref (orig_t));
2705 else if (bitmaxsize == 0)
2707 if (dump_file && (dump_flags & TDF_DETAILS))
2708 fprintf (dump_file, "Access to zero-sized part of variable,"
2712 if (dump_file && (dump_flags & TDF_DETAILS))
2713 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2717 else if (bitmaxsize == -1)
2719 /* We can't handle DEREF constraints with unknown size, we'll
2720 get the wrong answer. Punt and return anything. */
2721 result->var = anything_id;
2727 /* Dereference the constraint expression CONS, and return the result.
2728 DEREF (ADDRESSOF) = SCALAR
2729 DEREF (SCALAR) = DEREF
2730 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2731 This is needed so that we can handle dereferencing DEREF constraints. */
2734 do_deref (VEC (ce_s, heap) **constraints)
2736 struct constraint_expr *c;
2739 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2741 if (c->type == SCALAR)
2743 else if (c->type == ADDRESSOF)
2745 else if (c->type == DEREF)
2747 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2748 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2749 process_constraint (new_constraint (tmplhs, *c));
2750 c->var = tmplhs.var;
2757 /* Given a tree T, return the constraint expression for it. */
2760 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2762 struct constraint_expr temp;
2764 /* x = integer is all glommed to a single variable, which doesn't
2765 point to anything by itself. That is, of course, unless it is an
2766 integer constant being treated as a pointer, in which case, we
2767 will return that this is really the addressof anything. This
2768 happens below, since it will fall into the default case. The only
2769 case we know something about an integer treated like a pointer is
2770 when it is the NULL pointer, and then we just say it points to
2772 if (TREE_CODE (t) == INTEGER_CST
2773 && integer_zerop (t))
2775 temp.var = nothing_id;
2776 temp.type = ADDRESSOF;
2778 VEC_safe_push (ce_s, heap, *results, &temp);
2782 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2784 case tcc_expression:
2787 switch (TREE_CODE (t))
2791 struct constraint_expr *c;
2793 tree exp = TREE_OPERAND (t, 0);
2794 tree pttype = TREE_TYPE (TREE_TYPE (t));
2796 get_constraint_for (exp, results);
2799 /* Complex types are special. Taking the address of one
2800 allows you to access either part of it through that
2802 if (VEC_length (ce_s, *results) == 1 &&
2803 TREE_CODE (pttype) == COMPLEX_TYPE)
2805 struct constraint_expr *origrhs;
2807 struct constraint_expr tmp;
2809 gcc_assert (VEC_length (ce_s, *results) == 1);
2810 origrhs = VEC_last (ce_s, *results);
2812 VEC_pop (ce_s, *results);
2813 origvar = get_varinfo (origrhs->var);
2814 for (; origvar; origvar = origvar->next)
2816 tmp.var = origvar->id;
2817 VEC_safe_push (ce_s, heap, *results, &tmp);
2821 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2823 if (c->type == DEREF)
2826 c->type = ADDRESSOF;
2832 /* XXX: In interprocedural mode, if we didn't have the
2833 body, we would need to do *each pointer argument =
2835 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2838 tree heapvar = heapvar_lookup (t);
2840 if (heapvar == NULL)
2842 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2843 DECL_EXTERNAL (heapvar) = 1;
2844 get_var_ann (heapvar)->is_heapvar = 1;
2845 if (gimple_referenced_vars (cfun))
2846 add_referenced_var (heapvar);
2847 heapvar_insert (t, heapvar);
2850 temp.var = create_variable_info_for (heapvar,
2851 alias_get_name (heapvar));
2853 vi = get_varinfo (temp.var);
2854 vi->is_artificial_var = 1;
2855 vi->is_heap_var = 1;
2856 temp.type = ADDRESSOF;
2858 VEC_safe_push (ce_s, heap, *results, &temp);
2863 temp.var = anything_id;
2866 VEC_safe_push (ce_s, heap, *results, &temp);
2872 temp.type = ADDRESSOF;
2873 temp.var = anything_id;
2875 VEC_safe_push (ce_s, heap, *results, &temp);
2882 switch (TREE_CODE (t))
2886 get_constraint_for (TREE_OPERAND (t, 0), results);
2891 case ARRAY_RANGE_REF:
2893 get_constraint_for_component_ref (t, results);
2897 temp.type = ADDRESSOF;
2898 temp.var = anything_id;
2900 VEC_safe_push (ce_s, heap, *results, &temp);
2907 switch (TREE_CODE (t))
2911 case NON_LVALUE_EXPR:
2913 tree op = TREE_OPERAND (t, 0);
2915 /* Cast from non-pointer to pointers are bad news for us.
2916 Anything else, we see through */
2917 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2918 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2920 get_constraint_for (op, results);
2928 temp.type = ADDRESSOF;
2929 temp.var = anything_id;
2931 VEC_safe_push (ce_s, heap, *results, &temp);
2936 case tcc_exceptional:
2938 switch (TREE_CODE (t))
2942 get_constraint_for (PHI_RESULT (t), results);
2948 struct constraint_expr temp;
2949 temp = get_constraint_exp_from_ssa_var (t);
2950 VEC_safe_push (ce_s, heap, *results, &temp);
2956 temp.type = ADDRESSOF;
2957 temp.var = anything_id;
2959 VEC_safe_push (ce_s, heap, *results, &temp);
2964 case tcc_declaration:
2966 struct constraint_expr temp;
2967 temp = get_constraint_exp_from_ssa_var (t);
2968 VEC_safe_push (ce_s, heap, *results, &temp);
2973 temp.type = ADDRESSOF;
2974 temp.var = anything_id;
2976 VEC_safe_push (ce_s, heap, *results, &temp);
2983 /* Handle the structure copy case where we have a simple structure copy
2984 between LHS and RHS that is of SIZE (in bits)
2986 For each field of the lhs variable (lhsfield)
2987 For each field of the rhs variable at lhsfield.offset (rhsfield)
2988 add the constraint lhsfield = rhsfield
2990 If we fail due to some kind of type unsafety or other thing we
2991 can't handle, return false. We expect the caller to collapse the
2992 variable in that case. */
2995 do_simple_structure_copy (const struct constraint_expr lhs,
2996 const struct constraint_expr rhs,
2997 const unsigned HOST_WIDE_INT size)
2999 varinfo_t p = get_varinfo (lhs.var);
3000 unsigned HOST_WIDE_INT pstart, last;
3002 last = p->offset + size;
3003 for (; p && p->offset < last; p = p->next)
3006 struct constraint_expr templhs = lhs;
3007 struct constraint_expr temprhs = rhs;
3008 unsigned HOST_WIDE_INT fieldoffset;
3010 templhs.var = p->id;
3011 q = get_varinfo (temprhs.var);
3012 fieldoffset = p->offset - pstart;
3013 q = first_vi_for_offset (q, q->offset + fieldoffset);
3016 temprhs.var = q->id;
3017 process_constraint (new_constraint (templhs, temprhs));
3023 /* Handle the structure copy case where we have a structure copy between a
3024 aggregate on the LHS and a dereference of a pointer on the RHS
3025 that is of SIZE (in bits)
3027 For each field of the lhs variable (lhsfield)
3028 rhs.offset = lhsfield->offset
3029 add the constraint lhsfield = rhs
3033 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3034 const struct constraint_expr rhs,
3035 const unsigned HOST_WIDE_INT size)
3037 varinfo_t p = get_varinfo (lhs.var);
3038 unsigned HOST_WIDE_INT pstart,last;
3040 last = p->offset + size;
3042 for (; p && p->offset < last; p = p->next)
3045 struct constraint_expr templhs = lhs;
3046 struct constraint_expr temprhs = rhs;
3047 unsigned HOST_WIDE_INT fieldoffset;
3050 if (templhs.type == SCALAR)
3051 templhs.var = p->id;
3053 templhs.offset = p->offset;
3055 q = get_varinfo (temprhs.var);
3056 fieldoffset = p->offset - pstart;
3057 temprhs.offset += fieldoffset;
3058 process_constraint (new_constraint (templhs, temprhs));
3062 /* Handle the structure copy case where we have a structure copy
3063 between an aggregate on the RHS and a dereference of a pointer on
3064 the LHS that is of SIZE (in bits)
3066 For each field of the rhs variable (rhsfield)
3067 lhs.offset = rhsfield->offset
3068 add the constraint lhs = rhsfield
3072 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3073 const struct constraint_expr rhs,
3074 const unsigned HOST_WIDE_INT size)
3076 varinfo_t p = get_varinfo (rhs.var);
3077 unsigned HOST_WIDE_INT pstart,last;
3079 last = p->offset + size;
3081 for (; p && p->offset < last; p = p->next)
3084 struct constraint_expr templhs = lhs;
3085 struct constraint_expr temprhs = rhs;
3086 unsigned HOST_WIDE_INT fieldoffset;
3089 if (temprhs.type == SCALAR)
3090 temprhs.var = p->id;
3092 temprhs.offset = p->offset;
3094 q = get_varinfo (templhs.var);
3095 fieldoffset = p->offset - pstart;
3096 templhs.offset += fieldoffset;
3097 process_constraint (new_constraint (templhs, temprhs));
3101 /* Sometimes, frontends like to give us bad type information. This
3102 function will collapse all the fields from VAR to the end of VAR,
3103 into VAR, so that we treat those fields as a single variable.
3104 We return the variable they were collapsed into. */
3107 collapse_rest_of_var (unsigned int var)
3109 varinfo_t currvar = get_varinfo (var);
3112 for (field = currvar->next; field; field = field->next)
3115 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3116 field->name, currvar->name);
3118 gcc_assert (!field->collapsed_to);
3119 field->collapsed_to = currvar;
3122 currvar->next = NULL;
3123 currvar->size = currvar->fullsize - currvar->offset;
3128 /* Handle aggregate copies by expanding into copies of the respective
3129 fields of the structures. */
3132 do_structure_copy (tree lhsop, tree rhsop)
3134 struct constraint_expr lhs, rhs, tmp;
3135 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3137 unsigned HOST_WIDE_INT lhssize;
3138 unsigned HOST_WIDE_INT rhssize;
3140 get_constraint_for (lhsop, &lhsc);
3141 get_constraint_for (rhsop, &rhsc);
3142 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3143 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3144 lhs = *(VEC_last (ce_s, lhsc));
3145 rhs = *(VEC_last (ce_s, rhsc));
3147 VEC_free (ce_s, heap, lhsc);
3148 VEC_free (ce_s, heap, rhsc);
3150 /* If we have special var = x, swap it around. */
3151 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3158 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3159 possible it's something we could handle. However, most cases falling
3160 into this are dealing with transparent unions, which are slightly
3162 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3164 rhs.type = ADDRESSOF;
3165 rhs.var = anything_id;
3168 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3169 that special var. */
3170 if (rhs.var <= integer_id)
3172 for (p = get_varinfo (lhs.var); p; p = p->next)
3174 struct constraint_expr templhs = lhs;
3175 struct constraint_expr temprhs = rhs;
3177 if (templhs.type == SCALAR )
3178 templhs.var = p->id;
3180 templhs.offset += p->offset;
3181 process_constraint (new_constraint (templhs, temprhs));
3186 tree rhstype = TREE_TYPE (rhsop);
3187 tree lhstype = TREE_TYPE (lhsop);
3191 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3192 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3194 /* If we have a variably sized types on the rhs or lhs, and a deref
3195 constraint, add the constraint, lhsconstraint = &ANYTHING.
3196 This is conservatively correct because either the lhs is an unknown
3197 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3198 constraint, and every variable it can point to must be unknown sized
3199 anyway, so we don't need to worry about fields at all. */
3200 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3201 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3203 rhs.var = anything_id;
3204 rhs.type = ADDRESSOF;
3206 process_constraint (new_constraint (lhs, rhs));
3210 /* The size only really matters insofar as we don't set more or less of
3211 the variable. If we hit an unknown size var, the size should be the
3212 whole darn thing. */
3213 if (get_varinfo (rhs.var)->is_unknown_size_var)
3216 rhssize = TREE_INT_CST_LOW (rhstypesize);
3218 if (get_varinfo (lhs.var)->is_unknown_size_var)
3221 lhssize = TREE_INT_CST_LOW (lhstypesize);
3224 if (rhs.type == SCALAR && lhs.type == SCALAR)
3226 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3228 lhs.var = collapse_rest_of_var (lhs.var);
3229 rhs.var = collapse_rest_of_var (rhs.var);
3234 process_constraint (new_constraint (lhs, rhs));
3237 else if (lhs.type != DEREF && rhs.type == DEREF)
3238 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3239 else if (lhs.type == DEREF && rhs.type != DEREF)
3240 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3243 tree pointedtotype = lhstype;
3246 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3247 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3248 do_structure_copy (tmpvar, rhsop);
3249 do_structure_copy (lhsop, tmpvar);
3255 /* Update related alias information kept in AI. This is used when
3256 building name tags, alias sets and deciding grouping heuristics.
3257 STMT is the statement to process. This function also updates
3258 ADDRESSABLE_VARS. */
3261 update_alias_info (tree stmt, struct alias_info *ai)
3264 use_operand_p use_p;
3266 bool stmt_dereferences_ptr_p;
3267 enum escape_type stmt_escape_type = is_escape_site (stmt);
3268 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3270 stmt_dereferences_ptr_p = false;
3272 if (stmt_escape_type == ESCAPE_TO_CALL
3273 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3275 mem_ref_stats->num_call_sites++;
3276 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3277 mem_ref_stats->num_pure_const_call_sites++;
3279 else if (stmt_escape_type == ESCAPE_TO_ASM)
3280 mem_ref_stats->num_asm_sites++;
3282 /* Mark all the variables whose address are taken by the statement. */
3283 addr_taken = addresses_taken (stmt);
3286 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3288 /* If STMT is an escape point, all the addresses taken by it are
3290 if (stmt_escape_type != NO_ESCAPE)
3295 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3297 tree rvar = referenced_var (i);
3298 if (!unmodifiable_var_p (rvar))
3299 mark_call_clobbered (rvar, stmt_escape_type);
3304 /* Process each operand use. For pointers, determine whether they
3305 are dereferenced by the statement, or whether their value
3307 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3311 struct ptr_info_def *pi;
3312 unsigned num_uses, num_loads, num_stores;
3314 op = USE_FROM_PTR (use_p);
3316 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3317 to the set of addressable variables. */
3318 if (TREE_CODE (op) == ADDR_EXPR)
3320 bitmap addressable_vars = gimple_addressable_vars (cfun);
3322 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3323 gcc_assert (addressable_vars);
3325 /* PHI nodes don't have annotations for pinning the set
3326 of addresses taken, so we collect them here.
3328 FIXME, should we allow PHI nodes to have annotations
3329 so that they can be treated like regular statements?
3330 Currently, they are treated as second-class
3332 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3336 /* Ignore constants (they may occur in PHI node arguments). */
3337 if (TREE_CODE (op) != SSA_NAME)
3340 var = SSA_NAME_VAR (op);
3341 v_ann = var_ann (var);
3343 /* The base variable of an SSA name must be a GIMPLE register, and thus
3344 it cannot be aliased. */
3345 gcc_assert (!may_be_aliased (var));
3347 /* We are only interested in pointers. */
3348 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3351 pi = get_ptr_info (op);
3353 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3354 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3356 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3357 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3360 /* If STMT is a PHI node, then it will not have pointer
3361 dereferences and it will not be an escape point. */
3362 if (TREE_CODE (stmt) == PHI_NODE)
3365 /* Determine whether OP is a dereferenced pointer, and if STMT
3366 is an escape point, whether OP escapes. */
3367 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3369 /* Handle a corner case involving address expressions of the
3370 form '&PTR->FLD'. The problem with these expressions is that
3371 they do not represent a dereference of PTR. However, if some
3372 other transformation propagates them into an INDIRECT_REF
3373 expression, we end up with '*(&PTR->FLD)' which is folded
3376 So, if the original code had no other dereferences of PTR,
3377 the aliaser will not create memory tags for it, and when
3378 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3379 memory operations will receive no VDEF/VUSE operands.
3381 One solution would be to have count_uses_and_derefs consider
3382 &PTR->FLD a dereference of PTR. But that is wrong, since it
3383 is not really a dereference but an offset calculation.
3385 What we do here is to recognize these special ADDR_EXPR
3386 nodes. Since these expressions are never GIMPLE values (they
3387 are not GIMPLE invariants), they can only appear on the RHS
3388 of an assignment and their base address is always an
3389 INDIRECT_REF expression. */
3390 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3391 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3392 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3394 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3395 this represents a potential dereference of PTR. */
3396 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3397 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3398 if (TREE_CODE (base) == INDIRECT_REF
3399 && TREE_OPERAND (base, 0) == op)
3403 if (num_loads + num_stores > 0)
3405 /* Mark OP as dereferenced. In a subsequent pass,
3406 dereferenced pointers that point to a set of
3407 variables will be assigned a name tag to alias
3408 all the variables OP points to. */
3409 pi->is_dereferenced = 1;
3411 /* If this is a store operation, mark OP as being
3412 dereferenced to store, otherwise mark it as being
3413 dereferenced to load. */
3415 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3417 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3419 /* Update the frequency estimate for all the dereferences of
3421 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3423 /* Indicate that STMT contains pointer dereferences. */
3424 stmt_dereferences_ptr_p = true;
3427 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3429 /* If STMT is an escape point and STMT contains at
3430 least one direct use of OP, then the value of OP
3431 escapes and so the pointed-to variables need to
3432 be marked call-clobbered. */
3433 pi->value_escapes_p = 1;
3434 pi->escape_mask |= stmt_escape_type;
3436 /* If the statement makes a function call, assume
3437 that pointer OP will be dereferenced in a store
3438 operation inside the called function. */
3439 if (get_call_expr_in (stmt)
3440 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3442 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3443 pi->is_dereferenced = 1;
3448 if (TREE_CODE (stmt) == PHI_NODE)
3451 /* Mark stored variables in STMT as being written to and update the
3452 memory reference stats for all memory symbols referenced by STMT. */
3453 if (stmt_references_memory_p (stmt))
3458 mem_ref_stats->num_mem_stmts++;
3460 /* Notice that we only update memory reference stats for symbols
3461 loaded and stored by the statement if the statement does not
3462 contain pointer dereferences and it is not a call/asm site.
3463 This is to avoid double accounting problems when creating
3464 memory partitions. After computing points-to information,
3465 pointer dereference statistics are used to update the
3466 reference stats of the pointed-to variables, so here we
3467 should only update direct references to symbols.
3469 Indirect references are not updated here for two reasons: (1)
3470 The first time we compute alias information, the sets
3471 LOADED/STORED are empty for pointer dereferences, (2) After
3472 partitioning, LOADED/STORED may have references to
3473 partitions, not the original pointed-to variables. So, if we
3474 always counted LOADED/STORED here and during partitioning, we
3475 would count many symbols more than once.
3477 This does cause some imprecision when a statement has a
3478 combination of direct symbol references and pointer
3479 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3480 memory symbols in its argument list, but these cases do not
3481 occur so frequently as to constitute a serious problem. */
3482 if (STORED_SYMS (stmt))
3483 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3485 tree sym = referenced_var (i);
3486 pointer_set_insert (ai->written_vars, sym);
3487 if (!stmt_dereferences_ptr_p
3488 && stmt_escape_type != ESCAPE_TO_CALL
3489 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3490 && stmt_escape_type != ESCAPE_TO_ASM)
3491 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3494 if (!stmt_dereferences_ptr_p
3495 && LOADED_SYMS (stmt)
3496 && stmt_escape_type != ESCAPE_TO_CALL
3497 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3498 && stmt_escape_type != ESCAPE_TO_ASM)
3499 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3500 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3505 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3506 Expressions of the type PTR + CST can be handled in two ways:
3508 1- If the constraint for PTR is ADDRESSOF for a non-structure
3509 variable, then we can use it directly because adding or
3510 subtracting a constant may not alter the original ADDRESSOF
3511 constraint (i.e., pointer arithmetic may not legally go outside
3512 an object's boundaries).
3514 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3515 then if CST is a compile-time constant that can be used as an
3516 offset, we can determine which sub-variable will be pointed-to
3519 Return true if the expression is handled. For any other kind of
3520 expression, return false so that each operand can be added as a
3521 separate constraint by the caller. */
3524 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3527 struct constraint_expr *c, *c2;
3530 VEC (ce_s, heap) *temp = NULL;
3531 unsigned int rhsoffset = 0;
3532 bool unknown_addend = false;
3534 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3537 op0 = TREE_OPERAND (expr, 0);
3538 op1 = TREE_OPERAND (expr, 1);
3539 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3541 get_constraint_for (op0, &temp);
3543 /* Handle non-constants by making constraints from integer. */
3544 if (TREE_CODE (op1) == INTEGER_CST)
3545 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3547 unknown_addend = true;
3549 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3550 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3552 if (c2->type == ADDRESSOF && rhsoffset != 0)
3554 varinfo_t temp = get_varinfo (c2->var);
3556 /* An access one after the end of an array is valid,
3557 so simply punt on accesses we cannot resolve. */
3558 temp = first_vi_for_offset (temp, rhsoffset);
3564 else if (unknown_addend)
3566 /* Can't handle *a + integer where integer is unknown. */
3567 if (c2->type != SCALAR)
3569 struct constraint_expr intc;
3570 intc.var = integer_id;
3573 process_constraint (new_constraint (*c, intc));
3577 /* We known it lives somewhere within c2->var. */
3578 varinfo_t tmp = get_varinfo (c2->var);
3579 for (; tmp; tmp = tmp->next)
3581 struct constraint_expr tmpc = *c2;
3584 process_constraint (new_constraint (*c, tmpc));
3589 c2->offset = rhsoffset;
3590 process_constraint (new_constraint (*c, *c2));
3593 VEC_free (ce_s, heap, temp);
3598 /* For non-IPA mode, generate constraints necessary for a call on the
3602 handle_rhs_call (tree rhs)
3605 call_expr_arg_iterator iter;
3606 struct constraint_expr rhsc;
3608 rhsc.var = anything_id;
3610 rhsc.type = ADDRESSOF;
3612 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3614 VEC(ce_s, heap) *lhsc = NULL;
3616 /* Find those pointers being passed, and make sure they end up
3617 pointing to anything. */
3618 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3621 struct constraint_expr *lhsp;
3623 get_constraint_for (arg, &lhsc);
3625 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3626 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3627 VEC_free (ce_s, heap, lhsc);
3632 /* For non-IPA mode, generate constraints necessary for a call
3633 that returns a pointer and assigns it to LHS. This simply makes
3634 the LHS point to anything. */
3637 handle_lhs_call (tree lhs)
3639 VEC(ce_s, heap) *lhsc = NULL;
3640 struct constraint_expr rhsc;
3642 struct constraint_expr *lhsp;
3644 rhsc.var = anything_id;
3646 rhsc.type = ADDRESSOF;
3647 get_constraint_for (lhs, &lhsc);
3648 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3649 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3650 VEC_free (ce_s, heap, lhsc);
3653 /* Walk statement T setting up aliasing constraints according to the
3654 references found in T. This function is the main part of the
3655 constraint builder. AI points to auxiliary alias information used
3656 when building alias sets and computing alias grouping heuristics. */
3659 find_func_aliases (tree origt)
3662 VEC(ce_s, heap) *lhsc = NULL;
3663 VEC(ce_s, heap) *rhsc = NULL;
3664 struct constraint_expr *c;
3666 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3667 t = TREE_OPERAND (t, 0);
3669 /* Now build constraints expressions. */
3670 if (TREE_CODE (t) == PHI_NODE)
3672 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3674 /* Only care about pointers and structures containing
3676 if (could_have_pointers (PHI_RESULT (t)))
3681 /* For a phi node, assign all the arguments to
3683 get_constraint_for (PHI_RESULT (t), &lhsc);
3684 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3687 tree strippedrhs = PHI_ARG_DEF (t, i);
3689 STRIP_NOPS (strippedrhs);
3690 rhstype = TREE_TYPE (strippedrhs);
3691 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3693 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3695 struct constraint_expr *c2;
3696 while (VEC_length (ce_s, rhsc) > 0)
3698 c2 = VEC_last (ce_s, rhsc);
3699 process_constraint (new_constraint (*c, *c2));
3700 VEC_pop (ce_s, rhsc);
3706 /* In IPA mode, we need to generate constraints to pass call
3707 arguments through their calls. There are two cases, either a
3708 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3709 CALL_EXPR when we are not.
3711 In non-ipa mode, we need to generate constraints for each
3712 pointer passed by address. */
3713 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3714 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3715 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3716 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3717 || (TREE_CODE (t) == CALL_EXPR
3718 && !(call_expr_flags (t)
3719 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3723 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3725 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3726 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t, 1))))
3727 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3730 handle_rhs_call (t);
3737 call_expr_arg_iterator iter;
3741 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3743 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3744 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3751 decl = get_callee_fndecl (rhsop);
3753 /* If we can directly resolve the function being called, do so.
3754 Otherwise, it must be some sort of indirect expression that
3755 we should still be able to handle. */
3758 fi = get_vi_for_tree (decl);
3762 decl = CALL_EXPR_FN (rhsop);
3763 fi = get_vi_for_tree (decl);
3766 /* Assign all the passed arguments to the appropriate incoming
3767 parameters of the function. */
3769 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3771 struct constraint_expr lhs ;
3772 struct constraint_expr *rhsp;
3774 get_constraint_for (arg, &rhsc);
3775 if (TREE_CODE (decl) != FUNCTION_DECL)
3784 lhs.var = first_vi_for_offset (fi, i)->id;
3787 while (VEC_length (ce_s, rhsc) != 0)
3789 rhsp = VEC_last (ce_s, rhsc);
3790 process_constraint (new_constraint (lhs, *rhsp));
3791 VEC_pop (ce_s, rhsc);
3796 /* If we are returning a value, assign it to the result. */
3799 struct constraint_expr rhs;
3800 struct constraint_expr *lhsp;
3803 get_constraint_for (lhsop, &lhsc);
3804 if (TREE_CODE (decl) != FUNCTION_DECL)
3813 rhs.var = first_vi_for_offset (fi, i)->id;
3816 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3817 process_constraint (new_constraint (*lhsp, rhs));
3821 /* Otherwise, just a regular assignment statement. */
3822 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3824 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3825 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3828 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3829 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3830 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3831 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3833 do_structure_copy (lhsop, rhsop);
3837 /* Only care about operations with pointers, structures
3838 containing pointers, dereferences, and call expressions. */
3839 if (could_have_pointers (lhsop)
3840 || TREE_CODE (rhsop) == CALL_EXPR)
3842 get_constraint_for (lhsop, &lhsc);
3843 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3845 /* RHS that consist of unary operations,
3846 exceptional types, or bare decls/constants, get
3847 handled directly by get_constraint_for. */
3849 case tcc_declaration:
3851 case tcc_exceptional:
3852 case tcc_expression:
3858 get_constraint_for (rhsop, &rhsc);
3859 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3861 struct constraint_expr *c2;
3864 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3865 process_constraint (new_constraint (*c, *c2));
3873 /* For pointer arithmetic of the form
3874 PTR + CST, we can simply use PTR's
3875 constraint because pointer arithmetic is
3876 not allowed to go out of bounds. */
3877 if (handle_ptr_arith (lhsc, rhsop))
3882 /* Otherwise, walk each operand. Notice that we
3883 can't use the operand interface because we need
3884 to process expressions other than simple operands
3885 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3887 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3889 tree op = TREE_OPERAND (rhsop, i);
3892 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3893 get_constraint_for (op, &rhsc);
3894 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3896 struct constraint_expr *c2;
3897 while (VEC_length (ce_s, rhsc) > 0)
3899 c2 = VEC_last (ce_s, rhsc);
3900 process_constraint (new_constraint (*c, *c2));
3901 VEC_pop (ce_s, rhsc);
3909 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3913 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3914 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3915 get_varinfo (c->var)->no_tbaa_pruning = true;
3918 /* After promoting variables and computing aliasing we will
3919 need to re-scan most statements. FIXME: Try to minimize the
3920 number of statements re-scanned. It's not really necessary to
3921 re-scan *all* statements. */
3922 mark_stmt_modified (origt);
3923 VEC_free (ce_s, heap, rhsc);
3924 VEC_free (ce_s, heap, lhsc);
3928 /* Find the first varinfo in the same variable as START that overlaps with
3930 Effectively, walk the chain of fields for the variable START to find the
3931 first field that overlaps with OFFSET.
3932 Return NULL if we can't find one. */
3935 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3937 varinfo_t curr = start;
3940 /* We may not find a variable in the field list with the actual
3941 offset when when we have glommed a structure to a variable.
3942 In that case, however, offset should still be within the size
3944 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3952 /* Insert the varinfo FIELD into the field list for BASE, at the front
3956 insert_into_field_list (varinfo_t base, varinfo_t field)
3958 varinfo_t prev = base;
3959 varinfo_t curr = base->next;
3965 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3969 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3971 varinfo_t prev = base;
3972 varinfo_t curr = base->next;
3983 if (field->offset <= curr->offset)
3988 field->next = prev->next;
3993 /* qsort comparison function for two fieldoff's PA and PB */
3996 fieldoff_compare (const void *pa, const void *pb)
3998 const fieldoff_s *foa = (const fieldoff_s *)pa;
3999 const fieldoff_s *fob = (const fieldoff_s *)pb;
4000 HOST_WIDE_INT foasize, fobsize;
4002 if (foa->offset != fob->offset)
4003 return foa->offset - fob->offset;
4005 foasize = TREE_INT_CST_LOW (foa->size);
4006 fobsize = TREE_INT_CST_LOW (fob->size);
4007 return foasize - fobsize;
4010 /* Sort a fieldstack according to the field offset and sizes. */
4012 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4014 qsort (VEC_address (fieldoff_s, fieldstack),
4015 VEC_length (fieldoff_s, fieldstack),
4016 sizeof (fieldoff_s),
4020 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4021 the fields of TYPE onto fieldstack, recording their offsets along
4024 OFFSET is used to keep track of the offset in this entire
4025 structure, rather than just the immediately containing structure.
4026 Returns the number of fields pushed.
4028 HAS_UNION is set to true if we find a union type as a field of
4031 ADDRESSABLE_TYPE is the type of the outermost object that could
4032 have its address taken. */
4035 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4036 HOST_WIDE_INT offset, bool *has_union,
4037 tree addressable_type)
4041 unsigned int first_element = VEC_length (fieldoff_s, *fieldstack);
4043 /* If the vector of fields is growing too big, bail out early.
4044 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4046 if (first_element > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4049 if (TREE_CODE (type) == COMPLEX_TYPE)
4051 fieldoff_s *real_part, *img_part;
4052 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4053 real_part->type = TREE_TYPE (type);
4054 real_part->size = TYPE_SIZE (TREE_TYPE (type));
4055 real_part->offset = offset;
4056 real_part->decl = NULL_TREE;
4057 real_part->alias_set = -1;
4058 real_part->base_for_components = false;
4060 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4061 img_part->type = TREE_TYPE (type);
4062 img_part->size = TYPE_SIZE (TREE_TYPE (type));
4063 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
4064 img_part->decl = NULL_TREE;
4065 img_part->alias_set = -1;
4066 img_part->base_for_components = false;
4071 else if (TREE_CODE (type) == ARRAY_TYPE)
4073 tree sz = TYPE_SIZE (type);
4074 tree elsz = TYPE_SIZE (TREE_TYPE (type));
4079 || ! host_integerp (sz, 1)
4080 || TREE_INT_CST_LOW (sz) == 0
4082 || ! host_integerp (elsz, 1)
4083 || TREE_INT_CST_LOW (elsz) == 0)
4086 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
4087 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
4090 for (i = 0; i < nr; ++i)
4096 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
4097 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
4100 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
4102 else if (!(pushed = push_fields_onto_fieldstack
4105 offset + i * TREE_INT_CST_LOW (elsz),
4107 (TYPE_NONALIASED_COMPONENT (type)
4109 : TREE_TYPE (type)))))
4110 /* Empty structures may have actual size, like in C++. So
4111 see if we didn't push any subfields and the size is
4112 nonzero, push the field onto the stack */
4119 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4120 pair->type = TREE_TYPE (type);
4122 pair->decl = NULL_TREE;
4123 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
4124 if (TYPE_NONALIASED_COMPONENT (type))
4125 pair->alias_set = get_alias_set (addressable_type);
4127 pair->alias_set = -1;
4128 pair->base_for_components = false;
4138 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4139 if (TREE_CODE (field) == FIELD_DECL)
4145 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4146 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4149 if (!var_can_have_subvars (field))
4151 else if (!(pushed = push_fields_onto_fieldstack
4154 offset + bitpos_of_field (field),
4156 (DECL_NONADDRESSABLE_P (field)
4158 : TREE_TYPE (field))))
4159 && DECL_SIZE (field)
4160 && !integer_zerop (DECL_SIZE (field)))
4161 /* Empty structures may have actual size, like in C++. So
4162 see if we didn't push any subfields and the size is
4163 nonzero, push the field onto the stack */
4170 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4171 pair->type = TREE_TYPE (field);
4172 pair->size = DECL_SIZE (field);
4174 pair->offset = offset + bitpos_of_field (field);
4175 if (DECL_NONADDRESSABLE_P (field))
4176 pair->alias_set = get_alias_set (addressable_type);
4178 pair->alias_set = -1;
4179 pair->base_for_components = false;
4187 /* Make sure the first pushed field is marked as eligible for
4188 being a base for component references. */
4190 VEC_index (fieldoff_s, *fieldstack, first_element)->base_for_components = true;
4195 /* Create a constraint from ANYTHING variable to VI. */
4197 make_constraint_from_anything (varinfo_t vi)
4199 struct constraint_expr lhs, rhs;
4205 rhs.var = anything_id;
4207 rhs.type = ADDRESSOF;
4208 process_constraint (new_constraint (lhs, rhs));
4211 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4212 if it is a varargs function. */
4215 count_num_arguments (tree decl, bool *is_varargs)
4220 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4224 if (TREE_VALUE (t) == void_type_node)
4234 /* Creation function node for DECL, using NAME, and return the index
4235 of the variable we've created for the function. */
4238 create_function_info_for (tree decl, const char *name)
4240 unsigned int index = VEC_length (varinfo_t, varmap);
4244 bool is_varargs = false;
4246 /* Create the variable info. */
4248 vi = new_var_info (decl, index, name);
4253 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4254 insert_vi_for_tree (vi->decl, vi);
4255 VEC_safe_push (varinfo_t, heap, varmap, vi);
4259 /* If it's varargs, we don't know how many arguments it has, so we
4266 vi->is_unknown_size_var = true;
4271 arg = DECL_ARGUMENTS (decl);
4273 /* Set up variables for each argument. */
4274 for (i = 1; i < vi->fullsize; i++)
4277 const char *newname;
4279 unsigned int newindex;
4280 tree argdecl = decl;
4285 newindex = VEC_length (varinfo_t, varmap);
4286 asprintf (&tempname, "%s.arg%d", name, i-1);
4287 newname = ggc_strdup (tempname);
4290 argvi = new_var_info (argdecl, newindex, newname);
4291 argvi->decl = argdecl;
4292 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4295 argvi->fullsize = vi->fullsize;
4296 argvi->has_union = false;
4297 insert_into_field_list_sorted (vi, argvi);
4298 stats.total_vars ++;
4301 insert_vi_for_tree (arg, argvi);
4302 arg = TREE_CHAIN (arg);
4306 /* Create a variable for the return var. */
4307 if (DECL_RESULT (decl) != NULL
4308 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4311 const char *newname;
4313 unsigned int newindex;
4314 tree resultdecl = decl;
4318 if (DECL_RESULT (decl))
4319 resultdecl = DECL_RESULT (decl);
4321 newindex = VEC_length (varinfo_t, varmap);
4322 asprintf (&tempname, "%s.result", name);
4323 newname = ggc_strdup (tempname);
4326 resultvi = new_var_info (resultdecl, newindex, newname);
4327 resultvi->decl = resultdecl;
4328 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4329 resultvi->offset = i;
4331 resultvi->fullsize = vi->fullsize;
4332 resultvi->has_union = false;
4333 insert_into_field_list_sorted (vi, resultvi);
4334 stats.total_vars ++;
4335 if (DECL_RESULT (decl))
4336 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4342 /* Return true if FIELDSTACK contains fields that overlap.
4343 FIELDSTACK is assumed to be sorted by offset. */
4346 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4348 fieldoff_s *fo = NULL;
4350 HOST_WIDE_INT lastoffset = -1;
4352 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4354 if (fo->offset == lastoffset)
4356 lastoffset = fo->offset;
4361 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4362 This will also create any varinfo structures necessary for fields
4366 create_variable_info_for (tree decl, const char *name)
4368 unsigned int index = VEC_length (varinfo_t, varmap);
4370 tree decltype = TREE_TYPE (decl);
4371 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4372 bool notokay = false;
4374 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4375 VEC (fieldoff_s,heap) *fieldstack = NULL;
4377 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4378 return create_function_info_for (decl, name);
4380 hasunion = TREE_CODE (decltype) == UNION_TYPE
4381 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4382 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4384 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion,
4388 VEC_free (fieldoff_s, heap, fieldstack);
4394 /* If the variable doesn't have subvars, we may end up needing to
4395 sort the field list and create fake variables for all the
4397 vi = new_var_info (decl, index, name);
4400 vi->has_union = hasunion;
4402 || TREE_CODE (declsize) != INTEGER_CST
4403 || TREE_CODE (decltype) == UNION_TYPE
4404 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4406 vi->is_unknown_size_var = true;
4412 vi->fullsize = TREE_INT_CST_LOW (declsize);
4413 vi->size = vi->fullsize;
4416 insert_vi_for_tree (vi->decl, vi);
4417 VEC_safe_push (varinfo_t, heap, varmap, vi);
4418 if (is_global && (!flag_whole_program || !in_ipa_mode))
4419 make_constraint_from_anything (vi);
4422 if (use_field_sensitive
4424 && !vi->is_unknown_size_var
4425 && var_can_have_subvars (decl)
4426 && VEC_length (fieldoff_s, fieldstack) > 1
4427 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4429 unsigned int newindex = VEC_length (varinfo_t, varmap);
4430 fieldoff_s *fo = NULL;
4433 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4436 || TREE_CODE (fo->size) != INTEGER_CST
4444 /* We can't sort them if we have a field with a variable sized type,
4445 which will make notokay = true. In that case, we are going to return
4446 without creating varinfos for the fields anyway, so sorting them is a
4450 sort_fieldstack (fieldstack);
4451 /* Due to some C++ FE issues, like PR 22488, we might end up
4452 what appear to be overlapping fields even though they,
4453 in reality, do not overlap. Until the C++ FE is fixed,
4454 we will simply disable field-sensitivity for these cases. */
4455 notokay = check_for_overlaps (fieldstack);
4459 if (VEC_length (fieldoff_s, fieldstack) != 0)
4460 fo = VEC_index (fieldoff_s, fieldstack, 0);
4462 if (fo == NULL || notokay)
4464 vi->is_unknown_size_var = 1;
4467 VEC_free (fieldoff_s, heap, fieldstack);
4471 vi->size = TREE_INT_CST_LOW (fo->size);
4472 vi->offset = fo->offset;
4473 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4474 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4478 const char *newname = "NULL";
4481 newindex = VEC_length (varinfo_t, varmap);
4485 asprintf (&tempname, "%s.%s",
4486 vi->name, alias_get_name (fo->decl));
4488 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4489 vi->name, fo->offset);
4490 newname = ggc_strdup (tempname);
4493 newvi = new_var_info (decl, newindex, newname);
4494 newvi->offset = fo->offset;
4495 newvi->size = TREE_INT_CST_LOW (fo->size);
4496 newvi->fullsize = vi->fullsize;
4497 insert_into_field_list (vi, newvi);
4498 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4499 if (is_global && (!flag_whole_program || !in_ipa_mode))
4500 make_constraint_from_anything (newvi);
4506 VEC_free (fieldoff_s, heap, fieldstack);
4511 /* Print out the points-to solution for VAR to FILE. */
4514 dump_solution_for_var (FILE *file, unsigned int var)
4516 varinfo_t vi = get_varinfo (var);
4520 if (find (var) != var)
4522 varinfo_t vipt = get_varinfo (find (var));
4523 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4527 fprintf (file, "%s = { ", vi->name);
4528 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4530 fprintf (file, "%s ", get_varinfo (i)->name);
4532 fprintf (file, "}");
4533 if (vi->no_tbaa_pruning)
4534 fprintf (file, " no-tbaa-pruning");
4535 fprintf (file, "\n");
4539 /* Print the points-to solution for VAR to stdout. */
4542 debug_solution_for_var (unsigned int var)
4544 dump_solution_for_var (stdout, var);
4547 /* Create varinfo structures for all of the variables in the
4548 function for intraprocedural mode. */
4551 intra_create_variable_infos (void)
4554 struct constraint_expr lhs, rhs;
4556 /* For each incoming pointer argument arg, create the constraint ARG
4557 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4558 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4562 if (!could_have_pointers (t))
4565 /* If flag_argument_noalias is set, then function pointer
4566 arguments are guaranteed not to point to each other. In that
4567 case, create an artificial variable PARM_NOALIAS and the
4568 constraint ARG = &PARM_NOALIAS. */
4569 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4572 tree heapvar = heapvar_lookup (t);
4576 lhs.var = get_vi_for_tree (t)->id;
4578 if (heapvar == NULL_TREE)
4581 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4583 DECL_EXTERNAL (heapvar) = 1;
4584 if (gimple_referenced_vars (cfun))
4585 add_referenced_var (heapvar);
4587 heapvar_insert (t, heapvar);
4589 ann = get_var_ann (heapvar);
4590 if (flag_argument_noalias == 1)
4591 ann->noalias_state = NO_ALIAS;
4592 else if (flag_argument_noalias == 2)
4593 ann->noalias_state = NO_ALIAS_GLOBAL;
4594 else if (flag_argument_noalias == 3)
4595 ann->noalias_state = NO_ALIAS_ANYTHING;
4600 vi = get_vi_for_tree (heapvar);
4601 vi->is_artificial_var = 1;
4602 vi->is_heap_var = 1;
4604 rhs.type = ADDRESSOF;
4606 for (p = get_varinfo (lhs.var); p; p = p->next)
4608 struct constraint_expr temp = lhs;
4610 process_constraint (new_constraint (temp, rhs));
4615 varinfo_t arg_vi = get_vi_for_tree (t);
4617 for (p = arg_vi; p; p = p->next)
4618 make_constraint_from_anything (p);
4623 /* Structure used to put solution bitmaps in a hashtable so they can
4624 be shared among variables with the same points-to set. */
4626 typedef struct shared_bitmap_info
4630 } *shared_bitmap_info_t;
4631 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4633 static htab_t shared_bitmap_table;
4635 /* Hash function for a shared_bitmap_info_t */
4638 shared_bitmap_hash (const void *p)
4640 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4641 return bi->hashcode;
4644 /* Equality function for two shared_bitmap_info_t's. */
4647 shared_bitmap_eq (const void *p1, const void *p2)
4649 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4650 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4651 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4654 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4655 existing instance if there is one, NULL otherwise. */
4658 shared_bitmap_lookup (bitmap pt_vars)
4661 struct shared_bitmap_info sbi;
4663 sbi.pt_vars = pt_vars;
4664 sbi.hashcode = bitmap_hash (pt_vars);
4666 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4667 sbi.hashcode, NO_INSERT);
4671 return ((shared_bitmap_info_t) *slot)->pt_vars;
4675 /* Add a bitmap to the shared bitmap hashtable. */
4678 shared_bitmap_add (bitmap pt_vars)
4681 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4683 sbi->pt_vars = pt_vars;
4684 sbi->hashcode = bitmap_hash (pt_vars);
4686 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4687 sbi->hashcode, INSERT);
4688 gcc_assert (!*slot);
4689 *slot = (void *) sbi;
4693 /* Set bits in INTO corresponding to the variable uids in solution set
4694 FROM, which came from variable PTR.
4695 For variables that are actually dereferenced, we also use type
4696 based alias analysis to prune the points-to sets.
4697 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4698 help determine whether we are we are allowed to prune using TBAA.
4699 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4703 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4704 bool no_tbaa_pruning)
4708 alias_set_type ptr_alias_set;
4710 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4711 ptr_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4713 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4715 varinfo_t vi = get_varinfo (i);
4716 alias_set_type var_alias_set;
4718 /* The only artificial variables that are allowed in a may-alias
4719 set are heap variables. */
4720 if (vi->is_artificial_var && !vi->is_heap_var)
4723 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4727 subvar_t sv = get_subvars_for_var (vi->decl);
4729 /* Variables containing unions may need to be converted to
4730 their SFT's, because SFT's can have unions and we cannot. */
4731 for (i = 0; VEC_iterate (tree, sv, i, subvar); ++i)
4732 bitmap_set_bit (into, DECL_UID (subvar));
4734 else if (TREE_CODE (vi->decl) == VAR_DECL
4735 || TREE_CODE (vi->decl) == PARM_DECL
4736 || TREE_CODE (vi->decl) == RESULT_DECL)
4739 if (var_can_have_subvars (vi->decl)
4740 && (sv = get_subvars_for_var (vi->decl)))
4742 /* If VI->DECL is an aggregate for which we created
4743 SFTs, add the SFT corresponding to VI->OFFSET.
4744 If we didn't do field-sensitive PTA we need to to
4745 add all overlapping SFTs. */
4747 tree sft = get_first_overlapping_subvar (sv, vi->offset,
4750 for (; VEC_iterate (tree, sv, j, sft); ++j)
4752 if (SFT_OFFSET (sft) > vi->offset
4753 && vi->size <= SFT_OFFSET (sft) - vi->offset)
4756 var_alias_set = get_alias_set (sft);
4758 || (!is_derefed && !vi->directly_dereferenced)
4759 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4761 bitmap_set_bit (into, DECL_UID (sft));
4763 /* Pointed-to SFTs are needed by the operand scanner
4764 to adjust offsets when adding operands to memory
4765 expressions that dereference PTR. This means
4766 that memory partitioning may not partition
4767 this SFT because the operand scanner will not
4768 be able to find the other SFTs next to this
4769 one. But we only need to do this if the pointed
4770 to type is aggregate. */
4771 if (SFT_BASE_FOR_COMPONENTS_P (sft))
4772 SFT_UNPARTITIONABLE_P (sft) = true;
4778 /* Otherwise, just add VI->DECL to the alias set.
4779 Don't type prune artificial vars. */
4780 if (vi->is_artificial_var)
4781 bitmap_set_bit (into, DECL_UID (vi->decl));
4784 var_alias_set = get_alias_set (vi->decl);
4786 || (!is_derefed && !vi->directly_dereferenced)
4787 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4788 bitmap_set_bit (into, DECL_UID (vi->decl));
4796 static bool have_alias_info = false;
4798 /* The list of SMT's that are in use by our pointer variables. This
4799 is the set of SMT's for all pointers that can point to anything. */
4800 static bitmap used_smts;
4802 /* Due to the ordering of points-to set calculation and SMT
4803 calculation being a bit co-dependent, we can't just calculate SMT
4804 used info whenever we want, we have to calculate it around the time
4805 that find_what_p_points_to is called. */
4807 /* Mark which SMT's are in use by points-to anything variables. */
4810 set_used_smts (void)
4814 used_smts = BITMAP_ALLOC (&pta_obstack);
4816 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4818 tree var = vi->decl;
4819 varinfo_t withsolution = get_varinfo (find (i));
4822 struct ptr_info_def *pi = NULL;
4824 /* For parm decls, the pointer info may be under the default
4826 if (TREE_CODE (vi->decl) == PARM_DECL
4827 && gimple_default_def (cfun, var))
4828 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4829 else if (TREE_CODE (var) == SSA_NAME)
4830 pi = SSA_NAME_PTR_INFO (var);
4832 /* Skip the special variables and those that can't be aliased. */
4833 if (vi->is_special_var
4835 || (pi && !pi->is_dereferenced)
4836 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4837 || !POINTER_TYPE_P (TREE_TYPE (var)))
4840 if (TREE_CODE (var) == SSA_NAME)
4841 var = SSA_NAME_VAR (var);
4847 smt = va->symbol_mem_tag;
4848 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4849 bitmap_set_bit (used_smts, DECL_UID (smt));
4853 /* Merge the necessary SMT's into the bitmap INTO, which is
4854 P's varinfo. This involves merging all SMT's that are a subset of
4855 the SMT necessary for P. */
4858 merge_smts_into (tree p, bitmap solution)
4866 if (TREE_CODE (p) == SSA_NAME)
4867 var = SSA_NAME_VAR (p);
4869 smt = var_ann (var)->symbol_mem_tag;
4872 alias_set_type smtset = get_alias_set (TREE_TYPE (smt));
4874 /* Need to set the SMT subsets first before this
4875 will work properly. */
4876 bitmap_set_bit (solution, DECL_UID (smt));
4877 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4879 tree newsmt = referenced_var (i);
4880 tree newsmttype = TREE_TYPE (newsmt);
4882 if (alias_set_subset_of (get_alias_set (newsmttype),
4884 bitmap_set_bit (solution, i);
4887 aliases = MTAG_ALIASES (smt);
4889 bitmap_ior_into (solution, aliases);
4893 /* Given a pointer variable P, fill in its points-to set, or return
4895 Rather than return false for variables that point-to anything, we
4896 instead find the corresponding SMT, and merge in its aliases. In
4897 addition to these aliases, we also set the bits for the SMT's
4898 themselves and their subsets, as SMT's are still in use by
4899 non-SSA_NAME's, and pruning may eliminate every one of their
4900 aliases. In such a case, if we did not include the right set of
4901 SMT's in the points-to set of the variable, we'd end up with
4902 statements that do not conflict but should. */
4905 find_what_p_points_to (tree p)
4910 if (!have_alias_info)
4913 /* For parameters, get at the points-to set for the actual parm
4915 if (TREE_CODE (p) == SSA_NAME
4916 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4917 && SSA_NAME_IS_DEFAULT_DEF (p))
4918 lookup_p = SSA_NAME_VAR (p);
4920 vi = lookup_vi_for_tree (lookup_p);
4923 if (vi->is_artificial_var)
4926 /* See if this is a field or a structure. */
4927 if (vi->size != vi->fullsize)
4929 /* Nothing currently asks about structure fields directly,
4930 but when they do, we need code here to hand back the
4932 if (!var_can_have_subvars (vi->decl)
4933 || get_subvars_for_var (vi->decl) == NULL)
4938 struct ptr_info_def *pi = get_ptr_info (p);
4941 bool was_pt_anything = false;
4942 bitmap finished_solution;
4945 if (!pi->is_dereferenced)
4948 /* This variable may have been collapsed, let's get the real
4950 vi = get_varinfo (find (vi->id));
4952 /* Translate artificial variables into SSA_NAME_PTR_INFO
4954 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4956 varinfo_t vi = get_varinfo (i);
4958 if (vi->is_artificial_var)
4960 /* FIXME. READONLY should be handled better so that
4961 flow insensitive aliasing can disregard writable
4963 if (vi->id == nothing_id)
4965 else if (vi->id == anything_id)
4966 was_pt_anything = 1;
4967 else if (vi->id == readonly_id)
4968 was_pt_anything = 1;
4969 else if (vi->id == integer_id)
4970 was_pt_anything = 1;
4971 else if (vi->is_heap_var)
4972 pi->pt_global_mem = 1;
4976 /* Share the final set of variables when possible. */
4977 finished_solution = BITMAP_GGC_ALLOC ();
4978 stats.points_to_sets_created++;
4980 /* Instead of using pt_anything, we merge in the SMT aliases
4981 for the underlying SMT. In addition, if they could have
4982 pointed to anything, they could point to global memory.
4983 But we cannot do that for ref-all pointers because these
4984 aliases have not been computed yet. */
4985 if (was_pt_anything)
4987 if (PTR_IS_REF_ALL (p))
4989 pi->pt_anything = 1;
4993 merge_smts_into (p, finished_solution);
4994 pi->pt_global_mem = 1;
4997 set_uids_in_ptset (p, finished_solution, vi->solution,
4998 vi->directly_dereferenced,
4999 vi->no_tbaa_pruning);
5000 result = shared_bitmap_lookup (finished_solution);
5004 shared_bitmap_add (finished_solution);
5005 pi->pt_vars = finished_solution;
5009 pi->pt_vars = result;
5010 bitmap_clear (finished_solution);
5013 if (bitmap_empty_p (pi->pt_vars))
5025 /* Dump points-to information to OUTFILE. */
5028 dump_sa_points_to_info (FILE *outfile)
5032 fprintf (outfile, "\nPoints-to sets\n\n");
5034 if (dump_flags & TDF_STATS)
5036 fprintf (outfile, "Stats:\n");
5037 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5038 fprintf (outfile, "Non-pointer vars: %d\n",
5039 stats.nonpointer_vars);
5040 fprintf (outfile, "Statically unified vars: %d\n",
5041 stats.unified_vars_static);
5042 fprintf (outfile, "Dynamically unified vars: %d\n",
5043 stats.unified_vars_dynamic);
5044 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5045 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5046 fprintf (outfile, "Number of implicit edges: %d\n",
5047 stats.num_implicit_edges);
5050 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5051 dump_solution_for_var (outfile, i);
5055 /* Debug points-to information to stderr. */
5058 debug_sa_points_to_info (void)
5060 dump_sa_points_to_info (stderr);
5064 /* Initialize the always-existing constraint variables for NULL
5065 ANYTHING, READONLY, and INTEGER */
5068 init_base_vars (void)
5070 struct constraint_expr lhs, rhs;
5072 /* Create the NULL variable, used to represent that a variable points
5074 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5075 var_nothing = new_var_info (nothing_tree, 0, "NULL");
5076 insert_vi_for_tree (nothing_tree, var_nothing);
5077 var_nothing->is_artificial_var = 1;
5078 var_nothing->offset = 0;
5079 var_nothing->size = ~0;
5080 var_nothing->fullsize = ~0;
5081 var_nothing->is_special_var = 1;
5083 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5085 /* Create the ANYTHING variable, used to represent that a variable
5086 points to some unknown piece of memory. */
5087 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5088 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5089 insert_vi_for_tree (anything_tree, var_anything);
5090 var_anything->is_artificial_var = 1;
5091 var_anything->size = ~0;
5092 var_anything->offset = 0;
5093 var_anything->next = NULL;
5094 var_anything->fullsize = ~0;
5095 var_anything->is_special_var = 1;
5098 /* Anything points to anything. This makes deref constraints just
5099 work in the presence of linked list and other p = *p type loops,
5100 by saying that *ANYTHING = ANYTHING. */
5101 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5103 lhs.var = anything_id;
5105 rhs.type = ADDRESSOF;
5106 rhs.var = anything_id;
5109 /* This specifically does not use process_constraint because
5110 process_constraint ignores all anything = anything constraints, since all
5111 but this one are redundant. */
5112 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5114 /* Create the READONLY variable, used to represent that a variable
5115 points to readonly memory. */
5116 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5117 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5118 var_readonly->is_artificial_var = 1;
5119 var_readonly->offset = 0;
5120 var_readonly->size = ~0;
5121 var_readonly->fullsize = ~0;
5122 var_readonly->next = NULL;
5123 var_readonly->is_special_var = 1;
5124 insert_vi_for_tree (readonly_tree, var_readonly);
5126 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5128 /* readonly memory points to anything, in order to make deref
5129 easier. In reality, it points to anything the particular
5130 readonly variable can point to, but we don't track this
5133 lhs.var = readonly_id;
5135 rhs.type = ADDRESSOF;
5136 rhs.var = anything_id;
5139 process_constraint (new_constraint (lhs, rhs));
5141 /* Create the INTEGER variable, used to represent that a variable points
5143 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5144 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5145 insert_vi_for_tree (integer_tree, var_integer);
5146 var_integer->is_artificial_var = 1;
5147 var_integer->size = ~0;
5148 var_integer->fullsize = ~0;
5149 var_integer->offset = 0;
5150 var_integer->next = NULL;
5151 var_integer->is_special_var = 1;
5153 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5155 /* INTEGER = ANYTHING, because we don't know where a dereference of
5156 a random integer will point to. */
5158 lhs.var = integer_id;
5160 rhs.type = ADDRESSOF;
5161 rhs.var = anything_id;
5163 process_constraint (new_constraint (lhs, rhs));
5166 /* Initialize things necessary to perform PTA */
5169 init_alias_vars (void)
5171 bitmap_obstack_initialize (&pta_obstack);
5172 bitmap_obstack_initialize (&oldpta_obstack);
5173 bitmap_obstack_initialize (&predbitmap_obstack);
5175 constraint_pool = create_alloc_pool ("Constraint pool",
5176 sizeof (struct constraint), 30);
5177 variable_info_pool = create_alloc_pool ("Variable info pool",
5178 sizeof (struct variable_info), 30);
5179 constraints = VEC_alloc (constraint_t, heap, 8);
5180 varmap = VEC_alloc (varinfo_t, heap, 8);
5181 vi_for_tree = pointer_map_create ();
5183 memset (&stats, 0, sizeof (stats));
5184 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5185 shared_bitmap_eq, free);
5189 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5190 predecessor edges. */
5193 remove_preds_and_fake_succs (constraint_graph_t graph)
5197 /* Clear the implicit ref and address nodes from the successor
5199 for (i = 0; i < FIRST_REF_NODE; i++)
5201 if (graph->succs[i])
5202 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5203 FIRST_REF_NODE * 2);
5206 /* Free the successor list for the non-ref nodes. */
5207 for (i = FIRST_REF_NODE; i < graph->size; i++)
5209 if (graph->succs[i])
5210 BITMAP_FREE (graph->succs[i]);
5213 /* Now reallocate the size of the successor list as, and blow away
5214 the predecessor bitmaps. */
5215 graph->size = VEC_length (varinfo_t, varmap);
5216 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5218 free (graph->implicit_preds);
5219 graph->implicit_preds = NULL;
5220 free (graph->preds);
5221 graph->preds = NULL;
5222 bitmap_obstack_release (&predbitmap_obstack);
5225 /* Compute the set of variables we can't TBAA prune. */
5228 compute_tbaa_pruning (void)
5230 unsigned int size = VEC_length (varinfo_t, varmap);
5235 changed = sbitmap_alloc (size);
5236 sbitmap_zero (changed);
5238 /* Mark all initial no_tbaa_pruning nodes as changed. */
5240 for (i = 0; i < size; ++i)
5242 varinfo_t ivi = get_varinfo (i);
5244 if (find (i) == i && ivi->no_tbaa_pruning)
5247 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5248 || VEC_length (constraint_t, graph->complex[i]) > 0)
5250 SET_BIT (changed, i);
5256 while (changed_count > 0)
5258 struct topo_info *ti = init_topo_info ();
5261 compute_topo_order (graph, ti);
5263 while (VEC_length (unsigned, ti->topo_order) != 0)
5267 i = VEC_pop (unsigned, ti->topo_order);
5269 /* If this variable is not a representative, skip it. */
5273 /* If the node has changed, we need to process the complex
5274 constraints and outgoing edges again. */
5275 if (TEST_BIT (changed, i))
5279 VEC(constraint_t,heap) *complex = graph->complex[i];
5281 RESET_BIT (changed, i);
5284 /* Process the complex copy constraints. */
5285 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5287 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5289 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5291 if (!lhsvi->no_tbaa_pruning)
5293 lhsvi->no_tbaa_pruning = true;
5294 if (!TEST_BIT (changed, lhsvi->id))
5296 SET_BIT (changed, lhsvi->id);
5303 /* Propagate to all successors. */
5304 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5306 unsigned int to = find (j);
5307 varinfo_t tovi = get_varinfo (to);
5309 /* Don't propagate to ourselves. */
5313 if (!tovi->no_tbaa_pruning)
5315 tovi->no_tbaa_pruning = true;
5316 if (!TEST_BIT (changed, to))
5318 SET_BIT (changed, to);
5326 free_topo_info (ti);
5329 sbitmap_free (changed);
5333 for (i = 0; i < size; ++i)
5335 varinfo_t ivi = get_varinfo (i);
5336 varinfo_t ivip = get_varinfo (find (i));
5338 if (ivip->no_tbaa_pruning)
5340 tree var = ivi->decl;
5342 if (TREE_CODE (var) == SSA_NAME)
5343 var = SSA_NAME_VAR (var);
5345 if (POINTER_TYPE_P (TREE_TYPE (var)))
5347 DECL_NO_TBAA_P (var) = 1;
5349 /* Tell the RTL layer that this pointer can alias
5351 DECL_POINTER_ALIAS_SET (var) = 0;
5358 /* Create points-to sets for the current function. See the comments
5359 at the start of the file for an algorithmic overview. */
5362 compute_points_to_sets (struct alias_info *ai)
5364 struct scc_info *si;
5367 timevar_push (TV_TREE_PTA);
5370 init_alias_heapvars ();
5372 intra_create_variable_infos ();
5374 /* Now walk all statements and derive aliases. */
5377 block_stmt_iterator bsi;
5380 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5382 if (is_gimple_reg (PHI_RESULT (phi)))
5384 find_func_aliases (phi);
5386 /* Update various related attributes like escaped
5387 addresses, pointer dereferences for loads and stores.
5388 This is used when creating name tags and alias
5390 update_alias_info (phi, ai);
5394 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5396 tree stmt = bsi_stmt (bsi);
5398 find_func_aliases (stmt);
5400 /* Update various related attributes like escaped
5401 addresses, pointer dereferences for loads and stores.
5402 This is used when creating name tags and alias
5404 update_alias_info (stmt, ai);
5406 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5407 been captured, and we can remove them. */
5408 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5409 bsi_remove (&bsi, true);
5418 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5419 dump_constraints (dump_file);
5424 "\nCollapsing static cycles and doing variable "
5427 init_graph (VEC_length (varinfo_t, varmap) * 2);
5430 fprintf (dump_file, "Building predecessor graph\n");
5431 build_pred_graph ();
5434 fprintf (dump_file, "Detecting pointer and location "
5436 si = perform_var_substitution (graph);
5439 fprintf (dump_file, "Rewriting constraints and unifying "
5441 rewrite_constraints (graph, si);
5442 free_var_substitution_info (si);
5444 build_succ_graph ();
5445 move_complex_constraints (graph);
5448 fprintf (dump_file, "Uniting pointer but not location equivalent "
5450 unite_pointer_equivalences (graph);
5453 fprintf (dump_file, "Finding indirect cycles\n");
5454 find_indirect_cycles (graph);
5456 /* Implicit nodes and predecessors are no longer necessary at this
5458 remove_preds_and_fake_succs (graph);
5461 fprintf (dump_file, "Solving graph\n");
5463 solve_graph (graph);
5465 compute_tbaa_pruning ();
5468 dump_sa_points_to_info (dump_file);
5470 have_alias_info = true;
5472 timevar_pop (TV_TREE_PTA);
5476 /* Delete created points-to sets. */
5479 delete_points_to_sets (void)
5483 htab_delete (shared_bitmap_table);
5484 if (dump_file && (dump_flags & TDF_STATS))
5485 fprintf (dump_file, "Points to sets created:%d\n",
5486 stats.points_to_sets_created);
5488 pointer_map_destroy (vi_for_tree);
5489 bitmap_obstack_release (&pta_obstack);
5490 VEC_free (constraint_t, heap, constraints);
5492 for (i = 0; i < graph->size; i++)
5493 VEC_free (constraint_t, heap, graph->complex[i]);
5494 free (graph->complex);
5497 free (graph->succs);
5499 free (graph->pe_rep);
5500 free (graph->indirect_cycles);
5503 VEC_free (varinfo_t, heap, varmap);
5504 free_alloc_pool (variable_info_pool);
5505 free_alloc_pool (constraint_pool);
5506 have_alias_info = false;
5509 /* Return true if we should execute IPA PTA. */
5513 return (flag_unit_at_a_time != 0
5515 /* Don't bother doing anything if the program has errors. */
5516 && !(errorcount || sorrycount));
5519 /* Execute the driver for IPA PTA. */
5521 ipa_pta_execute (void)
5523 struct cgraph_node *node;
5524 struct scc_info *si;
5527 init_alias_heapvars ();
5530 for (node = cgraph_nodes; node; node = node->next)
5532 if (!node->analyzed || cgraph_is_master_clone (node))
5536 varid = create_function_info_for (node->decl,
5537 cgraph_node_name (node));
5538 if (node->local.externally_visible)
5540 varinfo_t fi = get_varinfo (varid);
5541 for (; fi; fi = fi->next)
5542 make_constraint_from_anything (fi);
5546 for (node = cgraph_nodes; node; node = node->next)
5548 if (node->analyzed && cgraph_is_master_clone (node))
5550 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5552 tree old_func_decl = current_function_decl;
5555 "Generating constraints for %s\n",
5556 cgraph_node_name (node));
5558 current_function_decl = node->decl;
5560 FOR_EACH_BB_FN (bb, func)
5562 block_stmt_iterator bsi;
5565 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5567 if (is_gimple_reg (PHI_RESULT (phi)))
5569 find_func_aliases (phi);
5573 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5575 tree stmt = bsi_stmt (bsi);
5576 find_func_aliases (stmt);
5579 current_function_decl = old_func_decl;
5584 /* Make point to anything. */
5590 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5591 dump_constraints (dump_file);
5596 "\nCollapsing static cycles and doing variable "
5599 init_graph (VEC_length (varinfo_t, varmap) * 2);
5600 build_pred_graph ();
5601 si = perform_var_substitution (graph);
5602 rewrite_constraints (graph, si);
5603 free_var_substitution_info (si);
5605 build_succ_graph ();
5606 move_complex_constraints (graph);
5607 unite_pointer_equivalences (graph);
5608 find_indirect_cycles (graph);
5610 /* Implicit nodes and predecessors are no longer necessary at this
5612 remove_preds_and_fake_succs (graph);
5615 fprintf (dump_file, "\nSolving graph\n");
5617 solve_graph (graph);
5620 dump_sa_points_to_info (dump_file);
5623 delete_alias_heapvars ();
5624 delete_points_to_sets ();
5628 struct simple_ipa_opt_pass pass_ipa_pta =
5633 gate_ipa_pta, /* gate */
5634 ipa_pta_execute, /* execute */
5637 0, /* static_pass_number */
5638 TV_IPA_PTA, /* tv_id */
5639 0, /* properties_required */
5640 0, /* properties_provided */
5641 0, /* properties_destroyed */
5642 0, /* todo_flags_start */
5643 TODO_update_ssa /* todo_flags_finish */
5647 /* Initialize the heapvar for statement mapping. */
5649 init_alias_heapvars (void)
5651 if (!heapvar_for_stmt)
5652 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5657 delete_alias_heapvars (void)
5659 htab_delete (heapvar_for_stmt);
5660 heapvar_for_stmt = NULL;
5664 #include "gt-tree-ssa-structalias.h"