1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
209 /* ID of this variable */
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
216 /* True if this is a special variable whose solution set should not be
218 unsigned int is_special_var:1;
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
223 /* True if this is a heap variable. */
224 unsigned int is_heap_var:1;
226 /* True if we may not use TBAA to prune references to this
227 variable. This is used for C++ placement new. */
228 unsigned int no_tbaa_pruning : 1;
230 /* Variable id this was collapsed to due to type unsafety. Zero if
231 this variable was not collapsed. This should be unused completely
232 after build_succ_graph, or something is broken. */
233 unsigned int collapsed_to;
235 /* A link to the variable for the next field in this structure. */
236 struct variable_info *next;
238 /* Offset of this variable, in bits, from the base variable */
239 unsigned HOST_WIDE_INT offset;
241 /* Size of the variable, in bits. */
242 unsigned HOST_WIDE_INT size;
244 /* Full size of the base variable, in bits. */
245 unsigned HOST_WIDE_INT fullsize;
247 /* Name of this variable */
250 /* Tree that this variable is associated with. */
253 /* Points-to set for this variable. */
256 /* Old points-to set for this variable. */
259 typedef struct variable_info *varinfo_t;
261 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
262 static varinfo_t lookup_vi_for_tree (tree);
264 /* Pool of variable info structures. */
265 static alloc_pool variable_info_pool;
267 DEF_VEC_P(varinfo_t);
269 DEF_VEC_ALLOC_P(varinfo_t, heap);
271 /* Table of variable info structures for constraint variables.
272 Indexed directly by variable info id. */
273 static VEC(varinfo_t,heap) *varmap;
275 /* Return the varmap element N */
277 static inline varinfo_t
278 get_varinfo (unsigned int n)
280 return VEC_index (varinfo_t, varmap, n);
283 /* Return the varmap element N, following the collapsed_to link. */
285 static inline varinfo_t
286 get_varinfo_fc (unsigned int n)
288 varinfo_t v = VEC_index (varinfo_t, varmap, n);
290 if (v->collapsed_to != 0)
291 return get_varinfo (v->collapsed_to);
295 /* Static IDs for the special variables. */
296 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
297 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
299 /* Variable that represents the unknown pointer. */
300 static varinfo_t var_anything;
301 static tree anything_tree;
303 /* Variable that represents the NULL pointer. */
304 static varinfo_t var_nothing;
305 static tree nothing_tree;
307 /* Variable that represents read only memory. */
308 static varinfo_t var_readonly;
309 static tree readonly_tree;
311 /* Variable that represents escaped memory. */
312 static varinfo_t var_escaped;
313 static tree escaped_tree;
315 /* Variable that represents nonlocal memory. */
316 static varinfo_t var_nonlocal;
317 static tree nonlocal_tree;
319 /* Variable that represents call-used memory. */
320 static varinfo_t var_callused;
321 static tree callused_tree;
323 /* Variable that represents integers. This is used for when people do things
325 static varinfo_t var_integer;
326 static tree integer_tree;
328 /* Lookup a heap var for FROM, and return it if we find one. */
331 heapvar_lookup (tree from)
333 struct tree_map *h, in;
336 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
337 htab_hash_pointer (from));
343 /* Insert a mapping FROM->TO in the heap var for statement
347 heapvar_insert (tree from, tree to)
352 h = GGC_NEW (struct tree_map);
353 h->hash = htab_hash_pointer (from);
356 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
357 *(struct tree_map **) loc = h;
360 /* Return a new variable info structure consisting for a variable
361 named NAME, and using constraint graph node NODE. */
364 new_var_info (tree t, unsigned int id, const char *name)
366 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
372 ret->is_artificial_var = false;
373 ret->is_heap_var = false;
374 ret->is_special_var = false;
375 ret->is_unknown_size_var = false;
377 if (TREE_CODE (var) == SSA_NAME)
378 var = SSA_NAME_VAR (var);
379 ret->no_tbaa_pruning = (DECL_P (var)
380 && POINTER_TYPE_P (TREE_TYPE (var))
381 && DECL_NO_TBAA_P (var));
382 ret->solution = BITMAP_ALLOC (&pta_obstack);
383 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
385 ret->collapsed_to = 0;
389 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
391 /* An expression that appears in a constraint. */
393 struct constraint_expr
395 /* Constraint type. */
396 constraint_expr_type type;
398 /* Variable we are referring to in the constraint. */
401 /* Offset, in bits, of this constraint from the beginning of
402 variables it ends up referring to.
404 IOW, in a deref constraint, we would deref, get the result set,
405 then add OFFSET to each member. */
406 unsigned HOST_WIDE_INT offset;
409 typedef struct constraint_expr ce_s;
411 DEF_VEC_ALLOC_O(ce_s, heap);
412 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
413 static void get_constraint_for (tree, VEC(ce_s, heap) **);
414 static void do_deref (VEC (ce_s, heap) **);
416 /* Our set constraints are made up of two constraint expressions, one
419 As described in the introduction, our set constraints each represent an
420 operation between set valued variables.
424 struct constraint_expr lhs;
425 struct constraint_expr rhs;
428 /* List of constraints that we use to build the constraint graph from. */
430 static VEC(constraint_t,heap) *constraints;
431 static alloc_pool constraint_pool;
435 DEF_VEC_ALLOC_I(int, heap);
437 /* The constraint graph is represented as an array of bitmaps
438 containing successor nodes. */
440 struct constraint_graph
442 /* Size of this graph, which may be different than the number of
443 nodes in the variable map. */
446 /* Explicit successors of each node. */
449 /* Implicit predecessors of each node (Used for variable
451 bitmap *implicit_preds;
453 /* Explicit predecessors of each node (Used for variable substitution). */
456 /* Indirect cycle representatives, or -1 if the node has no indirect
458 int *indirect_cycles;
460 /* Representative node for a node. rep[a] == a unless the node has
464 /* Equivalence class representative for a label. This is used for
465 variable substitution. */
468 /* Pointer equivalence label for a node. All nodes with the same
469 pointer equivalence label can be unified together at some point
470 (either during constraint optimization or after the constraint
474 /* Pointer equivalence representative for a label. This is used to
475 handle nodes that are pointer equivalent but not location
476 equivalent. We can unite these once the addressof constraints
477 are transformed into initial points-to sets. */
480 /* Pointer equivalence label for each node, used during variable
482 unsigned int *pointer_label;
484 /* Location equivalence label for each node, used during location
485 equivalence finding. */
486 unsigned int *loc_label;
488 /* Pointed-by set for each node, used during location equivalence
489 finding. This is pointed-by rather than pointed-to, because it
490 is constructed using the predecessor graph. */
493 /* Points to sets for pointer equivalence. This is *not* the actual
494 points-to sets for nodes. */
497 /* Bitmap of nodes where the bit is set if the node is a direct
498 node. Used for variable substitution. */
499 sbitmap direct_nodes;
501 /* Bitmap of nodes where the bit is set if the node is address
502 taken. Used for variable substitution. */
503 bitmap address_taken;
505 /* True if points_to bitmap for this node is stored in the hash
509 /* Number of incoming edges remaining to be processed by pointer
511 Used for variable substitution. */
512 unsigned int *number_incoming;
515 /* Vector of complex constraints for each graph node. Complex
516 constraints are those involving dereferences or offsets that are
518 VEC(constraint_t,heap) **complex;
521 static constraint_graph_t graph;
523 /* During variable substitution and the offline version of indirect
524 cycle finding, we create nodes to represent dereferences and
525 address taken constraints. These represent where these start and
527 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
528 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
530 /* Return the representative node for NODE, if NODE has been unioned
532 This function performs path compression along the way to finding
533 the representative. */
536 find (unsigned int node)
538 gcc_assert (node < graph->size);
539 if (graph->rep[node] != node)
540 return graph->rep[node] = find (graph->rep[node]);
544 /* Union the TO and FROM nodes to the TO nodes.
545 Note that at some point in the future, we may want to do
546 union-by-rank, in which case we are going to have to return the
547 node we unified to. */
550 unite (unsigned int to, unsigned int from)
552 gcc_assert (to < graph->size && from < graph->size);
553 if (to != from && graph->rep[from] != to)
555 graph->rep[from] = to;
561 /* Create a new constraint consisting of LHS and RHS expressions. */
564 new_constraint (const struct constraint_expr lhs,
565 const struct constraint_expr rhs)
567 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
573 /* Print out constraint C to FILE. */
576 dump_constraint (FILE *file, constraint_t c)
578 if (c->lhs.type == ADDRESSOF)
580 else if (c->lhs.type == DEREF)
582 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
583 if (c->lhs.offset != 0)
584 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
585 fprintf (file, " = ");
586 if (c->rhs.type == ADDRESSOF)
588 else if (c->rhs.type == DEREF)
590 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
591 if (c->rhs.offset != 0)
592 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
593 fprintf (file, "\n");
596 /* Print out constraint C to stderr. */
599 debug_constraint (constraint_t c)
601 dump_constraint (stderr, c);
604 /* Print out all constraints to FILE */
607 dump_constraints (FILE *file)
611 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
612 dump_constraint (file, c);
615 /* Print out all constraints to stderr. */
618 debug_constraints (void)
620 dump_constraints (stderr);
625 The solver is a simple worklist solver, that works on the following
628 sbitmap changed_nodes = all zeroes;
630 For each node that is not already collapsed:
632 set bit in changed nodes
634 while (changed_count > 0)
636 compute topological ordering for constraint graph
638 find and collapse cycles in the constraint graph (updating
639 changed if necessary)
641 for each node (n) in the graph in topological order:
644 Process each complex constraint associated with the node,
645 updating changed if necessary.
647 For each outgoing edge from n, propagate the solution from n to
648 the destination of the edge, updating changed as necessary.
652 /* Return true if two constraint expressions A and B are equal. */
655 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
657 return a.type == b.type && a.var == b.var && a.offset == b.offset;
660 /* Return true if constraint expression A is less than constraint expression
661 B. This is just arbitrary, but consistent, in order to give them an
665 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
667 if (a.type == b.type)
670 return a.offset < b.offset;
672 return a.var < b.var;
675 return a.type < b.type;
678 /* Return true if constraint A is less than constraint B. This is just
679 arbitrary, but consistent, in order to give them an ordering. */
682 constraint_less (const constraint_t a, const constraint_t b)
684 if (constraint_expr_less (a->lhs, b->lhs))
686 else if (constraint_expr_less (b->lhs, a->lhs))
689 return constraint_expr_less (a->rhs, b->rhs);
692 /* Return true if two constraints A and B are equal. */
695 constraint_equal (struct constraint a, struct constraint b)
697 return constraint_expr_equal (a.lhs, b.lhs)
698 && constraint_expr_equal (a.rhs, b.rhs);
702 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
705 constraint_vec_find (VEC(constraint_t,heap) *vec,
706 struct constraint lookfor)
714 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
715 if (place >= VEC_length (constraint_t, vec))
717 found = VEC_index (constraint_t, vec, place);
718 if (!constraint_equal (*found, lookfor))
723 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
726 constraint_set_union (VEC(constraint_t,heap) **to,
727 VEC(constraint_t,heap) **from)
732 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
734 if (constraint_vec_find (*to, *c) == NULL)
736 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
738 VEC_safe_insert (constraint_t, heap, *to, place, c);
743 /* Take a solution set SET, add OFFSET to each member of the set, and
744 overwrite SET with the result when done. */
747 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
749 bitmap result = BITMAP_ALLOC (&iteration_obstack);
753 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
755 /* If this is a properly sized variable, only add offset if it's
756 less than end. Otherwise, it is globbed to a single
759 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
761 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
762 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
765 bitmap_set_bit (result, v->id);
767 else if (get_varinfo (i)->is_artificial_var
768 || get_varinfo (i)->is_unknown_size_var)
770 bitmap_set_bit (result, i);
774 bitmap_copy (set, result);
775 BITMAP_FREE (result);
778 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
782 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
785 return bitmap_ior_into (to, from);
791 tmp = BITMAP_ALLOC (&iteration_obstack);
792 bitmap_copy (tmp, from);
793 solution_set_add (tmp, inc);
794 res = bitmap_ior_into (to, tmp);
800 /* Insert constraint C into the list of complex constraints for graph
804 insert_into_complex (constraint_graph_t graph,
805 unsigned int var, constraint_t c)
807 VEC (constraint_t, heap) *complex = graph->complex[var];
808 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
811 /* Only insert constraints that do not already exist. */
812 if (place >= VEC_length (constraint_t, complex)
813 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
814 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
818 /* Condense two variable nodes into a single variable node, by moving
819 all associated info from SRC to TO. */
822 merge_node_constraints (constraint_graph_t graph, unsigned int to,
828 gcc_assert (find (from) == to);
830 /* Move all complex constraints from src node into to node */
831 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
833 /* In complex constraints for node src, we may have either
834 a = *src, and *src = a, or an offseted constraint which are
835 always added to the rhs node's constraints. */
837 if (c->rhs.type == DEREF)
839 else if (c->lhs.type == DEREF)
844 constraint_set_union (&graph->complex[to], &graph->complex[from]);
845 VEC_free (constraint_t, heap, graph->complex[from]);
846 graph->complex[from] = NULL;
850 /* Remove edges involving NODE from GRAPH. */
853 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
855 if (graph->succs[node])
856 BITMAP_FREE (graph->succs[node]);
859 /* Merge GRAPH nodes FROM and TO into node TO. */
862 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
865 if (graph->indirect_cycles[from] != -1)
867 /* If we have indirect cycles with the from node, and we have
868 none on the to node, the to node has indirect cycles from the
869 from node now that they are unified.
870 If indirect cycles exist on both, unify the nodes that they
871 are in a cycle with, since we know they are in a cycle with
873 if (graph->indirect_cycles[to] == -1)
874 graph->indirect_cycles[to] = graph->indirect_cycles[from];
877 /* Merge all the successor edges. */
878 if (graph->succs[from])
880 if (!graph->succs[to])
881 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
882 bitmap_ior_into (graph->succs[to],
886 clear_edges_for_node (graph, from);
890 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
891 it doesn't exist in the graph already. */
894 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
900 if (!graph->implicit_preds[to])
901 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
903 if (bitmap_set_bit (graph->implicit_preds[to], from))
904 stats.num_implicit_edges++;
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
912 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
915 if (!graph->preds[to])
916 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
917 bitmap_set_bit (graph->preds[to], from);
920 /* Add a graph edge to GRAPH, going from FROM to TO if
921 it doesn't exist in the graph already.
922 Return false if the edge already existed, true otherwise. */
925 add_graph_edge (constraint_graph_t graph, unsigned int to,
936 if (!graph->succs[from])
937 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
938 if (bitmap_set_bit (graph->succs[from], to))
941 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
949 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
952 valid_graph_edge (constraint_graph_t graph, unsigned int src,
955 return (graph->succs[dest]
956 && bitmap_bit_p (graph->succs[dest], src));
959 /* Initialize the constraint graph structure to contain SIZE nodes. */
962 init_graph (unsigned int size)
966 graph = XCNEW (struct constraint_graph);
968 graph->succs = XCNEWVEC (bitmap, graph->size);
969 graph->indirect_cycles = XNEWVEC (int, graph->size);
970 graph->rep = XNEWVEC (unsigned int, graph->size);
971 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
972 graph->pe = XCNEWVEC (unsigned int, graph->size);
973 graph->pe_rep = XNEWVEC (int, graph->size);
975 for (j = 0; j < graph->size; j++)
978 graph->pe_rep[j] = -1;
979 graph->indirect_cycles[j] = -1;
983 /* Build the constraint graph, adding only predecessor edges right now. */
986 build_pred_graph (void)
992 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
993 graph->preds = XCNEWVEC (bitmap, graph->size);
994 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
995 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
996 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
997 graph->points_to = XCNEWVEC (bitmap, graph->size);
998 graph->eq_rep = XNEWVEC (int, graph->size);
999 graph->direct_nodes = sbitmap_alloc (graph->size);
1000 graph->pt_used = sbitmap_alloc (graph->size);
1001 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1002 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1003 sbitmap_zero (graph->direct_nodes);
1004 sbitmap_zero (graph->pt_used);
1006 for (j = 0; j < FIRST_REF_NODE; j++)
1008 if (!get_varinfo (j)->is_special_var)
1009 SET_BIT (graph->direct_nodes, j);
1012 for (j = 0; j < graph->size; j++)
1013 graph->eq_rep[j] = -1;
1015 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1016 graph->indirect_cycles[j] = -1;
1018 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1020 struct constraint_expr lhs = c->lhs;
1021 struct constraint_expr rhs = c->rhs;
1022 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1023 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1025 if (lhs.type == DEREF)
1028 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1029 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1031 else if (rhs.type == DEREF)
1034 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1035 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1037 RESET_BIT (graph->direct_nodes, lhsvar);
1039 else if (rhs.type == ADDRESSOF)
1042 if (graph->points_to[lhsvar] == NULL)
1043 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1044 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1046 if (graph->pointed_by[rhsvar] == NULL)
1047 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1048 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1050 /* Implicitly, *x = y */
1051 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1053 RESET_BIT (graph->direct_nodes, rhsvar);
1054 bitmap_set_bit (graph->address_taken, rhsvar);
1056 else if (lhsvar > anything_id
1057 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1060 add_pred_graph_edge (graph, lhsvar, rhsvar);
1061 /* Implicitly, *x = *y */
1062 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1063 FIRST_REF_NODE + rhsvar);
1065 else if (lhs.offset != 0 || rhs.offset != 0)
1067 if (rhs.offset != 0)
1068 RESET_BIT (graph->direct_nodes, lhs.var);
1069 else if (lhs.offset != 0)
1070 RESET_BIT (graph->direct_nodes, rhs.var);
1075 /* Build the constraint graph, adding successor edges. */
1078 build_succ_graph (void)
1083 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1085 struct constraint_expr lhs;
1086 struct constraint_expr rhs;
1087 unsigned int lhsvar;
1088 unsigned int rhsvar;
1095 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1096 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1098 if (lhs.type == DEREF)
1100 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1101 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1103 else if (rhs.type == DEREF)
1105 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1106 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1108 else if (rhs.type == ADDRESSOF)
1111 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1112 == get_varinfo_fc (rhs.var)->id);
1113 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1115 else if (lhsvar > anything_id
1116 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1118 add_graph_edge (graph, lhsvar, rhsvar);
1124 /* Changed variables on the last iteration. */
1125 static unsigned int changed_count;
1126 static sbitmap changed;
1128 DEF_VEC_I(unsigned);
1129 DEF_VEC_ALLOC_I(unsigned,heap);
1132 /* Strongly Connected Component visitation info. */
1139 unsigned int *node_mapping;
1141 VEC(unsigned,heap) *scc_stack;
1145 /* Recursive routine to find strongly connected components in GRAPH.
1146 SI is the SCC info to store the information in, and N is the id of current
1147 graph node we are processing.
1149 This is Tarjan's strongly connected component finding algorithm, as
1150 modified by Nuutila to keep only non-root nodes on the stack.
1151 The algorithm can be found in "On finding the strongly connected
1152 connected components in a directed graph" by Esko Nuutila and Eljas
1153 Soisalon-Soininen, in Information Processing Letters volume 49,
1154 number 1, pages 9-14. */
1157 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1161 unsigned int my_dfs;
1163 SET_BIT (si->visited, n);
1164 si->dfs[n] = si->current_index ++;
1165 my_dfs = si->dfs[n];
1167 /* Visit all the successors. */
1168 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1172 if (i > LAST_REF_NODE)
1176 if (TEST_BIT (si->deleted, w))
1179 if (!TEST_BIT (si->visited, w))
1180 scc_visit (graph, si, w);
1182 unsigned int t = find (w);
1183 unsigned int nnode = find (n);
1184 gcc_assert (nnode == n);
1186 if (si->dfs[t] < si->dfs[nnode])
1187 si->dfs[n] = si->dfs[t];
1191 /* See if any components have been identified. */
1192 if (si->dfs[n] == my_dfs)
1194 if (VEC_length (unsigned, si->scc_stack) > 0
1195 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1197 bitmap scc = BITMAP_ALLOC (NULL);
1198 bool have_ref_node = n >= FIRST_REF_NODE;
1199 unsigned int lowest_node;
1202 bitmap_set_bit (scc, n);
1204 while (VEC_length (unsigned, si->scc_stack) != 0
1205 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1207 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1209 bitmap_set_bit (scc, w);
1210 if (w >= FIRST_REF_NODE)
1211 have_ref_node = true;
1214 lowest_node = bitmap_first_set_bit (scc);
1215 gcc_assert (lowest_node < FIRST_REF_NODE);
1217 /* Collapse the SCC nodes into a single node, and mark the
1219 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1221 if (i < FIRST_REF_NODE)
1223 if (unite (lowest_node, i))
1224 unify_nodes (graph, lowest_node, i, false);
1228 unite (lowest_node, i);
1229 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1233 SET_BIT (si->deleted, n);
1236 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1239 /* Unify node FROM into node TO, updating the changed count if
1240 necessary when UPDATE_CHANGED is true. */
1243 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1244 bool update_changed)
1247 gcc_assert (to != from && find (to) == to);
1248 if (dump_file && (dump_flags & TDF_DETAILS))
1249 fprintf (dump_file, "Unifying %s to %s\n",
1250 get_varinfo (from)->name,
1251 get_varinfo (to)->name);
1254 stats.unified_vars_dynamic++;
1256 stats.unified_vars_static++;
1258 merge_graph_nodes (graph, to, from);
1259 merge_node_constraints (graph, to, from);
1261 if (get_varinfo (from)->no_tbaa_pruning)
1262 get_varinfo (to)->no_tbaa_pruning = true;
1264 /* Mark TO as changed if FROM was changed. If TO was already marked
1265 as changed, decrease the changed count. */
1267 if (update_changed && TEST_BIT (changed, from))
1269 RESET_BIT (changed, from);
1270 if (!TEST_BIT (changed, to))
1271 SET_BIT (changed, to);
1274 gcc_assert (changed_count > 0);
1278 if (get_varinfo (from)->solution)
1280 /* If the solution changes because of the merging, we need to mark
1281 the variable as changed. */
1282 if (bitmap_ior_into (get_varinfo (to)->solution,
1283 get_varinfo (from)->solution))
1285 if (update_changed && !TEST_BIT (changed, to))
1287 SET_BIT (changed, to);
1292 BITMAP_FREE (get_varinfo (from)->solution);
1293 BITMAP_FREE (get_varinfo (from)->oldsolution);
1295 if (stats.iterations > 0)
1297 BITMAP_FREE (get_varinfo (to)->oldsolution);
1298 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1301 if (valid_graph_edge (graph, to, to))
1303 if (graph->succs[to])
1304 bitmap_clear_bit (graph->succs[to], to);
1308 /* Information needed to compute the topological ordering of a graph. */
1312 /* sbitmap of visited nodes. */
1314 /* Array that stores the topological order of the graph, *in
1316 VEC(unsigned,heap) *topo_order;
1320 /* Initialize and return a topological info structure. */
1322 static struct topo_info *
1323 init_topo_info (void)
1325 size_t size = graph->size;
1326 struct topo_info *ti = XNEW (struct topo_info);
1327 ti->visited = sbitmap_alloc (size);
1328 sbitmap_zero (ti->visited);
1329 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1334 /* Free the topological sort info pointed to by TI. */
1337 free_topo_info (struct topo_info *ti)
1339 sbitmap_free (ti->visited);
1340 VEC_free (unsigned, heap, ti->topo_order);
1344 /* Visit the graph in topological order, and store the order in the
1345 topo_info structure. */
1348 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1354 SET_BIT (ti->visited, n);
1356 if (graph->succs[n])
1357 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1359 if (!TEST_BIT (ti->visited, j))
1360 topo_visit (graph, ti, j);
1363 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1366 /* Return true if variable N + OFFSET is a legal field of N. */
1369 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1371 varinfo_t ninfo = get_varinfo (n);
1373 /* For things we've globbed to single variables, any offset into the
1374 variable acts like the entire variable, so that it becomes offset
1376 if (ninfo->is_special_var
1377 || ninfo->is_artificial_var
1378 || ninfo->is_unknown_size_var)
1383 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1386 /* Process a constraint C that represents x = *y, using DELTA as the
1387 starting solution. */
1390 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1393 unsigned int lhs = c->lhs.var;
1395 bitmap sol = get_varinfo (lhs)->solution;
1399 if (bitmap_bit_p (delta, anything_id))
1401 flag |= bitmap_set_bit (sol, anything_id);
1405 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1406 reachability set of the rhs var. As a pointer to a sub-field
1407 of a variable can also reach all other fields of the variable
1408 we simply have to expand the solution to contain all sub-fields
1409 if one sub-field is contained. */
1410 if (c->rhs.var == escaped_id
1411 || c->rhs.var == callused_id)
1414 /* In a first pass record all variables we need to add all
1415 sub-fields off. This avoids quadratic behavior. */
1416 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1418 varinfo_t v = lookup_vi_for_tree (get_varinfo (j)->decl);
1419 if (v->next != NULL)
1422 vars = BITMAP_ALLOC (NULL);
1423 bitmap_set_bit (vars, v->id);
1426 /* In the second pass now do the addition to the solution and
1427 to speed up solving add it to the delta as well. */
1430 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1432 varinfo_t v = get_varinfo (j);
1433 for (; v != NULL; v = v->next)
1435 if (bitmap_set_bit (sol, v->id))
1438 bitmap_set_bit (delta, v->id);
1446 /* For each variable j in delta (Sol(y)), add
1447 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1448 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1450 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1451 if (type_safe (j, &roffset))
1454 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1457 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1462 /* Adding edges from the special vars is pointless.
1463 They don't have sets that can change. */
1464 if (get_varinfo (t)->is_special_var)
1465 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1466 /* Merging the solution from ESCAPED needlessly increases
1467 the set. Use ESCAPED as representative instead.
1468 Same for CALLUSED. */
1469 else if (get_varinfo (t)->id == escaped_id
1470 || get_varinfo (t)->id == callused_id)
1471 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1472 else if (add_graph_edge (graph, lhs, t))
1473 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1478 /* If the LHS solution changed, mark the var as changed. */
1481 get_varinfo (lhs)->solution = sol;
1482 if (!TEST_BIT (changed, lhs))
1484 SET_BIT (changed, lhs);
1490 /* Process a constraint C that represents *x = y. */
1493 do_ds_constraint (constraint_t c, bitmap delta)
1495 unsigned int rhs = c->rhs.var;
1496 bitmap sol = get_varinfo (rhs)->solution;
1500 if (bitmap_bit_p (sol, anything_id))
1502 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1504 varinfo_t jvi = get_varinfo (j);
1506 unsigned int loff = c->lhs.offset;
1507 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1510 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1515 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1516 && !TEST_BIT (changed, t))
1518 SET_BIT (changed, t);
1525 /* For each member j of delta (Sol(x)), add an edge from y to j and
1526 union Sol(y) into Sol(j) */
1527 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1529 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1530 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1534 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1537 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1541 tmp = get_varinfo (t)->solution;
1543 if (set_union_with_increment (tmp, sol, 0))
1545 get_varinfo (t)->solution = tmp;
1547 sol = get_varinfo (rhs)->solution;
1548 if (!TEST_BIT (changed, t))
1550 SET_BIT (changed, t);
1558 /* Handle a non-simple (simple meaning requires no iteration),
1559 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1562 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1564 if (c->lhs.type == DEREF)
1566 if (c->rhs.type == ADDRESSOF)
1573 do_ds_constraint (c, delta);
1576 else if (c->rhs.type == DEREF)
1579 if (!(get_varinfo (c->lhs.var)->is_special_var))
1580 do_sd_constraint (graph, c, delta);
1588 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1589 solution = get_varinfo (c->rhs.var)->solution;
1590 tmp = get_varinfo (c->lhs.var)->solution;
1592 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1596 get_varinfo (c->lhs.var)->solution = tmp;
1597 if (!TEST_BIT (changed, c->lhs.var))
1599 SET_BIT (changed, c->lhs.var);
1606 /* Initialize and return a new SCC info structure. */
1608 static struct scc_info *
1609 init_scc_info (size_t size)
1611 struct scc_info *si = XNEW (struct scc_info);
1614 si->current_index = 0;
1615 si->visited = sbitmap_alloc (size);
1616 sbitmap_zero (si->visited);
1617 si->deleted = sbitmap_alloc (size);
1618 sbitmap_zero (si->deleted);
1619 si->node_mapping = XNEWVEC (unsigned int, size);
1620 si->dfs = XCNEWVEC (unsigned int, size);
1622 for (i = 0; i < size; i++)
1623 si->node_mapping[i] = i;
1625 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1629 /* Free an SCC info structure pointed to by SI */
1632 free_scc_info (struct scc_info *si)
1634 sbitmap_free (si->visited);
1635 sbitmap_free (si->deleted);
1636 free (si->node_mapping);
1638 VEC_free (unsigned, heap, si->scc_stack);
1643 /* Find indirect cycles in GRAPH that occur, using strongly connected
1644 components, and note them in the indirect cycles map.
1646 This technique comes from Ben Hardekopf and Calvin Lin,
1647 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1648 Lines of Code", submitted to PLDI 2007. */
1651 find_indirect_cycles (constraint_graph_t graph)
1654 unsigned int size = graph->size;
1655 struct scc_info *si = init_scc_info (size);
1657 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1658 if (!TEST_BIT (si->visited, i) && find (i) == i)
1659 scc_visit (graph, si, i);
1664 /* Compute a topological ordering for GRAPH, and store the result in the
1665 topo_info structure TI. */
1668 compute_topo_order (constraint_graph_t graph,
1669 struct topo_info *ti)
1672 unsigned int size = graph->size;
1674 for (i = 0; i != size; ++i)
1675 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1676 topo_visit (graph, ti, i);
1679 /* Structure used to for hash value numbering of pointer equivalence
1682 typedef struct equiv_class_label
1684 unsigned int equivalence_class;
1687 } *equiv_class_label_t;
1688 typedef const struct equiv_class_label *const_equiv_class_label_t;
1690 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1692 static htab_t pointer_equiv_class_table;
1694 /* A hashtable for mapping a bitmap of labels->location equivalence
1696 static htab_t location_equiv_class_table;
1698 /* Hash function for a equiv_class_label_t */
1701 equiv_class_label_hash (const void *p)
1703 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1704 return ecl->hashcode;
1707 /* Equality function for two equiv_class_label_t's. */
1710 equiv_class_label_eq (const void *p1, const void *p2)
1712 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1713 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1714 return bitmap_equal_p (eql1->labels, eql2->labels);
1717 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1721 equiv_class_lookup (htab_t table, bitmap labels)
1724 struct equiv_class_label ecl;
1726 ecl.labels = labels;
1727 ecl.hashcode = bitmap_hash (labels);
1729 slot = htab_find_slot_with_hash (table, &ecl,
1730 ecl.hashcode, NO_INSERT);
1734 return ((equiv_class_label_t) *slot)->equivalence_class;
1738 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1742 equiv_class_add (htab_t table, unsigned int equivalence_class,
1746 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1748 ecl->labels = labels;
1749 ecl->equivalence_class = equivalence_class;
1750 ecl->hashcode = bitmap_hash (labels);
1752 slot = htab_find_slot_with_hash (table, ecl,
1753 ecl->hashcode, INSERT);
1754 gcc_assert (!*slot);
1755 *slot = (void *) ecl;
1758 /* Perform offline variable substitution.
1760 This is a worst case quadratic time way of identifying variables
1761 that must have equivalent points-to sets, including those caused by
1762 static cycles, and single entry subgraphs, in the constraint graph.
1764 The technique is described in "Exploiting Pointer and Location
1765 Equivalence to Optimize Pointer Analysis. In the 14th International
1766 Static Analysis Symposium (SAS), August 2007." It is known as the
1767 "HU" algorithm, and is equivalent to value numbering the collapsed
1768 constraint graph including evaluating unions.
1770 The general method of finding equivalence classes is as follows:
1771 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1772 Initialize all non-REF nodes to be direct nodes.
1773 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1775 For each constraint containing the dereference, we also do the same
1778 We then compute SCC's in the graph and unify nodes in the same SCC,
1781 For each non-collapsed node x:
1782 Visit all unvisited explicit incoming edges.
1783 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1785 Lookup the equivalence class for pts(x).
1786 If we found one, equivalence_class(x) = found class.
1787 Otherwise, equivalence_class(x) = new class, and new_class is
1788 added to the lookup table.
1790 All direct nodes with the same equivalence class can be replaced
1791 with a single representative node.
1792 All unlabeled nodes (label == 0) are not pointers and all edges
1793 involving them can be eliminated.
1794 We perform these optimizations during rewrite_constraints
1796 In addition to pointer equivalence class finding, we also perform
1797 location equivalence class finding. This is the set of variables
1798 that always appear together in points-to sets. We use this to
1799 compress the size of the points-to sets. */
1801 /* Current maximum pointer equivalence class id. */
1802 static int pointer_equiv_class;
1804 /* Current maximum location equivalence class id. */
1805 static int location_equiv_class;
1807 /* Recursive routine to find strongly connected components in GRAPH,
1808 and label it's nodes with DFS numbers. */
1811 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1815 unsigned int my_dfs;
1817 gcc_assert (si->node_mapping[n] == n);
1818 SET_BIT (si->visited, n);
1819 si->dfs[n] = si->current_index ++;
1820 my_dfs = si->dfs[n];
1822 /* Visit all the successors. */
1823 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1825 unsigned int w = si->node_mapping[i];
1827 if (TEST_BIT (si->deleted, w))
1830 if (!TEST_BIT (si->visited, w))
1831 condense_visit (graph, si, w);
1833 unsigned int t = si->node_mapping[w];
1834 unsigned int nnode = si->node_mapping[n];
1835 gcc_assert (nnode == n);
1837 if (si->dfs[t] < si->dfs[nnode])
1838 si->dfs[n] = si->dfs[t];
1842 /* Visit all the implicit predecessors. */
1843 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1845 unsigned int w = si->node_mapping[i];
1847 if (TEST_BIT (si->deleted, w))
1850 if (!TEST_BIT (si->visited, w))
1851 condense_visit (graph, si, w);
1853 unsigned int t = si->node_mapping[w];
1854 unsigned int nnode = si->node_mapping[n];
1855 gcc_assert (nnode == n);
1857 if (si->dfs[t] < si->dfs[nnode])
1858 si->dfs[n] = si->dfs[t];
1862 /* See if any components have been identified. */
1863 if (si->dfs[n] == my_dfs)
1865 while (VEC_length (unsigned, si->scc_stack) != 0
1866 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1868 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1869 si->node_mapping[w] = n;
1871 if (!TEST_BIT (graph->direct_nodes, w))
1872 RESET_BIT (graph->direct_nodes, n);
1874 /* Unify our nodes. */
1875 if (graph->preds[w])
1877 if (!graph->preds[n])
1878 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1879 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1881 if (graph->implicit_preds[w])
1883 if (!graph->implicit_preds[n])
1884 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1885 bitmap_ior_into (graph->implicit_preds[n],
1886 graph->implicit_preds[w]);
1888 if (graph->points_to[w])
1890 if (!graph->points_to[n])
1891 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1892 bitmap_ior_into (graph->points_to[n],
1893 graph->points_to[w]);
1895 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1897 unsigned int rep = si->node_mapping[i];
1898 graph->number_incoming[rep]++;
1901 SET_BIT (si->deleted, n);
1904 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1907 /* Label pointer equivalences. */
1910 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1914 SET_BIT (si->visited, n);
1916 if (!graph->points_to[n])
1917 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1919 /* Label and union our incoming edges's points to sets. */
1920 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1922 unsigned int w = si->node_mapping[i];
1923 if (!TEST_BIT (si->visited, w))
1924 label_visit (graph, si, w);
1926 /* Skip unused edges */
1927 if (w == n || graph->pointer_label[w] == 0)
1929 graph->number_incoming[w]--;
1932 if (graph->points_to[w])
1933 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1935 /* If all incoming edges to w have been processed and
1936 graph->points_to[w] was not stored in the hash table, we can
1938 graph->number_incoming[w]--;
1939 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1941 BITMAP_FREE (graph->points_to[w]);
1944 /* Indirect nodes get fresh variables. */
1945 if (!TEST_BIT (graph->direct_nodes, n))
1946 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1948 if (!bitmap_empty_p (graph->points_to[n]))
1950 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1951 graph->points_to[n]);
1954 SET_BIT (graph->pt_used, n);
1955 label = pointer_equiv_class++;
1956 equiv_class_add (pointer_equiv_class_table,
1957 label, graph->points_to[n]);
1959 graph->pointer_label[n] = label;
1963 /* Perform offline variable substitution, discovering equivalence
1964 classes, and eliminating non-pointer variables. */
1966 static struct scc_info *
1967 perform_var_substitution (constraint_graph_t graph)
1970 unsigned int size = graph->size;
1971 struct scc_info *si = init_scc_info (size);
1973 bitmap_obstack_initialize (&iteration_obstack);
1974 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1975 equiv_class_label_eq, free);
1976 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1977 equiv_class_label_eq, free);
1978 pointer_equiv_class = 1;
1979 location_equiv_class = 1;
1981 /* Condense the nodes, which means to find SCC's, count incoming
1982 predecessors, and unite nodes in SCC's. */
1983 for (i = 0; i < FIRST_REF_NODE; i++)
1984 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1985 condense_visit (graph, si, si->node_mapping[i]);
1987 sbitmap_zero (si->visited);
1988 /* Actually the label the nodes for pointer equivalences */
1989 for (i = 0; i < FIRST_REF_NODE; i++)
1990 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1991 label_visit (graph, si, si->node_mapping[i]);
1993 /* Calculate location equivalence labels. */
1994 for (i = 0; i < FIRST_REF_NODE; i++)
2001 if (!graph->pointed_by[i])
2003 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2005 /* Translate the pointed-by mapping for pointer equivalence
2007 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2009 bitmap_set_bit (pointed_by,
2010 graph->pointer_label[si->node_mapping[j]]);
2012 /* The original pointed_by is now dead. */
2013 BITMAP_FREE (graph->pointed_by[i]);
2015 /* Look up the location equivalence label if one exists, or make
2017 label = equiv_class_lookup (location_equiv_class_table,
2021 label = location_equiv_class++;
2022 equiv_class_add (location_equiv_class_table,
2027 if (dump_file && (dump_flags & TDF_DETAILS))
2028 fprintf (dump_file, "Found location equivalence for node %s\n",
2029 get_varinfo (i)->name);
2030 BITMAP_FREE (pointed_by);
2032 graph->loc_label[i] = label;
2036 if (dump_file && (dump_flags & TDF_DETAILS))
2037 for (i = 0; i < FIRST_REF_NODE; i++)
2039 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2041 "Equivalence classes for %s node id %d:%s are pointer: %d"
2043 direct_node ? "Direct node" : "Indirect node", i,
2044 get_varinfo (i)->name,
2045 graph->pointer_label[si->node_mapping[i]],
2046 graph->loc_label[si->node_mapping[i]]);
2049 /* Quickly eliminate our non-pointer variables. */
2051 for (i = 0; i < FIRST_REF_NODE; i++)
2053 unsigned int node = si->node_mapping[i];
2055 if (graph->pointer_label[node] == 0)
2057 if (dump_file && (dump_flags & TDF_DETAILS))
2059 "%s is a non-pointer variable, eliminating edges.\n",
2060 get_varinfo (node)->name);
2061 stats.nonpointer_vars++;
2062 clear_edges_for_node (graph, node);
2069 /* Free information that was only necessary for variable
2073 free_var_substitution_info (struct scc_info *si)
2076 free (graph->pointer_label);
2077 free (graph->loc_label);
2078 free (graph->pointed_by);
2079 free (graph->points_to);
2080 free (graph->number_incoming);
2081 free (graph->eq_rep);
2082 sbitmap_free (graph->direct_nodes);
2083 sbitmap_free (graph->pt_used);
2084 htab_delete (pointer_equiv_class_table);
2085 htab_delete (location_equiv_class_table);
2086 bitmap_obstack_release (&iteration_obstack);
2089 /* Return an existing node that is equivalent to NODE, which has
2090 equivalence class LABEL, if one exists. Return NODE otherwise. */
2093 find_equivalent_node (constraint_graph_t graph,
2094 unsigned int node, unsigned int label)
2096 /* If the address version of this variable is unused, we can
2097 substitute it for anything else with the same label.
2098 Otherwise, we know the pointers are equivalent, but not the
2099 locations, and we can unite them later. */
2101 if (!bitmap_bit_p (graph->address_taken, node))
2103 gcc_assert (label < graph->size);
2105 if (graph->eq_rep[label] != -1)
2107 /* Unify the two variables since we know they are equivalent. */
2108 if (unite (graph->eq_rep[label], node))
2109 unify_nodes (graph, graph->eq_rep[label], node, false);
2110 return graph->eq_rep[label];
2114 graph->eq_rep[label] = node;
2115 graph->pe_rep[label] = node;
2120 gcc_assert (label < graph->size);
2121 graph->pe[node] = label;
2122 if (graph->pe_rep[label] == -1)
2123 graph->pe_rep[label] = node;
2129 /* Unite pointer equivalent but not location equivalent nodes in
2130 GRAPH. This may only be performed once variable substitution is
2134 unite_pointer_equivalences (constraint_graph_t graph)
2138 /* Go through the pointer equivalences and unite them to their
2139 representative, if they aren't already. */
2140 for (i = 0; i < FIRST_REF_NODE; i++)
2142 unsigned int label = graph->pe[i];
2145 int label_rep = graph->pe_rep[label];
2147 if (label_rep == -1)
2150 label_rep = find (label_rep);
2151 if (label_rep >= 0 && unite (label_rep, find (i)))
2152 unify_nodes (graph, label_rep, i, false);
2157 /* Move complex constraints to the GRAPH nodes they belong to. */
2160 move_complex_constraints (constraint_graph_t graph)
2165 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2169 struct constraint_expr lhs = c->lhs;
2170 struct constraint_expr rhs = c->rhs;
2172 if (lhs.type == DEREF)
2174 insert_into_complex (graph, lhs.var, c);
2176 else if (rhs.type == DEREF)
2178 if (!(get_varinfo (lhs.var)->is_special_var))
2179 insert_into_complex (graph, rhs.var, c);
2181 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2182 && (lhs.offset != 0 || rhs.offset != 0))
2184 insert_into_complex (graph, rhs.var, c);
2191 /* Optimize and rewrite complex constraints while performing
2192 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2193 result of perform_variable_substitution. */
2196 rewrite_constraints (constraint_graph_t graph,
2197 struct scc_info *si)
2203 for (j = 0; j < graph->size; j++)
2204 gcc_assert (find (j) == j);
2206 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2208 struct constraint_expr lhs = c->lhs;
2209 struct constraint_expr rhs = c->rhs;
2210 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2211 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2212 unsigned int lhsnode, rhsnode;
2213 unsigned int lhslabel, rhslabel;
2215 lhsnode = si->node_mapping[lhsvar];
2216 rhsnode = si->node_mapping[rhsvar];
2217 lhslabel = graph->pointer_label[lhsnode];
2218 rhslabel = graph->pointer_label[rhsnode];
2220 /* See if it is really a non-pointer variable, and if so, ignore
2224 if (dump_file && (dump_flags & TDF_DETAILS))
2227 fprintf (dump_file, "%s is a non-pointer variable,"
2228 "ignoring constraint:",
2229 get_varinfo (lhs.var)->name);
2230 dump_constraint (dump_file, c);
2232 VEC_replace (constraint_t, constraints, i, NULL);
2238 if (dump_file && (dump_flags & TDF_DETAILS))
2241 fprintf (dump_file, "%s is a non-pointer variable,"
2242 "ignoring constraint:",
2243 get_varinfo (rhs.var)->name);
2244 dump_constraint (dump_file, c);
2246 VEC_replace (constraint_t, constraints, i, NULL);
2250 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2251 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2252 c->lhs.var = lhsvar;
2253 c->rhs.var = rhsvar;
2258 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2259 part of an SCC, false otherwise. */
2262 eliminate_indirect_cycles (unsigned int node)
2264 if (graph->indirect_cycles[node] != -1
2265 && !bitmap_empty_p (get_varinfo (node)->solution))
2268 VEC(unsigned,heap) *queue = NULL;
2270 unsigned int to = find (graph->indirect_cycles[node]);
2273 /* We can't touch the solution set and call unify_nodes
2274 at the same time, because unify_nodes is going to do
2275 bitmap unions into it. */
2277 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2279 if (find (i) == i && i != to)
2282 VEC_safe_push (unsigned, heap, queue, i);
2287 VEC_iterate (unsigned, queue, queuepos, i);
2290 unify_nodes (graph, to, i, true);
2292 VEC_free (unsigned, heap, queue);
2298 /* Solve the constraint graph GRAPH using our worklist solver.
2299 This is based on the PW* family of solvers from the "Efficient Field
2300 Sensitive Pointer Analysis for C" paper.
2301 It works by iterating over all the graph nodes, processing the complex
2302 constraints and propagating the copy constraints, until everything stops
2303 changed. This corresponds to steps 6-8 in the solving list given above. */
2306 solve_graph (constraint_graph_t graph)
2308 unsigned int size = graph->size;
2313 changed = sbitmap_alloc (size);
2314 sbitmap_zero (changed);
2316 /* Mark all initial non-collapsed nodes as changed. */
2317 for (i = 0; i < size; i++)
2319 varinfo_t ivi = get_varinfo (i);
2320 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2321 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2322 || VEC_length (constraint_t, graph->complex[i]) > 0))
2324 SET_BIT (changed, i);
2329 /* Allocate a bitmap to be used to store the changed bits. */
2330 pts = BITMAP_ALLOC (&pta_obstack);
2332 while (changed_count > 0)
2335 struct topo_info *ti = init_topo_info ();
2338 bitmap_obstack_initialize (&iteration_obstack);
2340 compute_topo_order (graph, ti);
2342 while (VEC_length (unsigned, ti->topo_order) != 0)
2345 i = VEC_pop (unsigned, ti->topo_order);
2347 /* If this variable is not a representative, skip it. */
2351 /* In certain indirect cycle cases, we may merge this
2352 variable to another. */
2353 if (eliminate_indirect_cycles (i) && find (i) != i)
2356 /* If the node has changed, we need to process the
2357 complex constraints and outgoing edges again. */
2358 if (TEST_BIT (changed, i))
2363 VEC(constraint_t,heap) *complex = graph->complex[i];
2364 bool solution_empty;
2366 RESET_BIT (changed, i);
2369 /* Compute the changed set of solution bits. */
2370 bitmap_and_compl (pts, get_varinfo (i)->solution,
2371 get_varinfo (i)->oldsolution);
2373 if (bitmap_empty_p (pts))
2376 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2378 solution = get_varinfo (i)->solution;
2379 solution_empty = bitmap_empty_p (solution);
2381 /* Process the complex constraints */
2382 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2384 /* XXX: This is going to unsort the constraints in
2385 some cases, which will occasionally add duplicate
2386 constraints during unification. This does not
2387 affect correctness. */
2388 c->lhs.var = find (c->lhs.var);
2389 c->rhs.var = find (c->rhs.var);
2391 /* The only complex constraint that can change our
2392 solution to non-empty, given an empty solution,
2393 is a constraint where the lhs side is receiving
2394 some set from elsewhere. */
2395 if (!solution_empty || c->lhs.type != DEREF)
2396 do_complex_constraint (graph, c, pts);
2399 solution_empty = bitmap_empty_p (solution);
2402 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2404 && i != callused_id)
2408 /* Propagate solution to all successors. */
2409 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2415 unsigned int to = find (j);
2416 tmp = get_varinfo (to)->solution;
2419 /* Don't try to propagate to ourselves. */
2423 flag = set_union_with_increment (tmp, pts, 0);
2427 get_varinfo (to)->solution = tmp;
2428 if (!TEST_BIT (changed, to))
2430 SET_BIT (changed, to);
2438 free_topo_info (ti);
2439 bitmap_obstack_release (&iteration_obstack);
2443 sbitmap_free (changed);
2444 bitmap_obstack_release (&oldpta_obstack);
2447 /* Map from trees to variable infos. */
2448 static struct pointer_map_t *vi_for_tree;
2451 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2454 insert_vi_for_tree (tree t, varinfo_t vi)
2456 void **slot = pointer_map_insert (vi_for_tree, t);
2458 gcc_assert (*slot == NULL);
2462 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2463 exist in the map, return NULL, otherwise, return the varinfo we found. */
2466 lookup_vi_for_tree (tree t)
2468 void **slot = pointer_map_contains (vi_for_tree, t);
2472 return (varinfo_t) *slot;
2475 /* Return a printable name for DECL */
2478 alias_get_name (tree decl)
2480 const char *res = get_name (decl);
2482 int num_printed = 0;
2491 if (TREE_CODE (decl) == SSA_NAME)
2493 num_printed = asprintf (&temp, "%s_%u",
2494 alias_get_name (SSA_NAME_VAR (decl)),
2495 SSA_NAME_VERSION (decl));
2497 else if (DECL_P (decl))
2499 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2501 if (num_printed > 0)
2503 res = ggc_strdup (temp);
2509 /* Find the variable id for tree T in the map.
2510 If T doesn't exist in the map, create an entry for it and return it. */
2513 get_vi_for_tree (tree t)
2515 void **slot = pointer_map_contains (vi_for_tree, t);
2517 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2519 return (varinfo_t) *slot;
2522 /* Get a constraint expression for a new temporary variable. */
2524 static struct constraint_expr
2525 get_constraint_exp_for_temp (tree t)
2527 struct constraint_expr cexpr;
2529 gcc_assert (SSA_VAR_P (t));
2531 cexpr.type = SCALAR;
2532 cexpr.var = get_vi_for_tree (t)->id;
2538 /* Get a constraint expression vector from an SSA_VAR_P node.
2539 If address_p is true, the result will be taken its address of. */
2542 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2544 struct constraint_expr cexpr;
2547 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2548 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2550 /* For parameters, get at the points-to set for the actual parm
2552 if (TREE_CODE (t) == SSA_NAME
2553 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2554 && SSA_NAME_IS_DEFAULT_DEF (t))
2556 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2560 vi = get_vi_for_tree (t);
2562 cexpr.type = SCALAR;
2564 /* If we determine the result is "anything", and we know this is readonly,
2565 say it points to readonly memory instead. */
2566 if (cexpr.var == anything_id && TREE_READONLY (t))
2569 cexpr.type = ADDRESSOF;
2570 cexpr.var = readonly_id;
2573 /* If we are not taking the address of the constraint expr, add all
2574 sub-fiels of the variable as well. */
2577 for (; vi; vi = vi->next)
2580 VEC_safe_push (ce_s, heap, *results, &cexpr);
2585 VEC_safe_push (ce_s, heap, *results, &cexpr);
2588 /* Process constraint T, performing various simplifications and then
2589 adding it to our list of overall constraints. */
2592 process_constraint (constraint_t t)
2594 struct constraint_expr rhs = t->rhs;
2595 struct constraint_expr lhs = t->lhs;
2597 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2598 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2600 if (!use_field_sensitive)
2606 /* ANYTHING == ANYTHING is pointless. */
2607 if (lhs.var == anything_id && rhs.var == anything_id)
2610 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2611 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2616 process_constraint (t);
2618 /* This can happen in our IR with things like n->a = *p */
2619 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2621 /* Split into tmp = *rhs, *lhs = tmp */
2622 tree rhsdecl = get_varinfo (rhs.var)->decl;
2623 tree pointertype = TREE_TYPE (rhsdecl);
2624 tree pointedtotype = TREE_TYPE (pointertype);
2625 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2626 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2628 process_constraint (new_constraint (tmplhs, rhs));
2629 process_constraint (new_constraint (lhs, tmplhs));
2631 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2633 /* Split into tmp = &rhs, *lhs = tmp */
2634 tree rhsdecl = get_varinfo (rhs.var)->decl;
2635 tree pointertype = TREE_TYPE (rhsdecl);
2636 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2637 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2639 process_constraint (new_constraint (tmplhs, rhs));
2640 process_constraint (new_constraint (lhs, tmplhs));
2644 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2645 VEC_safe_push (constraint_t, heap, constraints, t);
2649 /* Return true if T is a variable of a type that could contain
2653 could_have_pointers (tree t)
2655 tree type = TREE_TYPE (t);
2657 if (POINTER_TYPE_P (type)
2658 || AGGREGATE_TYPE_P (type)
2659 || TREE_CODE (type) == COMPLEX_TYPE)
2665 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2668 static HOST_WIDE_INT
2669 bitpos_of_field (const tree fdecl)
2672 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2673 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2676 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2677 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2681 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2682 If address_p is true the result will be taken its address of. */
2685 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2689 HOST_WIDE_INT bitsize = -1;
2690 HOST_WIDE_INT bitmaxsize = -1;
2691 HOST_WIDE_INT bitpos;
2693 struct constraint_expr *result;
2695 /* Some people like to do cute things like take the address of
2698 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2699 forzero = TREE_OPERAND (forzero, 0);
2701 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2703 struct constraint_expr temp;
2706 temp.var = integer_id;
2708 VEC_safe_push (ce_s, heap, *results, &temp);
2712 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2714 /* Pretend to take the address of the base, we'll take care of
2715 adding the required subset of sub-fields below. */
2716 get_constraint_for_1 (t, results, true);
2717 result = VEC_last (ce_s, *results);
2719 gcc_assert (VEC_length (ce_s, *results) == 1);
2721 /* This can also happen due to weird offsetof type macros. */
2722 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2723 result->type = SCALAR;
2725 if (result->type == SCALAR)
2727 /* In languages like C, you can access one past the end of an
2728 array. You aren't allowed to dereference it, so we can
2729 ignore this constraint. When we handle pointer subtraction,
2730 we may have to do something cute here. */
2732 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2735 /* It's also not true that the constraint will actually start at the
2736 right offset, it may start in some padding. We only care about
2737 setting the constraint to the first actual field it touches, so
2739 struct constraint_expr cexpr = *result;
2741 VEC_pop (ce_s, *results);
2743 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2745 if (ranges_overlap_p (curr->offset, curr->size,
2746 bitpos, bitmaxsize))
2748 cexpr.var = curr->id;
2749 VEC_safe_push (ce_s, heap, *results, &cexpr);
2754 /* assert that we found *some* field there. The user couldn't be
2755 accessing *only* padding. */
2756 /* Still the user could access one past the end of an array
2757 embedded in a struct resulting in accessing *only* padding. */
2758 gcc_assert (VEC_length (ce_s, *results) >= 1
2759 || ref_contains_array_ref (orig_t));
2761 else if (bitmaxsize == 0)
2763 if (dump_file && (dump_flags & TDF_DETAILS))
2764 fprintf (dump_file, "Access to zero-sized part of variable,"
2768 if (dump_file && (dump_flags & TDF_DETAILS))
2769 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2771 else if (bitmaxsize == -1)
2773 /* We can't handle DEREF constraints with unknown size, we'll
2774 get the wrong answer. Punt and return anything. */
2775 result->var = anything_id;
2779 result->offset = bitpos;
2783 /* Dereference the constraint expression CONS, and return the result.
2784 DEREF (ADDRESSOF) = SCALAR
2785 DEREF (SCALAR) = DEREF
2786 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2787 This is needed so that we can handle dereferencing DEREF constraints. */
2790 do_deref (VEC (ce_s, heap) **constraints)
2792 struct constraint_expr *c;
2795 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2797 if (c->type == SCALAR)
2799 else if (c->type == ADDRESSOF)
2801 else if (c->type == DEREF)
2803 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2804 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2805 process_constraint (new_constraint (tmplhs, *c));
2806 c->var = tmplhs.var;
2813 /* Given a tree T, return the constraint expression for it. */
2816 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
2818 struct constraint_expr temp;
2820 /* x = integer is all glommed to a single variable, which doesn't
2821 point to anything by itself. That is, of course, unless it is an
2822 integer constant being treated as a pointer, in which case, we
2823 will return that this is really the addressof anything. This
2824 happens below, since it will fall into the default case. The only
2825 case we know something about an integer treated like a pointer is
2826 when it is the NULL pointer, and then we just say it points to
2828 if (TREE_CODE (t) == INTEGER_CST
2829 && integer_zerop (t))
2831 temp.var = nothing_id;
2832 temp.type = ADDRESSOF;
2834 VEC_safe_push (ce_s, heap, *results, &temp);
2838 /* String constants are read-only. */
2839 if (TREE_CODE (t) == STRING_CST)
2841 temp.var = readonly_id;
2844 VEC_safe_push (ce_s, heap, *results, &temp);
2848 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2850 case tcc_expression:
2853 switch (TREE_CODE (t))
2857 struct constraint_expr *c;
2859 tree exp = TREE_OPERAND (t, 0);
2861 get_constraint_for_1 (exp, results, true);
2863 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2865 if (c->type == DEREF)
2868 c->type = ADDRESSOF;
2874 /* XXX: In interprocedural mode, if we didn't have the
2875 body, we would need to do *each pointer argument =
2877 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2880 tree heapvar = heapvar_lookup (t);
2882 if (heapvar == NULL)
2884 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2885 DECL_EXTERNAL (heapvar) = 1;
2886 get_var_ann (heapvar)->is_heapvar = 1;
2887 if (gimple_referenced_vars (cfun))
2888 add_referenced_var (heapvar);
2889 heapvar_insert (t, heapvar);
2892 temp.var = create_variable_info_for (heapvar,
2893 alias_get_name (heapvar));
2895 vi = get_varinfo (temp.var);
2896 vi->is_artificial_var = 1;
2897 vi->is_heap_var = 1;
2898 temp.type = ADDRESSOF;
2900 VEC_safe_push (ce_s, heap, *results, &temp);
2905 temp.var = anything_id;
2908 VEC_safe_push (ce_s, heap, *results, &temp);
2914 temp.type = ADDRESSOF;
2915 temp.var = anything_id;
2917 VEC_safe_push (ce_s, heap, *results, &temp);
2924 switch (TREE_CODE (t))
2928 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
2933 case ARRAY_RANGE_REF:
2935 get_constraint_for_component_ref (t, results, address_p);
2939 temp.type = ADDRESSOF;
2940 temp.var = anything_id;
2942 VEC_safe_push (ce_s, heap, *results, &temp);
2949 switch (TREE_CODE (t))
2953 tree op = TREE_OPERAND (t, 0);
2955 /* Cast from non-pointer to pointers are bad news for us.
2956 Anything else, we see through */
2957 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2958 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2960 get_constraint_for_1 (op, results, address_p);
2968 temp.type = ADDRESSOF;
2969 temp.var = anything_id;
2971 VEC_safe_push (ce_s, heap, *results, &temp);
2976 case tcc_exceptional:
2978 switch (TREE_CODE (t))
2982 get_constraint_for_1 (PHI_RESULT (t), results, address_p);
2988 get_constraint_for_ssa_var (t, results, address_p);
2994 temp.type = ADDRESSOF;
2995 temp.var = anything_id;
2997 VEC_safe_push (ce_s, heap, *results, &temp);
3002 case tcc_declaration:
3004 get_constraint_for_ssa_var (t, results, address_p);
3009 temp.type = ADDRESSOF;
3010 temp.var = anything_id;
3012 VEC_safe_push (ce_s, heap, *results, &temp);
3018 /* Given a gimple tree T, return the constraint expression vector for it. */
3021 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3023 gcc_assert (VEC_length (ce_s, *results) == 0);
3025 get_constraint_for_1 (t, results, false);
3028 /* Handle the structure copy case where we have a simple structure copy
3029 between LHS and RHS that is of SIZE (in bits)
3031 For each field of the lhs variable (lhsfield)
3032 For each field of the rhs variable at lhsfield.offset (rhsfield)
3033 add the constraint lhsfield = rhsfield
3035 If we fail due to some kind of type unsafety or other thing we
3036 can't handle, return false. We expect the caller to collapse the
3037 variable in that case. */
3040 do_simple_structure_copy (const struct constraint_expr lhs,
3041 const struct constraint_expr rhs,
3042 const unsigned HOST_WIDE_INT size)
3044 varinfo_t p = get_varinfo (lhs.var);
3045 unsigned HOST_WIDE_INT pstart, last;
3047 last = p->offset + size;
3048 for (; p && p->offset < last; p = p->next)
3051 struct constraint_expr templhs = lhs;
3052 struct constraint_expr temprhs = rhs;
3053 unsigned HOST_WIDE_INT fieldoffset;
3055 templhs.var = p->id;
3056 q = get_varinfo (temprhs.var);
3057 fieldoffset = p->offset - pstart;
3058 q = first_vi_for_offset (q, q->offset + fieldoffset);
3061 temprhs.var = q->id;
3062 process_constraint (new_constraint (templhs, temprhs));
3068 /* Handle the structure copy case where we have a structure copy between a
3069 aggregate on the LHS and a dereference of a pointer on the RHS
3070 that is of SIZE (in bits)
3072 For each field of the lhs variable (lhsfield)
3073 rhs.offset = lhsfield->offset
3074 add the constraint lhsfield = rhs
3078 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3079 const struct constraint_expr rhs,
3080 const unsigned HOST_WIDE_INT size)
3082 varinfo_t p = get_varinfo (lhs.var);
3083 unsigned HOST_WIDE_INT pstart,last;
3085 last = p->offset + size;
3087 for (; p && p->offset < last; p = p->next)
3090 struct constraint_expr templhs = lhs;
3091 struct constraint_expr temprhs = rhs;
3092 unsigned HOST_WIDE_INT fieldoffset;
3095 if (templhs.type == SCALAR)
3096 templhs.var = p->id;
3098 templhs.offset = p->offset;
3100 q = get_varinfo (temprhs.var);
3101 fieldoffset = p->offset - pstart;
3102 temprhs.offset += fieldoffset;
3103 process_constraint (new_constraint (templhs, temprhs));
3107 /* Handle the structure copy case where we have a structure copy
3108 between an aggregate on the RHS and a dereference of a pointer on
3109 the LHS that is of SIZE (in bits)
3111 For each field of the rhs variable (rhsfield)
3112 lhs.offset = rhsfield->offset
3113 add the constraint lhs = rhsfield
3117 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3118 const struct constraint_expr rhs,
3119 const unsigned HOST_WIDE_INT size)
3121 varinfo_t p = get_varinfo (rhs.var);
3122 unsigned HOST_WIDE_INT pstart,last;
3124 last = p->offset + size;
3126 for (; p && p->offset < last; p = p->next)
3129 struct constraint_expr templhs = lhs;
3130 struct constraint_expr temprhs = rhs;
3131 unsigned HOST_WIDE_INT fieldoffset;
3134 if (temprhs.type == SCALAR)
3135 temprhs.var = p->id;
3137 temprhs.offset = p->offset;
3139 q = get_varinfo (templhs.var);
3140 fieldoffset = p->offset - pstart;
3141 templhs.offset += fieldoffset;
3142 process_constraint (new_constraint (templhs, temprhs));
3146 /* Sometimes, frontends like to give us bad type information. This
3147 function will collapse all the fields from VAR to the end of VAR,
3148 into VAR, so that we treat those fields as a single variable.
3149 We return the variable they were collapsed into. */
3152 collapse_rest_of_var (unsigned int var)
3154 varinfo_t currvar = get_varinfo (var);
3157 for (field = currvar->next; field; field = field->next)
3160 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3161 field->name, currvar->name);
3163 gcc_assert (field->collapsed_to == 0);
3164 field->collapsed_to = currvar->id;
3167 currvar->next = NULL;
3168 currvar->size = currvar->fullsize - currvar->offset;
3173 /* Handle aggregate copies by expanding into copies of the respective
3174 fields of the structures. */
3177 do_structure_copy (tree lhsop, tree rhsop)
3179 struct constraint_expr lhs, rhs, tmp;
3180 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3182 unsigned HOST_WIDE_INT lhssize;
3183 unsigned HOST_WIDE_INT rhssize;
3185 /* Pretend we are taking the address of the constraint exprs.
3186 We deal with walking the sub-fields ourselves. */
3187 get_constraint_for_1 (lhsop, &lhsc, true);
3188 get_constraint_for_1 (rhsop, &rhsc, true);
3189 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3190 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3191 lhs = *(VEC_last (ce_s, lhsc));
3192 rhs = *(VEC_last (ce_s, rhsc));
3194 VEC_free (ce_s, heap, lhsc);
3195 VEC_free (ce_s, heap, rhsc);
3197 /* If we have special var = x, swap it around. */
3198 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3205 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3206 possible it's something we could handle. However, most cases falling
3207 into this are dealing with transparent unions, which are slightly
3209 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3211 rhs.type = ADDRESSOF;
3212 rhs.var = anything_id;
3215 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3216 that special var. */
3217 if (rhs.var <= integer_id)
3219 for (p = get_varinfo (lhs.var); p; p = p->next)
3221 struct constraint_expr templhs = lhs;
3222 struct constraint_expr temprhs = rhs;
3224 if (templhs.type == SCALAR )
3225 templhs.var = p->id;
3227 templhs.offset += p->offset;
3228 process_constraint (new_constraint (templhs, temprhs));
3233 tree rhstype = TREE_TYPE (rhsop);
3234 tree lhstype = TREE_TYPE (lhsop);
3238 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3239 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3241 /* If we have a variably sized types on the rhs or lhs, and a deref
3242 constraint, add the constraint, lhsconstraint = &ANYTHING.
3243 This is conservatively correct because either the lhs is an unknown
3244 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3245 constraint, and every variable it can point to must be unknown sized
3246 anyway, so we don't need to worry about fields at all. */
3247 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3248 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3250 rhs.var = anything_id;
3251 rhs.type = ADDRESSOF;
3253 process_constraint (new_constraint (lhs, rhs));
3257 /* The size only really matters insofar as we don't set more or less of
3258 the variable. If we hit an unknown size var, the size should be the
3259 whole darn thing. */
3260 if (get_varinfo (rhs.var)->is_unknown_size_var)
3263 rhssize = TREE_INT_CST_LOW (rhstypesize);
3265 if (get_varinfo (lhs.var)->is_unknown_size_var)
3268 lhssize = TREE_INT_CST_LOW (lhstypesize);
3271 if (rhs.type == SCALAR && lhs.type == SCALAR)
3273 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3275 lhs.var = collapse_rest_of_var (lhs.var);
3276 rhs.var = collapse_rest_of_var (rhs.var);
3281 process_constraint (new_constraint (lhs, rhs));
3284 else if (lhs.type != DEREF && rhs.type == DEREF)
3285 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3286 else if (lhs.type == DEREF && rhs.type != DEREF)
3287 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3290 tree pointedtotype = lhstype;
3293 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3294 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3295 do_structure_copy (tmpvar, rhsop);
3296 do_structure_copy (lhsop, tmpvar);
3302 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3303 Expressions of the type PTR + CST can be handled in two ways:
3305 1- If the constraint for PTR is ADDRESSOF for a non-structure
3306 variable, then we can use it directly because adding or
3307 subtracting a constant may not alter the original ADDRESSOF
3308 constraint (i.e., pointer arithmetic may not legally go outside
3309 an object's boundaries).
3311 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3312 then if CST is a compile-time constant that can be used as an
3313 offset, we can determine which sub-variable will be pointed-to
3316 Return true if the expression is handled. For any other kind of
3317 expression, return false so that each operand can be added as a
3318 separate constraint by the caller. */
3321 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3324 struct constraint_expr *c, *c2;
3327 VEC (ce_s, heap) *temp = NULL;
3328 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3330 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3333 op0 = TREE_OPERAND (expr, 0);
3334 op1 = TREE_OPERAND (expr, 1);
3335 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3337 /* If the offset is not a non-negative integer constant that fits
3338 in a HOST_WIDE_INT, we cannot handle it here. */
3339 if (!host_integerp (op1, 1))
3342 /* Make sure the bit-offset also fits. */
3343 rhsunitoffset = TREE_INT_CST_LOW (op1);
3344 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3345 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3348 get_constraint_for (op0, &temp);
3350 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3351 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3353 if (c2->type == ADDRESSOF && rhsoffset != 0)
3355 varinfo_t temp = get_varinfo (c2->var);
3357 /* An access one after the end of an array is valid,
3358 so simply punt on accesses we cannot resolve. */
3359 temp = first_vi_for_offset (temp, rhsoffset);
3366 c2->offset = rhsoffset;
3367 process_constraint (new_constraint (*c, *c2));
3370 VEC_free (ce_s, heap, temp);
3375 /* Create a constraint ID = OP. */
3378 make_constraint_to (unsigned id, tree op)
3380 VEC(ce_s, heap) *rhsc = NULL;
3381 struct constraint_expr *c;
3382 struct constraint_expr includes;
3386 includes.offset = 0;
3387 includes.type = SCALAR;
3389 get_constraint_for (op, &rhsc);
3390 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3391 process_constraint (new_constraint (includes, *c));
3392 VEC_free (ce_s, heap, rhsc);
3395 /* Make constraints necessary to make OP escape. */
3398 make_escape_constraint (tree op)
3400 make_constraint_to (escaped_id, op);
3403 /* For non-IPA mode, generate constraints necessary for a call on the
3407 handle_rhs_call (tree rhs)
3410 call_expr_arg_iterator iter;
3412 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3413 /* Find those pointers being passed, and make sure they end up
3414 pointing to anything. */
3415 if (could_have_pointers (arg))
3416 make_escape_constraint (arg);
3418 /* The static chain escapes as well. */
3419 if (CALL_EXPR_STATIC_CHAIN (rhs))
3420 make_escape_constraint (CALL_EXPR_STATIC_CHAIN (rhs));
3423 /* For non-IPA mode, generate constraints necessary for a call
3424 that returns a pointer and assigns it to LHS. This simply makes
3425 the LHS point to global and escaped variables. */
3428 handle_lhs_call (tree lhs, int flags)
3430 VEC(ce_s, heap) *lhsc = NULL;
3431 struct constraint_expr rhsc;
3433 struct constraint_expr *lhsp;
3435 get_constraint_for (lhs, &lhsc);
3437 if (flags & ECF_MALLOC)
3439 tree heapvar = heapvar_lookup (lhs);
3442 if (heapvar == NULL)
3444 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3445 DECL_EXTERNAL (heapvar) = 1;
3446 get_var_ann (heapvar)->is_heapvar = 1;
3447 if (gimple_referenced_vars (cfun))
3448 add_referenced_var (heapvar);
3449 heapvar_insert (lhs, heapvar);
3452 rhsc.var = create_variable_info_for (heapvar,
3453 alias_get_name (heapvar));
3454 vi = get_varinfo (rhsc.var);
3455 vi->is_artificial_var = 1;
3456 vi->is_heap_var = 1;
3457 rhsc.type = ADDRESSOF;
3462 rhsc.var = escaped_id;
3464 rhsc.type = ADDRESSOF;
3466 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3467 process_constraint (new_constraint (*lhsp, rhsc));
3468 VEC_free (ce_s, heap, lhsc);
3471 /* For non-IPA mode, generate constraints necessary for a call of a
3472 const function that returns a pointer in the statement STMT. */
3475 handle_const_call (tree stmt)
3477 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3478 tree call = get_call_expr_in (stmt);
3479 VEC(ce_s, heap) *lhsc = NULL;
3480 struct constraint_expr rhsc;
3482 struct constraint_expr *lhsp;
3484 call_expr_arg_iterator iter;
3485 struct constraint_expr tmpc;
3487 get_constraint_for (lhs, &lhsc);
3489 /* If this is a nested function then it can return anything. */
3490 if (CALL_EXPR_STATIC_CHAIN (call))
3492 rhsc.var = anything_id;
3494 rhsc.type = ADDRESSOF;
3495 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3496 process_constraint (new_constraint (*lhsp, rhsc));
3497 VEC_free (ce_s, heap, lhsc);
3501 /* We always use a temporary here, otherwise we end up with a quadratic
3502 amount of constraints for
3503 large_struct = const_call (large_struct);
3504 in field-sensitive PTA. */
3505 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3506 tmpc = get_constraint_exp_for_temp (tmpvar);
3508 /* May return addresses of globals. */
3509 rhsc.var = nonlocal_id;
3511 rhsc.type = ADDRESSOF;
3512 process_constraint (new_constraint (tmpc, rhsc));
3514 /* May return arguments. */
3515 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3516 if (could_have_pointers (arg))
3518 VEC(ce_s, heap) *argc = NULL;
3519 struct constraint_expr *argp;
3522 get_constraint_for (arg, &argc);
3523 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3524 process_constraint (new_constraint (tmpc, *argp));
3525 VEC_free (ce_s, heap, argc);
3528 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3529 process_constraint (new_constraint (*lhsp, tmpc));
3531 VEC_free (ce_s, heap, lhsc);
3534 /* For non-IPA mode, generate constraints necessary for a call to a
3535 pure function in statement STMT. */
3538 handle_pure_call (tree stmt)
3540 tree call = get_call_expr_in (stmt);
3542 call_expr_arg_iterator iter;
3544 /* Memory reached from pointer arguments is call-used. */
3545 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3546 if (could_have_pointers (arg))
3547 make_constraint_to (callused_id, arg);
3549 /* The static chain is used as well. */
3550 if (CALL_EXPR_STATIC_CHAIN (call))
3551 make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
3553 /* If the call returns a pointer it may point to reachable memory
3554 from the arguments. Not so for malloc functions though. */
3555 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3556 && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0))
3557 && !(call_expr_flags (call) & ECF_MALLOC))
3559 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3560 VEC(ce_s, heap) *lhsc = NULL;
3561 struct constraint_expr rhsc;
3562 struct constraint_expr *lhsp;
3565 get_constraint_for (lhs, &lhsc);
3567 /* If this is a nested function then it can return anything. */
3568 if (CALL_EXPR_STATIC_CHAIN (call))
3570 rhsc.var = anything_id;
3572 rhsc.type = ADDRESSOF;
3573 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3574 process_constraint (new_constraint (*lhsp, rhsc));
3575 VEC_free (ce_s, heap, lhsc);
3579 /* Else just add the call-used memory here. Escaped variables
3580 and globals will be dealt with in handle_lhs_call. */
3581 rhsc.var = callused_id;
3583 rhsc.type = ADDRESSOF;
3584 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3585 process_constraint (new_constraint (*lhsp, rhsc));
3586 VEC_free (ce_s, heap, lhsc);
3590 /* Walk statement T setting up aliasing constraints according to the
3591 references found in T. This function is the main part of the
3592 constraint builder. AI points to auxiliary alias information used
3593 when building alias sets and computing alias grouping heuristics. */
3596 find_func_aliases (tree origt)
3598 tree call, t = origt;
3599 VEC(ce_s, heap) *lhsc = NULL;
3600 VEC(ce_s, heap) *rhsc = NULL;
3601 struct constraint_expr *c;
3602 enum escape_type stmt_escape_type;
3604 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3605 t = TREE_OPERAND (t, 0);
3607 /* Now build constraints expressions. */
3608 if (TREE_CODE (t) == PHI_NODE)
3610 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3612 /* Only care about pointers and structures containing
3614 if (could_have_pointers (PHI_RESULT (t)))
3619 /* For a phi node, assign all the arguments to
3621 get_constraint_for (PHI_RESULT (t), &lhsc);
3622 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3625 tree strippedrhs = PHI_ARG_DEF (t, i);
3627 STRIP_NOPS (strippedrhs);
3628 rhstype = TREE_TYPE (strippedrhs);
3629 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3631 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3633 struct constraint_expr *c2;
3634 while (VEC_length (ce_s, rhsc) > 0)
3636 c2 = VEC_last (ce_s, rhsc);
3637 process_constraint (new_constraint (*c, *c2));
3638 VEC_pop (ce_s, rhsc);
3644 /* In IPA mode, we need to generate constraints to pass call
3645 arguments through their calls. There are two cases, either a
3646 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3647 CALL_EXPR when we are not.
3649 In non-ipa mode, we need to generate constraints for each
3650 pointer passed by address. */
3651 else if ((call = get_call_expr_in (t)) != NULL_TREE)
3653 int flags = call_expr_flags (call);
3656 /* Const functions can return their arguments and addresses
3657 of global memory but not of escaped memory. */
3658 if (flags & ECF_CONST)
3660 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3661 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3662 handle_const_call (t);
3664 else if (flags & ECF_PURE)
3666 handle_pure_call (t);
3667 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3668 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3669 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3671 /* Pure functions can return addresses in and of memory
3672 reachable from their arguments, but they are not an escape
3673 point for reachable memory of their arguments. But as we
3674 do not compute call-used memory separately we cannot do
3675 something special here. */
3676 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3678 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3679 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3680 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3683 handle_rhs_call (t);
3690 call_expr_arg_iterator iter;
3694 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3696 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3697 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3704 decl = get_callee_fndecl (rhsop);
3706 /* If we can directly resolve the function being called, do so.
3707 Otherwise, it must be some sort of indirect expression that
3708 we should still be able to handle. */
3711 fi = get_vi_for_tree (decl);
3715 decl = CALL_EXPR_FN (rhsop);
3716 fi = get_vi_for_tree (decl);
3719 /* Assign all the passed arguments to the appropriate incoming
3720 parameters of the function. */
3722 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3724 struct constraint_expr lhs ;
3725 struct constraint_expr *rhsp;
3727 get_constraint_for (arg, &rhsc);
3728 if (TREE_CODE (decl) != FUNCTION_DECL)
3737 lhs.var = first_vi_for_offset (fi, i)->id;
3740 while (VEC_length (ce_s, rhsc) != 0)
3742 rhsp = VEC_last (ce_s, rhsc);
3743 process_constraint (new_constraint (lhs, *rhsp));
3744 VEC_pop (ce_s, rhsc);
3749 /* If we are returning a value, assign it to the result. */
3752 struct constraint_expr rhs;
3753 struct constraint_expr *lhsp;
3756 get_constraint_for (lhsop, &lhsc);
3757 if (TREE_CODE (decl) != FUNCTION_DECL)
3766 rhs.var = first_vi_for_offset (fi, i)->id;
3769 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3770 process_constraint (new_constraint (*lhsp, rhs));
3774 /* Otherwise, just a regular assignment statement. */
3775 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3777 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3778 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3781 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3782 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
3784 do_structure_copy (lhsop, rhsop);
3788 /* Only care about operations with pointers, structures
3789 containing pointers, dereferences, and call expressions. */
3790 if (could_have_pointers (lhsop)
3791 || TREE_CODE (rhsop) == CALL_EXPR)
3793 get_constraint_for (lhsop, &lhsc);
3794 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3796 /* RHS that consist of unary operations,
3797 exceptional types, or bare decls/constants, get
3798 handled directly by get_constraint_for. */
3800 case tcc_declaration:
3802 case tcc_exceptional:
3803 case tcc_expression:
3809 get_constraint_for (rhsop, &rhsc);
3810 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3812 struct constraint_expr *c2;
3815 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3816 process_constraint (new_constraint (*c, *c2));
3824 /* For pointer arithmetic of the form
3825 PTR + CST, we can simply use PTR's
3826 constraint because pointer arithmetic is
3827 not allowed to go out of bounds. */
3828 if (handle_ptr_arith (lhsc, rhsop))
3833 /* Otherwise, walk each operand. Notice that we
3834 can't use the operand interface because we need
3835 to process expressions other than simple operands
3836 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3838 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3840 tree op = TREE_OPERAND (rhsop, i);
3843 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3844 get_constraint_for (op, &rhsc);
3845 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3847 struct constraint_expr *c2;
3848 while (VEC_length (ce_s, rhsc) > 0)
3850 c2 = VEC_last (ce_s, rhsc);
3851 process_constraint (new_constraint (*c, *c2));
3852 VEC_pop (ce_s, rhsc);
3860 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3864 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3865 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3866 get_varinfo (c->var)->no_tbaa_pruning = true;
3869 stmt_escape_type = is_escape_site (t);
3870 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3873 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3874 rhs = GIMPLE_STMT_OPERAND (t, 1);
3875 if (TREE_CODE (rhs) == ADDR_EXPR)
3877 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3880 || !is_global_var (base)))
3881 make_escape_constraint (rhs);
3883 else if (TREE_CODE (rhs) == SSA_NAME
3884 && POINTER_TYPE_P (TREE_TYPE (rhs)))
3885 make_escape_constraint (rhs);
3886 else if (could_have_pointers (rhs))
3887 make_escape_constraint (rhs);
3889 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3892 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3893 rhs = GIMPLE_STMT_OPERAND (t, 1);
3894 gcc_assert (CONVERT_EXPR_P (rhs)
3895 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR);
3896 rhs = TREE_OPERAND (rhs, 0);
3897 make_escape_constraint (rhs);
3899 else if (stmt_escape_type == ESCAPE_TO_ASM)
3903 for (i = 0, link = ASM_OUTPUTS (t); link; i++, link = TREE_CHAIN (link))
3905 tree op = TREE_VALUE (link);
3906 if (op && could_have_pointers (op))
3907 /* Strictly we'd only need the constraints from ESCAPED and
3909 make_escape_constraint (op);
3911 for (i = 0, link = ASM_INPUTS (t); link; i++, link = TREE_CHAIN (link))
3913 tree op = TREE_VALUE (link);
3914 if (op && could_have_pointers (op))
3915 /* Strictly we'd only need the constraint to ESCAPED. */
3916 make_escape_constraint (op);
3920 /* After promoting variables and computing aliasing we will
3921 need to re-scan most statements. FIXME: Try to minimize the
3922 number of statements re-scanned. It's not really necessary to
3923 re-scan *all* statements. */
3925 mark_stmt_modified (origt);
3926 VEC_free (ce_s, heap, rhsc);
3927 VEC_free (ce_s, heap, lhsc);
3931 /* Find the first varinfo in the same variable as START that overlaps with
3933 Effectively, walk the chain of fields for the variable START to find the
3934 first field that overlaps with OFFSET.
3935 Return NULL if we can't find one. */
3938 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3940 varinfo_t curr = start;
3943 /* We may not find a variable in the field list with the actual
3944 offset when when we have glommed a structure to a variable.
3945 In that case, however, offset should still be within the size
3947 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3955 /* Insert the varinfo FIELD into the field list for BASE, at the front
3959 insert_into_field_list (varinfo_t base, varinfo_t field)
3961 varinfo_t prev = base;
3962 varinfo_t curr = base->next;
3968 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3972 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3974 varinfo_t prev = base;
3975 varinfo_t curr = base->next;
3986 if (field->offset <= curr->offset)
3991 field->next = prev->next;
3996 /* This structure is used during pushing fields onto the fieldstack
3997 to track the offset of the field, since bitpos_of_field gives it
3998 relative to its immediate containing type, and we want it relative
3999 to the ultimate containing object. */
4003 /* Offset from the base of the base containing object to this field. */
4004 HOST_WIDE_INT offset;
4006 /* Size, in bits, of the field. */
4007 unsigned HOST_WIDE_INT size;
4009 unsigned has_unknown_size : 1;
4011 unsigned may_have_pointers : 1;
4013 typedef struct fieldoff fieldoff_s;
4015 DEF_VEC_O(fieldoff_s);
4016 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4018 /* qsort comparison function for two fieldoff's PA and PB */
4021 fieldoff_compare (const void *pa, const void *pb)
4023 const fieldoff_s *foa = (const fieldoff_s *)pa;
4024 const fieldoff_s *fob = (const fieldoff_s *)pb;
4025 unsigned HOST_WIDE_INT foasize, fobsize;
4027 if (foa->offset < fob->offset)
4029 else if (foa->offset > fob->offset)
4032 foasize = foa->size;
4033 fobsize = fob->size;
4034 if (foasize < fobsize)
4036 else if (foasize > fobsize)
4041 /* Sort a fieldstack according to the field offset and sizes. */
4043 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4045 qsort (VEC_address (fieldoff_s, fieldstack),
4046 VEC_length (fieldoff_s, fieldstack),
4047 sizeof (fieldoff_s),
4051 /* Return true if V is a tree that we can have subvars for.
4052 Normally, this is any aggregate type. Also complex
4053 types which are not gimple registers can have subvars. */
4056 var_can_have_subvars (const_tree v)
4058 /* Volatile variables should never have subvars. */
4059 if (TREE_THIS_VOLATILE (v))
4062 /* Non decls or memory tags can never have subvars. */
4063 if (!DECL_P (v) || MTAG_P (v))
4066 /* Aggregates without overlapping fields can have subvars. */
4067 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4073 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4074 the fields of TYPE onto fieldstack, recording their offsets along
4077 OFFSET is used to keep track of the offset in this entire
4078 structure, rather than just the immediately containing structure.
4079 Returns the number of fields pushed. */
4082 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4083 HOST_WIDE_INT offset)
4088 if (TREE_CODE (type) != RECORD_TYPE)
4091 /* If the vector of fields is growing too big, bail out early.
4092 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4094 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4097 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4098 if (TREE_CODE (field) == FIELD_DECL)
4102 HOST_WIDE_INT foff = bitpos_of_field (field);
4104 if (!var_can_have_subvars (field)
4105 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4106 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4108 else if (!(pushed = push_fields_onto_fieldstack
4109 (TREE_TYPE (field), fieldstack, offset + foff))
4110 && (DECL_SIZE (field)
4111 && !integer_zerop (DECL_SIZE (field))))
4112 /* Empty structures may have actual size, like in C++. So
4113 see if we didn't push any subfields and the size is
4114 nonzero, push the field onto the stack. */
4119 fieldoff_s *pair = NULL;
4120 bool has_unknown_size = false;
4122 if (!VEC_empty (fieldoff_s, *fieldstack))
4123 pair = VEC_last (fieldoff_s, *fieldstack);
4125 if (!DECL_SIZE (field)
4126 || !host_integerp (DECL_SIZE (field), 1))
4127 has_unknown_size = true;
4129 /* If adjacent fields do not contain pointers merge them. */
4131 && !pair->may_have_pointers
4132 && !could_have_pointers (field)
4133 && !pair->has_unknown_size
4134 && !has_unknown_size
4135 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4137 pair = VEC_last (fieldoff_s, *fieldstack);
4138 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4142 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4143 pair->offset = offset + foff;
4144 pair->has_unknown_size = has_unknown_size;
4145 if (!has_unknown_size)
4146 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4149 pair->may_have_pointers = could_have_pointers (field);
4160 /* Create a constraint ID = &FROM. */
4163 make_constraint_from (varinfo_t vi, int from)
4165 struct constraint_expr lhs, rhs;
4173 rhs.type = ADDRESSOF;
4174 process_constraint (new_constraint (lhs, rhs));
4177 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4178 if it is a varargs function. */
4181 count_num_arguments (tree decl, bool *is_varargs)
4186 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4190 if (TREE_VALUE (t) == void_type_node)
4200 /* Creation function node for DECL, using NAME, and return the index
4201 of the variable we've created for the function. */
4204 create_function_info_for (tree decl, const char *name)
4206 unsigned int index = VEC_length (varinfo_t, varmap);
4210 bool is_varargs = false;
4212 /* Create the variable info. */
4214 vi = new_var_info (decl, index, name);
4218 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4219 insert_vi_for_tree (vi->decl, vi);
4220 VEC_safe_push (varinfo_t, heap, varmap, vi);
4224 /* If it's varargs, we don't know how many arguments it has, so we
4231 vi->is_unknown_size_var = true;
4236 arg = DECL_ARGUMENTS (decl);
4238 /* Set up variables for each argument. */
4239 for (i = 1; i < vi->fullsize; i++)
4242 const char *newname;
4244 unsigned int newindex;
4245 tree argdecl = decl;
4250 newindex = VEC_length (varinfo_t, varmap);
4251 asprintf (&tempname, "%s.arg%d", name, i-1);
4252 newname = ggc_strdup (tempname);
4255 argvi = new_var_info (argdecl, newindex, newname);
4256 argvi->decl = argdecl;
4257 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4260 argvi->fullsize = vi->fullsize;
4261 insert_into_field_list_sorted (vi, argvi);
4262 stats.total_vars ++;
4265 insert_vi_for_tree (arg, argvi);
4266 arg = TREE_CHAIN (arg);
4270 /* Create a variable for the return var. */
4271 if (DECL_RESULT (decl) != NULL
4272 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4275 const char *newname;
4277 unsigned int newindex;
4278 tree resultdecl = decl;
4282 if (DECL_RESULT (decl))
4283 resultdecl = DECL_RESULT (decl);
4285 newindex = VEC_length (varinfo_t, varmap);
4286 asprintf (&tempname, "%s.result", name);
4287 newname = ggc_strdup (tempname);
4290 resultvi = new_var_info (resultdecl, newindex, newname);
4291 resultvi->decl = resultdecl;
4292 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4293 resultvi->offset = i;
4295 resultvi->fullsize = vi->fullsize;
4296 insert_into_field_list_sorted (vi, resultvi);
4297 stats.total_vars ++;
4298 if (DECL_RESULT (decl))
4299 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4305 /* Return true if FIELDSTACK contains fields that overlap.
4306 FIELDSTACK is assumed to be sorted by offset. */
4309 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4311 fieldoff_s *fo = NULL;
4313 HOST_WIDE_INT lastoffset = -1;
4315 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4317 if (fo->offset == lastoffset)
4319 lastoffset = fo->offset;
4324 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4325 This will also create any varinfo structures necessary for fields
4329 create_variable_info_for (tree decl, const char *name)
4331 unsigned int index = VEC_length (varinfo_t, varmap);
4333 tree decltype = TREE_TYPE (decl);
4334 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4335 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4336 VEC (fieldoff_s,heap) *fieldstack = NULL;
4338 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4339 return create_function_info_for (decl, name);
4341 if (var_can_have_subvars (decl) && use_field_sensitive)
4342 push_fields_onto_fieldstack (decltype, &fieldstack, 0);
4344 /* If the variable doesn't have subvars, we may end up needing to
4345 sort the field list and create fake variables for all the
4347 vi = new_var_info (decl, index, name);
4351 || !host_integerp (declsize, 1))
4353 vi->is_unknown_size_var = true;
4359 vi->fullsize = TREE_INT_CST_LOW (declsize);
4360 vi->size = vi->fullsize;
4363 insert_vi_for_tree (vi->decl, vi);
4364 VEC_safe_push (varinfo_t, heap, varmap, vi);
4365 if (is_global && (!flag_whole_program || !in_ipa_mode)
4366 && could_have_pointers (decl))
4367 make_constraint_from (vi, escaped_id);
4370 if (use_field_sensitive
4371 && !vi->is_unknown_size_var
4372 && var_can_have_subvars (decl)
4373 && VEC_length (fieldoff_s, fieldstack) > 1
4374 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4376 unsigned int newindex = VEC_length (varinfo_t, varmap);
4377 fieldoff_s *fo = NULL;
4378 bool notokay = false;
4381 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4383 if (fo->has_unknown_size
4391 /* We can't sort them if we have a field with a variable sized type,
4392 which will make notokay = true. In that case, we are going to return
4393 without creating varinfos for the fields anyway, so sorting them is a
4397 sort_fieldstack (fieldstack);
4398 /* Due to some C++ FE issues, like PR 22488, we might end up
4399 what appear to be overlapping fields even though they,
4400 in reality, do not overlap. Until the C++ FE is fixed,
4401 we will simply disable field-sensitivity for these cases. */
4402 notokay = check_for_overlaps (fieldstack);
4406 if (VEC_length (fieldoff_s, fieldstack) != 0)
4407 fo = VEC_index (fieldoff_s, fieldstack, 0);
4409 if (fo == NULL || notokay)
4411 vi->is_unknown_size_var = 1;
4414 VEC_free (fieldoff_s, heap, fieldstack);
4418 vi->size = fo->size;
4419 vi->offset = fo->offset;
4420 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4421 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4425 const char *newname = "NULL";
4428 newindex = VEC_length (varinfo_t, varmap);
4431 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4432 "+" HOST_WIDE_INT_PRINT_DEC,
4433 vi->name, fo->offset, fo->size);
4434 newname = ggc_strdup (tempname);
4437 newvi = new_var_info (decl, newindex, newname);
4438 newvi->offset = fo->offset;
4439 newvi->size = fo->size;
4440 newvi->fullsize = vi->fullsize;
4441 insert_into_field_list (vi, newvi);
4442 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4443 if (is_global && (!flag_whole_program || !in_ipa_mode)
4444 && fo->may_have_pointers)
4445 make_constraint_from (newvi, escaped_id);
4451 VEC_free (fieldoff_s, heap, fieldstack);
4456 /* Print out the points-to solution for VAR to FILE. */
4459 dump_solution_for_var (FILE *file, unsigned int var)
4461 varinfo_t vi = get_varinfo (var);
4465 if (find (var) != var)
4467 varinfo_t vipt = get_varinfo (find (var));
4468 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4472 fprintf (file, "%s = { ", vi->name);
4473 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4475 fprintf (file, "%s ", get_varinfo (i)->name);
4477 fprintf (file, "}");
4478 if (vi->no_tbaa_pruning)
4479 fprintf (file, " no-tbaa-pruning");
4480 fprintf (file, "\n");
4484 /* Print the points-to solution for VAR to stdout. */
4487 debug_solution_for_var (unsigned int var)
4489 dump_solution_for_var (stdout, var);
4492 /* Create varinfo structures for all of the variables in the
4493 function for intraprocedural mode. */
4496 intra_create_variable_infos (void)
4499 struct constraint_expr lhs, rhs;
4501 /* For each incoming pointer argument arg, create the constraint ARG
4502 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4503 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4507 if (!could_have_pointers (t))
4510 /* If flag_argument_noalias is set, then function pointer
4511 arguments are guaranteed not to point to each other. In that
4512 case, create an artificial variable PARM_NOALIAS and the
4513 constraint ARG = &PARM_NOALIAS. */
4514 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4517 tree heapvar = heapvar_lookup (t);
4521 lhs.var = get_vi_for_tree (t)->id;
4523 if (heapvar == NULL_TREE)
4526 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4528 DECL_EXTERNAL (heapvar) = 1;
4529 if (gimple_referenced_vars (cfun))
4530 add_referenced_var (heapvar);
4532 heapvar_insert (t, heapvar);
4534 ann = get_var_ann (heapvar);
4535 if (flag_argument_noalias == 1)
4536 ann->noalias_state = NO_ALIAS;
4537 else if (flag_argument_noalias == 2)
4538 ann->noalias_state = NO_ALIAS_GLOBAL;
4539 else if (flag_argument_noalias == 3)
4540 ann->noalias_state = NO_ALIAS_ANYTHING;
4545 vi = get_vi_for_tree (heapvar);
4546 vi->is_artificial_var = 1;
4547 vi->is_heap_var = 1;
4549 rhs.type = ADDRESSOF;
4551 for (p = get_varinfo (lhs.var); p; p = p->next)
4553 struct constraint_expr temp = lhs;
4555 process_constraint (new_constraint (temp, rhs));
4560 varinfo_t arg_vi = get_vi_for_tree (t);
4562 for (p = arg_vi; p; p = p->next)
4563 make_constraint_from (p, nonlocal_id);
4568 /* Structure used to put solution bitmaps in a hashtable so they can
4569 be shared among variables with the same points-to set. */
4571 typedef struct shared_bitmap_info
4575 } *shared_bitmap_info_t;
4576 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4578 static htab_t shared_bitmap_table;
4580 /* Hash function for a shared_bitmap_info_t */
4583 shared_bitmap_hash (const void *p)
4585 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4586 return bi->hashcode;
4589 /* Equality function for two shared_bitmap_info_t's. */
4592 shared_bitmap_eq (const void *p1, const void *p2)
4594 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4595 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4596 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4599 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4600 existing instance if there is one, NULL otherwise. */
4603 shared_bitmap_lookup (bitmap pt_vars)
4606 struct shared_bitmap_info sbi;
4608 sbi.pt_vars = pt_vars;
4609 sbi.hashcode = bitmap_hash (pt_vars);
4611 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4612 sbi.hashcode, NO_INSERT);
4616 return ((shared_bitmap_info_t) *slot)->pt_vars;
4620 /* Add a bitmap to the shared bitmap hashtable. */
4623 shared_bitmap_add (bitmap pt_vars)
4626 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4628 sbi->pt_vars = pt_vars;
4629 sbi->hashcode = bitmap_hash (pt_vars);
4631 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4632 sbi->hashcode, INSERT);
4633 gcc_assert (!*slot);
4634 *slot = (void *) sbi;
4638 /* Set bits in INTO corresponding to the variable uids in solution set
4639 FROM, which came from variable PTR.
4640 For variables that are actually dereferenced, we also use type
4641 based alias analysis to prune the points-to sets.
4642 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4643 help determine whether we are we are allowed to prune using TBAA.
4644 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4648 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4649 bool no_tbaa_pruning)
4654 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4656 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4658 varinfo_t vi = get_varinfo (i);
4660 /* The only artificial variables that are allowed in a may-alias
4661 set are heap variables. */
4662 if (vi->is_artificial_var && !vi->is_heap_var)
4665 if (TREE_CODE (vi->decl) == VAR_DECL
4666 || TREE_CODE (vi->decl) == PARM_DECL
4667 || TREE_CODE (vi->decl) == RESULT_DECL)
4669 /* Just add VI->DECL to the alias set.
4670 Don't type prune artificial vars or points-to sets
4671 for pointers that have not been dereferenced or with
4672 type-based pruning disabled. */
4673 if (vi->is_artificial_var
4676 bitmap_set_bit (into, DECL_UID (vi->decl));
4679 alias_set_type var_alias_set, mem_alias_set;
4680 var_alias_set = get_alias_set (vi->decl);
4681 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4682 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4683 vi->decl, var_alias_set, true))
4684 bitmap_set_bit (into, DECL_UID (vi->decl));
4691 static bool have_alias_info = false;
4693 /* The list of SMT's that are in use by our pointer variables. This
4694 is the set of SMT's for all pointers that can point to anything. */
4695 static bitmap used_smts;
4697 /* Due to the ordering of points-to set calculation and SMT
4698 calculation being a bit co-dependent, we can't just calculate SMT
4699 used info whenever we want, we have to calculate it around the time
4700 that find_what_p_points_to is called. */
4702 /* Mark which SMT's are in use by points-to anything variables. */
4705 set_used_smts (void)
4709 used_smts = BITMAP_ALLOC (&pta_obstack);
4711 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4713 tree var = vi->decl;
4714 varinfo_t withsolution = get_varinfo (find (i));
4717 struct ptr_info_def *pi = NULL;
4719 /* For parm decls, the pointer info may be under the default
4721 if (TREE_CODE (vi->decl) == PARM_DECL
4722 && gimple_default_def (cfun, var))
4723 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4724 else if (TREE_CODE (var) == SSA_NAME)
4725 pi = SSA_NAME_PTR_INFO (var);
4727 /* Skip the special variables and those that can't be aliased. */
4728 if (vi->is_special_var
4730 || (pi && !pi->memory_tag_needed)
4731 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4732 || !POINTER_TYPE_P (TREE_TYPE (var)))
4735 if (TREE_CODE (var) == SSA_NAME)
4736 var = SSA_NAME_VAR (var);
4742 smt = va->symbol_mem_tag;
4743 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4744 bitmap_set_bit (used_smts, DECL_UID (smt));
4748 /* Given a pointer variable P, fill in its points-to set, or return
4750 Rather than return false for variables that point-to anything, we
4751 instead find the corresponding SMT, and merge in its aliases. In
4752 addition to these aliases, we also set the bits for the SMT's
4753 themselves and their subsets, as SMT's are still in use by
4754 non-SSA_NAME's, and pruning may eliminate every one of their
4755 aliases. In such a case, if we did not include the right set of
4756 SMT's in the points-to set of the variable, we'd end up with
4757 statements that do not conflict but should. */
4760 find_what_p_points_to (tree p)
4765 if (!have_alias_info)
4768 /* For parameters, get at the points-to set for the actual parm
4770 if (TREE_CODE (p) == SSA_NAME
4771 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4772 && SSA_NAME_IS_DEFAULT_DEF (p))
4773 lookup_p = SSA_NAME_VAR (p);
4775 vi = lookup_vi_for_tree (lookup_p);
4778 if (vi->is_artificial_var)
4781 /* See if this is a field or a structure. */
4782 if (vi->size != vi->fullsize)
4784 /* Nothing currently asks about structure fields directly,
4785 but when they do, we need code here to hand back the
4791 struct ptr_info_def *pi = get_ptr_info (p);
4794 bool was_pt_anything = false;
4795 bitmap finished_solution;
4798 if (!pi->memory_tag_needed)
4801 /* This variable may have been collapsed, let's get the real
4803 vi = get_varinfo (find (vi->id));
4805 /* Translate artificial variables into SSA_NAME_PTR_INFO
4807 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4809 varinfo_t vi = get_varinfo (i);
4811 if (vi->is_artificial_var)
4813 /* FIXME. READONLY should be handled better so that
4814 flow insensitive aliasing can disregard writable
4816 if (vi->id == nothing_id)
4818 else if (vi->id == anything_id
4819 || vi->id == nonlocal_id
4820 || vi->id == escaped_id
4821 || vi->id == callused_id)
4822 was_pt_anything = 1;
4823 else if (vi->id == readonly_id)
4824 was_pt_anything = 1;
4825 else if (vi->id == integer_id)
4826 was_pt_anything = 1;
4827 else if (vi->is_heap_var)
4828 pi->pt_global_mem = 1;
4832 /* Instead of doing extra work, simply do not create
4833 points-to information for pt_anything pointers. This
4834 will cause the operand scanner to fall back to the
4835 type-based SMT and its aliases. Which is the best
4836 we could do here for the points-to set as well. */
4837 if (was_pt_anything)
4840 /* Share the final set of variables when possible. */
4841 finished_solution = BITMAP_GGC_ALLOC ();
4842 stats.points_to_sets_created++;
4844 set_uids_in_ptset (p, finished_solution, vi->solution,
4845 pi->is_dereferenced,
4846 vi->no_tbaa_pruning);
4847 result = shared_bitmap_lookup (finished_solution);
4851 shared_bitmap_add (finished_solution);
4852 pi->pt_vars = finished_solution;
4856 pi->pt_vars = result;
4857 bitmap_clear (finished_solution);
4860 if (bitmap_empty_p (pi->pt_vars))
4870 /* Mark the ESCAPED solution as call clobbered. Returns false if
4871 pt_anything escaped which needs all locals that have their address
4872 taken marked call clobbered as well. */
4875 clobber_what_escaped (void)
4881 if (!have_alias_info)
4884 /* This variable may have been collapsed, let's get the real
4885 variable for escaped_id. */
4886 vi = get_varinfo (find (escaped_id));
4888 /* If call-used memory escapes we need to include it in the
4889 set of escaped variables. This can happen if a pure
4890 function returns a pointer and this pointer escapes. */
4891 if (bitmap_bit_p (vi->solution, callused_id))
4893 varinfo_t cu_vi = get_varinfo (find (callused_id));
4894 bitmap_ior_into (vi->solution, cu_vi->solution);
4897 /* Mark variables in the solution call-clobbered. */
4898 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4900 varinfo_t vi = get_varinfo (i);
4902 if (vi->is_artificial_var)
4904 /* nothing_id and readonly_id do not cause any
4905 call clobber ops. For anything_id and integer_id
4906 we need to clobber all addressable vars. */
4907 if (vi->id == anything_id
4908 || vi->id == integer_id)
4912 /* Only artificial heap-vars are further interesting. */
4913 if (vi->is_artificial_var && !vi->is_heap_var)
4916 if ((TREE_CODE (vi->decl) == VAR_DECL
4917 || TREE_CODE (vi->decl) == PARM_DECL
4918 || TREE_CODE (vi->decl) == RESULT_DECL)
4919 && !unmodifiable_var_p (vi->decl))
4920 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4926 /* Compute the call-used variables. */
4929 compute_call_used_vars (void)
4934 bool has_anything_id = false;
4936 if (!have_alias_info)
4939 /* This variable may have been collapsed, let's get the real
4940 variable for escaped_id. */
4941 vi = get_varinfo (find (callused_id));
4943 /* Mark variables in the solution call-clobbered. */
4944 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4946 varinfo_t vi = get_varinfo (i);
4948 if (vi->is_artificial_var)
4950 /* For anything_id and integer_id we need to make
4951 all local addressable vars call-used. */
4952 if (vi->id == anything_id
4953 || vi->id == integer_id)
4954 has_anything_id = true;
4957 /* Only artificial heap-vars are further interesting. */
4958 if (vi->is_artificial_var && !vi->is_heap_var)
4961 if ((TREE_CODE (vi->decl) == VAR_DECL
4962 || TREE_CODE (vi->decl) == PARM_DECL
4963 || TREE_CODE (vi->decl) == RESULT_DECL)
4964 && !unmodifiable_var_p (vi->decl))
4965 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
4968 /* If anything is call-used, add all addressable locals to the set. */
4969 if (has_anything_id)
4970 bitmap_ior_into (gimple_call_used_vars (cfun),
4971 gimple_addressable_vars (cfun));
4975 /* Dump points-to information to OUTFILE. */
4978 dump_sa_points_to_info (FILE *outfile)
4982 fprintf (outfile, "\nPoints-to sets\n\n");
4984 if (dump_flags & TDF_STATS)
4986 fprintf (outfile, "Stats:\n");
4987 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4988 fprintf (outfile, "Non-pointer vars: %d\n",
4989 stats.nonpointer_vars);
4990 fprintf (outfile, "Statically unified vars: %d\n",
4991 stats.unified_vars_static);
4992 fprintf (outfile, "Dynamically unified vars: %d\n",
4993 stats.unified_vars_dynamic);
4994 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4995 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4996 fprintf (outfile, "Number of implicit edges: %d\n",
4997 stats.num_implicit_edges);
5000 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5001 dump_solution_for_var (outfile, i);
5005 /* Debug points-to information to stderr. */
5008 debug_sa_points_to_info (void)
5010 dump_sa_points_to_info (stderr);
5014 /* Initialize the always-existing constraint variables for NULL
5015 ANYTHING, READONLY, and INTEGER */
5018 init_base_vars (void)
5020 struct constraint_expr lhs, rhs;
5022 /* Create the NULL variable, used to represent that a variable points
5024 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5025 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5026 insert_vi_for_tree (nothing_tree, var_nothing);
5027 var_nothing->is_artificial_var = 1;
5028 var_nothing->offset = 0;
5029 var_nothing->size = ~0;
5030 var_nothing->fullsize = ~0;
5031 var_nothing->is_special_var = 1;
5032 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5034 /* Create the ANYTHING variable, used to represent that a variable
5035 points to some unknown piece of memory. */
5036 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5037 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5038 insert_vi_for_tree (anything_tree, var_anything);
5039 var_anything->is_artificial_var = 1;
5040 var_anything->size = ~0;
5041 var_anything->offset = 0;
5042 var_anything->next = NULL;
5043 var_anything->fullsize = ~0;
5044 var_anything->is_special_var = 1;
5046 /* Anything points to anything. This makes deref constraints just
5047 work in the presence of linked list and other p = *p type loops,
5048 by saying that *ANYTHING = ANYTHING. */
5049 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5051 lhs.var = anything_id;
5053 rhs.type = ADDRESSOF;
5054 rhs.var = anything_id;
5057 /* This specifically does not use process_constraint because
5058 process_constraint ignores all anything = anything constraints, since all
5059 but this one are redundant. */
5060 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5062 /* Create the READONLY variable, used to represent that a variable
5063 points to readonly memory. */
5064 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5065 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5066 var_readonly->is_artificial_var = 1;
5067 var_readonly->offset = 0;
5068 var_readonly->size = ~0;
5069 var_readonly->fullsize = ~0;
5070 var_readonly->next = NULL;
5071 var_readonly->is_special_var = 1;
5072 insert_vi_for_tree (readonly_tree, var_readonly);
5073 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5075 /* readonly memory points to anything, in order to make deref
5076 easier. In reality, it points to anything the particular
5077 readonly variable can point to, but we don't track this
5080 lhs.var = readonly_id;
5082 rhs.type = ADDRESSOF;
5083 rhs.var = readonly_id; /* FIXME */
5085 process_constraint (new_constraint (lhs, rhs));
5087 /* Create the ESCAPED variable, used to represent the set of escaped
5089 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5090 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5091 insert_vi_for_tree (escaped_tree, var_escaped);
5092 var_escaped->is_artificial_var = 1;
5093 var_escaped->offset = 0;
5094 var_escaped->size = ~0;
5095 var_escaped->fullsize = ~0;
5096 var_escaped->is_special_var = 0;
5097 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5098 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5100 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5102 lhs.var = escaped_id;
5105 rhs.var = escaped_id;
5107 process_constraint (new_constraint (lhs, rhs));
5109 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5111 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5112 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5113 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5114 var_nonlocal->is_artificial_var = 1;
5115 var_nonlocal->offset = 0;
5116 var_nonlocal->size = ~0;
5117 var_nonlocal->fullsize = ~0;
5118 var_nonlocal->is_special_var = 1;
5119 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5121 /* Nonlocal memory points to escaped (which includes nonlocal),
5122 in order to make deref easier. */
5124 lhs.var = nonlocal_id;
5126 rhs.type = ADDRESSOF;
5127 rhs.var = escaped_id;
5129 process_constraint (new_constraint (lhs, rhs));
5131 /* Create the CALLUSED variable, used to represent the set of call-used
5133 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5134 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5135 insert_vi_for_tree (callused_tree, var_callused);
5136 var_callused->is_artificial_var = 1;
5137 var_callused->offset = 0;
5138 var_callused->size = ~0;
5139 var_callused->fullsize = ~0;
5140 var_callused->is_special_var = 0;
5141 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5143 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5145 lhs.var = callused_id;
5148 rhs.var = callused_id;
5150 process_constraint (new_constraint (lhs, rhs));
5152 /* Create the INTEGER variable, used to represent that a variable points
5154 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5155 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5156 insert_vi_for_tree (integer_tree, var_integer);
5157 var_integer->is_artificial_var = 1;
5158 var_integer->size = ~0;
5159 var_integer->fullsize = ~0;
5160 var_integer->offset = 0;
5161 var_integer->next = NULL;
5162 var_integer->is_special_var = 1;
5163 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5165 /* INTEGER = ANYTHING, because we don't know where a dereference of
5166 a random integer will point to. */
5168 lhs.var = integer_id;
5170 rhs.type = ADDRESSOF;
5171 rhs.var = anything_id;
5173 process_constraint (new_constraint (lhs, rhs));
5175 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5176 everything pointed to by escaped can also point to escaped. */
5178 lhs.var = escaped_id;
5180 rhs.type = ADDRESSOF;
5181 rhs.var = escaped_id;
5183 process_constraint (new_constraint (lhs, rhs));
5185 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5186 everything pointed to by escaped can also point to nonlocal. */
5188 lhs.var = escaped_id;
5190 rhs.type = ADDRESSOF;
5191 rhs.var = nonlocal_id;
5193 process_constraint (new_constraint (lhs, rhs));
5196 /* Initialize things necessary to perform PTA */
5199 init_alias_vars (void)
5201 bitmap_obstack_initialize (&pta_obstack);
5202 bitmap_obstack_initialize (&oldpta_obstack);
5203 bitmap_obstack_initialize (&predbitmap_obstack);
5205 constraint_pool = create_alloc_pool ("Constraint pool",
5206 sizeof (struct constraint), 30);
5207 variable_info_pool = create_alloc_pool ("Variable info pool",
5208 sizeof (struct variable_info), 30);
5209 constraints = VEC_alloc (constraint_t, heap, 8);
5210 varmap = VEC_alloc (varinfo_t, heap, 8);
5211 vi_for_tree = pointer_map_create ();
5213 memset (&stats, 0, sizeof (stats));
5214 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5215 shared_bitmap_eq, free);
5219 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5220 predecessor edges. */
5223 remove_preds_and_fake_succs (constraint_graph_t graph)
5227 /* Clear the implicit ref and address nodes from the successor
5229 for (i = 0; i < FIRST_REF_NODE; i++)
5231 if (graph->succs[i])
5232 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5233 FIRST_REF_NODE * 2);
5236 /* Free the successor list for the non-ref nodes. */
5237 for (i = FIRST_REF_NODE; i < graph->size; i++)
5239 if (graph->succs[i])
5240 BITMAP_FREE (graph->succs[i]);
5243 /* Now reallocate the size of the successor list as, and blow away
5244 the predecessor bitmaps. */
5245 graph->size = VEC_length (varinfo_t, varmap);
5246 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5248 free (graph->implicit_preds);
5249 graph->implicit_preds = NULL;
5250 free (graph->preds);
5251 graph->preds = NULL;
5252 bitmap_obstack_release (&predbitmap_obstack);
5255 /* Compute the set of variables we can't TBAA prune. */
5258 compute_tbaa_pruning (void)
5260 unsigned int size = VEC_length (varinfo_t, varmap);
5265 changed = sbitmap_alloc (size);
5266 sbitmap_zero (changed);
5268 /* Mark all initial no_tbaa_pruning nodes as changed. */
5270 for (i = 0; i < size; ++i)
5272 varinfo_t ivi = get_varinfo (i);
5274 if (find (i) == i && ivi->no_tbaa_pruning)
5277 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5278 || VEC_length (constraint_t, graph->complex[i]) > 0)
5280 SET_BIT (changed, i);
5286 while (changed_count > 0)
5288 struct topo_info *ti = init_topo_info ();
5291 compute_topo_order (graph, ti);
5293 while (VEC_length (unsigned, ti->topo_order) != 0)
5297 i = VEC_pop (unsigned, ti->topo_order);
5299 /* If this variable is not a representative, skip it. */
5303 /* If the node has changed, we need to process the complex
5304 constraints and outgoing edges again. */
5305 if (TEST_BIT (changed, i))
5309 VEC(constraint_t,heap) *complex = graph->complex[i];
5311 RESET_BIT (changed, i);
5314 /* Process the complex copy constraints. */
5315 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5317 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5319 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5321 if (!lhsvi->no_tbaa_pruning)
5323 lhsvi->no_tbaa_pruning = true;
5324 if (!TEST_BIT (changed, lhsvi->id))
5326 SET_BIT (changed, lhsvi->id);
5333 /* Propagate to all successors. */
5334 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5336 unsigned int to = find (j);
5337 varinfo_t tovi = get_varinfo (to);
5339 /* Don't propagate to ourselves. */
5343 if (!tovi->no_tbaa_pruning)
5345 tovi->no_tbaa_pruning = true;
5346 if (!TEST_BIT (changed, to))
5348 SET_BIT (changed, to);
5356 free_topo_info (ti);
5359 sbitmap_free (changed);
5363 for (i = 0; i < size; ++i)
5365 varinfo_t ivi = get_varinfo (i);
5366 varinfo_t ivip = get_varinfo (find (i));
5368 if (ivip->no_tbaa_pruning)
5370 tree var = ivi->decl;
5372 if (TREE_CODE (var) == SSA_NAME)
5373 var = SSA_NAME_VAR (var);
5375 if (POINTER_TYPE_P (TREE_TYPE (var)))
5377 DECL_NO_TBAA_P (var) = 1;
5379 /* Tell the RTL layer that this pointer can alias
5381 DECL_POINTER_ALIAS_SET (var) = 0;
5388 /* Create points-to sets for the current function. See the comments
5389 at the start of the file for an algorithmic overview. */
5392 compute_points_to_sets (void)
5394 struct scc_info *si;
5397 timevar_push (TV_TREE_PTA);
5400 init_alias_heapvars ();
5402 intra_create_variable_infos ();
5404 /* Now walk all statements and derive aliases. */
5407 block_stmt_iterator bsi;
5410 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5411 if (is_gimple_reg (PHI_RESULT (phi)))
5412 find_func_aliases (phi);
5414 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5416 tree stmt = bsi_stmt (bsi);
5418 find_func_aliases (stmt);
5420 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5421 been captured, and we can remove them. */
5422 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5423 bsi_remove (&bsi, true);
5432 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5433 dump_constraints (dump_file);
5438 "\nCollapsing static cycles and doing variable "
5441 init_graph (VEC_length (varinfo_t, varmap) * 2);
5444 fprintf (dump_file, "Building predecessor graph\n");
5445 build_pred_graph ();
5448 fprintf (dump_file, "Detecting pointer and location "
5450 si = perform_var_substitution (graph);
5453 fprintf (dump_file, "Rewriting constraints and unifying "
5455 rewrite_constraints (graph, si);
5456 free_var_substitution_info (si);
5458 build_succ_graph ();
5459 move_complex_constraints (graph);
5462 fprintf (dump_file, "Uniting pointer but not location equivalent "
5464 unite_pointer_equivalences (graph);
5467 fprintf (dump_file, "Finding indirect cycles\n");
5468 find_indirect_cycles (graph);
5470 /* Implicit nodes and predecessors are no longer necessary at this
5472 remove_preds_and_fake_succs (graph);
5475 fprintf (dump_file, "Solving graph\n");
5477 solve_graph (graph);
5479 compute_tbaa_pruning ();
5482 dump_sa_points_to_info (dump_file);
5484 have_alias_info = true;
5486 timevar_pop (TV_TREE_PTA);
5490 /* Delete created points-to sets. */
5493 delete_points_to_sets (void)
5497 htab_delete (shared_bitmap_table);
5498 if (dump_file && (dump_flags & TDF_STATS))
5499 fprintf (dump_file, "Points to sets created:%d\n",
5500 stats.points_to_sets_created);
5502 pointer_map_destroy (vi_for_tree);
5503 bitmap_obstack_release (&pta_obstack);
5504 VEC_free (constraint_t, heap, constraints);
5506 for (i = 0; i < graph->size; i++)
5507 VEC_free (constraint_t, heap, graph->complex[i]);
5508 free (graph->complex);
5511 free (graph->succs);
5513 free (graph->pe_rep);
5514 free (graph->indirect_cycles);
5517 VEC_free (varinfo_t, heap, varmap);
5518 free_alloc_pool (variable_info_pool);
5519 free_alloc_pool (constraint_pool);
5520 have_alias_info = false;
5523 /* Return true if we should execute IPA PTA. */
5527 return (flag_unit_at_a_time != 0
5529 /* Don't bother doing anything if the program has errors. */
5530 && !(errorcount || sorrycount));
5533 /* Execute the driver for IPA PTA. */
5535 ipa_pta_execute (void)
5537 struct cgraph_node *node;
5538 struct scc_info *si;
5541 init_alias_heapvars ();
5544 for (node = cgraph_nodes; node; node = node->next)
5546 if (!node->analyzed || cgraph_is_master_clone (node))
5550 varid = create_function_info_for (node->decl,
5551 cgraph_node_name (node));
5552 if (node->local.externally_visible)
5554 varinfo_t fi = get_varinfo (varid);
5555 for (; fi; fi = fi->next)
5556 make_constraint_from (fi, anything_id);
5560 for (node = cgraph_nodes; node; node = node->next)
5562 if (node->analyzed && cgraph_is_master_clone (node))
5564 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5566 tree old_func_decl = current_function_decl;
5569 "Generating constraints for %s\n",
5570 cgraph_node_name (node));
5572 current_function_decl = node->decl;
5574 FOR_EACH_BB_FN (bb, func)
5576 block_stmt_iterator bsi;
5579 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5581 if (is_gimple_reg (PHI_RESULT (phi)))
5583 find_func_aliases (phi);
5587 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5589 tree stmt = bsi_stmt (bsi);
5590 find_func_aliases (stmt);
5593 current_function_decl = old_func_decl;
5598 /* Make point to anything. */
5604 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5605 dump_constraints (dump_file);
5610 "\nCollapsing static cycles and doing variable "
5613 init_graph (VEC_length (varinfo_t, varmap) * 2);
5614 build_pred_graph ();
5615 si = perform_var_substitution (graph);
5616 rewrite_constraints (graph, si);
5617 free_var_substitution_info (si);
5619 build_succ_graph ();
5620 move_complex_constraints (graph);
5621 unite_pointer_equivalences (graph);
5622 find_indirect_cycles (graph);
5624 /* Implicit nodes and predecessors are no longer necessary at this
5626 remove_preds_and_fake_succs (graph);
5629 fprintf (dump_file, "\nSolving graph\n");
5631 solve_graph (graph);
5634 dump_sa_points_to_info (dump_file);
5637 delete_alias_heapvars ();
5638 delete_points_to_sets ();
5642 struct simple_ipa_opt_pass pass_ipa_pta =
5647 gate_ipa_pta, /* gate */
5648 ipa_pta_execute, /* execute */
5651 0, /* static_pass_number */
5652 TV_IPA_PTA, /* tv_id */
5653 0, /* properties_required */
5654 0, /* properties_provided */
5655 0, /* properties_destroyed */
5656 0, /* todo_flags_start */
5657 TODO_update_ssa /* todo_flags_finish */
5661 /* Initialize the heapvar for statement mapping. */
5663 init_alias_heapvars (void)
5665 if (!heapvar_for_stmt)
5666 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5671 delete_alias_heapvars (void)
5673 htab_delete (heapvar_for_stmt);
5674 heapvar_for_stmt = NULL;
5678 #include "gt-tree-ssa-structalias.h"