1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
209 /* ID of this variable */
212 /* Name of this variable */
215 /* Tree that this variable is associated with. */
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if this is a variable created by the constraint analysis, such as
231 heap variables and constraints we had to break up. */
232 unsigned int is_artificial_var:1;
234 /* True if this is a special variable whose solution set should not be
236 unsigned int is_special_var:1;
238 /* True for variables whose size is not known or variable. */
239 unsigned int is_unknown_size_var:1;
241 /* True for variables that have unions somewhere in them. */
242 unsigned int has_union:1;
244 /* True if this is a heap variable. */
245 unsigned int is_heap_var:1;
247 /* True if we may not use TBAA to prune references to this
248 variable. This is used for C++ placement new. */
249 unsigned int no_tbaa_pruning : 1;
251 /* Points-to set for this variable. */
254 /* Old points-to set for this variable. */
257 /* Variable id this was collapsed to due to type unsafety. This
258 should be unused completely after build_succ_graph, or something
260 struct variable_info *collapsed_to;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
266 /* Pool of variable info structures. */
267 static alloc_pool variable_info_pool;
269 DEF_VEC_P(varinfo_t);
271 DEF_VEC_ALLOC_P(varinfo_t, heap);
273 /* Table of variable info structures for constraint variables.
274 Indexed directly by variable info id. */
275 static VEC(varinfo_t,heap) *varmap;
277 /* Return the varmap element N */
279 static inline varinfo_t
280 get_varinfo (unsigned int n)
282 return VEC_index (varinfo_t, varmap, n);
285 /* Return the varmap element N, following the collapsed_to link. */
287 static inline varinfo_t
288 get_varinfo_fc (unsigned int n)
290 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 return v->collapsed_to;
297 /* Static IDs for the special variables. */
298 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
299 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
301 /* Variable that represents the unknown pointer. */
302 static varinfo_t var_anything;
303 static tree anything_tree;
305 /* Variable that represents the NULL pointer. */
306 static varinfo_t var_nothing;
307 static tree nothing_tree;
309 /* Variable that represents read only memory. */
310 static varinfo_t var_readonly;
311 static tree readonly_tree;
313 /* Variable that represents escaped memory. */
314 static varinfo_t var_escaped;
315 static tree escaped_tree;
317 /* Variable that represents nonlocal memory. */
318 static varinfo_t var_nonlocal;
319 static tree nonlocal_tree;
321 /* Variable that represents call-used memory. */
322 static varinfo_t var_callused;
323 static tree callused_tree;
325 /* Variable that represents integers. This is used for when people do things
327 static varinfo_t var_integer;
328 static tree integer_tree;
330 /* Lookup a heap var for FROM, and return it if we find one. */
333 heapvar_lookup (tree from)
335 struct tree_map *h, in;
338 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
339 htab_hash_pointer (from));
345 /* Insert a mapping FROM->TO in the heap var for statement
349 heapvar_insert (tree from, tree to)
354 h = GGC_NEW (struct tree_map);
355 h->hash = htab_hash_pointer (from);
358 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
359 *(struct tree_map **) loc = h;
362 /* Return a new variable info structure consisting for a variable
363 named NAME, and using constraint graph node NODE. */
366 new_var_info (tree t, unsigned int id, const char *name)
368 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
374 ret->is_artificial_var = false;
375 ret->is_heap_var = false;
376 ret->is_special_var = false;
377 ret->is_unknown_size_var = false;
378 ret->has_union = false;
380 if (TREE_CODE (var) == SSA_NAME)
381 var = SSA_NAME_VAR (var);
382 ret->no_tbaa_pruning = (DECL_P (var)
383 && POINTER_TYPE_P (TREE_TYPE (var))
384 && DECL_NO_TBAA_P (var));
385 ret->solution = BITMAP_ALLOC (&pta_obstack);
386 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
388 ret->collapsed_to = NULL;
392 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
394 /* An expression that appears in a constraint. */
396 struct constraint_expr
398 /* Constraint type. */
399 constraint_expr_type type;
401 /* Variable we are referring to in the constraint. */
404 /* Offset, in bits, of this constraint from the beginning of
405 variables it ends up referring to.
407 IOW, in a deref constraint, we would deref, get the result set,
408 then add OFFSET to each member. */
409 unsigned HOST_WIDE_INT offset;
412 typedef struct constraint_expr ce_s;
414 DEF_VEC_ALLOC_O(ce_s, heap);
415 static void get_constraint_for (tree, VEC(ce_s, heap) **);
416 static void do_deref (VEC (ce_s, heap) **);
418 /* Our set constraints are made up of two constraint expressions, one
421 As described in the introduction, our set constraints each represent an
422 operation between set valued variables.
426 struct constraint_expr lhs;
427 struct constraint_expr rhs;
430 /* List of constraints that we use to build the constraint graph from. */
432 static VEC(constraint_t,heap) *constraints;
433 static alloc_pool constraint_pool;
437 DEF_VEC_ALLOC_I(int, heap);
439 /* The constraint graph is represented as an array of bitmaps
440 containing successor nodes. */
442 struct constraint_graph
444 /* Size of this graph, which may be different than the number of
445 nodes in the variable map. */
448 /* Explicit successors of each node. */
451 /* Implicit predecessors of each node (Used for variable
453 bitmap *implicit_preds;
455 /* Explicit predecessors of each node (Used for variable substitution). */
458 /* Indirect cycle representatives, or -1 if the node has no indirect
460 int *indirect_cycles;
462 /* Representative node for a node. rep[a] == a unless the node has
466 /* Equivalence class representative for a label. This is used for
467 variable substitution. */
470 /* Pointer equivalence label for a node. All nodes with the same
471 pointer equivalence label can be unified together at some point
472 (either during constraint optimization or after the constraint
476 /* Pointer equivalence representative for a label. This is used to
477 handle nodes that are pointer equivalent but not location
478 equivalent. We can unite these once the addressof constraints
479 are transformed into initial points-to sets. */
482 /* Pointer equivalence label for each node, used during variable
484 unsigned int *pointer_label;
486 /* Location equivalence label for each node, used during location
487 equivalence finding. */
488 unsigned int *loc_label;
490 /* Pointed-by set for each node, used during location equivalence
491 finding. This is pointed-by rather than pointed-to, because it
492 is constructed using the predecessor graph. */
495 /* Points to sets for pointer equivalence. This is *not* the actual
496 points-to sets for nodes. */
499 /* Bitmap of nodes where the bit is set if the node is a direct
500 node. Used for variable substitution. */
501 sbitmap direct_nodes;
503 /* Bitmap of nodes where the bit is set if the node is address
504 taken. Used for variable substitution. */
505 bitmap address_taken;
507 /* True if points_to bitmap for this node is stored in the hash
511 /* Number of incoming edges remaining to be processed by pointer
513 Used for variable substitution. */
514 unsigned int *number_incoming;
517 /* Vector of complex constraints for each graph node. Complex
518 constraints are those involving dereferences or offsets that are
520 VEC(constraint_t,heap) **complex;
523 static constraint_graph_t graph;
525 /* During variable substitution and the offline version of indirect
526 cycle finding, we create nodes to represent dereferences and
527 address taken constraints. These represent where these start and
529 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
530 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
532 /* Return the representative node for NODE, if NODE has been unioned
534 This function performs path compression along the way to finding
535 the representative. */
538 find (unsigned int node)
540 gcc_assert (node < graph->size);
541 if (graph->rep[node] != node)
542 return graph->rep[node] = find (graph->rep[node]);
546 /* Union the TO and FROM nodes to the TO nodes.
547 Note that at some point in the future, we may want to do
548 union-by-rank, in which case we are going to have to return the
549 node we unified to. */
552 unite (unsigned int to, unsigned int from)
554 gcc_assert (to < graph->size && from < graph->size);
555 if (to != from && graph->rep[from] != to)
557 graph->rep[from] = to;
563 /* Create a new constraint consisting of LHS and RHS expressions. */
566 new_constraint (const struct constraint_expr lhs,
567 const struct constraint_expr rhs)
569 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
575 /* Print out constraint C to FILE. */
578 dump_constraint (FILE *file, constraint_t c)
580 if (c->lhs.type == ADDRESSOF)
582 else if (c->lhs.type == DEREF)
584 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
585 if (c->lhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
587 fprintf (file, " = ");
588 if (c->rhs.type == ADDRESSOF)
590 else if (c->rhs.type == DEREF)
592 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
593 if (c->rhs.offset != 0)
594 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
595 fprintf (file, "\n");
598 /* Print out constraint C to stderr. */
601 debug_constraint (constraint_t c)
603 dump_constraint (stderr, c);
606 /* Print out all constraints to FILE */
609 dump_constraints (FILE *file)
613 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
614 dump_constraint (file, c);
617 /* Print out all constraints to stderr. */
620 debug_constraints (void)
622 dump_constraints (stderr);
627 The solver is a simple worklist solver, that works on the following
630 sbitmap changed_nodes = all zeroes;
632 For each node that is not already collapsed:
634 set bit in changed nodes
636 while (changed_count > 0)
638 compute topological ordering for constraint graph
640 find and collapse cycles in the constraint graph (updating
641 changed if necessary)
643 for each node (n) in the graph in topological order:
646 Process each complex constraint associated with the node,
647 updating changed if necessary.
649 For each outgoing edge from n, propagate the solution from n to
650 the destination of the edge, updating changed as necessary.
654 /* Return true if two constraint expressions A and B are equal. */
657 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
659 return a.type == b.type && a.var == b.var && a.offset == b.offset;
662 /* Return true if constraint expression A is less than constraint expression
663 B. This is just arbitrary, but consistent, in order to give them an
667 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
669 if (a.type == b.type)
672 return a.offset < b.offset;
674 return a.var < b.var;
677 return a.type < b.type;
680 /* Return true if constraint A is less than constraint B. This is just
681 arbitrary, but consistent, in order to give them an ordering. */
684 constraint_less (const constraint_t a, const constraint_t b)
686 if (constraint_expr_less (a->lhs, b->lhs))
688 else if (constraint_expr_less (b->lhs, a->lhs))
691 return constraint_expr_less (a->rhs, b->rhs);
694 /* Return true if two constraints A and B are equal. */
697 constraint_equal (struct constraint a, struct constraint b)
699 return constraint_expr_equal (a.lhs, b.lhs)
700 && constraint_expr_equal (a.rhs, b.rhs);
704 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
707 constraint_vec_find (VEC(constraint_t,heap) *vec,
708 struct constraint lookfor)
716 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
717 if (place >= VEC_length (constraint_t, vec))
719 found = VEC_index (constraint_t, vec, place);
720 if (!constraint_equal (*found, lookfor))
725 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
728 constraint_set_union (VEC(constraint_t,heap) **to,
729 VEC(constraint_t,heap) **from)
734 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
736 if (constraint_vec_find (*to, *c) == NULL)
738 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
740 VEC_safe_insert (constraint_t, heap, *to, place, c);
745 /* Take a solution set SET, add OFFSET to each member of the set, and
746 overwrite SET with the result when done. */
749 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
751 bitmap result = BITMAP_ALLOC (&iteration_obstack);
755 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
757 /* If this is a properly sized variable, only add offset if it's
758 less than end. Otherwise, it is globbed to a single
761 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
763 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
764 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
767 bitmap_set_bit (result, v->id);
769 else if (get_varinfo (i)->is_artificial_var
770 || get_varinfo (i)->has_union
771 || get_varinfo (i)->is_unknown_size_var)
773 bitmap_set_bit (result, i);
777 bitmap_copy (set, result);
778 BITMAP_FREE (result);
781 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
785 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
788 return bitmap_ior_into (to, from);
794 tmp = BITMAP_ALLOC (&iteration_obstack);
795 bitmap_copy (tmp, from);
796 solution_set_add (tmp, inc);
797 res = bitmap_ior_into (to, tmp);
803 /* Insert constraint C into the list of complex constraints for graph
807 insert_into_complex (constraint_graph_t graph,
808 unsigned int var, constraint_t c)
810 VEC (constraint_t, heap) *complex = graph->complex[var];
811 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
814 /* Only insert constraints that do not already exist. */
815 if (place >= VEC_length (constraint_t, complex)
816 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
817 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
821 /* Condense two variable nodes into a single variable node, by moving
822 all associated info from SRC to TO. */
825 merge_node_constraints (constraint_graph_t graph, unsigned int to,
831 gcc_assert (find (from) == to);
833 /* Move all complex constraints from src node into to node */
834 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
836 /* In complex constraints for node src, we may have either
837 a = *src, and *src = a, or an offseted constraint which are
838 always added to the rhs node's constraints. */
840 if (c->rhs.type == DEREF)
842 else if (c->lhs.type == DEREF)
847 constraint_set_union (&graph->complex[to], &graph->complex[from]);
848 VEC_free (constraint_t, heap, graph->complex[from]);
849 graph->complex[from] = NULL;
853 /* Remove edges involving NODE from GRAPH. */
856 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
858 if (graph->succs[node])
859 BITMAP_FREE (graph->succs[node]);
862 /* Merge GRAPH nodes FROM and TO into node TO. */
865 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
868 if (graph->indirect_cycles[from] != -1)
870 /* If we have indirect cycles with the from node, and we have
871 none on the to node, the to node has indirect cycles from the
872 from node now that they are unified.
873 If indirect cycles exist on both, unify the nodes that they
874 are in a cycle with, since we know they are in a cycle with
876 if (graph->indirect_cycles[to] == -1)
877 graph->indirect_cycles[to] = graph->indirect_cycles[from];
880 /* Merge all the successor edges. */
881 if (graph->succs[from])
883 if (!graph->succs[to])
884 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
885 bitmap_ior_into (graph->succs[to],
889 clear_edges_for_node (graph, from);
893 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
894 it doesn't exist in the graph already. */
897 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
903 if (!graph->implicit_preds[to])
904 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
906 if (!bitmap_bit_p (graph->implicit_preds[to], from))
908 stats.num_implicit_edges++;
909 bitmap_set_bit (graph->implicit_preds[to], from);
913 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
914 it doesn't exist in the graph already.
915 Return false if the edge already existed, true otherwise. */
918 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
921 if (!graph->preds[to])
922 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
923 if (!bitmap_bit_p (graph->preds[to], from))
924 bitmap_set_bit (graph->preds[to], from);
927 /* Add a graph edge to GRAPH, going from FROM to TO if
928 it doesn't exist in the graph already.
929 Return false if the edge already existed, true otherwise. */
932 add_graph_edge (constraint_graph_t graph, unsigned int to,
943 if (!graph->succs[from])
944 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
945 if (!bitmap_bit_p (graph->succs[from], to))
948 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
950 bitmap_set_bit (graph->succs[from], to);
957 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
960 valid_graph_edge (constraint_graph_t graph, unsigned int src,
963 return (graph->succs[dest]
964 && bitmap_bit_p (graph->succs[dest], src));
967 /* Initialize the constraint graph structure to contain SIZE nodes. */
970 init_graph (unsigned int size)
974 graph = XCNEW (struct constraint_graph);
976 graph->succs = XCNEWVEC (bitmap, graph->size);
977 graph->indirect_cycles = XNEWVEC (int, graph->size);
978 graph->rep = XNEWVEC (unsigned int, graph->size);
979 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
980 graph->pe = XCNEWVEC (unsigned int, graph->size);
981 graph->pe_rep = XNEWVEC (int, graph->size);
983 for (j = 0; j < graph->size; j++)
986 graph->pe_rep[j] = -1;
987 graph->indirect_cycles[j] = -1;
991 /* Build the constraint graph, adding only predecessor edges right now. */
994 build_pred_graph (void)
1000 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1001 graph->preds = XCNEWVEC (bitmap, graph->size);
1002 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1003 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1004 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1005 graph->points_to = XCNEWVEC (bitmap, graph->size);
1006 graph->eq_rep = XNEWVEC (int, graph->size);
1007 graph->direct_nodes = sbitmap_alloc (graph->size);
1008 graph->pt_used = sbitmap_alloc (graph->size);
1009 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1010 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1011 sbitmap_zero (graph->direct_nodes);
1012 sbitmap_zero (graph->pt_used);
1014 for (j = 0; j < FIRST_REF_NODE; j++)
1016 if (!get_varinfo (j)->is_special_var)
1017 SET_BIT (graph->direct_nodes, j);
1020 for (j = 0; j < graph->size; j++)
1021 graph->eq_rep[j] = -1;
1023 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1024 graph->indirect_cycles[j] = -1;
1026 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1028 struct constraint_expr lhs = c->lhs;
1029 struct constraint_expr rhs = c->rhs;
1030 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1031 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1033 if (lhs.type == DEREF)
1036 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1037 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1039 else if (rhs.type == DEREF)
1042 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1043 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1045 RESET_BIT (graph->direct_nodes, lhsvar);
1047 else if (rhs.type == ADDRESSOF)
1050 if (graph->points_to[lhsvar] == NULL)
1051 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1052 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1054 if (graph->pointed_by[rhsvar] == NULL)
1055 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1056 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1058 /* Implicitly, *x = y */
1059 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1061 RESET_BIT (graph->direct_nodes, rhsvar);
1062 bitmap_set_bit (graph->address_taken, rhsvar);
1064 else if (lhsvar > anything_id
1065 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1068 add_pred_graph_edge (graph, lhsvar, rhsvar);
1069 /* Implicitly, *x = *y */
1070 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1071 FIRST_REF_NODE + rhsvar);
1073 else if (lhs.offset != 0 || rhs.offset != 0)
1075 if (rhs.offset != 0)
1076 RESET_BIT (graph->direct_nodes, lhs.var);
1077 else if (lhs.offset != 0)
1078 RESET_BIT (graph->direct_nodes, rhs.var);
1083 /* Build the constraint graph, adding successor edges. */
1086 build_succ_graph (void)
1091 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1093 struct constraint_expr lhs;
1094 struct constraint_expr rhs;
1095 unsigned int lhsvar;
1096 unsigned int rhsvar;
1103 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1104 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1106 if (lhs.type == DEREF)
1108 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1109 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1111 else if (rhs.type == DEREF)
1113 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1114 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1116 else if (rhs.type == ADDRESSOF)
1119 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1120 == get_varinfo_fc (rhs.var)->id);
1121 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1123 else if (lhsvar > anything_id
1124 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1126 add_graph_edge (graph, lhsvar, rhsvar);
1132 /* Changed variables on the last iteration. */
1133 static unsigned int changed_count;
1134 static sbitmap changed;
1136 DEF_VEC_I(unsigned);
1137 DEF_VEC_ALLOC_I(unsigned,heap);
1140 /* Strongly Connected Component visitation info. */
1147 unsigned int *node_mapping;
1149 VEC(unsigned,heap) *scc_stack;
1153 /* Recursive routine to find strongly connected components in GRAPH.
1154 SI is the SCC info to store the information in, and N is the id of current
1155 graph node we are processing.
1157 This is Tarjan's strongly connected component finding algorithm, as
1158 modified by Nuutila to keep only non-root nodes on the stack.
1159 The algorithm can be found in "On finding the strongly connected
1160 connected components in a directed graph" by Esko Nuutila and Eljas
1161 Soisalon-Soininen, in Information Processing Letters volume 49,
1162 number 1, pages 9-14. */
1165 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1169 unsigned int my_dfs;
1171 SET_BIT (si->visited, n);
1172 si->dfs[n] = si->current_index ++;
1173 my_dfs = si->dfs[n];
1175 /* Visit all the successors. */
1176 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1180 if (i > LAST_REF_NODE)
1184 if (TEST_BIT (si->deleted, w))
1187 if (!TEST_BIT (si->visited, w))
1188 scc_visit (graph, si, w);
1190 unsigned int t = find (w);
1191 unsigned int nnode = find (n);
1192 gcc_assert (nnode == n);
1194 if (si->dfs[t] < si->dfs[nnode])
1195 si->dfs[n] = si->dfs[t];
1199 /* See if any components have been identified. */
1200 if (si->dfs[n] == my_dfs)
1202 if (VEC_length (unsigned, si->scc_stack) > 0
1203 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1205 bitmap scc = BITMAP_ALLOC (NULL);
1206 bool have_ref_node = n >= FIRST_REF_NODE;
1207 unsigned int lowest_node;
1210 bitmap_set_bit (scc, n);
1212 while (VEC_length (unsigned, si->scc_stack) != 0
1213 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1215 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1217 bitmap_set_bit (scc, w);
1218 if (w >= FIRST_REF_NODE)
1219 have_ref_node = true;
1222 lowest_node = bitmap_first_set_bit (scc);
1223 gcc_assert (lowest_node < FIRST_REF_NODE);
1225 /* Collapse the SCC nodes into a single node, and mark the
1227 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1229 if (i < FIRST_REF_NODE)
1231 if (unite (lowest_node, i))
1232 unify_nodes (graph, lowest_node, i, false);
1236 unite (lowest_node, i);
1237 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1241 SET_BIT (si->deleted, n);
1244 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1247 /* Unify node FROM into node TO, updating the changed count if
1248 necessary when UPDATE_CHANGED is true. */
1251 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1252 bool update_changed)
1255 gcc_assert (to != from && find (to) == to);
1256 if (dump_file && (dump_flags & TDF_DETAILS))
1257 fprintf (dump_file, "Unifying %s to %s\n",
1258 get_varinfo (from)->name,
1259 get_varinfo (to)->name);
1262 stats.unified_vars_dynamic++;
1264 stats.unified_vars_static++;
1266 merge_graph_nodes (graph, to, from);
1267 merge_node_constraints (graph, to, from);
1269 if (get_varinfo (from)->no_tbaa_pruning)
1270 get_varinfo (to)->no_tbaa_pruning = true;
1272 /* Mark TO as changed if FROM was changed. If TO was already marked
1273 as changed, decrease the changed count. */
1275 if (update_changed && TEST_BIT (changed, from))
1277 RESET_BIT (changed, from);
1278 if (!TEST_BIT (changed, to))
1279 SET_BIT (changed, to);
1282 gcc_assert (changed_count > 0);
1286 if (get_varinfo (from)->solution)
1288 /* If the solution changes because of the merging, we need to mark
1289 the variable as changed. */
1290 if (bitmap_ior_into (get_varinfo (to)->solution,
1291 get_varinfo (from)->solution))
1293 if (update_changed && !TEST_BIT (changed, to))
1295 SET_BIT (changed, to);
1300 BITMAP_FREE (get_varinfo (from)->solution);
1301 BITMAP_FREE (get_varinfo (from)->oldsolution);
1303 if (stats.iterations > 0)
1305 BITMAP_FREE (get_varinfo (to)->oldsolution);
1306 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1309 if (valid_graph_edge (graph, to, to))
1311 if (graph->succs[to])
1312 bitmap_clear_bit (graph->succs[to], to);
1316 /* Information needed to compute the topological ordering of a graph. */
1320 /* sbitmap of visited nodes. */
1322 /* Array that stores the topological order of the graph, *in
1324 VEC(unsigned,heap) *topo_order;
1328 /* Initialize and return a topological info structure. */
1330 static struct topo_info *
1331 init_topo_info (void)
1333 size_t size = graph->size;
1334 struct topo_info *ti = XNEW (struct topo_info);
1335 ti->visited = sbitmap_alloc (size);
1336 sbitmap_zero (ti->visited);
1337 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1342 /* Free the topological sort info pointed to by TI. */
1345 free_topo_info (struct topo_info *ti)
1347 sbitmap_free (ti->visited);
1348 VEC_free (unsigned, heap, ti->topo_order);
1352 /* Visit the graph in topological order, and store the order in the
1353 topo_info structure. */
1356 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1362 SET_BIT (ti->visited, n);
1364 if (graph->succs[n])
1365 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1367 if (!TEST_BIT (ti->visited, j))
1368 topo_visit (graph, ti, j);
1371 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1374 /* Return true if variable N + OFFSET is a legal field of N. */
1377 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1379 varinfo_t ninfo = get_varinfo (n);
1381 /* For things we've globbed to single variables, any offset into the
1382 variable acts like the entire variable, so that it becomes offset
1384 if (ninfo->is_special_var
1385 || ninfo->is_artificial_var
1386 || ninfo->is_unknown_size_var)
1391 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1394 /* Process a constraint C that represents x = *y, using DELTA as the
1395 starting solution. */
1398 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1401 unsigned int lhs = c->lhs.var;
1403 bitmap sol = get_varinfo (lhs)->solution;
1407 if (bitmap_bit_p (delta, anything_id))
1409 flag = !bitmap_bit_p (sol, anything_id);
1411 bitmap_set_bit (sol, anything_id);
1415 /* For each variable j in delta (Sol(y)), add
1416 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1417 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1419 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1420 if (type_safe (j, &roffset))
1423 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1426 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1431 /* Adding edges from the special vars is pointless.
1432 They don't have sets that can change. */
1433 if (get_varinfo (t)->is_special_var)
1434 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1435 /* Merging the solution from ESCAPED needlessly increases
1436 the set. Use ESCAPED as representative instead.
1437 Same for CALLUSED. */
1438 else if ((get_varinfo (t)->id == escaped_id
1439 || get_varinfo (t)->id == callused_id)
1440 && !bitmap_bit_p (sol, get_varinfo (t)->id))
1442 bitmap_set_bit (sol, get_varinfo (t)->id);
1445 else if (add_graph_edge (graph, lhs, t))
1446 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1451 /* If the LHS solution changed, mark the var as changed. */
1454 get_varinfo (lhs)->solution = sol;
1455 if (!TEST_BIT (changed, lhs))
1457 SET_BIT (changed, lhs);
1463 /* Process a constraint C that represents *x = y. */
1466 do_ds_constraint (constraint_t c, bitmap delta)
1468 unsigned int rhs = c->rhs.var;
1469 bitmap sol = get_varinfo (rhs)->solution;
1473 if (bitmap_bit_p (sol, anything_id))
1475 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1477 varinfo_t jvi = get_varinfo (j);
1479 unsigned int loff = c->lhs.offset;
1480 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1483 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1488 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1490 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1491 if (!TEST_BIT (changed, t))
1493 SET_BIT (changed, t);
1501 /* For each member j of delta (Sol(x)), add an edge from y to j and
1502 union Sol(y) into Sol(j) */
1503 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1505 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1506 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1510 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1513 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1517 tmp = get_varinfo (t)->solution;
1519 if (set_union_with_increment (tmp, sol, 0))
1521 get_varinfo (t)->solution = tmp;
1523 sol = get_varinfo (rhs)->solution;
1524 if (!TEST_BIT (changed, t))
1526 SET_BIT (changed, t);
1534 /* Handle a non-simple (simple meaning requires no iteration),
1535 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1538 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1540 if (c->lhs.type == DEREF)
1542 if (c->rhs.type == ADDRESSOF)
1549 do_ds_constraint (c, delta);
1552 else if (c->rhs.type == DEREF)
1555 if (!(get_varinfo (c->lhs.var)->is_special_var))
1556 do_sd_constraint (graph, c, delta);
1564 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1565 solution = get_varinfo (c->rhs.var)->solution;
1566 tmp = get_varinfo (c->lhs.var)->solution;
1568 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1572 get_varinfo (c->lhs.var)->solution = tmp;
1573 if (!TEST_BIT (changed, c->lhs.var))
1575 SET_BIT (changed, c->lhs.var);
1582 /* Initialize and return a new SCC info structure. */
1584 static struct scc_info *
1585 init_scc_info (size_t size)
1587 struct scc_info *si = XNEW (struct scc_info);
1590 si->current_index = 0;
1591 si->visited = sbitmap_alloc (size);
1592 sbitmap_zero (si->visited);
1593 si->deleted = sbitmap_alloc (size);
1594 sbitmap_zero (si->deleted);
1595 si->node_mapping = XNEWVEC (unsigned int, size);
1596 si->dfs = XCNEWVEC (unsigned int, size);
1598 for (i = 0; i < size; i++)
1599 si->node_mapping[i] = i;
1601 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1605 /* Free an SCC info structure pointed to by SI */
1608 free_scc_info (struct scc_info *si)
1610 sbitmap_free (si->visited);
1611 sbitmap_free (si->deleted);
1612 free (si->node_mapping);
1614 VEC_free (unsigned, heap, si->scc_stack);
1619 /* Find indirect cycles in GRAPH that occur, using strongly connected
1620 components, and note them in the indirect cycles map.
1622 This technique comes from Ben Hardekopf and Calvin Lin,
1623 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1624 Lines of Code", submitted to PLDI 2007. */
1627 find_indirect_cycles (constraint_graph_t graph)
1630 unsigned int size = graph->size;
1631 struct scc_info *si = init_scc_info (size);
1633 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1634 if (!TEST_BIT (si->visited, i) && find (i) == i)
1635 scc_visit (graph, si, i);
1640 /* Compute a topological ordering for GRAPH, and store the result in the
1641 topo_info structure TI. */
1644 compute_topo_order (constraint_graph_t graph,
1645 struct topo_info *ti)
1648 unsigned int size = graph->size;
1650 for (i = 0; i != size; ++i)
1651 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1652 topo_visit (graph, ti, i);
1655 /* Structure used to for hash value numbering of pointer equivalence
1658 typedef struct equiv_class_label
1660 unsigned int equivalence_class;
1663 } *equiv_class_label_t;
1664 typedef const struct equiv_class_label *const_equiv_class_label_t;
1666 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1668 static htab_t pointer_equiv_class_table;
1670 /* A hashtable for mapping a bitmap of labels->location equivalence
1672 static htab_t location_equiv_class_table;
1674 /* Hash function for a equiv_class_label_t */
1677 equiv_class_label_hash (const void *p)
1679 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1680 return ecl->hashcode;
1683 /* Equality function for two equiv_class_label_t's. */
1686 equiv_class_label_eq (const void *p1, const void *p2)
1688 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1689 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1690 return bitmap_equal_p (eql1->labels, eql2->labels);
1693 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1697 equiv_class_lookup (htab_t table, bitmap labels)
1700 struct equiv_class_label ecl;
1702 ecl.labels = labels;
1703 ecl.hashcode = bitmap_hash (labels);
1705 slot = htab_find_slot_with_hash (table, &ecl,
1706 ecl.hashcode, NO_INSERT);
1710 return ((equiv_class_label_t) *slot)->equivalence_class;
1714 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1718 equiv_class_add (htab_t table, unsigned int equivalence_class,
1722 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1724 ecl->labels = labels;
1725 ecl->equivalence_class = equivalence_class;
1726 ecl->hashcode = bitmap_hash (labels);
1728 slot = htab_find_slot_with_hash (table, ecl,
1729 ecl->hashcode, INSERT);
1730 gcc_assert (!*slot);
1731 *slot = (void *) ecl;
1734 /* Perform offline variable substitution.
1736 This is a worst case quadratic time way of identifying variables
1737 that must have equivalent points-to sets, including those caused by
1738 static cycles, and single entry subgraphs, in the constraint graph.
1740 The technique is described in "Exploiting Pointer and Location
1741 Equivalence to Optimize Pointer Analysis. In the 14th International
1742 Static Analysis Symposium (SAS), August 2007." It is known as the
1743 "HU" algorithm, and is equivalent to value numbering the collapsed
1744 constraint graph including evaluating unions.
1746 The general method of finding equivalence classes is as follows:
1747 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1748 Initialize all non-REF nodes to be direct nodes.
1749 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1751 For each constraint containing the dereference, we also do the same
1754 We then compute SCC's in the graph and unify nodes in the same SCC,
1757 For each non-collapsed node x:
1758 Visit all unvisited explicit incoming edges.
1759 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1761 Lookup the equivalence class for pts(x).
1762 If we found one, equivalence_class(x) = found class.
1763 Otherwise, equivalence_class(x) = new class, and new_class is
1764 added to the lookup table.
1766 All direct nodes with the same equivalence class can be replaced
1767 with a single representative node.
1768 All unlabeled nodes (label == 0) are not pointers and all edges
1769 involving them can be eliminated.
1770 We perform these optimizations during rewrite_constraints
1772 In addition to pointer equivalence class finding, we also perform
1773 location equivalence class finding. This is the set of variables
1774 that always appear together in points-to sets. We use this to
1775 compress the size of the points-to sets. */
1777 /* Current maximum pointer equivalence class id. */
1778 static int pointer_equiv_class;
1780 /* Current maximum location equivalence class id. */
1781 static int location_equiv_class;
1783 /* Recursive routine to find strongly connected components in GRAPH,
1784 and label it's nodes with DFS numbers. */
1787 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1791 unsigned int my_dfs;
1793 gcc_assert (si->node_mapping[n] == n);
1794 SET_BIT (si->visited, n);
1795 si->dfs[n] = si->current_index ++;
1796 my_dfs = si->dfs[n];
1798 /* Visit all the successors. */
1799 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1801 unsigned int w = si->node_mapping[i];
1803 if (TEST_BIT (si->deleted, w))
1806 if (!TEST_BIT (si->visited, w))
1807 condense_visit (graph, si, w);
1809 unsigned int t = si->node_mapping[w];
1810 unsigned int nnode = si->node_mapping[n];
1811 gcc_assert (nnode == n);
1813 if (si->dfs[t] < si->dfs[nnode])
1814 si->dfs[n] = si->dfs[t];
1818 /* Visit all the implicit predecessors. */
1819 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1821 unsigned int w = si->node_mapping[i];
1823 if (TEST_BIT (si->deleted, w))
1826 if (!TEST_BIT (si->visited, w))
1827 condense_visit (graph, si, w);
1829 unsigned int t = si->node_mapping[w];
1830 unsigned int nnode = si->node_mapping[n];
1831 gcc_assert (nnode == n);
1833 if (si->dfs[t] < si->dfs[nnode])
1834 si->dfs[n] = si->dfs[t];
1838 /* See if any components have been identified. */
1839 if (si->dfs[n] == my_dfs)
1841 while (VEC_length (unsigned, si->scc_stack) != 0
1842 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1844 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1845 si->node_mapping[w] = n;
1847 if (!TEST_BIT (graph->direct_nodes, w))
1848 RESET_BIT (graph->direct_nodes, n);
1850 /* Unify our nodes. */
1851 if (graph->preds[w])
1853 if (!graph->preds[n])
1854 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1855 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1857 if (graph->implicit_preds[w])
1859 if (!graph->implicit_preds[n])
1860 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1861 bitmap_ior_into (graph->implicit_preds[n],
1862 graph->implicit_preds[w]);
1864 if (graph->points_to[w])
1866 if (!graph->points_to[n])
1867 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1868 bitmap_ior_into (graph->points_to[n],
1869 graph->points_to[w]);
1871 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1873 unsigned int rep = si->node_mapping[i];
1874 graph->number_incoming[rep]++;
1877 SET_BIT (si->deleted, n);
1880 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1883 /* Label pointer equivalences. */
1886 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1890 SET_BIT (si->visited, n);
1892 if (!graph->points_to[n])
1893 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1895 /* Label and union our incoming edges's points to sets. */
1896 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1898 unsigned int w = si->node_mapping[i];
1899 if (!TEST_BIT (si->visited, w))
1900 label_visit (graph, si, w);
1902 /* Skip unused edges */
1903 if (w == n || graph->pointer_label[w] == 0)
1905 graph->number_incoming[w]--;
1908 if (graph->points_to[w])
1909 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1911 /* If all incoming edges to w have been processed and
1912 graph->points_to[w] was not stored in the hash table, we can
1914 graph->number_incoming[w]--;
1915 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1917 BITMAP_FREE (graph->points_to[w]);
1920 /* Indirect nodes get fresh variables. */
1921 if (!TEST_BIT (graph->direct_nodes, n))
1922 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1924 if (!bitmap_empty_p (graph->points_to[n]))
1926 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1927 graph->points_to[n]);
1930 SET_BIT (graph->pt_used, n);
1931 label = pointer_equiv_class++;
1932 equiv_class_add (pointer_equiv_class_table,
1933 label, graph->points_to[n]);
1935 graph->pointer_label[n] = label;
1939 /* Perform offline variable substitution, discovering equivalence
1940 classes, and eliminating non-pointer variables. */
1942 static struct scc_info *
1943 perform_var_substitution (constraint_graph_t graph)
1946 unsigned int size = graph->size;
1947 struct scc_info *si = init_scc_info (size);
1949 bitmap_obstack_initialize (&iteration_obstack);
1950 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1951 equiv_class_label_eq, free);
1952 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1953 equiv_class_label_eq, free);
1954 pointer_equiv_class = 1;
1955 location_equiv_class = 1;
1957 /* Condense the nodes, which means to find SCC's, count incoming
1958 predecessors, and unite nodes in SCC's. */
1959 for (i = 0; i < FIRST_REF_NODE; i++)
1960 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1961 condense_visit (graph, si, si->node_mapping[i]);
1963 sbitmap_zero (si->visited);
1964 /* Actually the label the nodes for pointer equivalences */
1965 for (i = 0; i < FIRST_REF_NODE; i++)
1966 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1967 label_visit (graph, si, si->node_mapping[i]);
1969 /* Calculate location equivalence labels. */
1970 for (i = 0; i < FIRST_REF_NODE; i++)
1977 if (!graph->pointed_by[i])
1979 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1981 /* Translate the pointed-by mapping for pointer equivalence
1983 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1985 bitmap_set_bit (pointed_by,
1986 graph->pointer_label[si->node_mapping[j]]);
1988 /* The original pointed_by is now dead. */
1989 BITMAP_FREE (graph->pointed_by[i]);
1991 /* Look up the location equivalence label if one exists, or make
1993 label = equiv_class_lookup (location_equiv_class_table,
1997 label = location_equiv_class++;
1998 equiv_class_add (location_equiv_class_table,
2003 if (dump_file && (dump_flags & TDF_DETAILS))
2004 fprintf (dump_file, "Found location equivalence for node %s\n",
2005 get_varinfo (i)->name);
2006 BITMAP_FREE (pointed_by);
2008 graph->loc_label[i] = label;
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2013 for (i = 0; i < FIRST_REF_NODE; i++)
2015 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2017 "Equivalence classes for %s node id %d:%s are pointer: %d"
2019 direct_node ? "Direct node" : "Indirect node", i,
2020 get_varinfo (i)->name,
2021 graph->pointer_label[si->node_mapping[i]],
2022 graph->loc_label[si->node_mapping[i]]);
2025 /* Quickly eliminate our non-pointer variables. */
2027 for (i = 0; i < FIRST_REF_NODE; i++)
2029 unsigned int node = si->node_mapping[i];
2031 if (graph->pointer_label[node] == 0)
2033 if (dump_file && (dump_flags & TDF_DETAILS))
2035 "%s is a non-pointer variable, eliminating edges.\n",
2036 get_varinfo (node)->name);
2037 stats.nonpointer_vars++;
2038 clear_edges_for_node (graph, node);
2045 /* Free information that was only necessary for variable
2049 free_var_substitution_info (struct scc_info *si)
2052 free (graph->pointer_label);
2053 free (graph->loc_label);
2054 free (graph->pointed_by);
2055 free (graph->points_to);
2056 free (graph->number_incoming);
2057 free (graph->eq_rep);
2058 sbitmap_free (graph->direct_nodes);
2059 sbitmap_free (graph->pt_used);
2060 htab_delete (pointer_equiv_class_table);
2061 htab_delete (location_equiv_class_table);
2062 bitmap_obstack_release (&iteration_obstack);
2065 /* Return an existing node that is equivalent to NODE, which has
2066 equivalence class LABEL, if one exists. Return NODE otherwise. */
2069 find_equivalent_node (constraint_graph_t graph,
2070 unsigned int node, unsigned int label)
2072 /* If the address version of this variable is unused, we can
2073 substitute it for anything else with the same label.
2074 Otherwise, we know the pointers are equivalent, but not the
2075 locations, and we can unite them later. */
2077 if (!bitmap_bit_p (graph->address_taken, node))
2079 gcc_assert (label < graph->size);
2081 if (graph->eq_rep[label] != -1)
2083 /* Unify the two variables since we know they are equivalent. */
2084 if (unite (graph->eq_rep[label], node))
2085 unify_nodes (graph, graph->eq_rep[label], node, false);
2086 return graph->eq_rep[label];
2090 graph->eq_rep[label] = node;
2091 graph->pe_rep[label] = node;
2096 gcc_assert (label < graph->size);
2097 graph->pe[node] = label;
2098 if (graph->pe_rep[label] == -1)
2099 graph->pe_rep[label] = node;
2105 /* Unite pointer equivalent but not location equivalent nodes in
2106 GRAPH. This may only be performed once variable substitution is
2110 unite_pointer_equivalences (constraint_graph_t graph)
2114 /* Go through the pointer equivalences and unite them to their
2115 representative, if they aren't already. */
2116 for (i = 0; i < FIRST_REF_NODE; i++)
2118 unsigned int label = graph->pe[i];
2121 int label_rep = graph->pe_rep[label];
2123 if (label_rep == -1)
2126 label_rep = find (label_rep);
2127 if (label_rep >= 0 && unite (label_rep, find (i)))
2128 unify_nodes (graph, label_rep, i, false);
2133 /* Move complex constraints to the GRAPH nodes they belong to. */
2136 move_complex_constraints (constraint_graph_t graph)
2141 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2145 struct constraint_expr lhs = c->lhs;
2146 struct constraint_expr rhs = c->rhs;
2148 if (lhs.type == DEREF)
2150 insert_into_complex (graph, lhs.var, c);
2152 else if (rhs.type == DEREF)
2154 if (!(get_varinfo (lhs.var)->is_special_var))
2155 insert_into_complex (graph, rhs.var, c);
2157 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2158 && (lhs.offset != 0 || rhs.offset != 0))
2160 insert_into_complex (graph, rhs.var, c);
2167 /* Optimize and rewrite complex constraints while performing
2168 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2169 result of perform_variable_substitution. */
2172 rewrite_constraints (constraint_graph_t graph,
2173 struct scc_info *si)
2179 for (j = 0; j < graph->size; j++)
2180 gcc_assert (find (j) == j);
2182 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2184 struct constraint_expr lhs = c->lhs;
2185 struct constraint_expr rhs = c->rhs;
2186 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2187 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2188 unsigned int lhsnode, rhsnode;
2189 unsigned int lhslabel, rhslabel;
2191 lhsnode = si->node_mapping[lhsvar];
2192 rhsnode = si->node_mapping[rhsvar];
2193 lhslabel = graph->pointer_label[lhsnode];
2194 rhslabel = graph->pointer_label[rhsnode];
2196 /* See if it is really a non-pointer variable, and if so, ignore
2200 if (dump_file && (dump_flags & TDF_DETAILS))
2203 fprintf (dump_file, "%s is a non-pointer variable,"
2204 "ignoring constraint:",
2205 get_varinfo (lhs.var)->name);
2206 dump_constraint (dump_file, c);
2208 VEC_replace (constraint_t, constraints, i, NULL);
2214 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "%s is a non-pointer variable,"
2218 "ignoring constraint:",
2219 get_varinfo (rhs.var)->name);
2220 dump_constraint (dump_file, c);
2222 VEC_replace (constraint_t, constraints, i, NULL);
2226 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2227 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2228 c->lhs.var = lhsvar;
2229 c->rhs.var = rhsvar;
2234 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2235 part of an SCC, false otherwise. */
2238 eliminate_indirect_cycles (unsigned int node)
2240 if (graph->indirect_cycles[node] != -1
2241 && !bitmap_empty_p (get_varinfo (node)->solution))
2244 VEC(unsigned,heap) *queue = NULL;
2246 unsigned int to = find (graph->indirect_cycles[node]);
2249 /* We can't touch the solution set and call unify_nodes
2250 at the same time, because unify_nodes is going to do
2251 bitmap unions into it. */
2253 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2255 if (find (i) == i && i != to)
2258 VEC_safe_push (unsigned, heap, queue, i);
2263 VEC_iterate (unsigned, queue, queuepos, i);
2266 unify_nodes (graph, to, i, true);
2268 VEC_free (unsigned, heap, queue);
2274 /* Solve the constraint graph GRAPH using our worklist solver.
2275 This is based on the PW* family of solvers from the "Efficient Field
2276 Sensitive Pointer Analysis for C" paper.
2277 It works by iterating over all the graph nodes, processing the complex
2278 constraints and propagating the copy constraints, until everything stops
2279 changed. This corresponds to steps 6-8 in the solving list given above. */
2282 solve_graph (constraint_graph_t graph)
2284 unsigned int size = graph->size;
2289 changed = sbitmap_alloc (size);
2290 sbitmap_zero (changed);
2292 /* Mark all initial non-collapsed nodes as changed. */
2293 for (i = 0; i < size; i++)
2295 varinfo_t ivi = get_varinfo (i);
2296 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2297 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2298 || VEC_length (constraint_t, graph->complex[i]) > 0))
2300 SET_BIT (changed, i);
2305 /* Allocate a bitmap to be used to store the changed bits. */
2306 pts = BITMAP_ALLOC (&pta_obstack);
2308 while (changed_count > 0)
2311 struct topo_info *ti = init_topo_info ();
2314 bitmap_obstack_initialize (&iteration_obstack);
2316 compute_topo_order (graph, ti);
2318 while (VEC_length (unsigned, ti->topo_order) != 0)
2321 i = VEC_pop (unsigned, ti->topo_order);
2323 /* If this variable is not a representative, skip it. */
2327 /* In certain indirect cycle cases, we may merge this
2328 variable to another. */
2329 if (eliminate_indirect_cycles (i) && find (i) != i)
2332 /* If the node has changed, we need to process the
2333 complex constraints and outgoing edges again. */
2334 if (TEST_BIT (changed, i))
2339 VEC(constraint_t,heap) *complex = graph->complex[i];
2340 bool solution_empty;
2342 RESET_BIT (changed, i);
2345 /* Compute the changed set of solution bits. */
2346 bitmap_and_compl (pts, get_varinfo (i)->solution,
2347 get_varinfo (i)->oldsolution);
2349 if (bitmap_empty_p (pts))
2352 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2354 solution = get_varinfo (i)->solution;
2355 solution_empty = bitmap_empty_p (solution);
2357 /* Process the complex constraints */
2358 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2360 /* XXX: This is going to unsort the constraints in
2361 some cases, which will occasionally add duplicate
2362 constraints during unification. This does not
2363 affect correctness. */
2364 c->lhs.var = find (c->lhs.var);
2365 c->rhs.var = find (c->rhs.var);
2367 /* The only complex constraint that can change our
2368 solution to non-empty, given an empty solution,
2369 is a constraint where the lhs side is receiving
2370 some set from elsewhere. */
2371 if (!solution_empty || c->lhs.type != DEREF)
2372 do_complex_constraint (graph, c, pts);
2375 solution_empty = bitmap_empty_p (solution);
2378 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2380 && i != callused_id)
2384 /* Propagate solution to all successors. */
2385 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2391 unsigned int to = find (j);
2392 tmp = get_varinfo (to)->solution;
2395 /* Don't try to propagate to ourselves. */
2399 flag = set_union_with_increment (tmp, pts, 0);
2403 get_varinfo (to)->solution = tmp;
2404 if (!TEST_BIT (changed, to))
2406 SET_BIT (changed, to);
2414 free_topo_info (ti);
2415 bitmap_obstack_release (&iteration_obstack);
2419 sbitmap_free (changed);
2420 bitmap_obstack_release (&oldpta_obstack);
2423 /* Map from trees to variable infos. */
2424 static struct pointer_map_t *vi_for_tree;
2427 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2430 insert_vi_for_tree (tree t, varinfo_t vi)
2432 void **slot = pointer_map_insert (vi_for_tree, t);
2434 gcc_assert (*slot == NULL);
2438 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2439 exist in the map, return NULL, otherwise, return the varinfo we found. */
2442 lookup_vi_for_tree (tree t)
2444 void **slot = pointer_map_contains (vi_for_tree, t);
2448 return (varinfo_t) *slot;
2451 /* Return a printable name for DECL */
2454 alias_get_name (tree decl)
2456 const char *res = get_name (decl);
2458 int num_printed = 0;
2467 if (TREE_CODE (decl) == SSA_NAME)
2469 num_printed = asprintf (&temp, "%s_%u",
2470 alias_get_name (SSA_NAME_VAR (decl)),
2471 SSA_NAME_VERSION (decl));
2473 else if (DECL_P (decl))
2475 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2477 if (num_printed > 0)
2479 res = ggc_strdup (temp);
2485 /* Find the variable id for tree T in the map.
2486 If T doesn't exist in the map, create an entry for it and return it. */
2489 get_vi_for_tree (tree t)
2491 void **slot = pointer_map_contains (vi_for_tree, t);
2493 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2495 return (varinfo_t) *slot;
2498 /* Get a constraint expression from an SSA_VAR_P node. */
2500 static struct constraint_expr
2501 get_constraint_exp_from_ssa_var (tree t)
2503 struct constraint_expr cexpr;
2505 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2507 /* For parameters, get at the points-to set for the actual parm
2509 if (TREE_CODE (t) == SSA_NAME
2510 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2511 && SSA_NAME_IS_DEFAULT_DEF (t))
2512 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2514 cexpr.type = SCALAR;
2516 cexpr.var = get_vi_for_tree (t)->id;
2517 /* If we determine the result is "anything", and we know this is readonly,
2518 say it points to readonly memory instead. */
2519 if (cexpr.var == anything_id && TREE_READONLY (t))
2521 cexpr.type = ADDRESSOF;
2522 cexpr.var = readonly_id;
2529 /* Process a completed constraint T, and add it to the constraint
2530 list. FROM_CALL is true if this is a constraint coming from a
2531 call, which means any DEREFs we see are "may-deref's", not
2535 process_constraint_1 (constraint_t t, bool from_call)
2537 struct constraint_expr rhs = t->rhs;
2538 struct constraint_expr lhs = t->lhs;
2540 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2541 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2543 if (!use_field_sensitive)
2549 /* ANYTHING == ANYTHING is pointless. */
2550 if (lhs.var == anything_id && rhs.var == anything_id)
2553 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2554 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2559 process_constraint_1 (t, from_call);
2561 /* This can happen in our IR with things like n->a = *p */
2562 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2564 /* Split into tmp = *rhs, *lhs = tmp */
2565 tree rhsdecl = get_varinfo (rhs.var)->decl;
2566 tree pointertype = TREE_TYPE (rhsdecl);
2567 tree pointedtotype = TREE_TYPE (pointertype);
2568 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2569 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2571 /* If this is an aggregate of known size, we should have passed
2572 this off to do_structure_copy, and it should have broken it
2574 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2575 || get_varinfo (rhs.var)->is_unknown_size_var);
2577 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2578 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2580 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2582 /* Split into tmp = &rhs, *lhs = tmp */
2583 tree rhsdecl = get_varinfo (rhs.var)->decl;
2584 tree pointertype = TREE_TYPE (rhsdecl);
2585 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2586 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2588 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2589 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2593 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2594 VEC_safe_push (constraint_t, heap, constraints, t);
2599 /* Process constraint T, performing various simplifications and then
2600 adding it to our list of overall constraints. */
2603 process_constraint (constraint_t t)
2605 process_constraint_1 (t, false);
2608 /* Return true if T is a variable of a type that could contain
2612 could_have_pointers (tree t)
2614 tree type = TREE_TYPE (t);
2616 if (POINTER_TYPE_P (type)
2617 || AGGREGATE_TYPE_P (type)
2618 || TREE_CODE (type) == COMPLEX_TYPE)
2624 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2627 static unsigned HOST_WIDE_INT
2628 bitpos_of_field (const tree fdecl)
2631 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2632 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2635 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2636 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2640 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2643 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2646 HOST_WIDE_INT bitsize = -1;
2647 HOST_WIDE_INT bitmaxsize = -1;
2648 HOST_WIDE_INT bitpos;
2650 struct constraint_expr *result;
2651 unsigned int beforelength = VEC_length (ce_s, *results);
2653 /* Some people like to do cute things like take the address of
2656 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2657 forzero = TREE_OPERAND (forzero, 0);
2659 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2661 struct constraint_expr temp;
2664 temp.var = integer_id;
2666 VEC_safe_push (ce_s, heap, *results, &temp);
2670 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2672 get_constraint_for (t, results);
2673 result = VEC_last (ce_s, *results);
2674 result->offset = bitpos;
2676 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2678 /* This can also happen due to weird offsetof type macros. */
2679 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2680 result->type = SCALAR;
2682 if (result->type == SCALAR)
2684 /* In languages like C, you can access one past the end of an
2685 array. You aren't allowed to dereference it, so we can
2686 ignore this constraint. When we handle pointer subtraction,
2687 we may have to do something cute here. */
2689 if (result->offset < get_varinfo (result->var)->fullsize
2692 /* It's also not true that the constraint will actually start at the
2693 right offset, it may start in some padding. We only care about
2694 setting the constraint to the first actual field it touches, so
2697 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2699 if (ranges_overlap_p (curr->offset, curr->size,
2700 result->offset, bitmaxsize))
2702 result->var = curr->id;
2706 /* assert that we found *some* field there. The user couldn't be
2707 accessing *only* padding. */
2708 /* Still the user could access one past the end of an array
2709 embedded in a struct resulting in accessing *only* padding. */
2710 gcc_assert (curr || ref_contains_array_ref (orig_t));
2712 else if (bitmaxsize == 0)
2714 if (dump_file && (dump_flags & TDF_DETAILS))
2715 fprintf (dump_file, "Access to zero-sized part of variable,"
2719 if (dump_file && (dump_flags & TDF_DETAILS))
2720 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2724 else if (bitmaxsize == -1)
2726 /* We can't handle DEREF constraints with unknown size, we'll
2727 get the wrong answer. Punt and return anything. */
2728 result->var = anything_id;
2734 /* Dereference the constraint expression CONS, and return the result.
2735 DEREF (ADDRESSOF) = SCALAR
2736 DEREF (SCALAR) = DEREF
2737 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2738 This is needed so that we can handle dereferencing DEREF constraints. */
2741 do_deref (VEC (ce_s, heap) **constraints)
2743 struct constraint_expr *c;
2746 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2748 if (c->type == SCALAR)
2750 else if (c->type == ADDRESSOF)
2752 else if (c->type == DEREF)
2754 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2755 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2756 process_constraint (new_constraint (tmplhs, *c));
2757 c->var = tmplhs.var;
2764 /* Given a tree T, return the constraint expression for it. */
2767 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2769 struct constraint_expr temp;
2771 /* x = integer is all glommed to a single variable, which doesn't
2772 point to anything by itself. That is, of course, unless it is an
2773 integer constant being treated as a pointer, in which case, we
2774 will return that this is really the addressof anything. This
2775 happens below, since it will fall into the default case. The only
2776 case we know something about an integer treated like a pointer is
2777 when it is the NULL pointer, and then we just say it points to
2779 if (TREE_CODE (t) == INTEGER_CST
2780 && integer_zerop (t))
2782 temp.var = nothing_id;
2783 temp.type = ADDRESSOF;
2785 VEC_safe_push (ce_s, heap, *results, &temp);
2789 /* String constants are read-only. */
2790 if (TREE_CODE (t) == STRING_CST)
2792 temp.var = readonly_id;
2795 VEC_safe_push (ce_s, heap, *results, &temp);
2799 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2801 case tcc_expression:
2804 switch (TREE_CODE (t))
2808 struct constraint_expr *c;
2810 tree exp = TREE_OPERAND (t, 0);
2811 tree pttype = TREE_TYPE (TREE_TYPE (t));
2813 get_constraint_for (exp, results);
2816 /* Complex types are special. Taking the address of one
2817 allows you to access either part of it through that
2819 if (VEC_length (ce_s, *results) == 1 &&
2820 TREE_CODE (pttype) == COMPLEX_TYPE)
2822 struct constraint_expr *origrhs;
2824 struct constraint_expr tmp;
2826 gcc_assert (VEC_length (ce_s, *results) == 1);
2827 origrhs = VEC_last (ce_s, *results);
2829 VEC_pop (ce_s, *results);
2830 origvar = get_varinfo (origrhs->var);
2831 for (; origvar; origvar = origvar->next)
2833 tmp.var = origvar->id;
2834 VEC_safe_push (ce_s, heap, *results, &tmp);
2838 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2840 if (c->type == DEREF)
2843 c->type = ADDRESSOF;
2849 /* XXX: In interprocedural mode, if we didn't have the
2850 body, we would need to do *each pointer argument =
2852 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2855 tree heapvar = heapvar_lookup (t);
2857 if (heapvar == NULL)
2859 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2860 DECL_EXTERNAL (heapvar) = 1;
2861 get_var_ann (heapvar)->is_heapvar = 1;
2862 if (gimple_referenced_vars (cfun))
2863 add_referenced_var (heapvar);
2864 heapvar_insert (t, heapvar);
2867 temp.var = create_variable_info_for (heapvar,
2868 alias_get_name (heapvar));
2870 vi = get_varinfo (temp.var);
2871 vi->is_artificial_var = 1;
2872 vi->is_heap_var = 1;
2873 temp.type = ADDRESSOF;
2875 VEC_safe_push (ce_s, heap, *results, &temp);
2880 temp.var = anything_id;
2883 VEC_safe_push (ce_s, heap, *results, &temp);
2889 temp.type = ADDRESSOF;
2890 temp.var = anything_id;
2892 VEC_safe_push (ce_s, heap, *results, &temp);
2899 switch (TREE_CODE (t))
2903 get_constraint_for (TREE_OPERAND (t, 0), results);
2908 case ARRAY_RANGE_REF:
2910 get_constraint_for_component_ref (t, results);
2914 temp.type = ADDRESSOF;
2915 temp.var = anything_id;
2917 VEC_safe_push (ce_s, heap, *results, &temp);
2924 switch (TREE_CODE (t))
2928 tree op = TREE_OPERAND (t, 0);
2930 /* Cast from non-pointer to pointers are bad news for us.
2931 Anything else, we see through */
2932 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2933 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2935 get_constraint_for (op, results);
2943 temp.type = ADDRESSOF;
2944 temp.var = anything_id;
2946 VEC_safe_push (ce_s, heap, *results, &temp);
2951 case tcc_exceptional:
2953 switch (TREE_CODE (t))
2957 get_constraint_for (PHI_RESULT (t), results);
2963 struct constraint_expr temp;
2964 temp = get_constraint_exp_from_ssa_var (t);
2965 VEC_safe_push (ce_s, heap, *results, &temp);
2971 temp.type = ADDRESSOF;
2972 temp.var = anything_id;
2974 VEC_safe_push (ce_s, heap, *results, &temp);
2979 case tcc_declaration:
2981 struct constraint_expr temp;
2982 temp = get_constraint_exp_from_ssa_var (t);
2983 VEC_safe_push (ce_s, heap, *results, &temp);
2988 temp.type = ADDRESSOF;
2989 temp.var = anything_id;
2991 VEC_safe_push (ce_s, heap, *results, &temp);
2998 /* Handle the structure copy case where we have a simple structure copy
2999 between LHS and RHS that is of SIZE (in bits)
3001 For each field of the lhs variable (lhsfield)
3002 For each field of the rhs variable at lhsfield.offset (rhsfield)
3003 add the constraint lhsfield = rhsfield
3005 If we fail due to some kind of type unsafety or other thing we
3006 can't handle, return false. We expect the caller to collapse the
3007 variable in that case. */
3010 do_simple_structure_copy (const struct constraint_expr lhs,
3011 const struct constraint_expr rhs,
3012 const unsigned HOST_WIDE_INT size)
3014 varinfo_t p = get_varinfo (lhs.var);
3015 unsigned HOST_WIDE_INT pstart, last;
3017 last = p->offset + size;
3018 for (; p && p->offset < last; p = p->next)
3021 struct constraint_expr templhs = lhs;
3022 struct constraint_expr temprhs = rhs;
3023 unsigned HOST_WIDE_INT fieldoffset;
3025 templhs.var = p->id;
3026 q = get_varinfo (temprhs.var);
3027 fieldoffset = p->offset - pstart;
3028 q = first_vi_for_offset (q, q->offset + fieldoffset);
3031 temprhs.var = q->id;
3032 process_constraint (new_constraint (templhs, temprhs));
3038 /* Handle the structure copy case where we have a structure copy between a
3039 aggregate on the LHS and a dereference of a pointer on the RHS
3040 that is of SIZE (in bits)
3042 For each field of the lhs variable (lhsfield)
3043 rhs.offset = lhsfield->offset
3044 add the constraint lhsfield = rhs
3048 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3049 const struct constraint_expr rhs,
3050 const unsigned HOST_WIDE_INT size)
3052 varinfo_t p = get_varinfo (lhs.var);
3053 unsigned HOST_WIDE_INT pstart,last;
3055 last = p->offset + size;
3057 for (; p && p->offset < last; p = p->next)
3060 struct constraint_expr templhs = lhs;
3061 struct constraint_expr temprhs = rhs;
3062 unsigned HOST_WIDE_INT fieldoffset;
3065 if (templhs.type == SCALAR)
3066 templhs.var = p->id;
3068 templhs.offset = p->offset;
3070 q = get_varinfo (temprhs.var);
3071 fieldoffset = p->offset - pstart;
3072 temprhs.offset += fieldoffset;
3073 process_constraint (new_constraint (templhs, temprhs));
3077 /* Handle the structure copy case where we have a structure copy
3078 between an aggregate on the RHS and a dereference of a pointer on
3079 the LHS that is of SIZE (in bits)
3081 For each field of the rhs variable (rhsfield)
3082 lhs.offset = rhsfield->offset
3083 add the constraint lhs = rhsfield
3087 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3088 const struct constraint_expr rhs,
3089 const unsigned HOST_WIDE_INT size)
3091 varinfo_t p = get_varinfo (rhs.var);
3092 unsigned HOST_WIDE_INT pstart,last;
3094 last = p->offset + size;
3096 for (; p && p->offset < last; p = p->next)
3099 struct constraint_expr templhs = lhs;
3100 struct constraint_expr temprhs = rhs;
3101 unsigned HOST_WIDE_INT fieldoffset;
3104 if (temprhs.type == SCALAR)
3105 temprhs.var = p->id;
3107 temprhs.offset = p->offset;
3109 q = get_varinfo (templhs.var);
3110 fieldoffset = p->offset - pstart;
3111 templhs.offset += fieldoffset;
3112 process_constraint (new_constraint (templhs, temprhs));
3116 /* Sometimes, frontends like to give us bad type information. This
3117 function will collapse all the fields from VAR to the end of VAR,
3118 into VAR, so that we treat those fields as a single variable.
3119 We return the variable they were collapsed into. */
3122 collapse_rest_of_var (unsigned int var)
3124 varinfo_t currvar = get_varinfo (var);
3127 for (field = currvar->next; field; field = field->next)
3130 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3131 field->name, currvar->name);
3133 gcc_assert (!field->collapsed_to);
3134 field->collapsed_to = currvar;
3137 currvar->next = NULL;
3138 currvar->size = currvar->fullsize - currvar->offset;
3143 /* Handle aggregate copies by expanding into copies of the respective
3144 fields of the structures. */
3147 do_structure_copy (tree lhsop, tree rhsop)
3149 struct constraint_expr lhs, rhs, tmp;
3150 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3152 unsigned HOST_WIDE_INT lhssize;
3153 unsigned HOST_WIDE_INT rhssize;
3155 get_constraint_for (lhsop, &lhsc);
3156 get_constraint_for (rhsop, &rhsc);
3157 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3158 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3159 lhs = *(VEC_last (ce_s, lhsc));
3160 rhs = *(VEC_last (ce_s, rhsc));
3162 VEC_free (ce_s, heap, lhsc);
3163 VEC_free (ce_s, heap, rhsc);
3165 /* If we have special var = x, swap it around. */
3166 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3173 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3174 possible it's something we could handle. However, most cases falling
3175 into this are dealing with transparent unions, which are slightly
3177 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3179 rhs.type = ADDRESSOF;
3180 rhs.var = anything_id;
3183 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3184 that special var. */
3185 if (rhs.var <= integer_id)
3187 for (p = get_varinfo (lhs.var); p; p = p->next)
3189 struct constraint_expr templhs = lhs;
3190 struct constraint_expr temprhs = rhs;
3192 if (templhs.type == SCALAR )
3193 templhs.var = p->id;
3195 templhs.offset += p->offset;
3196 process_constraint (new_constraint (templhs, temprhs));
3201 tree rhstype = TREE_TYPE (rhsop);
3202 tree lhstype = TREE_TYPE (lhsop);
3206 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3207 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3209 /* If we have a variably sized types on the rhs or lhs, and a deref
3210 constraint, add the constraint, lhsconstraint = &ANYTHING.
3211 This is conservatively correct because either the lhs is an unknown
3212 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3213 constraint, and every variable it can point to must be unknown sized
3214 anyway, so we don't need to worry about fields at all. */
3215 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3216 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3218 rhs.var = anything_id;
3219 rhs.type = ADDRESSOF;
3221 process_constraint (new_constraint (lhs, rhs));
3225 /* The size only really matters insofar as we don't set more or less of
3226 the variable. If we hit an unknown size var, the size should be the
3227 whole darn thing. */
3228 if (get_varinfo (rhs.var)->is_unknown_size_var)
3231 rhssize = TREE_INT_CST_LOW (rhstypesize);
3233 if (get_varinfo (lhs.var)->is_unknown_size_var)
3236 lhssize = TREE_INT_CST_LOW (lhstypesize);
3239 if (rhs.type == SCALAR && lhs.type == SCALAR)
3241 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3243 lhs.var = collapse_rest_of_var (lhs.var);
3244 rhs.var = collapse_rest_of_var (rhs.var);
3249 process_constraint (new_constraint (lhs, rhs));
3252 else if (lhs.type != DEREF && rhs.type == DEREF)
3253 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3254 else if (lhs.type == DEREF && rhs.type != DEREF)
3255 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3258 tree pointedtotype = lhstype;
3261 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3262 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3263 do_structure_copy (tmpvar, rhsop);
3264 do_structure_copy (lhsop, tmpvar);
3270 /* Update related alias information kept in AI. This is used when
3271 building name tags, alias sets and deciding grouping heuristics.
3272 STMT is the statement to process. This function also updates
3273 ADDRESSABLE_VARS. */
3276 update_alias_info (tree stmt, struct alias_info *ai)
3279 use_operand_p use_p;
3281 bool stmt_dereferences_ptr_p;
3282 enum escape_type stmt_escape_type = is_escape_site (stmt);
3283 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3285 stmt_dereferences_ptr_p = false;
3287 if (stmt_escape_type == ESCAPE_TO_CALL
3288 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3290 mem_ref_stats->num_call_sites++;
3291 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3292 mem_ref_stats->num_pure_const_call_sites++;
3294 else if (stmt_escape_type == ESCAPE_TO_ASM)
3295 mem_ref_stats->num_asm_sites++;
3297 /* Mark all the variables whose address are taken by the statement. */
3298 addr_taken = addresses_taken (stmt);
3300 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3302 /* Process each operand use. For pointers, determine whether they
3303 are dereferenced by the statement, or whether their value
3305 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3309 struct ptr_info_def *pi;
3310 unsigned num_uses, num_loads, num_stores;
3312 op = USE_FROM_PTR (use_p);
3314 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3315 to the set of addressable variables. */
3316 if (TREE_CODE (op) == ADDR_EXPR)
3318 bitmap addressable_vars = gimple_addressable_vars (cfun);
3320 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3321 gcc_assert (addressable_vars);
3323 /* PHI nodes don't have annotations for pinning the set
3324 of addresses taken, so we collect them here.
3326 FIXME, should we allow PHI nodes to have annotations
3327 so that they can be treated like regular statements?
3328 Currently, they are treated as second-class
3330 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3334 /* Ignore constants (they may occur in PHI node arguments). */
3335 if (TREE_CODE (op) != SSA_NAME)
3338 var = SSA_NAME_VAR (op);
3339 v_ann = var_ann (var);
3341 /* The base variable of an SSA name must be a GIMPLE register, and thus
3342 it cannot be aliased. */
3343 gcc_assert (!may_be_aliased (var));
3345 /* We are only interested in pointers. */
3346 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3349 pi = get_ptr_info (op);
3351 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3352 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3354 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3355 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3358 /* If STMT is a PHI node, then it will not have pointer
3359 dereferences and it will not be an escape point. */
3360 if (TREE_CODE (stmt) == PHI_NODE)
3363 /* Determine whether OP is a dereferenced pointer, and if STMT
3364 is an escape point, whether OP escapes. */
3365 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3367 /* For directly dereferenced pointers we can apply
3368 TBAA-pruning to their points-to set. We may not count the
3369 implicit dereferences &PTR->FLD here. */
3370 if (num_loads + num_stores > 0)
3371 pi->is_dereferenced = 1;
3373 /* Handle a corner case involving address expressions of the
3374 form '&PTR->FLD'. The problem with these expressions is that
3375 they do not represent a dereference of PTR. However, if some
3376 other transformation propagates them into an INDIRECT_REF
3377 expression, we end up with '*(&PTR->FLD)' which is folded
3380 So, if the original code had no other dereferences of PTR,
3381 the aliaser will not create memory tags for it, and when
3382 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3383 memory operations will receive no VDEF/VUSE operands.
3385 One solution would be to have count_uses_and_derefs consider
3386 &PTR->FLD a dereference of PTR. But that is wrong, since it
3387 is not really a dereference but an offset calculation.
3389 What we do here is to recognize these special ADDR_EXPR
3390 nodes. Since these expressions are never GIMPLE values (they
3391 are not GIMPLE invariants), they can only appear on the RHS
3392 of an assignment and their base address is always an
3393 INDIRECT_REF expression. */
3394 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3395 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3396 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3398 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3399 this represents a potential dereference of PTR. */
3400 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3401 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3402 if (TREE_CODE (base) == INDIRECT_REF
3403 && TREE_OPERAND (base, 0) == op)
3407 if (num_loads + num_stores > 0)
3409 /* Mark OP as dereferenced. In a subsequent pass,
3410 dereferenced pointers that point to a set of
3411 variables will be assigned a name tag to alias
3412 all the variables OP points to. */
3413 pi->memory_tag_needed = 1;
3415 /* ??? For always executed direct dereferences we can
3416 apply TBAA-pruning to their escape set. */
3418 /* If this is a store operation, mark OP as being
3419 dereferenced to store, otherwise mark it as being
3420 dereferenced to load. */
3422 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3424 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3426 /* Update the frequency estimate for all the dereferences of
3428 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3430 /* Indicate that STMT contains pointer dereferences. */
3431 stmt_dereferences_ptr_p = true;
3434 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3436 /* If STMT is an escape point and STMT contains at
3437 least one direct use of OP, then the value of OP
3438 escapes and so the pointed-to variables need to
3439 be marked call-clobbered. */
3440 pi->value_escapes_p = 1;
3441 pi->escape_mask |= stmt_escape_type;
3443 /* If the statement makes a function call, assume
3444 that pointer OP will be dereferenced in a store
3445 operation inside the called function. */
3446 if (get_call_expr_in (stmt)
3447 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3449 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3450 pi->memory_tag_needed = 1;
3455 if (TREE_CODE (stmt) == PHI_NODE)
3458 /* Mark stored variables in STMT as being written to and update the
3459 memory reference stats for all memory symbols referenced by STMT. */
3460 if (stmt_references_memory_p (stmt))
3465 mem_ref_stats->num_mem_stmts++;
3467 /* Notice that we only update memory reference stats for symbols
3468 loaded and stored by the statement if the statement does not
3469 contain pointer dereferences and it is not a call/asm site.
3470 This is to avoid double accounting problems when creating
3471 memory partitions. After computing points-to information,
3472 pointer dereference statistics are used to update the
3473 reference stats of the pointed-to variables, so here we
3474 should only update direct references to symbols.
3476 Indirect references are not updated here for two reasons: (1)
3477 The first time we compute alias information, the sets
3478 LOADED/STORED are empty for pointer dereferences, (2) After
3479 partitioning, LOADED/STORED may have references to
3480 partitions, not the original pointed-to variables. So, if we
3481 always counted LOADED/STORED here and during partitioning, we
3482 would count many symbols more than once.
3484 This does cause some imprecision when a statement has a
3485 combination of direct symbol references and pointer
3486 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3487 memory symbols in its argument list, but these cases do not
3488 occur so frequently as to constitute a serious problem. */
3489 if (STORED_SYMS (stmt))
3490 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3492 tree sym = referenced_var (i);
3493 pointer_set_insert (ai->written_vars, sym);
3494 if (!stmt_dereferences_ptr_p
3495 && stmt_escape_type != ESCAPE_TO_CALL
3496 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3497 && stmt_escape_type != ESCAPE_TO_ASM)
3498 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3501 if (!stmt_dereferences_ptr_p
3502 && LOADED_SYMS (stmt)
3503 && stmt_escape_type != ESCAPE_TO_CALL
3504 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3505 && stmt_escape_type != ESCAPE_TO_ASM)
3506 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3507 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3512 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3513 Expressions of the type PTR + CST can be handled in two ways:
3515 1- If the constraint for PTR is ADDRESSOF for a non-structure
3516 variable, then we can use it directly because adding or
3517 subtracting a constant may not alter the original ADDRESSOF
3518 constraint (i.e., pointer arithmetic may not legally go outside
3519 an object's boundaries).
3521 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3522 then if CST is a compile-time constant that can be used as an
3523 offset, we can determine which sub-variable will be pointed-to
3526 Return true if the expression is handled. For any other kind of
3527 expression, return false so that each operand can be added as a
3528 separate constraint by the caller. */
3531 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3534 struct constraint_expr *c, *c2;
3537 VEC (ce_s, heap) *temp = NULL;
3538 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3540 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3543 op0 = TREE_OPERAND (expr, 0);
3544 op1 = TREE_OPERAND (expr, 1);
3545 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3547 /* If the offset is not a non-negative integer constant that fits
3548 in a HOST_WIDE_INT, we cannot handle it here. */
3549 if (!host_integerp (op1, 1))
3552 /* Make sure the bit-offset also fits. */
3553 rhsunitoffset = TREE_INT_CST_LOW (op1);
3554 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3555 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3558 get_constraint_for (op0, &temp);
3560 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3561 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3563 if (c2->type == ADDRESSOF && rhsoffset != 0)
3565 varinfo_t temp = get_varinfo (c2->var);
3567 /* An access one after the end of an array is valid,
3568 so simply punt on accesses we cannot resolve. */
3569 temp = first_vi_for_offset (temp, rhsoffset);
3576 c2->offset = rhsoffset;
3577 process_constraint (new_constraint (*c, *c2));
3580 VEC_free (ce_s, heap, temp);
3585 /* Create a constraint ID = OP. */
3588 make_constraint_to (unsigned id, tree op)
3590 VEC(ce_s, heap) *rhsc = NULL;
3591 struct constraint_expr *c;
3592 struct constraint_expr includes;
3596 includes.offset = 0;
3597 includes.type = SCALAR;
3599 get_constraint_for (op, &rhsc);
3600 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3601 process_constraint_1 (new_constraint (includes, *c), true);
3602 VEC_free (ce_s, heap, rhsc);
3605 /* Make constraints necessary to make OP escape. */
3608 make_escape_constraint (tree op)
3610 make_constraint_to (escaped_id, op);
3613 /* For non-IPA mode, generate constraints necessary for a call on the
3617 handle_rhs_call (tree rhs)
3620 call_expr_arg_iterator iter;
3622 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3623 /* Find those pointers being passed, and make sure they end up
3624 pointing to anything. */
3625 if (could_have_pointers (arg))
3626 make_escape_constraint (arg);
3628 /* The static chain escapes as well. */
3629 if (CALL_EXPR_STATIC_CHAIN (rhs))
3630 make_escape_constraint (CALL_EXPR_STATIC_CHAIN (rhs));
3633 /* For non-IPA mode, generate constraints necessary for a call
3634 that returns a pointer and assigns it to LHS. This simply makes
3635 the LHS point to global and escaped variables. */
3638 handle_lhs_call (tree lhs)
3640 VEC(ce_s, heap) *lhsc = NULL;
3641 struct constraint_expr rhsc;
3643 struct constraint_expr *lhsp;
3645 get_constraint_for (lhs, &lhsc);
3646 rhsc.var = nonlocal_id;
3648 rhsc.type = ADDRESSOF;
3649 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3650 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3651 rhsc.var = escaped_id;
3653 rhsc.type = ADDRESSOF;
3654 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3655 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3656 VEC_free (ce_s, heap, lhsc);
3659 /* For non-IPA mode, generate constraints necessary for a call of a
3660 const function that returns a pointer in the statement STMT. */
3663 handle_const_call (tree stmt)
3665 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3666 tree call = get_call_expr_in (stmt);
3667 VEC(ce_s, heap) *lhsc = NULL;
3668 struct constraint_expr rhsc;
3670 struct constraint_expr *lhsp;
3672 call_expr_arg_iterator iter;
3674 get_constraint_for (lhs, &lhsc);
3676 /* If this is a nested function then it can return anything. */
3677 if (CALL_EXPR_STATIC_CHAIN (call))
3679 rhsc.var = anything_id;
3681 rhsc.type = ADDRESSOF;
3682 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3683 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3684 VEC_free (ce_s, heap, lhsc);
3688 /* May return addresses of globals. */
3689 rhsc.var = nonlocal_id;
3691 rhsc.type = ADDRESSOF;
3692 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3693 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3695 /* May return arguments. */
3696 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3697 if (could_have_pointers (arg))
3699 VEC(ce_s, heap) *argc = NULL;
3700 struct constraint_expr *argp;
3702 get_constraint_for (arg, &argc);
3703 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3704 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3705 process_constraint_1 (new_constraint (*lhsp, *argp), true);
3706 VEC_free (ce_s, heap, argc);
3709 VEC_free (ce_s, heap, lhsc);
3712 /* For non-IPA mode, generate constraints necessary for a call to a
3713 pure function in statement STMT. */
3716 handle_pure_call (tree stmt)
3718 tree call = get_call_expr_in (stmt);
3720 call_expr_arg_iterator iter;
3722 /* Memory reached from pointer arguments is call-used. */
3723 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3724 if (could_have_pointers (arg))
3725 make_constraint_to (callused_id, arg);
3727 /* The static chain is used as well. */
3728 if (CALL_EXPR_STATIC_CHAIN (call))
3729 make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
3731 /* If the call returns a pointer it may point to reachable memory
3732 from the arguments. */
3733 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3734 && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0)))
3736 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3737 VEC(ce_s, heap) *lhsc = NULL;
3738 struct constraint_expr rhsc;
3739 struct constraint_expr *lhsp;
3742 get_constraint_for (lhs, &lhsc);
3744 /* If this is a nested function then it can return anything. */
3745 if (CALL_EXPR_STATIC_CHAIN (call))
3747 rhsc.var = anything_id;
3749 rhsc.type = ADDRESSOF;
3750 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3751 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3752 VEC_free (ce_s, heap, lhsc);
3756 /* Else just add the call-used memory here. Escaped variables
3757 and globals will be dealt with in handle_lhs_call. */
3758 rhsc.var = callused_id;
3760 rhsc.type = ADDRESSOF;
3761 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3762 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3763 VEC_free (ce_s, heap, lhsc);
3767 /* Walk statement T setting up aliasing constraints according to the
3768 references found in T. This function is the main part of the
3769 constraint builder. AI points to auxiliary alias information used
3770 when building alias sets and computing alias grouping heuristics. */
3773 find_func_aliases (tree origt)
3775 tree call, t = origt;
3776 VEC(ce_s, heap) *lhsc = NULL;
3777 VEC(ce_s, heap) *rhsc = NULL;
3778 struct constraint_expr *c;
3779 enum escape_type stmt_escape_type;
3782 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3783 t = TREE_OPERAND (t, 0);
3785 /* Now build constraints expressions. */
3786 if (TREE_CODE (t) == PHI_NODE)
3788 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3790 /* Only care about pointers and structures containing
3792 if (could_have_pointers (PHI_RESULT (t)))
3797 /* For a phi node, assign all the arguments to
3799 get_constraint_for (PHI_RESULT (t), &lhsc);
3800 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3803 tree strippedrhs = PHI_ARG_DEF (t, i);
3805 STRIP_NOPS (strippedrhs);
3806 rhstype = TREE_TYPE (strippedrhs);
3807 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3809 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3811 struct constraint_expr *c2;
3812 while (VEC_length (ce_s, rhsc) > 0)
3814 c2 = VEC_last (ce_s, rhsc);
3815 process_constraint (new_constraint (*c, *c2));
3816 VEC_pop (ce_s, rhsc);
3822 /* In IPA mode, we need to generate constraints to pass call
3823 arguments through their calls. There are two cases, either a
3824 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3825 CALL_EXPR when we are not.
3827 In non-ipa mode, we need to generate constraints for each
3828 pointer passed by address. */
3829 else if ((call = get_call_expr_in (t)) != NULL_TREE
3830 && !((flags = call_expr_flags (call))
3831 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3835 /* Const functions can return their arguments and addresses
3836 of global memory but not of escaped memory. */
3837 if (flags & ECF_CONST)
3839 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3840 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3841 handle_const_call (t);
3843 else if (flags & ECF_PURE)
3845 handle_pure_call (t);
3846 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3847 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3848 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3850 /* Pure functions can return addresses in and of memory
3851 reachable from their arguments, but they are not an escape
3852 point for reachable memory of their arguments. But as we
3853 do not compute call-used memory separately we cannot do
3854 something special here. */
3855 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3857 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3858 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3859 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3862 handle_rhs_call (t);
3869 call_expr_arg_iterator iter;
3873 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3875 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3876 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3883 decl = get_callee_fndecl (rhsop);
3885 /* If we can directly resolve the function being called, do so.
3886 Otherwise, it must be some sort of indirect expression that
3887 we should still be able to handle. */
3890 fi = get_vi_for_tree (decl);
3894 decl = CALL_EXPR_FN (rhsop);
3895 fi = get_vi_for_tree (decl);
3898 /* Assign all the passed arguments to the appropriate incoming
3899 parameters of the function. */
3901 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3903 struct constraint_expr lhs ;
3904 struct constraint_expr *rhsp;
3906 get_constraint_for (arg, &rhsc);
3907 if (TREE_CODE (decl) != FUNCTION_DECL)
3916 lhs.var = first_vi_for_offset (fi, i)->id;
3919 while (VEC_length (ce_s, rhsc) != 0)
3921 rhsp = VEC_last (ce_s, rhsc);
3922 process_constraint (new_constraint (lhs, *rhsp));
3923 VEC_pop (ce_s, rhsc);
3928 /* If we are returning a value, assign it to the result. */
3931 struct constraint_expr rhs;
3932 struct constraint_expr *lhsp;
3935 get_constraint_for (lhsop, &lhsc);
3936 if (TREE_CODE (decl) != FUNCTION_DECL)
3945 rhs.var = first_vi_for_offset (fi, i)->id;
3948 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3949 process_constraint (new_constraint (*lhsp, rhs));
3953 /* Otherwise, just a regular assignment statement. */
3954 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3956 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3957 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3960 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3961 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3962 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3963 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3965 do_structure_copy (lhsop, rhsop);
3969 /* Only care about operations with pointers, structures
3970 containing pointers, dereferences, and call expressions. */
3971 if (could_have_pointers (lhsop)
3972 || TREE_CODE (rhsop) == CALL_EXPR)
3974 get_constraint_for (lhsop, &lhsc);
3975 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3977 /* RHS that consist of unary operations,
3978 exceptional types, or bare decls/constants, get
3979 handled directly by get_constraint_for. */
3981 case tcc_declaration:
3983 case tcc_exceptional:
3984 case tcc_expression:
3990 get_constraint_for (rhsop, &rhsc);
3991 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3993 struct constraint_expr *c2;
3996 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3997 process_constraint (new_constraint (*c, *c2));
4005 /* For pointer arithmetic of the form
4006 PTR + CST, we can simply use PTR's
4007 constraint because pointer arithmetic is
4008 not allowed to go out of bounds. */
4009 if (handle_ptr_arith (lhsc, rhsop))
4014 /* Otherwise, walk each operand. Notice that we
4015 can't use the operand interface because we need
4016 to process expressions other than simple operands
4017 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
4019 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
4021 tree op = TREE_OPERAND (rhsop, i);
4024 gcc_assert (VEC_length (ce_s, rhsc) == 0);
4025 get_constraint_for (op, &rhsc);
4026 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
4028 struct constraint_expr *c2;
4029 while (VEC_length (ce_s, rhsc) > 0)
4031 c2 = VEC_last (ce_s, rhsc);
4032 process_constraint (new_constraint (*c, *c2));
4033 VEC_pop (ce_s, rhsc);
4041 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
4045 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
4046 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
4047 get_varinfo (c->var)->no_tbaa_pruning = true;
4050 stmt_escape_type = is_escape_site (t);
4051 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
4054 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4055 rhs = GIMPLE_STMT_OPERAND (t, 1);
4056 if (TREE_CODE (rhs) == ADDR_EXPR)
4058 tree base = get_base_address (TREE_OPERAND (rhs, 0));
4061 || !is_global_var (base)))
4062 make_escape_constraint (rhs);
4064 else if (TREE_CODE (rhs) == SSA_NAME
4065 && POINTER_TYPE_P (TREE_TYPE (rhs)))
4066 make_escape_constraint (rhs);
4067 else if (could_have_pointers (rhs))
4068 make_escape_constraint (rhs);
4070 else if (stmt_escape_type == ESCAPE_BAD_CAST)
4073 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4074 rhs = GIMPLE_STMT_OPERAND (t, 1);
4075 gcc_assert (CONVERT_EXPR_P (rhs)
4076 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR);
4077 rhs = TREE_OPERAND (rhs, 0);
4078 make_escape_constraint (rhs);
4080 else if (stmt_escape_type == ESCAPE_TO_ASM)
4084 for (i = 0, link = ASM_OUTPUTS (t); link; i++, link = TREE_CHAIN (link))
4086 tree op = TREE_VALUE (link);
4087 if (op && could_have_pointers (op))
4088 /* Strictly we'd only need the constraints from ESCAPED and
4090 make_escape_constraint (op);
4092 for (i = 0, link = ASM_INPUTS (t); link; i++, link = TREE_CHAIN (link))
4094 tree op = TREE_VALUE (link);
4095 if (op && could_have_pointers (op))
4096 /* Strictly we'd only need the constraint to ESCAPED. */
4097 make_escape_constraint (op);
4101 /* After promoting variables and computing aliasing we will
4102 need to re-scan most statements. FIXME: Try to minimize the
4103 number of statements re-scanned. It's not really necessary to
4104 re-scan *all* statements. */
4106 mark_stmt_modified (origt);
4107 VEC_free (ce_s, heap, rhsc);
4108 VEC_free (ce_s, heap, lhsc);
4112 /* Find the first varinfo in the same variable as START that overlaps with
4114 Effectively, walk the chain of fields for the variable START to find the
4115 first field that overlaps with OFFSET.
4116 Return NULL if we can't find one. */
4119 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4121 varinfo_t curr = start;
4124 /* We may not find a variable in the field list with the actual
4125 offset when when we have glommed a structure to a variable.
4126 In that case, however, offset should still be within the size
4128 if (offset >= curr->offset && offset < (curr->offset + curr->size))
4136 /* Insert the varinfo FIELD into the field list for BASE, at the front
4140 insert_into_field_list (varinfo_t base, varinfo_t field)
4142 varinfo_t prev = base;
4143 varinfo_t curr = base->next;
4149 /* Insert the varinfo FIELD into the field list for BASE, ordered by
4153 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
4155 varinfo_t prev = base;
4156 varinfo_t curr = base->next;
4167 if (field->offset <= curr->offset)
4172 field->next = prev->next;
4177 /* This structure is used during pushing fields onto the fieldstack
4178 to track the offset of the field, since bitpos_of_field gives it
4179 relative to its immediate containing type, and we want it relative
4180 to the ultimate containing object. */
4184 /* Type of the field. */
4187 /* Size, in bits, of the field. */
4193 /* Offset from the base of the base containing object to this field. */
4194 HOST_WIDE_INT offset;
4196 typedef struct fieldoff fieldoff_s;
4198 DEF_VEC_O(fieldoff_s);
4199 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4201 /* qsort comparison function for two fieldoff's PA and PB */
4204 fieldoff_compare (const void *pa, const void *pb)
4206 const fieldoff_s *foa = (const fieldoff_s *)pa;
4207 const fieldoff_s *fob = (const fieldoff_s *)pb;
4208 unsigned HOST_WIDE_INT foasize, fobsize;
4210 if (foa->offset < fob->offset)
4212 else if (foa->offset > fob->offset)
4215 foasize = TREE_INT_CST_LOW (foa->size);
4216 fobsize = TREE_INT_CST_LOW (fob->size);
4217 if (foasize < fobsize)
4219 else if (foasize > fobsize)
4224 /* Sort a fieldstack according to the field offset and sizes. */
4226 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4228 qsort (VEC_address (fieldoff_s, fieldstack),
4229 VEC_length (fieldoff_s, fieldstack),
4230 sizeof (fieldoff_s),
4234 /* Return true if V is a tree that we can have subvars for.
4235 Normally, this is any aggregate type. Also complex
4236 types which are not gimple registers can have subvars. */
4239 var_can_have_subvars (const_tree v)
4241 /* Volatile variables should never have subvars. */
4242 if (TREE_THIS_VOLATILE (v))
4245 /* Non decls or memory tags can never have subvars. */
4246 if (!DECL_P (v) || MTAG_P (v))
4249 /* Aggregates without overlapping fields can have subvars. */
4250 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4256 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4257 the fields of TYPE onto fieldstack, recording their offsets along
4260 OFFSET is used to keep track of the offset in this entire
4261 structure, rather than just the immediately containing structure.
4262 Returns the number of fields pushed.
4264 HAS_UNION is set to true if we find a union type as a field of
4268 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4269 HOST_WIDE_INT offset, bool *has_union)
4274 if (TREE_CODE (type) != RECORD_TYPE)
4277 /* If the vector of fields is growing too big, bail out early.
4278 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4280 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4283 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4284 if (TREE_CODE (field) == FIELD_DECL)
4290 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4291 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4294 if (!var_can_have_subvars (field))
4296 else if (!(pushed = push_fields_onto_fieldstack
4299 offset + bitpos_of_field (field),
4301 && (DECL_SIZE (field)
4302 && !integer_zerop (DECL_SIZE (field))))
4303 /* Empty structures may have actual size, like in C++. So
4304 see if we didn't push any subfields and the size is
4305 nonzero, push the field onto the stack. */
4312 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4313 pair->type = TREE_TYPE (field);
4314 pair->size = DECL_SIZE (field);
4316 pair->offset = offset + bitpos_of_field (field);
4326 /* Create a constraint ID = &FROM. */
4329 make_constraint_from (varinfo_t vi, int from)
4331 struct constraint_expr lhs, rhs;
4339 rhs.type = ADDRESSOF;
4340 process_constraint (new_constraint (lhs, rhs));
4343 /* Create a constraint from ANYTHING variable to VI. */
4345 make_constraint_from_anything (varinfo_t vi)
4347 make_constraint_from (vi, anything_id);
4350 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4351 if it is a varargs function. */
4354 count_num_arguments (tree decl, bool *is_varargs)
4359 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4363 if (TREE_VALUE (t) == void_type_node)
4373 /* Creation function node for DECL, using NAME, and return the index
4374 of the variable we've created for the function. */
4377 create_function_info_for (tree decl, const char *name)
4379 unsigned int index = VEC_length (varinfo_t, varmap);
4383 bool is_varargs = false;
4385 /* Create the variable info. */
4387 vi = new_var_info (decl, index, name);
4392 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4393 insert_vi_for_tree (vi->decl, vi);
4394 VEC_safe_push (varinfo_t, heap, varmap, vi);
4398 /* If it's varargs, we don't know how many arguments it has, so we
4405 vi->is_unknown_size_var = true;
4410 arg = DECL_ARGUMENTS (decl);
4412 /* Set up variables for each argument. */
4413 for (i = 1; i < vi->fullsize; i++)
4416 const char *newname;
4418 unsigned int newindex;
4419 tree argdecl = decl;
4424 newindex = VEC_length (varinfo_t, varmap);
4425 asprintf (&tempname, "%s.arg%d", name, i-1);
4426 newname = ggc_strdup (tempname);
4429 argvi = new_var_info (argdecl, newindex, newname);
4430 argvi->decl = argdecl;
4431 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4434 argvi->fullsize = vi->fullsize;
4435 argvi->has_union = false;
4436 insert_into_field_list_sorted (vi, argvi);
4437 stats.total_vars ++;
4440 insert_vi_for_tree (arg, argvi);
4441 arg = TREE_CHAIN (arg);
4445 /* Create a variable for the return var. */
4446 if (DECL_RESULT (decl) != NULL
4447 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4450 const char *newname;
4452 unsigned int newindex;
4453 tree resultdecl = decl;
4457 if (DECL_RESULT (decl))
4458 resultdecl = DECL_RESULT (decl);
4460 newindex = VEC_length (varinfo_t, varmap);
4461 asprintf (&tempname, "%s.result", name);
4462 newname = ggc_strdup (tempname);
4465 resultvi = new_var_info (resultdecl, newindex, newname);
4466 resultvi->decl = resultdecl;
4467 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4468 resultvi->offset = i;
4470 resultvi->fullsize = vi->fullsize;
4471 resultvi->has_union = false;
4472 insert_into_field_list_sorted (vi, resultvi);
4473 stats.total_vars ++;
4474 if (DECL_RESULT (decl))
4475 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4481 /* Return true if FIELDSTACK contains fields that overlap.
4482 FIELDSTACK is assumed to be sorted by offset. */
4485 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4487 fieldoff_s *fo = NULL;
4489 HOST_WIDE_INT lastoffset = -1;
4491 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4493 if (fo->offset == lastoffset)
4495 lastoffset = fo->offset;
4500 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4501 This will also create any varinfo structures necessary for fields
4505 create_variable_info_for (tree decl, const char *name)
4507 unsigned int index = VEC_length (varinfo_t, varmap);
4509 tree decltype = TREE_TYPE (decl);
4510 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4511 bool notokay = false;
4513 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4514 VEC (fieldoff_s,heap) *fieldstack = NULL;
4516 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4517 return create_function_info_for (decl, name);
4519 hasunion = TREE_CODE (decltype) == UNION_TYPE
4520 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4521 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4523 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4526 VEC_free (fieldoff_s, heap, fieldstack);
4531 /* If the variable doesn't have subvars, we may end up needing to
4532 sort the field list and create fake variables for all the
4534 vi = new_var_info (decl, index, name);
4537 vi->has_union = hasunion;
4539 || TREE_CODE (declsize) != INTEGER_CST
4540 || TREE_CODE (decltype) == UNION_TYPE
4541 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4543 vi->is_unknown_size_var = true;
4549 vi->fullsize = TREE_INT_CST_LOW (declsize);
4550 vi->size = vi->fullsize;
4553 insert_vi_for_tree (vi->decl, vi);
4554 VEC_safe_push (varinfo_t, heap, varmap, vi);
4555 if (is_global && (!flag_whole_program || !in_ipa_mode))
4556 make_constraint_from_anything (vi);
4559 if (use_field_sensitive
4561 && !vi->is_unknown_size_var
4562 && var_can_have_subvars (decl)
4563 && VEC_length (fieldoff_s, fieldstack) > 1
4564 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4566 unsigned int newindex = VEC_length (varinfo_t, varmap);
4567 fieldoff_s *fo = NULL;
4570 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4573 || TREE_CODE (fo->size) != INTEGER_CST
4581 /* We can't sort them if we have a field with a variable sized type,
4582 which will make notokay = true. In that case, we are going to return
4583 without creating varinfos for the fields anyway, so sorting them is a
4587 sort_fieldstack (fieldstack);
4588 /* Due to some C++ FE issues, like PR 22488, we might end up
4589 what appear to be overlapping fields even though they,
4590 in reality, do not overlap. Until the C++ FE is fixed,
4591 we will simply disable field-sensitivity for these cases. */
4592 notokay = check_for_overlaps (fieldstack);
4596 if (VEC_length (fieldoff_s, fieldstack) != 0)
4597 fo = VEC_index (fieldoff_s, fieldstack, 0);
4599 if (fo == NULL || notokay)
4601 vi->is_unknown_size_var = 1;
4604 VEC_free (fieldoff_s, heap, fieldstack);
4608 vi->size = TREE_INT_CST_LOW (fo->size);
4609 vi->offset = fo->offset;
4610 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4611 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4615 const char *newname = "NULL";
4618 newindex = VEC_length (varinfo_t, varmap);
4622 asprintf (&tempname, "%s.%s",
4623 vi->name, alias_get_name (fo->decl));
4625 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4626 vi->name, fo->offset);
4627 newname = ggc_strdup (tempname);
4630 newvi = new_var_info (decl, newindex, newname);
4631 newvi->offset = fo->offset;
4632 newvi->size = TREE_INT_CST_LOW (fo->size);
4633 newvi->fullsize = vi->fullsize;
4634 insert_into_field_list (vi, newvi);
4635 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4636 if (is_global && (!flag_whole_program || !in_ipa_mode))
4637 make_constraint_from_anything (newvi);
4643 VEC_free (fieldoff_s, heap, fieldstack);
4648 /* Print out the points-to solution for VAR to FILE. */
4651 dump_solution_for_var (FILE *file, unsigned int var)
4653 varinfo_t vi = get_varinfo (var);
4657 if (find (var) != var)
4659 varinfo_t vipt = get_varinfo (find (var));
4660 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4664 fprintf (file, "%s = { ", vi->name);
4665 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4667 fprintf (file, "%s ", get_varinfo (i)->name);
4669 fprintf (file, "}");
4670 if (vi->no_tbaa_pruning)
4671 fprintf (file, " no-tbaa-pruning");
4672 fprintf (file, "\n");
4676 /* Print the points-to solution for VAR to stdout. */
4679 debug_solution_for_var (unsigned int var)
4681 dump_solution_for_var (stdout, var);
4684 /* Create varinfo structures for all of the variables in the
4685 function for intraprocedural mode. */
4688 intra_create_variable_infos (void)
4691 struct constraint_expr lhs, rhs;
4693 /* For each incoming pointer argument arg, create the constraint ARG
4694 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4695 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4699 if (!could_have_pointers (t))
4702 /* If flag_argument_noalias is set, then function pointer
4703 arguments are guaranteed not to point to each other. In that
4704 case, create an artificial variable PARM_NOALIAS and the
4705 constraint ARG = &PARM_NOALIAS. */
4706 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4709 tree heapvar = heapvar_lookup (t);
4713 lhs.var = get_vi_for_tree (t)->id;
4715 if (heapvar == NULL_TREE)
4718 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4720 DECL_EXTERNAL (heapvar) = 1;
4721 if (gimple_referenced_vars (cfun))
4722 add_referenced_var (heapvar);
4724 heapvar_insert (t, heapvar);
4726 ann = get_var_ann (heapvar);
4727 if (flag_argument_noalias == 1)
4728 ann->noalias_state = NO_ALIAS;
4729 else if (flag_argument_noalias == 2)
4730 ann->noalias_state = NO_ALIAS_GLOBAL;
4731 else if (flag_argument_noalias == 3)
4732 ann->noalias_state = NO_ALIAS_ANYTHING;
4737 vi = get_vi_for_tree (heapvar);
4738 vi->is_artificial_var = 1;
4739 vi->is_heap_var = 1;
4741 rhs.type = ADDRESSOF;
4743 for (p = get_varinfo (lhs.var); p; p = p->next)
4745 struct constraint_expr temp = lhs;
4747 process_constraint (new_constraint (temp, rhs));
4752 varinfo_t arg_vi = get_vi_for_tree (t);
4754 for (p = arg_vi; p; p = p->next)
4755 make_constraint_from (p, nonlocal_id);
4760 /* Structure used to put solution bitmaps in a hashtable so they can
4761 be shared among variables with the same points-to set. */
4763 typedef struct shared_bitmap_info
4767 } *shared_bitmap_info_t;
4768 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4770 static htab_t shared_bitmap_table;
4772 /* Hash function for a shared_bitmap_info_t */
4775 shared_bitmap_hash (const void *p)
4777 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4778 return bi->hashcode;
4781 /* Equality function for two shared_bitmap_info_t's. */
4784 shared_bitmap_eq (const void *p1, const void *p2)
4786 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4787 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4788 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4791 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4792 existing instance if there is one, NULL otherwise. */
4795 shared_bitmap_lookup (bitmap pt_vars)
4798 struct shared_bitmap_info sbi;
4800 sbi.pt_vars = pt_vars;
4801 sbi.hashcode = bitmap_hash (pt_vars);
4803 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4804 sbi.hashcode, NO_INSERT);
4808 return ((shared_bitmap_info_t) *slot)->pt_vars;
4812 /* Add a bitmap to the shared bitmap hashtable. */
4815 shared_bitmap_add (bitmap pt_vars)
4818 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4820 sbi->pt_vars = pt_vars;
4821 sbi->hashcode = bitmap_hash (pt_vars);
4823 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4824 sbi->hashcode, INSERT);
4825 gcc_assert (!*slot);
4826 *slot = (void *) sbi;
4830 /* Set bits in INTO corresponding to the variable uids in solution set
4831 FROM, which came from variable PTR.
4832 For variables that are actually dereferenced, we also use type
4833 based alias analysis to prune the points-to sets.
4834 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4835 help determine whether we are we are allowed to prune using TBAA.
4836 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4840 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4841 bool no_tbaa_pruning)
4846 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4848 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4850 varinfo_t vi = get_varinfo (i);
4852 /* The only artificial variables that are allowed in a may-alias
4853 set are heap variables. */
4854 if (vi->is_artificial_var && !vi->is_heap_var)
4857 if (TREE_CODE (vi->decl) == VAR_DECL
4858 || TREE_CODE (vi->decl) == PARM_DECL
4859 || TREE_CODE (vi->decl) == RESULT_DECL)
4861 /* Just add VI->DECL to the alias set.
4862 Don't type prune artificial vars or points-to sets
4863 for pointers that have not been dereferenced or with
4864 type-based pruning disabled. */
4865 if (vi->is_artificial_var
4868 bitmap_set_bit (into, DECL_UID (vi->decl));
4871 alias_set_type var_alias_set, mem_alias_set;
4872 var_alias_set = get_alias_set (vi->decl);
4873 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4874 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4875 vi->decl, var_alias_set, true))
4876 bitmap_set_bit (into, DECL_UID (vi->decl));
4883 static bool have_alias_info = false;
4885 /* The list of SMT's that are in use by our pointer variables. This
4886 is the set of SMT's for all pointers that can point to anything. */
4887 static bitmap used_smts;
4889 /* Due to the ordering of points-to set calculation and SMT
4890 calculation being a bit co-dependent, we can't just calculate SMT
4891 used info whenever we want, we have to calculate it around the time
4892 that find_what_p_points_to is called. */
4894 /* Mark which SMT's are in use by points-to anything variables. */
4897 set_used_smts (void)
4901 used_smts = BITMAP_ALLOC (&pta_obstack);
4903 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4905 tree var = vi->decl;
4906 varinfo_t withsolution = get_varinfo (find (i));
4909 struct ptr_info_def *pi = NULL;
4911 /* For parm decls, the pointer info may be under the default
4913 if (TREE_CODE (vi->decl) == PARM_DECL
4914 && gimple_default_def (cfun, var))
4915 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4916 else if (TREE_CODE (var) == SSA_NAME)
4917 pi = SSA_NAME_PTR_INFO (var);
4919 /* Skip the special variables and those that can't be aliased. */
4920 if (vi->is_special_var
4922 || (pi && !pi->memory_tag_needed)
4923 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4924 || !POINTER_TYPE_P (TREE_TYPE (var)))
4927 if (TREE_CODE (var) == SSA_NAME)
4928 var = SSA_NAME_VAR (var);
4934 smt = va->symbol_mem_tag;
4935 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4936 bitmap_set_bit (used_smts, DECL_UID (smt));
4940 /* Given a pointer variable P, fill in its points-to set, or return
4942 Rather than return false for variables that point-to anything, we
4943 instead find the corresponding SMT, and merge in its aliases. In
4944 addition to these aliases, we also set the bits for the SMT's
4945 themselves and their subsets, as SMT's are still in use by
4946 non-SSA_NAME's, and pruning may eliminate every one of their
4947 aliases. In such a case, if we did not include the right set of
4948 SMT's in the points-to set of the variable, we'd end up with
4949 statements that do not conflict but should. */
4952 find_what_p_points_to (tree p)
4957 if (!have_alias_info)
4960 /* For parameters, get at the points-to set for the actual parm
4962 if (TREE_CODE (p) == SSA_NAME
4963 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4964 && SSA_NAME_IS_DEFAULT_DEF (p))
4965 lookup_p = SSA_NAME_VAR (p);
4967 vi = lookup_vi_for_tree (lookup_p);
4970 if (vi->is_artificial_var)
4973 /* See if this is a field or a structure. */
4974 if (vi->size != vi->fullsize)
4976 /* Nothing currently asks about structure fields directly,
4977 but when they do, we need code here to hand back the
4983 struct ptr_info_def *pi = get_ptr_info (p);
4986 bool was_pt_anything = false;
4987 bitmap finished_solution;
4990 if (!pi->memory_tag_needed)
4993 /* This variable may have been collapsed, let's get the real
4995 vi = get_varinfo (find (vi->id));
4997 /* Translate artificial variables into SSA_NAME_PTR_INFO
4999 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5001 varinfo_t vi = get_varinfo (i);
5003 if (vi->is_artificial_var)
5005 /* FIXME. READONLY should be handled better so that
5006 flow insensitive aliasing can disregard writable
5008 if (vi->id == nothing_id)
5010 else if (vi->id == anything_id
5011 || vi->id == nonlocal_id
5012 || vi->id == escaped_id
5013 || vi->id == callused_id)
5014 was_pt_anything = 1;
5015 else if (vi->id == readonly_id)
5016 was_pt_anything = 1;
5017 else if (vi->id == integer_id)
5018 was_pt_anything = 1;
5019 else if (vi->is_heap_var)
5020 pi->pt_global_mem = 1;
5024 /* Instead of doing extra work, simply do not create
5025 points-to information for pt_anything pointers. This
5026 will cause the operand scanner to fall back to the
5027 type-based SMT and its aliases. Which is the best
5028 we could do here for the points-to set as well. */
5029 if (was_pt_anything)
5032 /* Share the final set of variables when possible. */
5033 finished_solution = BITMAP_GGC_ALLOC ();
5034 stats.points_to_sets_created++;
5036 set_uids_in_ptset (p, finished_solution, vi->solution,
5037 pi->is_dereferenced,
5038 vi->no_tbaa_pruning);
5039 result = shared_bitmap_lookup (finished_solution);
5043 shared_bitmap_add (finished_solution);
5044 pi->pt_vars = finished_solution;
5048 pi->pt_vars = result;
5049 bitmap_clear (finished_solution);
5052 if (bitmap_empty_p (pi->pt_vars))
5062 /* Mark the ESCAPED solution as call clobbered. Returns false if
5063 pt_anything escaped which needs all locals that have their address
5064 taken marked call clobbered as well. */
5067 clobber_what_escaped (void)
5073 if (!have_alias_info)
5076 /* This variable may have been collapsed, let's get the real
5077 variable for escaped_id. */
5078 vi = get_varinfo (find (escaped_id));
5080 /* If call-used memory escapes we need to include it in the
5081 set of escaped variables. This can happen if a pure
5082 function returns a pointer and this pointer escapes. */
5083 if (bitmap_bit_p (vi->solution, callused_id))
5085 varinfo_t cu_vi = get_varinfo (find (callused_id));
5086 bitmap_ior_into (vi->solution, cu_vi->solution);
5089 /* Mark variables in the solution call-clobbered. */
5090 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5092 varinfo_t vi = get_varinfo (i);
5094 if (vi->is_artificial_var)
5096 /* nothing_id and readonly_id do not cause any
5097 call clobber ops. For anything_id and integer_id
5098 we need to clobber all addressable vars. */
5099 if (vi->id == anything_id
5100 || vi->id == integer_id)
5104 /* Only artificial heap-vars are further interesting. */
5105 if (vi->is_artificial_var && !vi->is_heap_var)
5108 if ((TREE_CODE (vi->decl) == VAR_DECL
5109 || TREE_CODE (vi->decl) == PARM_DECL
5110 || TREE_CODE (vi->decl) == RESULT_DECL)
5111 && !unmodifiable_var_p (vi->decl))
5112 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
5118 /* Compute the call-used variables. */
5121 compute_call_used_vars (void)
5126 bool has_anything_id = false;
5128 if (!have_alias_info)
5131 /* This variable may have been collapsed, let's get the real
5132 variable for escaped_id. */
5133 vi = get_varinfo (find (callused_id));
5135 /* Mark variables in the solution call-clobbered. */
5136 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5138 varinfo_t vi = get_varinfo (i);
5140 if (vi->is_artificial_var)
5142 /* For anything_id and integer_id we need to make
5143 all local addressable vars call-used. */
5144 if (vi->id == anything_id
5145 || vi->id == integer_id)
5146 has_anything_id = true;
5149 /* Only artificial heap-vars are further interesting. */
5150 if (vi->is_artificial_var && !vi->is_heap_var)
5153 if ((TREE_CODE (vi->decl) == VAR_DECL
5154 || TREE_CODE (vi->decl) == PARM_DECL
5155 || TREE_CODE (vi->decl) == RESULT_DECL)
5156 && !unmodifiable_var_p (vi->decl))
5157 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
5160 /* If anything is call-used, add all addressable locals to the set. */
5161 if (has_anything_id)
5162 bitmap_ior_into (gimple_call_used_vars (cfun),
5163 gimple_addressable_vars (cfun));
5167 /* Dump points-to information to OUTFILE. */
5170 dump_sa_points_to_info (FILE *outfile)
5174 fprintf (outfile, "\nPoints-to sets\n\n");
5176 if (dump_flags & TDF_STATS)
5178 fprintf (outfile, "Stats:\n");
5179 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5180 fprintf (outfile, "Non-pointer vars: %d\n",
5181 stats.nonpointer_vars);
5182 fprintf (outfile, "Statically unified vars: %d\n",
5183 stats.unified_vars_static);
5184 fprintf (outfile, "Dynamically unified vars: %d\n",
5185 stats.unified_vars_dynamic);
5186 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5187 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5188 fprintf (outfile, "Number of implicit edges: %d\n",
5189 stats.num_implicit_edges);
5192 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5193 dump_solution_for_var (outfile, i);
5197 /* Debug points-to information to stderr. */
5200 debug_sa_points_to_info (void)
5202 dump_sa_points_to_info (stderr);
5206 /* Initialize the always-existing constraint variables for NULL
5207 ANYTHING, READONLY, and INTEGER */
5210 init_base_vars (void)
5212 struct constraint_expr lhs, rhs;
5214 /* Create the NULL variable, used to represent that a variable points
5216 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5217 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5218 insert_vi_for_tree (nothing_tree, var_nothing);
5219 var_nothing->is_artificial_var = 1;
5220 var_nothing->offset = 0;
5221 var_nothing->size = ~0;
5222 var_nothing->fullsize = ~0;
5223 var_nothing->is_special_var = 1;
5224 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5226 /* Create the ANYTHING variable, used to represent that a variable
5227 points to some unknown piece of memory. */
5228 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5229 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5230 insert_vi_for_tree (anything_tree, var_anything);
5231 var_anything->is_artificial_var = 1;
5232 var_anything->size = ~0;
5233 var_anything->offset = 0;
5234 var_anything->next = NULL;
5235 var_anything->fullsize = ~0;
5236 var_anything->is_special_var = 1;
5238 /* Anything points to anything. This makes deref constraints just
5239 work in the presence of linked list and other p = *p type loops,
5240 by saying that *ANYTHING = ANYTHING. */
5241 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5243 lhs.var = anything_id;
5245 rhs.type = ADDRESSOF;
5246 rhs.var = anything_id;
5249 /* This specifically does not use process_constraint because
5250 process_constraint ignores all anything = anything constraints, since all
5251 but this one are redundant. */
5252 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5254 /* Create the READONLY variable, used to represent that a variable
5255 points to readonly memory. */
5256 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5257 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5258 var_readonly->is_artificial_var = 1;
5259 var_readonly->offset = 0;
5260 var_readonly->size = ~0;
5261 var_readonly->fullsize = ~0;
5262 var_readonly->next = NULL;
5263 var_readonly->is_special_var = 1;
5264 insert_vi_for_tree (readonly_tree, var_readonly);
5265 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5267 /* readonly memory points to anything, in order to make deref
5268 easier. In reality, it points to anything the particular
5269 readonly variable can point to, but we don't track this
5272 lhs.var = readonly_id;
5274 rhs.type = ADDRESSOF;
5275 rhs.var = readonly_id; /* FIXME */
5277 process_constraint (new_constraint (lhs, rhs));
5279 /* Create the ESCAPED variable, used to represent the set of escaped
5281 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5282 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5283 insert_vi_for_tree (escaped_tree, var_escaped);
5284 var_escaped->is_artificial_var = 1;
5285 var_escaped->offset = 0;
5286 var_escaped->size = ~0;
5287 var_escaped->fullsize = ~0;
5288 var_escaped->is_special_var = 0;
5289 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5290 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5292 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5294 lhs.var = escaped_id;
5297 rhs.var = escaped_id;
5299 process_constraint_1 (new_constraint (lhs, rhs), true);
5301 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5303 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5304 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5305 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5306 var_nonlocal->is_artificial_var = 1;
5307 var_nonlocal->offset = 0;
5308 var_nonlocal->size = ~0;
5309 var_nonlocal->fullsize = ~0;
5310 var_nonlocal->is_special_var = 1;
5311 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5313 /* Nonlocal memory points to escaped (which includes nonlocal),
5314 in order to make deref easier. */
5316 lhs.var = nonlocal_id;
5318 rhs.type = ADDRESSOF;
5319 rhs.var = escaped_id;
5321 process_constraint (new_constraint (lhs, rhs));
5323 /* Create the CALLUSED variable, used to represent the set of call-used
5325 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5326 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5327 insert_vi_for_tree (callused_tree, var_callused);
5328 var_callused->is_artificial_var = 1;
5329 var_callused->offset = 0;
5330 var_callused->size = ~0;
5331 var_callused->fullsize = ~0;
5332 var_callused->is_special_var = 0;
5333 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5335 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5337 lhs.var = callused_id;
5340 rhs.var = callused_id;
5342 process_constraint_1 (new_constraint (lhs, rhs), true);
5344 /* Create the INTEGER variable, used to represent that a variable points
5346 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5347 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5348 insert_vi_for_tree (integer_tree, var_integer);
5349 var_integer->is_artificial_var = 1;
5350 var_integer->size = ~0;
5351 var_integer->fullsize = ~0;
5352 var_integer->offset = 0;
5353 var_integer->next = NULL;
5354 var_integer->is_special_var = 1;
5355 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5357 /* INTEGER = ANYTHING, because we don't know where a dereference of
5358 a random integer will point to. */
5360 lhs.var = integer_id;
5362 rhs.type = ADDRESSOF;
5363 rhs.var = anything_id;
5365 process_constraint (new_constraint (lhs, rhs));
5367 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5368 everything pointed to by escaped can also point to escaped. */
5370 lhs.var = escaped_id;
5372 rhs.type = ADDRESSOF;
5373 rhs.var = escaped_id;
5375 process_constraint_1 (new_constraint (lhs, rhs), true);
5377 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5378 everything pointed to by escaped can also point to nonlocal. */
5380 lhs.var = escaped_id;
5382 rhs.type = ADDRESSOF;
5383 rhs.var = nonlocal_id;
5385 process_constraint_1 (new_constraint (lhs, rhs), true);
5388 /* Initialize things necessary to perform PTA */
5391 init_alias_vars (void)
5393 bitmap_obstack_initialize (&pta_obstack);
5394 bitmap_obstack_initialize (&oldpta_obstack);
5395 bitmap_obstack_initialize (&predbitmap_obstack);
5397 constraint_pool = create_alloc_pool ("Constraint pool",
5398 sizeof (struct constraint), 30);
5399 variable_info_pool = create_alloc_pool ("Variable info pool",
5400 sizeof (struct variable_info), 30);
5401 constraints = VEC_alloc (constraint_t, heap, 8);
5402 varmap = VEC_alloc (varinfo_t, heap, 8);
5403 vi_for_tree = pointer_map_create ();
5405 memset (&stats, 0, sizeof (stats));
5406 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5407 shared_bitmap_eq, free);
5411 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5412 predecessor edges. */
5415 remove_preds_and_fake_succs (constraint_graph_t graph)
5419 /* Clear the implicit ref and address nodes from the successor
5421 for (i = 0; i < FIRST_REF_NODE; i++)
5423 if (graph->succs[i])
5424 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5425 FIRST_REF_NODE * 2);
5428 /* Free the successor list for the non-ref nodes. */
5429 for (i = FIRST_REF_NODE; i < graph->size; i++)
5431 if (graph->succs[i])
5432 BITMAP_FREE (graph->succs[i]);
5435 /* Now reallocate the size of the successor list as, and blow away
5436 the predecessor bitmaps. */
5437 graph->size = VEC_length (varinfo_t, varmap);
5438 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5440 free (graph->implicit_preds);
5441 graph->implicit_preds = NULL;
5442 free (graph->preds);
5443 graph->preds = NULL;
5444 bitmap_obstack_release (&predbitmap_obstack);
5447 /* Compute the set of variables we can't TBAA prune. */
5450 compute_tbaa_pruning (void)
5452 unsigned int size = VEC_length (varinfo_t, varmap);
5457 changed = sbitmap_alloc (size);
5458 sbitmap_zero (changed);
5460 /* Mark all initial no_tbaa_pruning nodes as changed. */
5462 for (i = 0; i < size; ++i)
5464 varinfo_t ivi = get_varinfo (i);
5466 if (find (i) == i && ivi->no_tbaa_pruning)
5469 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5470 || VEC_length (constraint_t, graph->complex[i]) > 0)
5472 SET_BIT (changed, i);
5478 while (changed_count > 0)
5480 struct topo_info *ti = init_topo_info ();
5483 compute_topo_order (graph, ti);
5485 while (VEC_length (unsigned, ti->topo_order) != 0)
5489 i = VEC_pop (unsigned, ti->topo_order);
5491 /* If this variable is not a representative, skip it. */
5495 /* If the node has changed, we need to process the complex
5496 constraints and outgoing edges again. */
5497 if (TEST_BIT (changed, i))
5501 VEC(constraint_t,heap) *complex = graph->complex[i];
5503 RESET_BIT (changed, i);
5506 /* Process the complex copy constraints. */
5507 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5509 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5511 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5513 if (!lhsvi->no_tbaa_pruning)
5515 lhsvi->no_tbaa_pruning = true;
5516 if (!TEST_BIT (changed, lhsvi->id))
5518 SET_BIT (changed, lhsvi->id);
5525 /* Propagate to all successors. */
5526 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5528 unsigned int to = find (j);
5529 varinfo_t tovi = get_varinfo (to);
5531 /* Don't propagate to ourselves. */
5535 if (!tovi->no_tbaa_pruning)
5537 tovi->no_tbaa_pruning = true;
5538 if (!TEST_BIT (changed, to))
5540 SET_BIT (changed, to);
5548 free_topo_info (ti);
5551 sbitmap_free (changed);
5555 for (i = 0; i < size; ++i)
5557 varinfo_t ivi = get_varinfo (i);
5558 varinfo_t ivip = get_varinfo (find (i));
5560 if (ivip->no_tbaa_pruning)
5562 tree var = ivi->decl;
5564 if (TREE_CODE (var) == SSA_NAME)
5565 var = SSA_NAME_VAR (var);
5567 if (POINTER_TYPE_P (TREE_TYPE (var)))
5569 DECL_NO_TBAA_P (var) = 1;
5571 /* Tell the RTL layer that this pointer can alias
5573 DECL_POINTER_ALIAS_SET (var) = 0;
5580 /* Create points-to sets for the current function. See the comments
5581 at the start of the file for an algorithmic overview. */
5584 compute_points_to_sets (struct alias_info *ai)
5586 struct scc_info *si;
5589 timevar_push (TV_TREE_PTA);
5592 init_alias_heapvars ();
5594 intra_create_variable_infos ();
5596 /* Now walk all statements and derive aliases. */
5599 block_stmt_iterator bsi;
5602 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5604 if (is_gimple_reg (PHI_RESULT (phi)))
5606 find_func_aliases (phi);
5608 /* Update various related attributes like escaped
5609 addresses, pointer dereferences for loads and stores.
5610 This is used when creating name tags and alias
5612 update_alias_info (phi, ai);
5616 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5618 tree stmt = bsi_stmt (bsi);
5620 find_func_aliases (stmt);
5622 /* Update various related attributes like escaped
5623 addresses, pointer dereferences for loads and stores.
5624 This is used when creating name tags and alias
5626 update_alias_info (stmt, ai);
5628 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5629 been captured, and we can remove them. */
5630 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5631 bsi_remove (&bsi, true);
5640 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5641 dump_constraints (dump_file);
5646 "\nCollapsing static cycles and doing variable "
5649 init_graph (VEC_length (varinfo_t, varmap) * 2);
5652 fprintf (dump_file, "Building predecessor graph\n");
5653 build_pred_graph ();
5656 fprintf (dump_file, "Detecting pointer and location "
5658 si = perform_var_substitution (graph);
5661 fprintf (dump_file, "Rewriting constraints and unifying "
5663 rewrite_constraints (graph, si);
5664 free_var_substitution_info (si);
5666 build_succ_graph ();
5667 move_complex_constraints (graph);
5670 fprintf (dump_file, "Uniting pointer but not location equivalent "
5672 unite_pointer_equivalences (graph);
5675 fprintf (dump_file, "Finding indirect cycles\n");
5676 find_indirect_cycles (graph);
5678 /* Implicit nodes and predecessors are no longer necessary at this
5680 remove_preds_and_fake_succs (graph);
5683 fprintf (dump_file, "Solving graph\n");
5685 solve_graph (graph);
5687 compute_tbaa_pruning ();
5690 dump_sa_points_to_info (dump_file);
5692 have_alias_info = true;
5694 timevar_pop (TV_TREE_PTA);
5698 /* Delete created points-to sets. */
5701 delete_points_to_sets (void)
5705 htab_delete (shared_bitmap_table);
5706 if (dump_file && (dump_flags & TDF_STATS))
5707 fprintf (dump_file, "Points to sets created:%d\n",
5708 stats.points_to_sets_created);
5710 pointer_map_destroy (vi_for_tree);
5711 bitmap_obstack_release (&pta_obstack);
5712 VEC_free (constraint_t, heap, constraints);
5714 for (i = 0; i < graph->size; i++)
5715 VEC_free (constraint_t, heap, graph->complex[i]);
5716 free (graph->complex);
5719 free (graph->succs);
5721 free (graph->pe_rep);
5722 free (graph->indirect_cycles);
5725 VEC_free (varinfo_t, heap, varmap);
5726 free_alloc_pool (variable_info_pool);
5727 free_alloc_pool (constraint_pool);
5728 have_alias_info = false;
5731 /* Return true if we should execute IPA PTA. */
5735 return (flag_unit_at_a_time != 0
5737 /* Don't bother doing anything if the program has errors. */
5738 && !(errorcount || sorrycount));
5741 /* Execute the driver for IPA PTA. */
5743 ipa_pta_execute (void)
5745 struct cgraph_node *node;
5746 struct scc_info *si;
5749 init_alias_heapvars ();
5752 for (node = cgraph_nodes; node; node = node->next)
5754 if (!node->analyzed || cgraph_is_master_clone (node))
5758 varid = create_function_info_for (node->decl,
5759 cgraph_node_name (node));
5760 if (node->local.externally_visible)
5762 varinfo_t fi = get_varinfo (varid);
5763 for (; fi; fi = fi->next)
5764 make_constraint_from_anything (fi);
5768 for (node = cgraph_nodes; node; node = node->next)
5770 if (node->analyzed && cgraph_is_master_clone (node))
5772 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5774 tree old_func_decl = current_function_decl;
5777 "Generating constraints for %s\n",
5778 cgraph_node_name (node));
5780 current_function_decl = node->decl;
5782 FOR_EACH_BB_FN (bb, func)
5784 block_stmt_iterator bsi;
5787 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5789 if (is_gimple_reg (PHI_RESULT (phi)))
5791 find_func_aliases (phi);
5795 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5797 tree stmt = bsi_stmt (bsi);
5798 find_func_aliases (stmt);
5801 current_function_decl = old_func_decl;
5806 /* Make point to anything. */
5812 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5813 dump_constraints (dump_file);
5818 "\nCollapsing static cycles and doing variable "
5821 init_graph (VEC_length (varinfo_t, varmap) * 2);
5822 build_pred_graph ();
5823 si = perform_var_substitution (graph);
5824 rewrite_constraints (graph, si);
5825 free_var_substitution_info (si);
5827 build_succ_graph ();
5828 move_complex_constraints (graph);
5829 unite_pointer_equivalences (graph);
5830 find_indirect_cycles (graph);
5832 /* Implicit nodes and predecessors are no longer necessary at this
5834 remove_preds_and_fake_succs (graph);
5837 fprintf (dump_file, "\nSolving graph\n");
5839 solve_graph (graph);
5842 dump_sa_points_to_info (dump_file);
5845 delete_alias_heapvars ();
5846 delete_points_to_sets ();
5850 struct simple_ipa_opt_pass pass_ipa_pta =
5855 gate_ipa_pta, /* gate */
5856 ipa_pta_execute, /* execute */
5859 0, /* static_pass_number */
5860 TV_IPA_PTA, /* tv_id */
5861 0, /* properties_required */
5862 0, /* properties_provided */
5863 0, /* properties_destroyed */
5864 0, /* todo_flags_start */
5865 TODO_update_ssa /* todo_flags_finish */
5869 /* Initialize the heapvar for statement mapping. */
5871 init_alias_heapvars (void)
5873 if (!heapvar_for_stmt)
5874 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5879 delete_alias_heapvars (void)
5881 htab_delete (heapvar_for_stmt);
5882 heapvar_for_stmt = NULL;
5886 #include "gt-tree-ssa-structalias.h"