1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
40 #include "diagnostic.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
209 /* ID of this variable */
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
216 /* True if this is a special variable whose solution set should not be
218 unsigned int is_special_var:1;
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
233 /* Variable id this was collapsed to due to type unsafety. Zero if
234 this variable was not collapsed. This should be unused completely
235 after build_succ_graph, or something is broken. */
236 unsigned int collapsed_to;
238 /* A link to the variable for the next field in this structure. */
239 struct variable_info *next;
241 /* Offset of this variable, in bits, from the base variable */
242 unsigned HOST_WIDE_INT offset;
244 /* Size of the variable, in bits. */
245 unsigned HOST_WIDE_INT size;
247 /* Full size of the base variable, in bits. */
248 unsigned HOST_WIDE_INT fullsize;
250 /* Name of this variable */
253 /* Tree that this variable is associated with. */
256 /* Points-to set for this variable. */
259 /* Old points-to set for this variable. */
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
270 DEF_VEC_P(varinfo_t);
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
278 /* Return the varmap element N */
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
283 return VEC_index (varinfo_t, varmap, n);
286 /* Return the varmap element N, following the collapsed_to link. */
288 static inline varinfo_t
289 get_varinfo_fc (unsigned int n)
291 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 if (v->collapsed_to != 0)
294 return get_varinfo (v->collapsed_to);
298 /* Static IDs for the special variables. */
299 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
300 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
306 /* Variable that represents the NULL pointer. */
307 static varinfo_t var_nothing;
308 static tree nothing_tree;
310 /* Variable that represents read only memory. */
311 static varinfo_t var_readonly;
312 static tree readonly_tree;
314 /* Variable that represents escaped memory. */
315 static varinfo_t var_escaped;
316 static tree escaped_tree;
318 /* Variable that represents nonlocal memory. */
319 static varinfo_t var_nonlocal;
320 static tree nonlocal_tree;
322 /* Variable that represents call-used memory. */
323 static varinfo_t var_callused;
324 static tree callused_tree;
326 /* Variable that represents integers. This is used for when people do things
328 static varinfo_t var_integer;
329 static tree integer_tree;
331 /* Lookup a heap var for FROM, and return it if we find one. */
334 heapvar_lookup (tree from)
336 struct tree_map *h, in;
339 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
340 htab_hash_pointer (from));
346 /* Insert a mapping FROM->TO in the heap var for statement
350 heapvar_insert (tree from, tree to)
355 h = GGC_NEW (struct tree_map);
356 h->hash = htab_hash_pointer (from);
359 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
360 *(struct tree_map **) loc = h;
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. */
367 new_var_info (tree t, unsigned int id, const char *name)
369 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
375 ret->is_artificial_var = false;
376 ret->is_heap_var = false;
377 ret->is_special_var = false;
378 ret->is_unknown_size_var = false;
379 ret->is_full_var = false;
381 if (TREE_CODE (var) == SSA_NAME)
382 var = SSA_NAME_VAR (var);
383 ret->no_tbaa_pruning = (DECL_P (var)
384 && POINTER_TYPE_P (TREE_TYPE (var))
385 && DECL_NO_TBAA_P (var));
386 ret->solution = BITMAP_ALLOC (&pta_obstack);
387 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
389 ret->collapsed_to = 0;
393 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
395 /* An expression that appears in a constraint. */
397 struct constraint_expr
399 /* Constraint type. */
400 constraint_expr_type type;
402 /* Variable we are referring to in the constraint. */
405 /* Offset, in bits, of this constraint from the beginning of
406 variables it ends up referring to.
408 IOW, in a deref constraint, we would deref, get the result set,
409 then add OFFSET to each member. */
410 unsigned HOST_WIDE_INT offset;
413 typedef struct constraint_expr ce_s;
415 DEF_VEC_ALLOC_O(ce_s, heap);
416 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
417 static void get_constraint_for (tree, VEC(ce_s, heap) **);
418 static void do_deref (VEC (ce_s, heap) **);
420 /* Our set constraints are made up of two constraint expressions, one
423 As described in the introduction, our set constraints each represent an
424 operation between set valued variables.
428 struct constraint_expr lhs;
429 struct constraint_expr rhs;
432 /* List of constraints that we use to build the constraint graph from. */
434 static VEC(constraint_t,heap) *constraints;
435 static alloc_pool constraint_pool;
439 DEF_VEC_ALLOC_I(int, heap);
441 /* The constraint graph is represented as an array of bitmaps
442 containing successor nodes. */
444 struct constraint_graph
446 /* Size of this graph, which may be different than the number of
447 nodes in the variable map. */
450 /* Explicit successors of each node. */
453 /* Implicit predecessors of each node (Used for variable
455 bitmap *implicit_preds;
457 /* Explicit predecessors of each node (Used for variable substitution). */
460 /* Indirect cycle representatives, or -1 if the node has no indirect
462 int *indirect_cycles;
464 /* Representative node for a node. rep[a] == a unless the node has
468 /* Equivalence class representative for a label. This is used for
469 variable substitution. */
472 /* Pointer equivalence label for a node. All nodes with the same
473 pointer equivalence label can be unified together at some point
474 (either during constraint optimization or after the constraint
478 /* Pointer equivalence representative for a label. This is used to
479 handle nodes that are pointer equivalent but not location
480 equivalent. We can unite these once the addressof constraints
481 are transformed into initial points-to sets. */
484 /* Pointer equivalence label for each node, used during variable
486 unsigned int *pointer_label;
488 /* Location equivalence label for each node, used during location
489 equivalence finding. */
490 unsigned int *loc_label;
492 /* Pointed-by set for each node, used during location equivalence
493 finding. This is pointed-by rather than pointed-to, because it
494 is constructed using the predecessor graph. */
497 /* Points to sets for pointer equivalence. This is *not* the actual
498 points-to sets for nodes. */
501 /* Bitmap of nodes where the bit is set if the node is a direct
502 node. Used for variable substitution. */
503 sbitmap direct_nodes;
505 /* Bitmap of nodes where the bit is set if the node is address
506 taken. Used for variable substitution. */
507 bitmap address_taken;
509 /* Vector of complex constraints for each graph node. Complex
510 constraints are those involving dereferences or offsets that are
512 VEC(constraint_t,heap) **complex;
515 static constraint_graph_t graph;
517 /* During variable substitution and the offline version of indirect
518 cycle finding, we create nodes to represent dereferences and
519 address taken constraints. These represent where these start and
521 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
522 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
524 /* Return the representative node for NODE, if NODE has been unioned
526 This function performs path compression along the way to finding
527 the representative. */
530 find (unsigned int node)
532 gcc_assert (node < graph->size);
533 if (graph->rep[node] != node)
534 return graph->rep[node] = find (graph->rep[node]);
538 /* Union the TO and FROM nodes to the TO nodes.
539 Note that at some point in the future, we may want to do
540 union-by-rank, in which case we are going to have to return the
541 node we unified to. */
544 unite (unsigned int to, unsigned int from)
546 gcc_assert (to < graph->size && from < graph->size);
547 if (to != from && graph->rep[from] != to)
549 graph->rep[from] = to;
555 /* Create a new constraint consisting of LHS and RHS expressions. */
558 new_constraint (const struct constraint_expr lhs,
559 const struct constraint_expr rhs)
561 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
567 /* Print out constraint C to FILE. */
570 dump_constraint (FILE *file, constraint_t c)
572 if (c->lhs.type == ADDRESSOF)
574 else if (c->lhs.type == DEREF)
576 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
577 if (c->lhs.offset != 0)
578 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
579 fprintf (file, " = ");
580 if (c->rhs.type == ADDRESSOF)
582 else if (c->rhs.type == DEREF)
584 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
585 if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
590 /* Print out constraint C to stderr. */
593 debug_constraint (constraint_t c)
595 dump_constraint (stderr, c);
598 /* Print out all constraints to FILE */
601 dump_constraints (FILE *file)
605 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
606 dump_constraint (file, c);
609 /* Print out all constraints to stderr. */
612 debug_constraints (void)
614 dump_constraints (stderr);
617 /* Print out to FILE the edge in the constraint graph that is created by
618 constraint c. The edge may have a label, depending on the type of
619 constraint that it represents. If complex1, e.g: a = *b, then the label
620 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
621 complex with an offset, e.g: a = b + 8, then the label is "+".
622 Otherwise the edge has no label. */
625 dump_constraint_edge (FILE *file, constraint_t c)
627 if (c->rhs.type != ADDRESSOF)
629 const char *src = get_varinfo_fc (c->rhs.var)->name;
630 const char *dst = get_varinfo_fc (c->lhs.var)->name;
631 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
632 /* Due to preprocessing of constraints, instructions like *a = *b are
633 illegal; thus, we do not have to handle such cases. */
634 if (c->lhs.type == DEREF)
635 fprintf (file, " [ label=\"*=\" ] ;\n");
636 else if (c->rhs.type == DEREF)
637 fprintf (file, " [ label=\"=*\" ] ;\n");
640 /* We must check the case where the constraint is an offset.
641 In this case, it is treated as a complex constraint. */
642 if (c->rhs.offset != c->lhs.offset)
643 fprintf (file, " [ label=\"+\" ] ;\n");
645 fprintf (file, " ;\n");
650 /* Print the constraint graph in dot format. */
653 dump_constraint_graph (FILE *file)
655 unsigned int i=0, size;
658 /* Only print the graph if it has already been initialized: */
662 /* Print the constraints used to produce the constraint graph. The
663 constraints will be printed as comments in the dot file: */
664 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
665 dump_constraints (file);
666 fprintf (file, "*/\n");
668 /* Prints the header of the dot file: */
669 fprintf (file, "\n\n// The constraint graph in dot format:\n");
670 fprintf (file, "strict digraph {\n");
671 fprintf (file, " node [\n shape = box\n ]\n");
672 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
673 fprintf (file, "\n // List of nodes in the constraint graph:\n");
675 /* The next lines print the nodes in the graph. In order to get the
676 number of nodes in the graph, we must choose the minimum between the
677 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
678 yet been initialized, then graph->size == 0, otherwise we must only
679 read nodes that have an entry in VEC (varinfo_t, varmap). */
680 size = VEC_length (varinfo_t, varmap);
681 size = size < graph->size ? size : graph->size;
682 for (i = 0; i < size; i++)
684 const char *name = get_varinfo_fc (graph->rep[i])->name;
685 fprintf (file, " \"%s\" ;\n", name);
688 /* Go over the list of constraints printing the edges in the constraint
690 fprintf (file, "\n // The constraint edges:\n");
691 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
693 dump_constraint_edge (file, c);
695 /* Prints the tail of the dot file. By now, only the closing bracket. */
696 fprintf (file, "}\n\n\n");
699 /* Print out the constraint graph to stderr. */
702 debug_constraint_graph (void)
704 dump_constraint_graph (stderr);
709 The solver is a simple worklist solver, that works on the following
712 sbitmap changed_nodes = all zeroes;
714 For each node that is not already collapsed:
716 set bit in changed nodes
718 while (changed_count > 0)
720 compute topological ordering for constraint graph
722 find and collapse cycles in the constraint graph (updating
723 changed if necessary)
725 for each node (n) in the graph in topological order:
728 Process each complex constraint associated with the node,
729 updating changed if necessary.
731 For each outgoing edge from n, propagate the solution from n to
732 the destination of the edge, updating changed as necessary.
736 /* Return true if two constraint expressions A and B are equal. */
739 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
741 return a.type == b.type && a.var == b.var && a.offset == b.offset;
744 /* Return true if constraint expression A is less than constraint expression
745 B. This is just arbitrary, but consistent, in order to give them an
749 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
751 if (a.type == b.type)
754 return a.offset < b.offset;
756 return a.var < b.var;
759 return a.type < b.type;
762 /* Return true if constraint A is less than constraint B. This is just
763 arbitrary, but consistent, in order to give them an ordering. */
766 constraint_less (const constraint_t a, const constraint_t b)
768 if (constraint_expr_less (a->lhs, b->lhs))
770 else if (constraint_expr_less (b->lhs, a->lhs))
773 return constraint_expr_less (a->rhs, b->rhs);
776 /* Return true if two constraints A and B are equal. */
779 constraint_equal (struct constraint a, struct constraint b)
781 return constraint_expr_equal (a.lhs, b.lhs)
782 && constraint_expr_equal (a.rhs, b.rhs);
786 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
789 constraint_vec_find (VEC(constraint_t,heap) *vec,
790 struct constraint lookfor)
798 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
799 if (place >= VEC_length (constraint_t, vec))
801 found = VEC_index (constraint_t, vec, place);
802 if (!constraint_equal (*found, lookfor))
807 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
810 constraint_set_union (VEC(constraint_t,heap) **to,
811 VEC(constraint_t,heap) **from)
816 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
818 if (constraint_vec_find (*to, *c) == NULL)
820 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
822 VEC_safe_insert (constraint_t, heap, *to, place, c);
827 /* Take a solution set SET, add OFFSET to each member of the set, and
828 overwrite SET with the result when done. */
831 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
833 bitmap result = BITMAP_ALLOC (&iteration_obstack);
837 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
839 varinfo_t vi = get_varinfo (i);
841 /* If this is a variable with just one field just set its bit
843 if (vi->is_artificial_var
844 || vi->is_unknown_size_var
846 bitmap_set_bit (result, i);
849 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
850 varinfo_t v = first_vi_for_offset (vi, fieldoffset);
851 /* If the result is outside of the variable use the last field. */
855 while (v->next != NULL)
858 bitmap_set_bit (result, v->id);
859 /* If the result is not exactly at fieldoffset include the next
860 field as well. See get_constraint_for_ptr_offset for more
862 if (v->offset != fieldoffset
864 bitmap_set_bit (result, v->next->id);
868 bitmap_copy (set, result);
869 BITMAP_FREE (result);
872 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
876 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
879 return bitmap_ior_into (to, from);
885 tmp = BITMAP_ALLOC (&iteration_obstack);
886 bitmap_copy (tmp, from);
887 solution_set_add (tmp, inc);
888 res = bitmap_ior_into (to, tmp);
894 /* Insert constraint C into the list of complex constraints for graph
898 insert_into_complex (constraint_graph_t graph,
899 unsigned int var, constraint_t c)
901 VEC (constraint_t, heap) *complex = graph->complex[var];
902 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
905 /* Only insert constraints that do not already exist. */
906 if (place >= VEC_length (constraint_t, complex)
907 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
908 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
912 /* Condense two variable nodes into a single variable node, by moving
913 all associated info from SRC to TO. */
916 merge_node_constraints (constraint_graph_t graph, unsigned int to,
922 gcc_assert (find (from) == to);
924 /* Move all complex constraints from src node into to node */
925 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
927 /* In complex constraints for node src, we may have either
928 a = *src, and *src = a, or an offseted constraint which are
929 always added to the rhs node's constraints. */
931 if (c->rhs.type == DEREF)
933 else if (c->lhs.type == DEREF)
938 constraint_set_union (&graph->complex[to], &graph->complex[from]);
939 VEC_free (constraint_t, heap, graph->complex[from]);
940 graph->complex[from] = NULL;
944 /* Remove edges involving NODE from GRAPH. */
947 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
949 if (graph->succs[node])
950 BITMAP_FREE (graph->succs[node]);
953 /* Merge GRAPH nodes FROM and TO into node TO. */
956 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
959 if (graph->indirect_cycles[from] != -1)
961 /* If we have indirect cycles with the from node, and we have
962 none on the to node, the to node has indirect cycles from the
963 from node now that they are unified.
964 If indirect cycles exist on both, unify the nodes that they
965 are in a cycle with, since we know they are in a cycle with
967 if (graph->indirect_cycles[to] == -1)
968 graph->indirect_cycles[to] = graph->indirect_cycles[from];
971 /* Merge all the successor edges. */
972 if (graph->succs[from])
974 if (!graph->succs[to])
975 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
976 bitmap_ior_into (graph->succs[to],
980 clear_edges_for_node (graph, from);
984 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
985 it doesn't exist in the graph already. */
988 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
994 if (!graph->implicit_preds[to])
995 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
997 if (bitmap_set_bit (graph->implicit_preds[to], from))
998 stats.num_implicit_edges++;
1001 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1002 it doesn't exist in the graph already.
1003 Return false if the edge already existed, true otherwise. */
1006 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1009 if (!graph->preds[to])
1010 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1011 bitmap_set_bit (graph->preds[to], from);
1014 /* Add a graph edge to GRAPH, going from FROM to TO if
1015 it doesn't exist in the graph already.
1016 Return false if the edge already existed, true otherwise. */
1019 add_graph_edge (constraint_graph_t graph, unsigned int to,
1030 if (!graph->succs[from])
1031 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1032 if (bitmap_set_bit (graph->succs[from], to))
1035 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1043 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1046 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1049 return (graph->succs[dest]
1050 && bitmap_bit_p (graph->succs[dest], src));
1053 /* Initialize the constraint graph structure to contain SIZE nodes. */
1056 init_graph (unsigned int size)
1060 graph = XCNEW (struct constraint_graph);
1062 graph->succs = XCNEWVEC (bitmap, graph->size);
1063 graph->indirect_cycles = XNEWVEC (int, graph->size);
1064 graph->rep = XNEWVEC (unsigned int, graph->size);
1065 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1066 graph->pe = XCNEWVEC (unsigned int, graph->size);
1067 graph->pe_rep = XNEWVEC (int, graph->size);
1069 for (j = 0; j < graph->size; j++)
1072 graph->pe_rep[j] = -1;
1073 graph->indirect_cycles[j] = -1;
1077 /* Build the constraint graph, adding only predecessor edges right now. */
1080 build_pred_graph (void)
1086 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1087 graph->preds = XCNEWVEC (bitmap, graph->size);
1088 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1089 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1090 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1091 graph->points_to = XCNEWVEC (bitmap, graph->size);
1092 graph->eq_rep = XNEWVEC (int, graph->size);
1093 graph->direct_nodes = sbitmap_alloc (graph->size);
1094 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1095 sbitmap_zero (graph->direct_nodes);
1097 for (j = 0; j < FIRST_REF_NODE; j++)
1099 if (!get_varinfo (j)->is_special_var)
1100 SET_BIT (graph->direct_nodes, j);
1103 for (j = 0; j < graph->size; j++)
1104 graph->eq_rep[j] = -1;
1106 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1107 graph->indirect_cycles[j] = -1;
1109 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1111 struct constraint_expr lhs = c->lhs;
1112 struct constraint_expr rhs = c->rhs;
1113 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1114 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1116 if (lhs.type == DEREF)
1119 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1120 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1122 else if (rhs.type == DEREF)
1125 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1126 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1128 RESET_BIT (graph->direct_nodes, lhsvar);
1130 else if (rhs.type == ADDRESSOF)
1135 if (graph->points_to[lhsvar] == NULL)
1136 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1137 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1139 if (graph->pointed_by[rhsvar] == NULL)
1140 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1141 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1143 /* Implicitly, *x = y */
1144 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1146 /* All related variables are no longer direct nodes. */
1147 RESET_BIT (graph->direct_nodes, rhsvar);
1148 v = get_varinfo (rhsvar);
1149 if (!v->is_full_var)
1151 v = lookup_vi_for_tree (v->decl);
1154 RESET_BIT (graph->direct_nodes, v->id);
1159 bitmap_set_bit (graph->address_taken, rhsvar);
1161 else if (lhsvar > anything_id
1162 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1165 add_pred_graph_edge (graph, lhsvar, rhsvar);
1166 /* Implicitly, *x = *y */
1167 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1168 FIRST_REF_NODE + rhsvar);
1170 else if (lhs.offset != 0 || rhs.offset != 0)
1172 if (rhs.offset != 0)
1173 RESET_BIT (graph->direct_nodes, lhs.var);
1174 else if (lhs.offset != 0)
1175 RESET_BIT (graph->direct_nodes, rhs.var);
1180 /* Build the constraint graph, adding successor edges. */
1183 build_succ_graph (void)
1188 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1190 struct constraint_expr lhs;
1191 struct constraint_expr rhs;
1192 unsigned int lhsvar;
1193 unsigned int rhsvar;
1200 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1201 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1203 if (lhs.type == DEREF)
1205 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1206 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1208 else if (rhs.type == DEREF)
1210 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1211 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1213 else if (rhs.type == ADDRESSOF)
1216 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1217 == get_varinfo_fc (rhs.var)->id);
1218 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1220 else if (lhsvar > anything_id
1221 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1223 add_graph_edge (graph, lhsvar, rhsvar);
1229 /* Changed variables on the last iteration. */
1230 static unsigned int changed_count;
1231 static sbitmap changed;
1233 DEF_VEC_I(unsigned);
1234 DEF_VEC_ALLOC_I(unsigned,heap);
1237 /* Strongly Connected Component visitation info. */
1244 unsigned int *node_mapping;
1246 VEC(unsigned,heap) *scc_stack;
1250 /* Recursive routine to find strongly connected components in GRAPH.
1251 SI is the SCC info to store the information in, and N is the id of current
1252 graph node we are processing.
1254 This is Tarjan's strongly connected component finding algorithm, as
1255 modified by Nuutila to keep only non-root nodes on the stack.
1256 The algorithm can be found in "On finding the strongly connected
1257 connected components in a directed graph" by Esko Nuutila and Eljas
1258 Soisalon-Soininen, in Information Processing Letters volume 49,
1259 number 1, pages 9-14. */
1262 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1266 unsigned int my_dfs;
1268 SET_BIT (si->visited, n);
1269 si->dfs[n] = si->current_index ++;
1270 my_dfs = si->dfs[n];
1272 /* Visit all the successors. */
1273 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1277 if (i > LAST_REF_NODE)
1281 if (TEST_BIT (si->deleted, w))
1284 if (!TEST_BIT (si->visited, w))
1285 scc_visit (graph, si, w);
1287 unsigned int t = find (w);
1288 unsigned int nnode = find (n);
1289 gcc_assert (nnode == n);
1291 if (si->dfs[t] < si->dfs[nnode])
1292 si->dfs[n] = si->dfs[t];
1296 /* See if any components have been identified. */
1297 if (si->dfs[n] == my_dfs)
1299 if (VEC_length (unsigned, si->scc_stack) > 0
1300 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1302 bitmap scc = BITMAP_ALLOC (NULL);
1303 bool have_ref_node = n >= FIRST_REF_NODE;
1304 unsigned int lowest_node;
1307 bitmap_set_bit (scc, n);
1309 while (VEC_length (unsigned, si->scc_stack) != 0
1310 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1312 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1314 bitmap_set_bit (scc, w);
1315 if (w >= FIRST_REF_NODE)
1316 have_ref_node = true;
1319 lowest_node = bitmap_first_set_bit (scc);
1320 gcc_assert (lowest_node < FIRST_REF_NODE);
1322 /* Collapse the SCC nodes into a single node, and mark the
1324 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1326 if (i < FIRST_REF_NODE)
1328 if (unite (lowest_node, i))
1329 unify_nodes (graph, lowest_node, i, false);
1333 unite (lowest_node, i);
1334 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1338 SET_BIT (si->deleted, n);
1341 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1344 /* Unify node FROM into node TO, updating the changed count if
1345 necessary when UPDATE_CHANGED is true. */
1348 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1349 bool update_changed)
1352 gcc_assert (to != from && find (to) == to);
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, "Unifying %s to %s\n",
1355 get_varinfo (from)->name,
1356 get_varinfo (to)->name);
1359 stats.unified_vars_dynamic++;
1361 stats.unified_vars_static++;
1363 merge_graph_nodes (graph, to, from);
1364 merge_node_constraints (graph, to, from);
1366 if (get_varinfo (from)->no_tbaa_pruning)
1367 get_varinfo (to)->no_tbaa_pruning = true;
1369 /* Mark TO as changed if FROM was changed. If TO was already marked
1370 as changed, decrease the changed count. */
1372 if (update_changed && TEST_BIT (changed, from))
1374 RESET_BIT (changed, from);
1375 if (!TEST_BIT (changed, to))
1376 SET_BIT (changed, to);
1379 gcc_assert (changed_count > 0);
1383 if (get_varinfo (from)->solution)
1385 /* If the solution changes because of the merging, we need to mark
1386 the variable as changed. */
1387 if (bitmap_ior_into (get_varinfo (to)->solution,
1388 get_varinfo (from)->solution))
1390 if (update_changed && !TEST_BIT (changed, to))
1392 SET_BIT (changed, to);
1397 BITMAP_FREE (get_varinfo (from)->solution);
1398 BITMAP_FREE (get_varinfo (from)->oldsolution);
1400 if (stats.iterations > 0)
1402 BITMAP_FREE (get_varinfo (to)->oldsolution);
1403 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1406 if (valid_graph_edge (graph, to, to))
1408 if (graph->succs[to])
1409 bitmap_clear_bit (graph->succs[to], to);
1413 /* Information needed to compute the topological ordering of a graph. */
1417 /* sbitmap of visited nodes. */
1419 /* Array that stores the topological order of the graph, *in
1421 VEC(unsigned,heap) *topo_order;
1425 /* Initialize and return a topological info structure. */
1427 static struct topo_info *
1428 init_topo_info (void)
1430 size_t size = graph->size;
1431 struct topo_info *ti = XNEW (struct topo_info);
1432 ti->visited = sbitmap_alloc (size);
1433 sbitmap_zero (ti->visited);
1434 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1439 /* Free the topological sort info pointed to by TI. */
1442 free_topo_info (struct topo_info *ti)
1444 sbitmap_free (ti->visited);
1445 VEC_free (unsigned, heap, ti->topo_order);
1449 /* Visit the graph in topological order, and store the order in the
1450 topo_info structure. */
1453 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1459 SET_BIT (ti->visited, n);
1461 if (graph->succs[n])
1462 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1464 if (!TEST_BIT (ti->visited, j))
1465 topo_visit (graph, ti, j);
1468 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1471 /* Return true if variable N + OFFSET is a legal field of N. */
1474 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1476 varinfo_t ninfo = get_varinfo (n);
1478 /* For things we've globbed to single variables, any offset into the
1479 variable acts like the entire variable, so that it becomes offset
1481 if (ninfo->is_special_var
1482 || ninfo->is_artificial_var
1483 || ninfo->is_unknown_size_var
1484 || ninfo->is_full_var)
1489 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1492 /* Process a constraint C that represents x = *y, using DELTA as the
1493 starting solution. */
1496 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1499 unsigned int lhs = c->lhs.var;
1501 bitmap sol = get_varinfo (lhs)->solution;
1505 if (bitmap_bit_p (delta, anything_id))
1507 flag |= bitmap_set_bit (sol, anything_id);
1511 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1512 reachability set of the rhs var. As a pointer to a sub-field
1513 of a variable can also reach all other fields of the variable
1514 we simply have to expand the solution to contain all sub-fields
1515 if one sub-field is contained. */
1516 if (c->rhs.var == escaped_id
1517 || c->rhs.var == callused_id)
1520 /* In a first pass record all variables we need to add all
1521 sub-fields off. This avoids quadratic behavior. */
1522 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1524 varinfo_t v = get_varinfo (j);
1528 v = lookup_vi_for_tree (v->decl);
1529 if (v->next != NULL)
1532 vars = BITMAP_ALLOC (NULL);
1533 bitmap_set_bit (vars, v->id);
1536 /* In the second pass now do the addition to the solution and
1537 to speed up solving add it to the delta as well. */
1540 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1542 varinfo_t v = get_varinfo (j);
1543 for (; v != NULL; v = v->next)
1545 if (bitmap_set_bit (sol, v->id))
1548 bitmap_set_bit (delta, v->id);
1556 /* For each variable j in delta (Sol(y)), add
1557 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1558 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1560 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1561 if (type_safe (j, &roffset))
1564 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1567 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1568 /* If the access is outside of the variable we can ignore it. */
1573 /* Adding edges from the special vars is pointless.
1574 They don't have sets that can change. */
1575 if (get_varinfo (t)->is_special_var)
1576 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1577 /* Merging the solution from ESCAPED needlessly increases
1578 the set. Use ESCAPED as representative instead.
1579 Same for CALLUSED. */
1580 else if (get_varinfo (t)->id == escaped_id
1581 || get_varinfo (t)->id == callused_id)
1582 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1583 else if (add_graph_edge (graph, lhs, t))
1584 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1589 /* If the LHS solution changed, mark the var as changed. */
1592 get_varinfo (lhs)->solution = sol;
1593 if (!TEST_BIT (changed, lhs))
1595 SET_BIT (changed, lhs);
1601 /* Process a constraint C that represents *x = y. */
1604 do_ds_constraint (constraint_t c, bitmap delta)
1606 unsigned int rhs = c->rhs.var;
1607 bitmap sol = get_varinfo (rhs)->solution;
1611 if (bitmap_bit_p (sol, anything_id))
1613 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1615 varinfo_t jvi = get_varinfo (j);
1617 unsigned int loff = c->lhs.offset;
1618 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1621 v = get_varinfo (j);
1622 if (!v->is_full_var)
1624 v = first_vi_for_offset (v, fieldoffset);
1625 /* If the access is outside of the variable we can ignore it. */
1631 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1632 && !TEST_BIT (changed, t))
1634 SET_BIT (changed, t);
1641 /* For each member j of delta (Sol(x)), add an edge from y to j and
1642 union Sol(y) into Sol(j) */
1643 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1645 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1646 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1650 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1653 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1654 /* If the access is outside of the variable we can ignore it. */
1658 tmp = get_varinfo (t)->solution;
1660 if (set_union_with_increment (tmp, sol, 0))
1662 get_varinfo (t)->solution = tmp;
1664 sol = get_varinfo (rhs)->solution;
1665 if (!TEST_BIT (changed, t))
1667 SET_BIT (changed, t);
1675 /* Handle a non-simple (simple meaning requires no iteration),
1676 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1679 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1681 if (c->lhs.type == DEREF)
1683 if (c->rhs.type == ADDRESSOF)
1690 do_ds_constraint (c, delta);
1693 else if (c->rhs.type == DEREF)
1696 if (!(get_varinfo (c->lhs.var)->is_special_var))
1697 do_sd_constraint (graph, c, delta);
1705 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1706 solution = get_varinfo (c->rhs.var)->solution;
1707 tmp = get_varinfo (c->lhs.var)->solution;
1709 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1713 get_varinfo (c->lhs.var)->solution = tmp;
1714 if (!TEST_BIT (changed, c->lhs.var))
1716 SET_BIT (changed, c->lhs.var);
1723 /* Initialize and return a new SCC info structure. */
1725 static struct scc_info *
1726 init_scc_info (size_t size)
1728 struct scc_info *si = XNEW (struct scc_info);
1731 si->current_index = 0;
1732 si->visited = sbitmap_alloc (size);
1733 sbitmap_zero (si->visited);
1734 si->deleted = sbitmap_alloc (size);
1735 sbitmap_zero (si->deleted);
1736 si->node_mapping = XNEWVEC (unsigned int, size);
1737 si->dfs = XCNEWVEC (unsigned int, size);
1739 for (i = 0; i < size; i++)
1740 si->node_mapping[i] = i;
1742 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1746 /* Free an SCC info structure pointed to by SI */
1749 free_scc_info (struct scc_info *si)
1751 sbitmap_free (si->visited);
1752 sbitmap_free (si->deleted);
1753 free (si->node_mapping);
1755 VEC_free (unsigned, heap, si->scc_stack);
1760 /* Find indirect cycles in GRAPH that occur, using strongly connected
1761 components, and note them in the indirect cycles map.
1763 This technique comes from Ben Hardekopf and Calvin Lin,
1764 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1765 Lines of Code", submitted to PLDI 2007. */
1768 find_indirect_cycles (constraint_graph_t graph)
1771 unsigned int size = graph->size;
1772 struct scc_info *si = init_scc_info (size);
1774 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1775 if (!TEST_BIT (si->visited, i) && find (i) == i)
1776 scc_visit (graph, si, i);
1781 /* Compute a topological ordering for GRAPH, and store the result in the
1782 topo_info structure TI. */
1785 compute_topo_order (constraint_graph_t graph,
1786 struct topo_info *ti)
1789 unsigned int size = graph->size;
1791 for (i = 0; i != size; ++i)
1792 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1793 topo_visit (graph, ti, i);
1796 /* Structure used to for hash value numbering of pointer equivalence
1799 typedef struct equiv_class_label
1801 unsigned int equivalence_class;
1804 } *equiv_class_label_t;
1805 typedef const struct equiv_class_label *const_equiv_class_label_t;
1807 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1809 static htab_t pointer_equiv_class_table;
1811 /* A hashtable for mapping a bitmap of labels->location equivalence
1813 static htab_t location_equiv_class_table;
1815 /* Hash function for a equiv_class_label_t */
1818 equiv_class_label_hash (const void *p)
1820 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1821 return ecl->hashcode;
1824 /* Equality function for two equiv_class_label_t's. */
1827 equiv_class_label_eq (const void *p1, const void *p2)
1829 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1830 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1831 return bitmap_equal_p (eql1->labels, eql2->labels);
1834 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1838 equiv_class_lookup (htab_t table, bitmap labels)
1841 struct equiv_class_label ecl;
1843 ecl.labels = labels;
1844 ecl.hashcode = bitmap_hash (labels);
1846 slot = htab_find_slot_with_hash (table, &ecl,
1847 ecl.hashcode, NO_INSERT);
1851 return ((equiv_class_label_t) *slot)->equivalence_class;
1855 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1859 equiv_class_add (htab_t table, unsigned int equivalence_class,
1863 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1865 ecl->labels = labels;
1866 ecl->equivalence_class = equivalence_class;
1867 ecl->hashcode = bitmap_hash (labels);
1869 slot = htab_find_slot_with_hash (table, ecl,
1870 ecl->hashcode, INSERT);
1871 gcc_assert (!*slot);
1872 *slot = (void *) ecl;
1875 /* Perform offline variable substitution.
1877 This is a worst case quadratic time way of identifying variables
1878 that must have equivalent points-to sets, including those caused by
1879 static cycles, and single entry subgraphs, in the constraint graph.
1881 The technique is described in "Exploiting Pointer and Location
1882 Equivalence to Optimize Pointer Analysis. In the 14th International
1883 Static Analysis Symposium (SAS), August 2007." It is known as the
1884 "HU" algorithm, and is equivalent to value numbering the collapsed
1885 constraint graph including evaluating unions.
1887 The general method of finding equivalence classes is as follows:
1888 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1889 Initialize all non-REF nodes to be direct nodes.
1890 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1892 For each constraint containing the dereference, we also do the same
1895 We then compute SCC's in the graph and unify nodes in the same SCC,
1898 For each non-collapsed node x:
1899 Visit all unvisited explicit incoming edges.
1900 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1902 Lookup the equivalence class for pts(x).
1903 If we found one, equivalence_class(x) = found class.
1904 Otherwise, equivalence_class(x) = new class, and new_class is
1905 added to the lookup table.
1907 All direct nodes with the same equivalence class can be replaced
1908 with a single representative node.
1909 All unlabeled nodes (label == 0) are not pointers and all edges
1910 involving them can be eliminated.
1911 We perform these optimizations during rewrite_constraints
1913 In addition to pointer equivalence class finding, we also perform
1914 location equivalence class finding. This is the set of variables
1915 that always appear together in points-to sets. We use this to
1916 compress the size of the points-to sets. */
1918 /* Current maximum pointer equivalence class id. */
1919 static int pointer_equiv_class;
1921 /* Current maximum location equivalence class id. */
1922 static int location_equiv_class;
1924 /* Recursive routine to find strongly connected components in GRAPH,
1925 and label it's nodes with DFS numbers. */
1928 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1932 unsigned int my_dfs;
1934 gcc_assert (si->node_mapping[n] == n);
1935 SET_BIT (si->visited, n);
1936 si->dfs[n] = si->current_index ++;
1937 my_dfs = si->dfs[n];
1939 /* Visit all the successors. */
1940 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1942 unsigned int w = si->node_mapping[i];
1944 if (TEST_BIT (si->deleted, w))
1947 if (!TEST_BIT (si->visited, w))
1948 condense_visit (graph, si, w);
1950 unsigned int t = si->node_mapping[w];
1951 unsigned int nnode = si->node_mapping[n];
1952 gcc_assert (nnode == n);
1954 if (si->dfs[t] < si->dfs[nnode])
1955 si->dfs[n] = si->dfs[t];
1959 /* Visit all the implicit predecessors. */
1960 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1962 unsigned int w = si->node_mapping[i];
1964 if (TEST_BIT (si->deleted, w))
1967 if (!TEST_BIT (si->visited, w))
1968 condense_visit (graph, si, w);
1970 unsigned int t = si->node_mapping[w];
1971 unsigned int nnode = si->node_mapping[n];
1972 gcc_assert (nnode == n);
1974 if (si->dfs[t] < si->dfs[nnode])
1975 si->dfs[n] = si->dfs[t];
1979 /* See if any components have been identified. */
1980 if (si->dfs[n] == my_dfs)
1982 while (VEC_length (unsigned, si->scc_stack) != 0
1983 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1985 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1986 si->node_mapping[w] = n;
1988 if (!TEST_BIT (graph->direct_nodes, w))
1989 RESET_BIT (graph->direct_nodes, n);
1991 /* Unify our nodes. */
1992 if (graph->preds[w])
1994 if (!graph->preds[n])
1995 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1996 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1998 if (graph->implicit_preds[w])
2000 if (!graph->implicit_preds[n])
2001 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2002 bitmap_ior_into (graph->implicit_preds[n],
2003 graph->implicit_preds[w]);
2005 if (graph->points_to[w])
2007 if (!graph->points_to[n])
2008 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2009 bitmap_ior_into (graph->points_to[n],
2010 graph->points_to[w]);
2013 SET_BIT (si->deleted, n);
2016 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2019 /* Label pointer equivalences. */
2022 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2026 SET_BIT (si->visited, n);
2028 if (!graph->points_to[n])
2029 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2031 /* Label and union our incoming edges's points to sets. */
2032 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2034 unsigned int w = si->node_mapping[i];
2035 if (!TEST_BIT (si->visited, w))
2036 label_visit (graph, si, w);
2038 /* Skip unused edges */
2039 if (w == n || graph->pointer_label[w] == 0)
2042 if (graph->points_to[w])
2043 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2045 /* Indirect nodes get fresh variables. */
2046 if (!TEST_BIT (graph->direct_nodes, n))
2047 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2049 if (!bitmap_empty_p (graph->points_to[n]))
2051 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2052 graph->points_to[n]);
2055 label = pointer_equiv_class++;
2056 equiv_class_add (pointer_equiv_class_table,
2057 label, graph->points_to[n]);
2059 graph->pointer_label[n] = label;
2063 /* Perform offline variable substitution, discovering equivalence
2064 classes, and eliminating non-pointer variables. */
2066 static struct scc_info *
2067 perform_var_substitution (constraint_graph_t graph)
2070 unsigned int size = graph->size;
2071 struct scc_info *si = init_scc_info (size);
2073 bitmap_obstack_initialize (&iteration_obstack);
2074 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2075 equiv_class_label_eq, free);
2076 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2077 equiv_class_label_eq, free);
2078 pointer_equiv_class = 1;
2079 location_equiv_class = 1;
2081 /* Condense the nodes, which means to find SCC's, count incoming
2082 predecessors, and unite nodes in SCC's. */
2083 for (i = 0; i < FIRST_REF_NODE; i++)
2084 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2085 condense_visit (graph, si, si->node_mapping[i]);
2087 sbitmap_zero (si->visited);
2088 /* Actually the label the nodes for pointer equivalences */
2089 for (i = 0; i < FIRST_REF_NODE; i++)
2090 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2091 label_visit (graph, si, si->node_mapping[i]);
2093 /* Calculate location equivalence labels. */
2094 for (i = 0; i < FIRST_REF_NODE; i++)
2101 if (!graph->pointed_by[i])
2103 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2105 /* Translate the pointed-by mapping for pointer equivalence
2107 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2109 bitmap_set_bit (pointed_by,
2110 graph->pointer_label[si->node_mapping[j]]);
2112 /* The original pointed_by is now dead. */
2113 BITMAP_FREE (graph->pointed_by[i]);
2115 /* Look up the location equivalence label if one exists, or make
2117 label = equiv_class_lookup (location_equiv_class_table,
2121 label = location_equiv_class++;
2122 equiv_class_add (location_equiv_class_table,
2127 if (dump_file && (dump_flags & TDF_DETAILS))
2128 fprintf (dump_file, "Found location equivalence for node %s\n",
2129 get_varinfo (i)->name);
2130 BITMAP_FREE (pointed_by);
2132 graph->loc_label[i] = label;
2136 if (dump_file && (dump_flags & TDF_DETAILS))
2137 for (i = 0; i < FIRST_REF_NODE; i++)
2139 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2141 "Equivalence classes for %s node id %d:%s are pointer: %d"
2143 direct_node ? "Direct node" : "Indirect node", i,
2144 get_varinfo (i)->name,
2145 graph->pointer_label[si->node_mapping[i]],
2146 graph->loc_label[si->node_mapping[i]]);
2149 /* Quickly eliminate our non-pointer variables. */
2151 for (i = 0; i < FIRST_REF_NODE; i++)
2153 unsigned int node = si->node_mapping[i];
2155 if (graph->pointer_label[node] == 0)
2157 if (dump_file && (dump_flags & TDF_DETAILS))
2159 "%s is a non-pointer variable, eliminating edges.\n",
2160 get_varinfo (node)->name);
2161 stats.nonpointer_vars++;
2162 clear_edges_for_node (graph, node);
2169 /* Free information that was only necessary for variable
2173 free_var_substitution_info (struct scc_info *si)
2176 free (graph->pointer_label);
2177 free (graph->loc_label);
2178 free (graph->pointed_by);
2179 free (graph->points_to);
2180 free (graph->eq_rep);
2181 sbitmap_free (graph->direct_nodes);
2182 htab_delete (pointer_equiv_class_table);
2183 htab_delete (location_equiv_class_table);
2184 bitmap_obstack_release (&iteration_obstack);
2187 /* Return an existing node that is equivalent to NODE, which has
2188 equivalence class LABEL, if one exists. Return NODE otherwise. */
2191 find_equivalent_node (constraint_graph_t graph,
2192 unsigned int node, unsigned int label)
2194 /* If the address version of this variable is unused, we can
2195 substitute it for anything else with the same label.
2196 Otherwise, we know the pointers are equivalent, but not the
2197 locations, and we can unite them later. */
2199 if (!bitmap_bit_p (graph->address_taken, node))
2201 gcc_assert (label < graph->size);
2203 if (graph->eq_rep[label] != -1)
2205 /* Unify the two variables since we know they are equivalent. */
2206 if (unite (graph->eq_rep[label], node))
2207 unify_nodes (graph, graph->eq_rep[label], node, false);
2208 return graph->eq_rep[label];
2212 graph->eq_rep[label] = node;
2213 graph->pe_rep[label] = node;
2218 gcc_assert (label < graph->size);
2219 graph->pe[node] = label;
2220 if (graph->pe_rep[label] == -1)
2221 graph->pe_rep[label] = node;
2227 /* Unite pointer equivalent but not location equivalent nodes in
2228 GRAPH. This may only be performed once variable substitution is
2232 unite_pointer_equivalences (constraint_graph_t graph)
2236 /* Go through the pointer equivalences and unite them to their
2237 representative, if they aren't already. */
2238 for (i = 0; i < FIRST_REF_NODE; i++)
2240 unsigned int label = graph->pe[i];
2243 int label_rep = graph->pe_rep[label];
2245 if (label_rep == -1)
2248 label_rep = find (label_rep);
2249 if (label_rep >= 0 && unite (label_rep, find (i)))
2250 unify_nodes (graph, label_rep, i, false);
2255 /* Move complex constraints to the GRAPH nodes they belong to. */
2258 move_complex_constraints (constraint_graph_t graph)
2263 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2267 struct constraint_expr lhs = c->lhs;
2268 struct constraint_expr rhs = c->rhs;
2270 if (lhs.type == DEREF)
2272 insert_into_complex (graph, lhs.var, c);
2274 else if (rhs.type == DEREF)
2276 if (!(get_varinfo (lhs.var)->is_special_var))
2277 insert_into_complex (graph, rhs.var, c);
2279 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2280 && (lhs.offset != 0 || rhs.offset != 0))
2282 insert_into_complex (graph, rhs.var, c);
2289 /* Optimize and rewrite complex constraints while performing
2290 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2291 result of perform_variable_substitution. */
2294 rewrite_constraints (constraint_graph_t graph,
2295 struct scc_info *si)
2301 for (j = 0; j < graph->size; j++)
2302 gcc_assert (find (j) == j);
2304 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2306 struct constraint_expr lhs = c->lhs;
2307 struct constraint_expr rhs = c->rhs;
2308 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2309 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2310 unsigned int lhsnode, rhsnode;
2311 unsigned int lhslabel, rhslabel;
2313 lhsnode = si->node_mapping[lhsvar];
2314 rhsnode = si->node_mapping[rhsvar];
2315 lhslabel = graph->pointer_label[lhsnode];
2316 rhslabel = graph->pointer_label[rhsnode];
2318 /* See if it is really a non-pointer variable, and if so, ignore
2322 if (dump_file && (dump_flags & TDF_DETAILS))
2325 fprintf (dump_file, "%s is a non-pointer variable,"
2326 "ignoring constraint:",
2327 get_varinfo (lhs.var)->name);
2328 dump_constraint (dump_file, c);
2330 VEC_replace (constraint_t, constraints, i, NULL);
2336 if (dump_file && (dump_flags & TDF_DETAILS))
2339 fprintf (dump_file, "%s is a non-pointer variable,"
2340 "ignoring constraint:",
2341 get_varinfo (rhs.var)->name);
2342 dump_constraint (dump_file, c);
2344 VEC_replace (constraint_t, constraints, i, NULL);
2348 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2349 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2350 c->lhs.var = lhsvar;
2351 c->rhs.var = rhsvar;
2356 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2357 part of an SCC, false otherwise. */
2360 eliminate_indirect_cycles (unsigned int node)
2362 if (graph->indirect_cycles[node] != -1
2363 && !bitmap_empty_p (get_varinfo (node)->solution))
2366 VEC(unsigned,heap) *queue = NULL;
2368 unsigned int to = find (graph->indirect_cycles[node]);
2371 /* We can't touch the solution set and call unify_nodes
2372 at the same time, because unify_nodes is going to do
2373 bitmap unions into it. */
2375 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2377 if (find (i) == i && i != to)
2380 VEC_safe_push (unsigned, heap, queue, i);
2385 VEC_iterate (unsigned, queue, queuepos, i);
2388 unify_nodes (graph, to, i, true);
2390 VEC_free (unsigned, heap, queue);
2396 /* Solve the constraint graph GRAPH using our worklist solver.
2397 This is based on the PW* family of solvers from the "Efficient Field
2398 Sensitive Pointer Analysis for C" paper.
2399 It works by iterating over all the graph nodes, processing the complex
2400 constraints and propagating the copy constraints, until everything stops
2401 changed. This corresponds to steps 6-8 in the solving list given above. */
2404 solve_graph (constraint_graph_t graph)
2406 unsigned int size = graph->size;
2411 changed = sbitmap_alloc (size);
2412 sbitmap_zero (changed);
2414 /* Mark all initial non-collapsed nodes as changed. */
2415 for (i = 0; i < size; i++)
2417 varinfo_t ivi = get_varinfo (i);
2418 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2419 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2420 || VEC_length (constraint_t, graph->complex[i]) > 0))
2422 SET_BIT (changed, i);
2427 /* Allocate a bitmap to be used to store the changed bits. */
2428 pts = BITMAP_ALLOC (&pta_obstack);
2430 while (changed_count > 0)
2433 struct topo_info *ti = init_topo_info ();
2436 bitmap_obstack_initialize (&iteration_obstack);
2438 compute_topo_order (graph, ti);
2440 while (VEC_length (unsigned, ti->topo_order) != 0)
2443 i = VEC_pop (unsigned, ti->topo_order);
2445 /* If this variable is not a representative, skip it. */
2449 /* In certain indirect cycle cases, we may merge this
2450 variable to another. */
2451 if (eliminate_indirect_cycles (i) && find (i) != i)
2454 /* If the node has changed, we need to process the
2455 complex constraints and outgoing edges again. */
2456 if (TEST_BIT (changed, i))
2461 VEC(constraint_t,heap) *complex = graph->complex[i];
2462 bool solution_empty;
2464 RESET_BIT (changed, i);
2467 /* Compute the changed set of solution bits. */
2468 bitmap_and_compl (pts, get_varinfo (i)->solution,
2469 get_varinfo (i)->oldsolution);
2471 if (bitmap_empty_p (pts))
2474 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2476 solution = get_varinfo (i)->solution;
2477 solution_empty = bitmap_empty_p (solution);
2479 /* Process the complex constraints */
2480 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2482 /* XXX: This is going to unsort the constraints in
2483 some cases, which will occasionally add duplicate
2484 constraints during unification. This does not
2485 affect correctness. */
2486 c->lhs.var = find (c->lhs.var);
2487 c->rhs.var = find (c->rhs.var);
2489 /* The only complex constraint that can change our
2490 solution to non-empty, given an empty solution,
2491 is a constraint where the lhs side is receiving
2492 some set from elsewhere. */
2493 if (!solution_empty || c->lhs.type != DEREF)
2494 do_complex_constraint (graph, c, pts);
2497 solution_empty = bitmap_empty_p (solution);
2500 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2502 && i != callused_id)
2506 /* Propagate solution to all successors. */
2507 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2513 unsigned int to = find (j);
2514 tmp = get_varinfo (to)->solution;
2517 /* Don't try to propagate to ourselves. */
2521 flag = set_union_with_increment (tmp, pts, 0);
2525 get_varinfo (to)->solution = tmp;
2526 if (!TEST_BIT (changed, to))
2528 SET_BIT (changed, to);
2536 free_topo_info (ti);
2537 bitmap_obstack_release (&iteration_obstack);
2541 sbitmap_free (changed);
2542 bitmap_obstack_release (&oldpta_obstack);
2545 /* Map from trees to variable infos. */
2546 static struct pointer_map_t *vi_for_tree;
2549 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2552 insert_vi_for_tree (tree t, varinfo_t vi)
2554 void **slot = pointer_map_insert (vi_for_tree, t);
2556 gcc_assert (*slot == NULL);
2560 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2561 exist in the map, return NULL, otherwise, return the varinfo we found. */
2564 lookup_vi_for_tree (tree t)
2566 void **slot = pointer_map_contains (vi_for_tree, t);
2570 return (varinfo_t) *slot;
2573 /* Return a printable name for DECL */
2576 alias_get_name (tree decl)
2578 const char *res = get_name (decl);
2580 int num_printed = 0;
2589 if (TREE_CODE (decl) == SSA_NAME)
2591 num_printed = asprintf (&temp, "%s_%u",
2592 alias_get_name (SSA_NAME_VAR (decl)),
2593 SSA_NAME_VERSION (decl));
2595 else if (DECL_P (decl))
2597 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2599 if (num_printed > 0)
2601 res = ggc_strdup (temp);
2607 /* Find the variable id for tree T in the map.
2608 If T doesn't exist in the map, create an entry for it and return it. */
2611 get_vi_for_tree (tree t)
2613 void **slot = pointer_map_contains (vi_for_tree, t);
2615 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2617 return (varinfo_t) *slot;
2620 /* Get a constraint expression for a new temporary variable. */
2622 static struct constraint_expr
2623 get_constraint_exp_for_temp (tree t)
2625 struct constraint_expr cexpr;
2627 gcc_assert (SSA_VAR_P (t));
2629 cexpr.type = SCALAR;
2630 cexpr.var = get_vi_for_tree (t)->id;
2636 /* Get a constraint expression vector from an SSA_VAR_P node.
2637 If address_p is true, the result will be taken its address of. */
2640 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2642 struct constraint_expr cexpr;
2645 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2646 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2648 /* For parameters, get at the points-to set for the actual parm
2650 if (TREE_CODE (t) == SSA_NAME
2651 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2652 && SSA_NAME_IS_DEFAULT_DEF (t))
2654 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2658 vi = get_vi_for_tree (t);
2660 cexpr.type = SCALAR;
2662 /* If we determine the result is "anything", and we know this is readonly,
2663 say it points to readonly memory instead. */
2664 if (cexpr.var == anything_id && TREE_READONLY (t))
2667 cexpr.type = ADDRESSOF;
2668 cexpr.var = readonly_id;
2671 /* If we are not taking the address of the constraint expr, add all
2672 sub-fiels of the variable as well. */
2675 for (; vi; vi = vi->next)
2678 VEC_safe_push (ce_s, heap, *results, &cexpr);
2683 VEC_safe_push (ce_s, heap, *results, &cexpr);
2686 /* Process constraint T, performing various simplifications and then
2687 adding it to our list of overall constraints. */
2690 process_constraint (constraint_t t)
2692 struct constraint_expr rhs = t->rhs;
2693 struct constraint_expr lhs = t->lhs;
2695 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2696 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2698 /* ANYTHING == ANYTHING is pointless. */
2699 if (lhs.var == anything_id && rhs.var == anything_id)
2702 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2703 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2708 process_constraint (t);
2710 /* This can happen in our IR with things like n->a = *p */
2711 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2713 /* Split into tmp = *rhs, *lhs = tmp */
2714 tree rhsdecl = get_varinfo (rhs.var)->decl;
2715 tree pointertype = TREE_TYPE (rhsdecl);
2716 tree pointedtotype = TREE_TYPE (pointertype);
2717 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2718 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2720 process_constraint (new_constraint (tmplhs, rhs));
2721 process_constraint (new_constraint (lhs, tmplhs));
2723 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2725 /* Split into tmp = &rhs, *lhs = tmp */
2726 tree rhsdecl = get_varinfo (rhs.var)->decl;
2727 tree pointertype = TREE_TYPE (rhsdecl);
2728 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2729 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2731 process_constraint (new_constraint (tmplhs, rhs));
2732 process_constraint (new_constraint (lhs, tmplhs));
2736 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2737 VEC_safe_push (constraint_t, heap, constraints, t);
2741 /* Return true if T is a variable of a type that could contain
2745 could_have_pointers (tree t)
2747 tree type = TREE_TYPE (t);
2749 if (POINTER_TYPE_P (type)
2750 || AGGREGATE_TYPE_P (type))
2756 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2759 static HOST_WIDE_INT
2760 bitpos_of_field (const tree fdecl)
2763 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2764 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2767 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2768 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2772 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2773 resulting constraint expressions in *RESULTS. */
2776 get_constraint_for_ptr_offset (tree ptr, tree offset,
2777 VEC (ce_s, heap) **results)
2779 struct constraint_expr *c;
2781 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
2783 /* If we do not do field-sensitive PTA adding offsets to pointers
2784 does not change the points-to solution. */
2785 if (!use_field_sensitive)
2787 get_constraint_for (ptr, results);
2791 /* If the offset is not a non-negative integer constant that fits
2792 in a HOST_WIDE_INT, we have to fall back to a conservative
2793 solution which includes all sub-fields of all pointed-to
2795 ??? As we do not have the ability to express this, fall back
2797 if (!host_integerp (offset, 1))
2799 struct constraint_expr temp;
2800 temp.var = anything_id;
2803 VEC_safe_push (ce_s, heap, *results, &temp);
2807 /* Make sure the bit-offset also fits. */
2808 rhsunitoffset = TREE_INT_CST_LOW (offset);
2809 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2810 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2812 struct constraint_expr temp;
2813 temp.var = anything_id;
2816 VEC_safe_push (ce_s, heap, *results, &temp);
2820 get_constraint_for (ptr, results);
2824 /* As we are eventually appending to the solution do not use
2825 VEC_iterate here. */
2826 n = VEC_length (ce_s, *results);
2827 for (j = 0; j < n; j++)
2830 c = VEC_index (ce_s, *results, j);
2831 curr = get_varinfo (c->var);
2833 if (c->type == ADDRESSOF
2834 && !curr->is_full_var)
2836 varinfo_t temp, curr = get_varinfo (c->var);
2838 /* Search the sub-field which overlaps with the
2839 pointed-to offset. As we deal with positive offsets
2840 only, we can start the search from the current variable. */
2841 temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
2843 /* If the result is outside of the variable we have to provide
2844 a conservative result, as the variable is still reachable
2845 from the resulting pointer (even though it technically
2846 cannot point to anything). The last sub-field is such
2847 a conservative result.
2848 ??? If we always had a sub-field for &object + 1 then
2849 we could represent this in a more precise way. */
2853 while (temp->next != NULL)
2858 /* If the found variable is not exactly at the pointed to
2859 result, we have to include the next variable in the
2860 solution as well. Otherwise two increments by offset / 2
2861 do not result in the same or a conservative superset
2863 if (temp->offset != curr->offset + rhsoffset
2864 && temp->next != NULL)
2866 struct constraint_expr c2;
2867 c2.var = temp->next->id;
2868 c2.type = ADDRESSOF;
2870 VEC_safe_push (ce_s, heap, *results, &c2);
2875 else if (c->type == ADDRESSOF
2876 /* If this varinfo represents a full variable just use it. */
2877 && curr->is_full_var)
2880 c->offset = rhsoffset;
2885 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2886 If address_p is true the result will be taken its address of. */
2889 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2893 HOST_WIDE_INT bitsize = -1;
2894 HOST_WIDE_INT bitmaxsize = -1;
2895 HOST_WIDE_INT bitpos;
2897 struct constraint_expr *result;
2899 /* Some people like to do cute things like take the address of
2902 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2903 forzero = TREE_OPERAND (forzero, 0);
2905 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2907 struct constraint_expr temp;
2910 temp.var = integer_id;
2912 VEC_safe_push (ce_s, heap, *results, &temp);
2916 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2918 /* Pretend to take the address of the base, we'll take care of
2919 adding the required subset of sub-fields below. */
2920 get_constraint_for_1 (t, results, true);
2921 gcc_assert (VEC_length (ce_s, *results) == 1);
2922 result = VEC_last (ce_s, *results);
2924 /* This can also happen due to weird offsetof type macros. */
2925 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2926 result->type = SCALAR;
2928 if (result->type == SCALAR
2929 && get_varinfo (result->var)->is_full_var)
2930 /* For single-field vars do not bother about the offset. */
2932 else if (result->type == SCALAR)
2934 /* In languages like C, you can access one past the end of an
2935 array. You aren't allowed to dereference it, so we can
2936 ignore this constraint. When we handle pointer subtraction,
2937 we may have to do something cute here. */
2939 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2942 /* It's also not true that the constraint will actually start at the
2943 right offset, it may start in some padding. We only care about
2944 setting the constraint to the first actual field it touches, so
2946 struct constraint_expr cexpr = *result;
2948 VEC_pop (ce_s, *results);
2950 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2952 if (ranges_overlap_p (curr->offset, curr->size,
2953 bitpos, bitmaxsize))
2955 cexpr.var = curr->id;
2956 VEC_safe_push (ce_s, heap, *results, &cexpr);
2961 /* If we are going to take the address of this field then
2962 to be able to compute reachability correctly add at least
2963 the last field of the variable. */
2965 && VEC_length (ce_s, *results) == 0)
2967 curr = get_varinfo (cexpr.var);
2968 while (curr->next != NULL)
2970 cexpr.var = curr->id;
2971 VEC_safe_push (ce_s, heap, *results, &cexpr);
2974 /* Assert that we found *some* field there. The user couldn't be
2975 accessing *only* padding. */
2976 /* Still the user could access one past the end of an array
2977 embedded in a struct resulting in accessing *only* padding. */
2978 gcc_assert (VEC_length (ce_s, *results) >= 1
2979 || ref_contains_array_ref (orig_t));
2981 else if (bitmaxsize == 0)
2983 if (dump_file && (dump_flags & TDF_DETAILS))
2984 fprintf (dump_file, "Access to zero-sized part of variable,"
2988 if (dump_file && (dump_flags & TDF_DETAILS))
2989 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2991 else if (bitmaxsize == -1)
2993 /* We can't handle DEREF constraints with unknown size, we'll
2994 get the wrong answer. Punt and return anything. */
2995 result->var = anything_id;
2999 result->offset = bitpos;
3003 /* Dereference the constraint expression CONS, and return the result.
3004 DEREF (ADDRESSOF) = SCALAR
3005 DEREF (SCALAR) = DEREF
3006 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3007 This is needed so that we can handle dereferencing DEREF constraints. */
3010 do_deref (VEC (ce_s, heap) **constraints)
3012 struct constraint_expr *c;
3015 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3017 if (c->type == SCALAR)
3019 else if (c->type == ADDRESSOF)
3021 else if (c->type == DEREF)
3023 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3024 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3025 process_constraint (new_constraint (tmplhs, *c));
3026 c->var = tmplhs.var;
3033 /* Given a tree T, return the constraint expression for it. */
3036 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3038 struct constraint_expr temp;
3040 /* x = integer is all glommed to a single variable, which doesn't
3041 point to anything by itself. That is, of course, unless it is an
3042 integer constant being treated as a pointer, in which case, we
3043 will return that this is really the addressof anything. This
3044 happens below, since it will fall into the default case. The only
3045 case we know something about an integer treated like a pointer is
3046 when it is the NULL pointer, and then we just say it points to
3048 if (TREE_CODE (t) == INTEGER_CST
3049 && integer_zerop (t))
3051 temp.var = nothing_id;
3052 temp.type = ADDRESSOF;
3054 VEC_safe_push (ce_s, heap, *results, &temp);
3058 /* String constants are read-only. */
3059 if (TREE_CODE (t) == STRING_CST)
3061 temp.var = readonly_id;
3064 VEC_safe_push (ce_s, heap, *results, &temp);
3068 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3070 case tcc_expression:
3072 switch (TREE_CODE (t))
3076 struct constraint_expr *c;
3078 tree exp = TREE_OPERAND (t, 0);
3080 get_constraint_for_1 (exp, results, true);
3082 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3084 if (c->type == DEREF)
3087 c->type = ADDRESSOF;
3098 switch (TREE_CODE (t))
3102 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3107 case ARRAY_RANGE_REF:
3109 get_constraint_for_component_ref (t, results, address_p);
3115 case tcc_exceptional:
3117 switch (TREE_CODE (t))
3121 get_constraint_for_ssa_var (t, results, address_p);
3128 case tcc_declaration:
3130 get_constraint_for_ssa_var (t, results, address_p);
3136 /* The default fallback is a constraint from anything. */
3137 temp.type = ADDRESSOF;
3138 temp.var = anything_id;
3140 VEC_safe_push (ce_s, heap, *results, &temp);
3143 /* Given a gimple tree T, return the constraint expression vector for it. */
3146 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3148 gcc_assert (VEC_length (ce_s, *results) == 0);
3150 get_constraint_for_1 (t, results, false);
3153 /* Handle the structure copy case where we have a simple structure copy
3154 between LHS and RHS that is of SIZE (in bits)
3156 For each field of the lhs variable (lhsfield)
3157 For each field of the rhs variable at lhsfield.offset (rhsfield)
3158 add the constraint lhsfield = rhsfield
3160 If we fail due to some kind of type unsafety or other thing we
3161 can't handle, return false. We expect the caller to collapse the
3162 variable in that case. */
3165 do_simple_structure_copy (const struct constraint_expr lhs,
3166 const struct constraint_expr rhs,
3167 const unsigned HOST_WIDE_INT size)
3169 varinfo_t p = get_varinfo (lhs.var);
3170 unsigned HOST_WIDE_INT pstart, last;
3172 last = p->offset + size;
3173 for (; p && p->offset < last; p = p->next)
3176 struct constraint_expr templhs = lhs;
3177 struct constraint_expr temprhs = rhs;
3178 unsigned HOST_WIDE_INT fieldoffset;
3180 templhs.var = p->id;
3181 q = get_varinfo (temprhs.var);
3182 fieldoffset = p->offset - pstart;
3183 q = first_vi_for_offset (q, q->offset + fieldoffset);
3186 temprhs.var = q->id;
3187 process_constraint (new_constraint (templhs, temprhs));
3193 /* Handle the structure copy case where we have a structure copy between a
3194 aggregate on the LHS and a dereference of a pointer on the RHS
3195 that is of SIZE (in bits)
3197 For each field of the lhs variable (lhsfield)
3198 rhs.offset = lhsfield->offset
3199 add the constraint lhsfield = rhs
3203 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3204 const struct constraint_expr rhs,
3205 const unsigned HOST_WIDE_INT size)
3207 varinfo_t p = get_varinfo (lhs.var);
3208 unsigned HOST_WIDE_INT pstart,last;
3210 last = p->offset + size;
3212 for (; p && p->offset < last; p = p->next)
3215 struct constraint_expr templhs = lhs;
3216 struct constraint_expr temprhs = rhs;
3217 unsigned HOST_WIDE_INT fieldoffset;
3220 if (templhs.type == SCALAR)
3221 templhs.var = p->id;
3223 templhs.offset = p->offset;
3225 q = get_varinfo (temprhs.var);
3226 fieldoffset = p->offset - pstart;
3227 temprhs.offset += fieldoffset;
3228 process_constraint (new_constraint (templhs, temprhs));
3232 /* Handle the structure copy case where we have a structure copy
3233 between an aggregate on the RHS and a dereference of a pointer on
3234 the LHS that is of SIZE (in bits)
3236 For each field of the rhs variable (rhsfield)
3237 lhs.offset = rhsfield->offset
3238 add the constraint lhs = rhsfield
3242 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3243 const struct constraint_expr rhs,
3244 const unsigned HOST_WIDE_INT size)
3246 varinfo_t p = get_varinfo (rhs.var);
3247 unsigned HOST_WIDE_INT pstart,last;
3249 last = p->offset + size;
3251 for (; p && p->offset < last; p = p->next)
3254 struct constraint_expr templhs = lhs;
3255 struct constraint_expr temprhs = rhs;
3256 unsigned HOST_WIDE_INT fieldoffset;
3259 if (temprhs.type == SCALAR)
3260 temprhs.var = p->id;
3262 temprhs.offset = p->offset;
3264 q = get_varinfo (templhs.var);
3265 fieldoffset = p->offset - pstart;
3266 templhs.offset += fieldoffset;
3267 process_constraint (new_constraint (templhs, temprhs));
3271 /* Sometimes, frontends like to give us bad type information. This
3272 function will collapse all the fields from VAR to the end of VAR,
3273 into VAR, so that we treat those fields as a single variable.
3274 We return the variable they were collapsed into. */
3277 collapse_rest_of_var (unsigned int var)
3279 varinfo_t currvar = get_varinfo (var);
3282 for (field = currvar->next; field; field = field->next)
3285 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3286 field->name, currvar->name);
3288 gcc_assert (field->collapsed_to == 0);
3289 field->collapsed_to = currvar->id;
3292 currvar->next = NULL;
3293 currvar->size = currvar->fullsize - currvar->offset;
3298 /* Handle aggregate copies by expanding into copies of the respective
3299 fields of the structures. */
3302 do_structure_copy (tree lhsop, tree rhsop)
3304 struct constraint_expr lhs, rhs, tmp;
3305 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3307 unsigned HOST_WIDE_INT lhssize;
3308 unsigned HOST_WIDE_INT rhssize;
3310 /* Pretend we are taking the address of the constraint exprs.
3311 We deal with walking the sub-fields ourselves. */
3312 get_constraint_for_1 (lhsop, &lhsc, true);
3313 get_constraint_for_1 (rhsop, &rhsc, true);
3314 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3315 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3316 lhs = *(VEC_last (ce_s, lhsc));
3317 rhs = *(VEC_last (ce_s, rhsc));
3319 VEC_free (ce_s, heap, lhsc);
3320 VEC_free (ce_s, heap, rhsc);
3322 /* If we have special var = x, swap it around. */
3323 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3330 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3331 possible it's something we could handle. However, most cases falling
3332 into this are dealing with transparent unions, which are slightly
3334 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3336 rhs.type = ADDRESSOF;
3337 rhs.var = anything_id;
3340 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3341 that special var. */
3342 if (rhs.var <= integer_id)
3344 for (p = get_varinfo (lhs.var); p; p = p->next)
3346 struct constraint_expr templhs = lhs;
3347 struct constraint_expr temprhs = rhs;
3349 if (templhs.type == SCALAR )
3350 templhs.var = p->id;
3352 templhs.offset += p->offset;
3353 process_constraint (new_constraint (templhs, temprhs));
3358 tree rhstype = TREE_TYPE (rhsop);
3359 tree lhstype = TREE_TYPE (lhsop);
3363 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3364 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3366 /* If we have a variably sized types on the rhs or lhs, and a deref
3367 constraint, add the constraint, lhsconstraint = &ANYTHING.
3368 This is conservatively correct because either the lhs is an unknown
3369 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3370 constraint, and every variable it can point to must be unknown sized
3371 anyway, so we don't need to worry about fields at all. */
3372 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3373 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3375 rhs.var = anything_id;
3376 rhs.type = ADDRESSOF;
3378 process_constraint (new_constraint (lhs, rhs));
3382 /* The size only really matters insofar as we don't set more or less of
3383 the variable. If we hit an unknown size var, the size should be the
3384 whole darn thing. */
3385 if (get_varinfo (rhs.var)->is_unknown_size_var)
3388 rhssize = TREE_INT_CST_LOW (rhstypesize);
3390 if (get_varinfo (lhs.var)->is_unknown_size_var)
3393 lhssize = TREE_INT_CST_LOW (lhstypesize);
3396 if (rhs.type == SCALAR && lhs.type == SCALAR)
3398 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3400 lhs.var = collapse_rest_of_var (lhs.var);
3401 rhs.var = collapse_rest_of_var (rhs.var);
3406 process_constraint (new_constraint (lhs, rhs));
3409 else if (lhs.type != DEREF && rhs.type == DEREF)
3410 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3411 else if (lhs.type == DEREF && rhs.type != DEREF)
3412 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3415 tree pointedtotype = lhstype;
3418 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3419 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3420 do_structure_copy (tmpvar, rhsop);
3421 do_structure_copy (lhsop, tmpvar);
3426 /* Create a constraint ID = OP. */
3429 make_constraint_to (unsigned id, tree op)
3431 VEC(ce_s, heap) *rhsc = NULL;
3432 struct constraint_expr *c;
3433 struct constraint_expr includes;
3437 includes.offset = 0;
3438 includes.type = SCALAR;
3440 get_constraint_for (op, &rhsc);
3441 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3442 process_constraint (new_constraint (includes, *c));
3443 VEC_free (ce_s, heap, rhsc);
3446 /* Make constraints necessary to make OP escape. */
3449 make_escape_constraint (tree op)
3451 make_constraint_to (escaped_id, op);
3454 /* For non-IPA mode, generate constraints necessary for a call on the
3458 handle_rhs_call (gimple stmt)
3462 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3464 tree arg = gimple_call_arg (stmt, i);
3466 /* Find those pointers being passed, and make sure they end up
3467 pointing to anything. */
3468 if (could_have_pointers (arg))
3469 make_escape_constraint (arg);
3472 /* The static chain escapes as well. */
3473 if (gimple_call_chain (stmt))
3474 make_escape_constraint (gimple_call_chain (stmt));
3477 /* For non-IPA mode, generate constraints necessary for a call
3478 that returns a pointer and assigns it to LHS. This simply makes
3479 the LHS point to global and escaped variables. */
3482 handle_lhs_call (tree lhs, int flags)
3484 VEC(ce_s, heap) *lhsc = NULL;
3485 struct constraint_expr rhsc;
3487 struct constraint_expr *lhsp;
3489 get_constraint_for (lhs, &lhsc);
3491 if (flags & ECF_MALLOC)
3493 tree heapvar = heapvar_lookup (lhs);
3496 if (heapvar == NULL)
3498 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3499 DECL_EXTERNAL (heapvar) = 1;
3500 get_var_ann (heapvar)->is_heapvar = 1;
3501 if (gimple_referenced_vars (cfun))
3502 add_referenced_var (heapvar);
3503 heapvar_insert (lhs, heapvar);
3506 rhsc.var = create_variable_info_for (heapvar,
3507 alias_get_name (heapvar));
3508 vi = get_varinfo (rhsc.var);
3509 vi->is_artificial_var = 1;
3510 vi->is_heap_var = 1;
3511 rhsc.type = ADDRESSOF;
3516 rhsc.var = escaped_id;
3518 rhsc.type = ADDRESSOF;
3520 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3521 process_constraint (new_constraint (*lhsp, rhsc));
3522 VEC_free (ce_s, heap, lhsc);
3525 /* For non-IPA mode, generate constraints necessary for a call of a
3526 const function that returns a pointer in the statement STMT. */
3529 handle_const_call (gimple stmt)
3531 tree lhs = gimple_call_lhs (stmt);
3532 VEC(ce_s, heap) *lhsc = NULL;
3533 struct constraint_expr rhsc;
3535 struct constraint_expr *lhsp;
3537 struct constraint_expr tmpc;
3539 get_constraint_for (lhs, &lhsc);
3541 /* If this is a nested function then it can return anything. */
3542 if (gimple_call_chain (stmt))
3544 rhsc.var = anything_id;
3546 rhsc.type = ADDRESSOF;
3547 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3548 process_constraint (new_constraint (*lhsp, rhsc));
3549 VEC_free (ce_s, heap, lhsc);
3553 /* We always use a temporary here, otherwise we end up with a quadratic
3554 amount of constraints for
3555 large_struct = const_call (large_struct);
3556 in field-sensitive PTA. */
3557 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3558 tmpc = get_constraint_exp_for_temp (tmpvar);
3560 /* May return addresses of globals. */
3561 rhsc.var = nonlocal_id;
3563 rhsc.type = ADDRESSOF;
3564 process_constraint (new_constraint (tmpc, rhsc));
3566 /* May return arguments. */
3567 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3569 tree arg = gimple_call_arg (stmt, k);
3571 if (could_have_pointers (arg))
3573 VEC(ce_s, heap) *argc = NULL;
3574 struct constraint_expr *argp;
3577 get_constraint_for (arg, &argc);
3578 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3579 process_constraint (new_constraint (tmpc, *argp));
3580 VEC_free (ce_s, heap, argc);
3584 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3585 process_constraint (new_constraint (*lhsp, tmpc));
3587 VEC_free (ce_s, heap, lhsc);
3590 /* For non-IPA mode, generate constraints necessary for a call to a
3591 pure function in statement STMT. */
3594 handle_pure_call (gimple stmt)
3598 /* Memory reached from pointer arguments is call-used. */
3599 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3601 tree arg = gimple_call_arg (stmt, i);
3603 if (could_have_pointers (arg))
3604 make_constraint_to (callused_id, arg);
3607 /* The static chain is used as well. */
3608 if (gimple_call_chain (stmt))
3609 make_constraint_to (callused_id, gimple_call_chain (stmt));
3611 /* If the call returns a pointer it may point to reachable memory
3612 from the arguments. Not so for malloc functions though. */
3613 if (gimple_call_lhs (stmt)
3614 && could_have_pointers (gimple_call_lhs (stmt))
3615 && !(gimple_call_flags (stmt) & ECF_MALLOC))
3617 tree lhs = gimple_call_lhs (stmt);
3618 VEC(ce_s, heap) *lhsc = NULL;
3619 struct constraint_expr rhsc;
3620 struct constraint_expr *lhsp;
3623 get_constraint_for (lhs, &lhsc);
3625 /* If this is a nested function then it can return anything. */
3626 if (gimple_call_chain (stmt))
3628 rhsc.var = anything_id;
3630 rhsc.type = ADDRESSOF;
3631 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3632 process_constraint (new_constraint (*lhsp, rhsc));
3633 VEC_free (ce_s, heap, lhsc);
3637 /* Else just add the call-used memory here. Escaped variables
3638 and globals will be dealt with in handle_lhs_call. */
3639 rhsc.var = callused_id;
3641 rhsc.type = ADDRESSOF;
3642 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3643 process_constraint (new_constraint (*lhsp, rhsc));
3644 VEC_free (ce_s, heap, lhsc);
3648 /* Walk statement T setting up aliasing constraints according to the
3649 references found in T. This function is the main part of the
3650 constraint builder. AI points to auxiliary alias information used
3651 when building alias sets and computing alias grouping heuristics. */
3654 find_func_aliases (gimple origt)
3657 VEC(ce_s, heap) *lhsc = NULL;
3658 VEC(ce_s, heap) *rhsc = NULL;
3659 struct constraint_expr *c;
3660 enum escape_type stmt_escape_type;
3662 /* Now build constraints expressions. */
3663 if (gimple_code (t) == GIMPLE_PHI)
3665 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3667 /* Only care about pointers and structures containing
3669 if (could_have_pointers (gimple_phi_result (t)))
3674 /* For a phi node, assign all the arguments to
3676 get_constraint_for (gimple_phi_result (t), &lhsc);
3677 for (i = 0; i < gimple_phi_num_args (t); i++)
3680 tree strippedrhs = PHI_ARG_DEF (t, i);
3682 STRIP_NOPS (strippedrhs);
3683 rhstype = TREE_TYPE (strippedrhs);
3684 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3686 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3688 struct constraint_expr *c2;
3689 while (VEC_length (ce_s, rhsc) > 0)
3691 c2 = VEC_last (ce_s, rhsc);
3692 process_constraint (new_constraint (*c, *c2));
3693 VEC_pop (ce_s, rhsc);
3699 /* In IPA mode, we need to generate constraints to pass call
3700 arguments through their calls. There are two cases,
3701 either a GIMPLE_CALL returning a value, or just a plain
3702 GIMPLE_CALL when we are not.
3704 In non-ipa mode, we need to generate constraints for each
3705 pointer passed by address. */
3706 else if (is_gimple_call (t))
3710 int flags = gimple_call_flags (t);
3712 /* Const functions can return their arguments and addresses
3713 of global memory but not of escaped memory. */
3714 if (flags & ECF_CONST)
3716 if (gimple_call_lhs (t)
3717 && could_have_pointers (gimple_call_lhs (t)))
3718 handle_const_call (t);
3720 /* Pure functions can return addresses in and of memory
3721 reachable from their arguments, but they are not an escape
3722 point for reachable memory of their arguments. */
3723 else if (flags & ECF_PURE)
3725 handle_pure_call (t);
3726 if (gimple_call_lhs (t)
3727 && could_have_pointers (gimple_call_lhs (t)))
3728 handle_lhs_call (gimple_call_lhs (t), flags);
3732 handle_rhs_call (t);
3733 if (gimple_call_lhs (t)
3734 && could_have_pointers (gimple_call_lhs (t)))
3735 handle_lhs_call (gimple_call_lhs (t), flags);
3746 lhsop = gimple_call_lhs (t);
3747 decl = gimple_call_fndecl (t);
3749 /* If we can directly resolve the function being called, do so.
3750 Otherwise, it must be some sort of indirect expression that
3751 we should still be able to handle. */
3753 fi = get_vi_for_tree (decl);
3756 decl = gimple_call_fn (t);
3757 fi = get_vi_for_tree (decl);
3760 /* Assign all the passed arguments to the appropriate incoming
3761 parameters of the function. */
3762 for (j = 0; j < gimple_call_num_args (t); j++)
3764 struct constraint_expr lhs ;
3765 struct constraint_expr *rhsp;
3766 tree arg = gimple_call_arg (t, j);
3768 get_constraint_for (arg, &rhsc);
3769 if (TREE_CODE (decl) != FUNCTION_DECL)
3778 lhs.var = first_vi_for_offset (fi, i)->id;
3781 while (VEC_length (ce_s, rhsc) != 0)
3783 rhsp = VEC_last (ce_s, rhsc);
3784 process_constraint (new_constraint (lhs, *rhsp));
3785 VEC_pop (ce_s, rhsc);
3790 /* If we are returning a value, assign it to the result. */
3793 struct constraint_expr rhs;
3794 struct constraint_expr *lhsp;
3797 get_constraint_for (lhsop, &lhsc);
3798 if (TREE_CODE (decl) != FUNCTION_DECL)
3807 rhs.var = first_vi_for_offset (fi, i)->id;
3810 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3811 process_constraint (new_constraint (*lhsp, rhs));
3815 /* Otherwise, just a regular assignment statement. Only care about
3816 operations with pointer result, others are dealt with as escape
3817 points if they have pointer operands. */
3818 else if (is_gimple_assign (t)
3819 && could_have_pointers (gimple_assign_lhs (t)))
3821 /* Otherwise, just a regular assignment statement. */
3822 tree lhsop = gimple_assign_lhs (t);
3823 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3825 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3826 do_structure_copy (lhsop, rhsop);
3830 struct constraint_expr temp;
3831 get_constraint_for (lhsop, &lhsc);
3833 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3834 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3835 gimple_assign_rhs2 (t), &rhsc);
3836 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3837 && !(POINTER_TYPE_P (gimple_expr_type (t))
3838 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3839 || gimple_assign_single_p (t))
3840 get_constraint_for (rhsop, &rhsc);
3843 temp.type = ADDRESSOF;
3844 temp.var = anything_id;
3846 VEC_safe_push (ce_s, heap, rhsc, &temp);
3848 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3850 struct constraint_expr *c2;
3853 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3854 process_constraint (new_constraint (*c, *c2));
3858 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3862 get_constraint_for (gimple_cdt_location (t), &lhsc);
3863 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3864 get_varinfo (c->var)->no_tbaa_pruning = true;
3867 stmt_escape_type = is_escape_site (t);
3868 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3870 gcc_assert (is_gimple_assign (t));
3871 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3873 tree rhs = gimple_assign_rhs1 (t);
3874 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3877 || !is_global_var (base)))
3878 make_escape_constraint (rhs);
3880 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3881 == GIMPLE_SINGLE_RHS)
3883 if (could_have_pointers (gimple_assign_rhs1 (t)))
3884 make_escape_constraint (gimple_assign_rhs1 (t));
3889 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3891 gcc_assert (is_gimple_assign (t));
3892 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3893 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3894 make_escape_constraint (gimple_assign_rhs1 (t));
3896 else if (stmt_escape_type == ESCAPE_TO_ASM)
3899 for (i = 0; i < gimple_asm_noutputs (t); ++i)
3901 tree op = TREE_VALUE (gimple_asm_output_op (t, i));
3902 if (op && could_have_pointers (op))
3903 /* Strictly we'd only need the constraints from ESCAPED and
3905 make_escape_constraint (op);
3907 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3909 tree op = TREE_VALUE (gimple_asm_input_op (t, i));
3910 if (op && could_have_pointers (op))
3911 /* Strictly we'd only need the constraint to ESCAPED. */
3912 make_escape_constraint (op);
3916 /* After promoting variables and computing aliasing we will
3917 need to re-scan most statements. FIXME: Try to minimize the
3918 number of statements re-scanned. It's not really necessary to
3919 re-scan *all* statements. */
3921 gimple_set_modified (origt, true);
3922 VEC_free (ce_s, heap, rhsc);
3923 VEC_free (ce_s, heap, lhsc);
3927 /* Find the first varinfo in the same variable as START that overlaps with
3929 Effectively, walk the chain of fields for the variable START to find the
3930 first field that overlaps with OFFSET.
3931 Return NULL if we can't find one. */
3934 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3936 varinfo_t curr = start;
3939 /* We may not find a variable in the field list with the actual
3940 offset when when we have glommed a structure to a variable.
3941 In that case, however, offset should still be within the size
3943 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3951 /* Insert the varinfo FIELD into the field list for BASE, at the front
3955 insert_into_field_list (varinfo_t base, varinfo_t field)
3957 varinfo_t prev = base;
3958 varinfo_t curr = base->next;
3964 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3968 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3970 varinfo_t prev = base;
3971 varinfo_t curr = base->next;
3982 if (field->offset <= curr->offset)
3987 field->next = prev->next;
3992 /* This structure is used during pushing fields onto the fieldstack
3993 to track the offset of the field, since bitpos_of_field gives it
3994 relative to its immediate containing type, and we want it relative
3995 to the ultimate containing object. */
3999 /* Offset from the base of the base containing object to this field. */
4000 HOST_WIDE_INT offset;
4002 /* Size, in bits, of the field. */
4003 unsigned HOST_WIDE_INT size;
4005 unsigned has_unknown_size : 1;
4007 unsigned may_have_pointers : 1;
4009 typedef struct fieldoff fieldoff_s;
4011 DEF_VEC_O(fieldoff_s);
4012 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4014 /* qsort comparison function for two fieldoff's PA and PB */
4017 fieldoff_compare (const void *pa, const void *pb)
4019 const fieldoff_s *foa = (const fieldoff_s *)pa;
4020 const fieldoff_s *fob = (const fieldoff_s *)pb;
4021 unsigned HOST_WIDE_INT foasize, fobsize;
4023 if (foa->offset < fob->offset)
4025 else if (foa->offset > fob->offset)
4028 foasize = foa->size;
4029 fobsize = fob->size;
4030 if (foasize < fobsize)
4032 else if (foasize > fobsize)
4037 /* Sort a fieldstack according to the field offset and sizes. */
4039 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4041 qsort (VEC_address (fieldoff_s, fieldstack),
4042 VEC_length (fieldoff_s, fieldstack),
4043 sizeof (fieldoff_s),
4047 /* Return true if V is a tree that we can have subvars for.
4048 Normally, this is any aggregate type. Also complex
4049 types which are not gimple registers can have subvars. */
4052 var_can_have_subvars (const_tree v)
4054 /* Volatile variables should never have subvars. */
4055 if (TREE_THIS_VOLATILE (v))
4058 /* Non decls or memory tags can never have subvars. */
4059 if (!DECL_P (v) || MTAG_P (v))
4062 /* Aggregates without overlapping fields can have subvars. */
4063 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4069 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4070 the fields of TYPE onto fieldstack, recording their offsets along
4073 OFFSET is used to keep track of the offset in this entire
4074 structure, rather than just the immediately containing structure.
4075 Returns the number of fields pushed. */
4078 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4079 HOST_WIDE_INT offset)
4084 if (TREE_CODE (type) != RECORD_TYPE)
4087 /* If the vector of fields is growing too big, bail out early.
4088 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4090 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4093 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4094 if (TREE_CODE (field) == FIELD_DECL)
4098 HOST_WIDE_INT foff = bitpos_of_field (field);
4100 if (!var_can_have_subvars (field)
4101 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4102 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4104 else if (!(pushed = push_fields_onto_fieldstack
4105 (TREE_TYPE (field), fieldstack, offset + foff))
4106 && (DECL_SIZE (field)
4107 && !integer_zerop (DECL_SIZE (field))))
4108 /* Empty structures may have actual size, like in C++. So
4109 see if we didn't push any subfields and the size is
4110 nonzero, push the field onto the stack. */
4115 fieldoff_s *pair = NULL;
4116 bool has_unknown_size = false;
4118 if (!VEC_empty (fieldoff_s, *fieldstack))
4119 pair = VEC_last (fieldoff_s, *fieldstack);
4121 if (!DECL_SIZE (field)
4122 || !host_integerp (DECL_SIZE (field), 1))
4123 has_unknown_size = true;
4125 /* If adjacent fields do not contain pointers merge them. */
4127 && !pair->may_have_pointers
4128 && !could_have_pointers (field)
4129 && !pair->has_unknown_size
4130 && !has_unknown_size
4131 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4133 pair = VEC_last (fieldoff_s, *fieldstack);
4134 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4138 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4139 pair->offset = offset + foff;
4140 pair->has_unknown_size = has_unknown_size;
4141 if (!has_unknown_size)
4142 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4145 pair->may_have_pointers = could_have_pointers (field);
4156 /* Create a constraint ID = &FROM. */
4159 make_constraint_from (varinfo_t vi, int from)
4161 struct constraint_expr lhs, rhs;
4169 rhs.type = ADDRESSOF;
4170 process_constraint (new_constraint (lhs, rhs));
4173 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4174 if it is a varargs function. */
4177 count_num_arguments (tree decl, bool *is_varargs)
4182 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4186 if (TREE_VALUE (t) == void_type_node)
4196 /* Creation function node for DECL, using NAME, and return the index
4197 of the variable we've created for the function. */
4200 create_function_info_for (tree decl, const char *name)
4202 unsigned int index = VEC_length (varinfo_t, varmap);
4206 bool is_varargs = false;
4208 /* Create the variable info. */
4210 vi = new_var_info (decl, index, name);
4214 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4215 insert_vi_for_tree (vi->decl, vi);
4216 VEC_safe_push (varinfo_t, heap, varmap, vi);
4220 /* If it's varargs, we don't know how many arguments it has, so we
4226 vi->is_unknown_size_var = true;
4231 arg = DECL_ARGUMENTS (decl);
4233 /* Set up variables for each argument. */
4234 for (i = 1; i < vi->fullsize; i++)
4237 const char *newname;
4239 unsigned int newindex;
4240 tree argdecl = decl;
4245 newindex = VEC_length (varinfo_t, varmap);
4246 asprintf (&tempname, "%s.arg%d", name, i-1);
4247 newname = ggc_strdup (tempname);
4250 argvi = new_var_info (argdecl, newindex, newname);
4251 argvi->decl = argdecl;
4252 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4255 argvi->is_full_var = true;
4256 argvi->fullsize = vi->fullsize;
4257 insert_into_field_list_sorted (vi, argvi);
4258 stats.total_vars ++;
4261 insert_vi_for_tree (arg, argvi);
4262 arg = TREE_CHAIN (arg);
4266 /* Create a variable for the return var. */
4267 if (DECL_RESULT (decl) != NULL
4268 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4271 const char *newname;
4273 unsigned int newindex;
4274 tree resultdecl = decl;
4278 if (DECL_RESULT (decl))
4279 resultdecl = DECL_RESULT (decl);
4281 newindex = VEC_length (varinfo_t, varmap);
4282 asprintf (&tempname, "%s.result", name);
4283 newname = ggc_strdup (tempname);
4286 resultvi = new_var_info (resultdecl, newindex, newname);
4287 resultvi->decl = resultdecl;
4288 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4289 resultvi->offset = i;
4291 resultvi->fullsize = vi->fullsize;
4292 resultvi->is_full_var = true;
4293 insert_into_field_list_sorted (vi, resultvi);
4294 stats.total_vars ++;
4295 if (DECL_RESULT (decl))
4296 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4302 /* Return true if FIELDSTACK contains fields that overlap.
4303 FIELDSTACK is assumed to be sorted by offset. */
4306 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4308 fieldoff_s *fo = NULL;
4310 HOST_WIDE_INT lastoffset = -1;
4312 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4314 if (fo->offset == lastoffset)
4316 lastoffset = fo->offset;
4321 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4322 This will also create any varinfo structures necessary for fields
4326 create_variable_info_for (tree decl, const char *name)
4328 unsigned int index = VEC_length (varinfo_t, varmap);
4330 tree decl_type = TREE_TYPE (decl);
4331 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4332 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4333 VEC (fieldoff_s,heap) *fieldstack = NULL;
4335 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4336 return create_function_info_for (decl, name);
4338 if (var_can_have_subvars (decl) && use_field_sensitive
4340 || var_ann (decl)->noalias_state == 0)
4342 || !var_ann (decl)->is_heapvar))
4343 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4345 /* If the variable doesn't have subvars, we may end up needing to
4346 sort the field list and create fake variables for all the
4348 vi = new_var_info (decl, index, name);
4352 || !host_integerp (declsize, 1))
4354 vi->is_unknown_size_var = true;
4360 vi->fullsize = TREE_INT_CST_LOW (declsize);
4361 vi->size = vi->fullsize;
4364 insert_vi_for_tree (vi->decl, vi);
4365 VEC_safe_push (varinfo_t, heap, varmap, vi);
4366 if (is_global && (!flag_whole_program || !in_ipa_mode)
4367 && could_have_pointers (decl))
4370 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4371 make_constraint_from (vi, vi->id);
4373 make_constraint_from (vi, escaped_id);
4377 if (use_field_sensitive
4378 && !vi->is_unknown_size_var
4379 && var_can_have_subvars (decl)
4380 && VEC_length (fieldoff_s, fieldstack) > 1
4381 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4383 unsigned int newindex = VEC_length (varinfo_t, varmap);
4384 fieldoff_s *fo = NULL;
4385 bool notokay = false;
4388 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4390 if (fo->has_unknown_size
4398 /* We can't sort them if we have a field with a variable sized type,
4399 which will make notokay = true. In that case, we are going to return
4400 without creating varinfos for the fields anyway, so sorting them is a
4404 sort_fieldstack (fieldstack);
4405 /* Due to some C++ FE issues, like PR 22488, we might end up
4406 what appear to be overlapping fields even though they,
4407 in reality, do not overlap. Until the C++ FE is fixed,
4408 we will simply disable field-sensitivity for these cases. */
4409 notokay = check_for_overlaps (fieldstack);
4413 if (VEC_length (fieldoff_s, fieldstack) != 0)
4414 fo = VEC_index (fieldoff_s, fieldstack, 0);
4416 if (fo == NULL || notokay)
4418 vi->is_unknown_size_var = 1;
4421 vi->is_full_var = true;
4422 VEC_free (fieldoff_s, heap, fieldstack);
4426 vi->size = fo->size;
4427 vi->offset = fo->offset;
4428 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4429 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4433 const char *newname = "NULL";
4436 newindex = VEC_length (varinfo_t, varmap);
4439 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4440 "+" HOST_WIDE_INT_PRINT_DEC,
4441 vi->name, fo->offset, fo->size);
4442 newname = ggc_strdup (tempname);
4445 newvi = new_var_info (decl, newindex, newname);
4446 newvi->offset = fo->offset;
4447 newvi->size = fo->size;
4448 newvi->fullsize = vi->fullsize;
4449 insert_into_field_list (vi, newvi);
4450 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4451 if (is_global && (!flag_whole_program || !in_ipa_mode)
4452 && fo->may_have_pointers)
4453 make_constraint_from (newvi, escaped_id);
4459 vi->is_full_var = true;
4461 VEC_free (fieldoff_s, heap, fieldstack);
4466 /* Print out the points-to solution for VAR to FILE. */
4469 dump_solution_for_var (FILE *file, unsigned int var)
4471 varinfo_t vi = get_varinfo (var);
4475 if (find (var) != var)
4477 varinfo_t vipt = get_varinfo (find (var));
4478 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4482 fprintf (file, "%s = { ", vi->name);
4483 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4485 fprintf (file, "%s ", get_varinfo (i)->name);
4487 fprintf (file, "}");
4488 if (vi->no_tbaa_pruning)
4489 fprintf (file, " no-tbaa-pruning");
4490 fprintf (file, "\n");
4494 /* Print the points-to solution for VAR to stdout. */
4497 debug_solution_for_var (unsigned int var)
4499 dump_solution_for_var (stdout, var);
4502 /* Create varinfo structures for all of the variables in the
4503 function for intraprocedural mode. */
4506 intra_create_variable_infos (void)
4509 struct constraint_expr lhs, rhs;
4511 /* For each incoming pointer argument arg, create the constraint ARG
4512 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4513 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4517 if (!could_have_pointers (t))
4520 /* If flag_argument_noalias is set, then function pointer
4521 arguments are guaranteed not to point to each other. In that
4522 case, create an artificial variable PARM_NOALIAS and the
4523 constraint ARG = &PARM_NOALIAS. */
4524 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4527 tree heapvar = heapvar_lookup (t);
4531 lhs.var = get_vi_for_tree (t)->id;
4533 if (heapvar == NULL_TREE)
4536 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4538 DECL_EXTERNAL (heapvar) = 1;
4539 if (gimple_referenced_vars (cfun))
4540 add_referenced_var (heapvar);
4542 heapvar_insert (t, heapvar);
4544 ann = get_var_ann (heapvar);
4545 ann->is_heapvar = 1;
4546 if (flag_argument_noalias == 1)
4547 ann->noalias_state = NO_ALIAS;
4548 else if (flag_argument_noalias == 2)
4549 ann->noalias_state = NO_ALIAS_GLOBAL;
4550 else if (flag_argument_noalias == 3)
4551 ann->noalias_state = NO_ALIAS_ANYTHING;
4556 vi = get_vi_for_tree (heapvar);
4557 vi->is_artificial_var = 1;
4558 vi->is_heap_var = 1;
4560 rhs.type = ADDRESSOF;
4562 for (p = get_varinfo (lhs.var); p; p = p->next)
4564 struct constraint_expr temp = lhs;
4566 process_constraint (new_constraint (temp, rhs));
4571 varinfo_t arg_vi = get_vi_for_tree (t);
4573 for (p = arg_vi; p; p = p->next)
4574 make_constraint_from (p, nonlocal_id);
4578 /* Add a constraint for a result decl that is passed by reference. */
4579 if (DECL_RESULT (cfun->decl)
4580 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
4582 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
4584 for (p = result_vi; p; p = p->next)
4585 make_constraint_from (p, nonlocal_id);
4588 /* Add a constraint for the incoming static chain parameter. */
4589 if (cfun->static_chain_decl != NULL_TREE)
4591 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
4593 for (p = chain_vi; p; p = p->next)
4594 make_constraint_from (p, nonlocal_id);
4598 /* Structure used to put solution bitmaps in a hashtable so they can
4599 be shared among variables with the same points-to set. */
4601 typedef struct shared_bitmap_info
4605 } *shared_bitmap_info_t;
4606 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4608 static htab_t shared_bitmap_table;
4610 /* Hash function for a shared_bitmap_info_t */
4613 shared_bitmap_hash (const void *p)
4615 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4616 return bi->hashcode;
4619 /* Equality function for two shared_bitmap_info_t's. */
4622 shared_bitmap_eq (const void *p1, const void *p2)
4624 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4625 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4626 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4629 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4630 existing instance if there is one, NULL otherwise. */
4633 shared_bitmap_lookup (bitmap pt_vars)
4636 struct shared_bitmap_info sbi;
4638 sbi.pt_vars = pt_vars;
4639 sbi.hashcode = bitmap_hash (pt_vars);
4641 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4642 sbi.hashcode, NO_INSERT);
4646 return ((shared_bitmap_info_t) *slot)->pt_vars;
4650 /* Add a bitmap to the shared bitmap hashtable. */
4653 shared_bitmap_add (bitmap pt_vars)
4656 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4658 sbi->pt_vars = pt_vars;
4659 sbi->hashcode = bitmap_hash (pt_vars);
4661 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4662 sbi->hashcode, INSERT);
4663 gcc_assert (!*slot);
4664 *slot = (void *) sbi;
4668 /* Set bits in INTO corresponding to the variable uids in solution set
4669 FROM, which came from variable PTR.
4670 For variables that are actually dereferenced, we also use type
4671 based alias analysis to prune the points-to sets.
4672 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4673 help determine whether we are we are allowed to prune using TBAA.
4674 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4675 the from set. Returns the number of pruned variables. */
4678 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4679 bool no_tbaa_pruning)
4683 unsigned pruned = 0;
4685 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4687 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4689 varinfo_t vi = get_varinfo (i);
4691 /* The only artificial variables that are allowed in a may-alias
4692 set are heap variables. */
4693 if (vi->is_artificial_var && !vi->is_heap_var)
4696 if (TREE_CODE (vi->decl) == VAR_DECL
4697 || TREE_CODE (vi->decl) == PARM_DECL
4698 || TREE_CODE (vi->decl) == RESULT_DECL)
4700 /* Just add VI->DECL to the alias set.
4701 Don't type prune artificial vars or points-to sets
4702 for pointers that have not been dereferenced or with
4703 type-based pruning disabled. */
4704 if (vi->is_artificial_var
4707 bitmap_set_bit (into, DECL_UID (vi->decl));
4710 alias_set_type var_alias_set, mem_alias_set;
4711 var_alias_set = get_alias_set (vi->decl);
4712 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4713 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4714 vi->decl, var_alias_set, true))
4715 bitmap_set_bit (into, DECL_UID (vi->decl));
4726 static bool have_alias_info = false;
4728 /* Emit a note for the pointer initialization point DEF. */
4731 emit_pointer_definition (tree ptr, bitmap visited)
4733 gimple def = SSA_NAME_DEF_STMT (ptr);
4734 if (gimple_code (def) == GIMPLE_PHI)
4739 FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
4741 tree arg = USE_FROM_PTR (argp);
4742 if (TREE_CODE (arg) == SSA_NAME)
4744 if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
4745 emit_pointer_definition (arg, visited);
4748 inform (0, "initialized from %qE", arg);
4751 else if (!gimple_nop_p (def))
4752 inform (gimple_location (def), "initialized from here");
4755 /* Emit a strict aliasing warning for dereferencing the pointer PTR. */
4758 emit_alias_warning (tree ptr)
4761 imm_use_iterator ui;
4762 bool warned = false;
4764 FOR_EACH_IMM_USE_STMT (use, ui, ptr)
4766 tree deref = NULL_TREE;
4768 if (gimple_has_lhs (use))
4770 tree lhs = get_base_address (gimple_get_lhs (use));
4772 && INDIRECT_REF_P (lhs)
4773 && TREE_OPERAND (lhs, 0) == ptr)
4776 if (gimple_assign_single_p (use))
4778 tree rhs = get_base_address (gimple_assign_rhs1 (use));
4780 && INDIRECT_REF_P (rhs)
4781 && TREE_OPERAND (rhs, 0) == ptr)
4784 else if (is_gimple_call (use))
4787 for (i = 0; i < gimple_call_num_args (use); ++i)
4789 tree op = get_base_address (gimple_call_arg (use, i));
4791 && INDIRECT_REF_P (op)
4792 && TREE_OPERAND (op, 0) == ptr)
4797 && !TREE_NO_WARNING (deref))
4799 TREE_NO_WARNING (deref) = 1;
4800 warned |= warning_at (gimple_location (use), OPT_Wstrict_aliasing,
4801 "dereferencing pointer %qD does break "
4802 "strict-aliasing rules", SSA_NAME_VAR (ptr));
4807 bitmap visited = BITMAP_ALLOC (NULL);
4808 emit_pointer_definition (ptr, visited);
4809 BITMAP_FREE (visited);
4813 /* Given a pointer variable P, fill in its points-to set, or return
4815 Rather than return false for variables that point-to anything, we
4816 instead find the corresponding SMT, and merge in its aliases. In
4817 addition to these aliases, we also set the bits for the SMT's
4818 themselves and their subsets, as SMT's are still in use by
4819 non-SSA_NAME's, and pruning may eliminate every one of their
4820 aliases. In such a case, if we did not include the right set of
4821 SMT's in the points-to set of the variable, we'd end up with
4822 statements that do not conflict but should. */
4825 find_what_p_points_to (tree p)
4830 if (!have_alias_info)
4833 /* For parameters, get at the points-to set for the actual parm
4835 if (TREE_CODE (p) == SSA_NAME
4836 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4837 && SSA_NAME_IS_DEFAULT_DEF (p))
4838 lookup_p = SSA_NAME_VAR (p);
4840 vi = lookup_vi_for_tree (lookup_p);
4843 if (vi->is_artificial_var)
4846 /* See if this is a field or a structure. */
4847 if (vi->size != vi->fullsize)
4849 /* Nothing currently asks about structure fields directly,
4850 but when they do, we need code here to hand back the
4856 struct ptr_info_def *pi = get_ptr_info (p);
4857 unsigned int i, pruned;
4859 bool was_pt_anything = false;
4860 bitmap finished_solution;
4863 if (!pi->memory_tag_needed)
4866 /* This variable may have been collapsed, let's get the real
4868 vi = get_varinfo (find (vi->id));
4870 /* Translate artificial variables into SSA_NAME_PTR_INFO
4872 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4874 varinfo_t vi = get_varinfo (i);
4876 if (vi->is_artificial_var)
4878 /* FIXME. READONLY should be handled better so that
4879 flow insensitive aliasing can disregard writable
4881 if (vi->id == nothing_id)
4883 else if (vi->id == anything_id
4884 || vi->id == nonlocal_id
4885 || vi->id == escaped_id
4886 || vi->id == callused_id)
4887 was_pt_anything = 1;
4888 else if (vi->id == readonly_id)
4889 was_pt_anything = 1;
4890 else if (vi->id == integer_id)
4891 was_pt_anything = 1;
4892 else if (vi->is_heap_var)
4893 pi->pt_global_mem = 1;
4897 /* Instead of doing extra work, simply do not create
4898 points-to information for pt_anything pointers. This
4899 will cause the operand scanner to fall back to the
4900 type-based SMT and its aliases. Which is the best
4901 we could do here for the points-to set as well. */
4902 if (was_pt_anything)
4905 /* Share the final set of variables when possible. */
4906 finished_solution = BITMAP_GGC_ALLOC ();
4907 stats.points_to_sets_created++;
4909 pruned = set_uids_in_ptset (p, finished_solution, vi->solution,
4910 pi->is_dereferenced,
4911 vi->no_tbaa_pruning);
4912 result = shared_bitmap_lookup (finished_solution);
4916 shared_bitmap_add (finished_solution);
4917 pi->pt_vars = finished_solution;
4921 pi->pt_vars = result;
4922 bitmap_clear (finished_solution);
4925 if (bitmap_empty_p (pi->pt_vars))
4929 && pi->is_dereferenced
4930 && warn_strict_aliasing > 0
4931 && !SSA_NAME_IS_DEFAULT_DEF (p))
4933 if (dump_file && dump_flags & TDF_DETAILS)
4935 fprintf (dump_file, "alias warning for ");
4936 print_generic_expr (dump_file, p, 0);
4937 fprintf (dump_file, "\n");
4939 emit_alias_warning (p);
4950 /* Mark the ESCAPED solution as call clobbered. Returns false if
4951 pt_anything escaped which needs all locals that have their address
4952 taken marked call clobbered as well. */
4955 clobber_what_escaped (void)
4961 if (!have_alias_info)
4964 /* This variable may have been collapsed, let's get the real
4965 variable for escaped_id. */
4966 vi = get_varinfo (find (escaped_id));
4968 /* If call-used memory escapes we need to include it in the
4969 set of escaped variables. This can happen if a pure
4970 function returns a pointer and this pointer escapes. */
4971 if (bitmap_bit_p (vi->solution, callused_id))
4973 varinfo_t cu_vi = get_varinfo (find (callused_id));
4974 bitmap_ior_into (vi->solution, cu_vi->solution);
4977 /* Mark variables in the solution call-clobbered. */
4978 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4980 varinfo_t vi = get_varinfo (i);
4982 if (vi->is_artificial_var)
4984 /* nothing_id and readonly_id do not cause any
4985 call clobber ops. For anything_id and integer_id
4986 we need to clobber all addressable vars. */
4987 if (vi->id == anything_id
4988 || vi->id == integer_id)
4992 /* Only artificial heap-vars are further interesting. */
4993 if (vi->is_artificial_var && !vi->is_heap_var)
4996 if ((TREE_CODE (vi->decl) == VAR_DECL
4997 || TREE_CODE (vi->decl) == PARM_DECL
4998 || TREE_CODE (vi->decl) == RESULT_DECL)
4999 && !unmodifiable_var_p (vi->decl))
5000 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
5006 /* Compute the call-used variables. */
5009 compute_call_used_vars (void)
5014 bool has_anything_id = false;
5016 if (!have_alias_info)
5019 /* This variable may have been collapsed, let's get the real
5020 variable for escaped_id. */
5021 vi = get_varinfo (find (callused_id));
5023 /* Mark variables in the solution call-clobbered. */
5024 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5026 varinfo_t vi = get_varinfo (i);
5028 if (vi->is_artificial_var)
5030 /* For anything_id and integer_id we need to make
5031 all local addressable vars call-used. */
5032 if (vi->id == anything_id
5033 || vi->id == integer_id)
5034 has_anything_id = true;
5037 /* Only artificial heap-vars are further interesting. */
5038 if (vi->is_artificial_var && !vi->is_heap_var)
5041 if ((TREE_CODE (vi->decl) == VAR_DECL
5042 || TREE_CODE (vi->decl) == PARM_DECL
5043 || TREE_CODE (vi->decl) == RESULT_DECL)
5044 && !unmodifiable_var_p (vi->decl))
5045 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
5048 /* If anything is call-used, add all addressable locals to the set. */
5049 if (has_anything_id)
5050 bitmap_ior_into (gimple_call_used_vars (cfun),
5051 gimple_addressable_vars (cfun));
5055 /* Dump points-to information to OUTFILE. */
5058 dump_sa_points_to_info (FILE *outfile)
5062 fprintf (outfile, "\nPoints-to sets\n\n");
5064 if (dump_flags & TDF_STATS)
5066 fprintf (outfile, "Stats:\n");
5067 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5068 fprintf (outfile, "Non-pointer vars: %d\n",
5069 stats.nonpointer_vars);
5070 fprintf (outfile, "Statically unified vars: %d\n",
5071 stats.unified_vars_static);
5072 fprintf (outfile, "Dynamically unified vars: %d\n",
5073 stats.unified_vars_dynamic);
5074 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5075 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5076 fprintf (outfile, "Number of implicit edges: %d\n",
5077 stats.num_implicit_edges);
5080 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5081 dump_solution_for_var (outfile, i);
5085 /* Debug points-to information to stderr. */
5088 debug_sa_points_to_info (void)
5090 dump_sa_points_to_info (stderr);
5094 /* Initialize the always-existing constraint variables for NULL
5095 ANYTHING, READONLY, and INTEGER */
5098 init_base_vars (void)
5100 struct constraint_expr lhs, rhs;
5102 /* Create the NULL variable, used to represent that a variable points
5104 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5105 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5106 insert_vi_for_tree (nothing_tree, var_nothing);
5107 var_nothing->is_artificial_var = 1;
5108 var_nothing->offset = 0;
5109 var_nothing->size = ~0;
5110 var_nothing->fullsize = ~0;
5111 var_nothing->is_special_var = 1;
5112 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5114 /* Create the ANYTHING variable, used to represent that a variable
5115 points to some unknown piece of memory. */
5116 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5117 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5118 insert_vi_for_tree (anything_tree, var_anything);
5119 var_anything->is_artificial_var = 1;
5120 var_anything->size = ~0;
5121 var_anything->offset = 0;
5122 var_anything->next = NULL;
5123 var_anything->fullsize = ~0;
5124 var_anything->is_special_var = 1;
5126 /* Anything points to anything. This makes deref constraints just
5127 work in the presence of linked list and other p = *p type loops,
5128 by saying that *ANYTHING = ANYTHING. */
5129 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5131 lhs.var = anything_id;
5133 rhs.type = ADDRESSOF;
5134 rhs.var = anything_id;
5137 /* This specifically does not use process_constraint because
5138 process_constraint ignores all anything = anything constraints, since all
5139 but this one are redundant. */
5140 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5142 /* Create the READONLY variable, used to represent that a variable
5143 points to readonly memory. */
5144 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5145 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5146 var_readonly->is_artificial_var = 1;
5147 var_readonly->offset = 0;
5148 var_readonly->size = ~0;
5149 var_readonly->fullsize = ~0;
5150 var_readonly->next = NULL;
5151 var_readonly->is_special_var = 1;
5152 insert_vi_for_tree (readonly_tree, var_readonly);
5153 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5155 /* readonly memory points to anything, in order to make deref
5156 easier. In reality, it points to anything the particular
5157 readonly variable can point to, but we don't track this
5160 lhs.var = readonly_id;
5162 rhs.type = ADDRESSOF;
5163 rhs.var = readonly_id; /* FIXME */
5165 process_constraint (new_constraint (lhs, rhs));
5167 /* Create the ESCAPED variable, used to represent the set of escaped
5169 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5170 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5171 insert_vi_for_tree (escaped_tree, var_escaped);
5172 var_escaped->is_artificial_var = 1;
5173 var_escaped->offset = 0;
5174 var_escaped->size = ~0;
5175 var_escaped->fullsize = ~0;
5176 var_escaped->is_special_var = 0;
5177 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5178 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5180 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5182 lhs.var = escaped_id;
5185 rhs.var = escaped_id;
5187 process_constraint (new_constraint (lhs, rhs));
5189 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5191 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5192 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5193 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5194 var_nonlocal->is_artificial_var = 1;
5195 var_nonlocal->offset = 0;
5196 var_nonlocal->size = ~0;
5197 var_nonlocal->fullsize = ~0;
5198 var_nonlocal->is_special_var = 1;
5199 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5201 /* Nonlocal memory points to escaped (which includes nonlocal),
5202 in order to make deref easier. */
5204 lhs.var = nonlocal_id;
5206 rhs.type = ADDRESSOF;
5207 rhs.var = escaped_id;
5209 process_constraint (new_constraint (lhs, rhs));
5211 /* Create the CALLUSED variable, used to represent the set of call-used
5213 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5214 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5215 insert_vi_for_tree (callused_tree, var_callused);
5216 var_callused->is_artificial_var = 1;
5217 var_callused->offset = 0;
5218 var_callused->size = ~0;
5219 var_callused->fullsize = ~0;
5220 var_callused->is_special_var = 0;
5221 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5223 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5225 lhs.var = callused_id;
5228 rhs.var = callused_id;
5230 process_constraint (new_constraint (lhs, rhs));
5232 /* Create the INTEGER variable, used to represent that a variable points
5234 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5235 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5236 insert_vi_for_tree (integer_tree, var_integer);
5237 var_integer->is_artificial_var = 1;
5238 var_integer->size = ~0;
5239 var_integer->fullsize = ~0;
5240 var_integer->offset = 0;
5241 var_integer->next = NULL;
5242 var_integer->is_special_var = 1;
5243 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5245 /* INTEGER = ANYTHING, because we don't know where a dereference of
5246 a random integer will point to. */
5248 lhs.var = integer_id;
5250 rhs.type = ADDRESSOF;
5251 rhs.var = anything_id;
5253 process_constraint (new_constraint (lhs, rhs));
5255 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5256 everything pointed to by escaped can also point to escaped. */
5258 lhs.var = escaped_id;
5260 rhs.type = ADDRESSOF;
5261 rhs.var = escaped_id;
5263 process_constraint (new_constraint (lhs, rhs));
5265 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5266 everything pointed to by escaped can also point to nonlocal. */
5268 lhs.var = escaped_id;
5270 rhs.type = ADDRESSOF;
5271 rhs.var = nonlocal_id;
5273 process_constraint (new_constraint (lhs, rhs));
5276 /* Initialize things necessary to perform PTA */
5279 init_alias_vars (void)
5281 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5283 bitmap_obstack_initialize (&pta_obstack);
5284 bitmap_obstack_initialize (&oldpta_obstack);
5285 bitmap_obstack_initialize (&predbitmap_obstack);
5287 constraint_pool = create_alloc_pool ("Constraint pool",
5288 sizeof (struct constraint), 30);
5289 variable_info_pool = create_alloc_pool ("Variable info pool",
5290 sizeof (struct variable_info), 30);
5291 constraints = VEC_alloc (constraint_t, heap, 8);
5292 varmap = VEC_alloc (varinfo_t, heap, 8);
5293 vi_for_tree = pointer_map_create ();
5295 memset (&stats, 0, sizeof (stats));
5296 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5297 shared_bitmap_eq, free);
5301 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5302 predecessor edges. */
5305 remove_preds_and_fake_succs (constraint_graph_t graph)
5309 /* Clear the implicit ref and address nodes from the successor
5311 for (i = 0; i < FIRST_REF_NODE; i++)
5313 if (graph->succs[i])
5314 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5315 FIRST_REF_NODE * 2);
5318 /* Free the successor list for the non-ref nodes. */
5319 for (i = FIRST_REF_NODE; i < graph->size; i++)
5321 if (graph->succs[i])
5322 BITMAP_FREE (graph->succs[i]);
5325 /* Now reallocate the size of the successor list as, and blow away
5326 the predecessor bitmaps. */
5327 graph->size = VEC_length (varinfo_t, varmap);
5328 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5330 free (graph->implicit_preds);
5331 graph->implicit_preds = NULL;
5332 free (graph->preds);
5333 graph->preds = NULL;
5334 bitmap_obstack_release (&predbitmap_obstack);
5337 /* Compute the set of variables we can't TBAA prune. */
5340 compute_tbaa_pruning (void)
5342 unsigned int size = VEC_length (varinfo_t, varmap);
5347 changed = sbitmap_alloc (size);
5348 sbitmap_zero (changed);
5350 /* Mark all initial no_tbaa_pruning nodes as changed. */
5352 for (i = 0; i < size; ++i)
5354 varinfo_t ivi = get_varinfo (i);
5356 if (find (i) == i && ivi->no_tbaa_pruning)
5359 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5360 || VEC_length (constraint_t, graph->complex[i]) > 0)
5362 SET_BIT (changed, i);
5368 while (changed_count > 0)
5370 struct topo_info *ti = init_topo_info ();
5373 compute_topo_order (graph, ti);
5375 while (VEC_length (unsigned, ti->topo_order) != 0)
5379 i = VEC_pop (unsigned, ti->topo_order);
5381 /* If this variable is not a representative, skip it. */
5385 /* If the node has changed, we need to process the complex
5386 constraints and outgoing edges again. */
5387 if (TEST_BIT (changed, i))
5391 VEC(constraint_t,heap) *complex = graph->complex[i];
5393 RESET_BIT (changed, i);
5396 /* Process the complex copy constraints. */
5397 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5399 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5401 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5403 if (!lhsvi->no_tbaa_pruning)
5405 lhsvi->no_tbaa_pruning = true;
5406 if (!TEST_BIT (changed, lhsvi->id))
5408 SET_BIT (changed, lhsvi->id);
5415 /* Propagate to all successors. */
5416 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5418 unsigned int to = find (j);
5419 varinfo_t tovi = get_varinfo (to);
5421 /* Don't propagate to ourselves. */
5425 if (!tovi->no_tbaa_pruning)
5427 tovi->no_tbaa_pruning = true;
5428 if (!TEST_BIT (changed, to))
5430 SET_BIT (changed, to);
5438 free_topo_info (ti);
5441 sbitmap_free (changed);
5445 for (i = 0; i < size; ++i)
5447 varinfo_t ivi = get_varinfo (i);
5448 varinfo_t ivip = get_varinfo (find (i));
5450 if (ivip->no_tbaa_pruning)
5452 tree var = ivi->decl;
5454 if (TREE_CODE (var) == SSA_NAME)
5455 var = SSA_NAME_VAR (var);
5457 if (POINTER_TYPE_P (TREE_TYPE (var)))
5459 DECL_NO_TBAA_P (var) = 1;
5461 /* Tell the RTL layer that this pointer can alias
5463 DECL_POINTER_ALIAS_SET (var) = 0;
5470 /* Create points-to sets for the current function. See the comments
5471 at the start of the file for an algorithmic overview. */
5474 compute_points_to_sets (void)
5476 struct scc_info *si;
5479 timevar_push (TV_TREE_PTA);
5482 init_alias_heapvars ();
5484 intra_create_variable_infos ();
5486 /* Now walk all statements and derive aliases. */
5489 gimple_stmt_iterator gsi;
5491 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5493 gimple phi = gsi_stmt (gsi);
5495 if (is_gimple_reg (gimple_phi_result (phi)))
5496 find_func_aliases (phi);
5499 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
5501 gimple stmt = gsi_stmt (gsi);
5503 find_func_aliases (stmt);
5505 /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
5506 has now been captured, and we can remove them. */
5507 if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
5508 gsi_remove (&gsi, true);
5517 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5518 dump_constraints (dump_file);
5523 "\nCollapsing static cycles and doing variable "
5526 init_graph (VEC_length (varinfo_t, varmap) * 2);
5529 fprintf (dump_file, "Building predecessor graph\n");
5530 build_pred_graph ();
5533 fprintf (dump_file, "Detecting pointer and location "
5535 si = perform_var_substitution (graph);
5538 fprintf (dump_file, "Rewriting constraints and unifying "
5540 rewrite_constraints (graph, si);
5541 free_var_substitution_info (si);
5543 build_succ_graph ();
5545 if (dump_file && (dump_flags & TDF_GRAPH))
5546 dump_constraint_graph (dump_file);
5548 move_complex_constraints (graph);
5551 fprintf (dump_file, "Uniting pointer but not location equivalent "
5553 unite_pointer_equivalences (graph);
5556 fprintf (dump_file, "Finding indirect cycles\n");
5557 find_indirect_cycles (graph);
5559 /* Implicit nodes and predecessors are no longer necessary at this
5561 remove_preds_and_fake_succs (graph);
5564 fprintf (dump_file, "Solving graph\n");
5566 solve_graph (graph);
5568 compute_tbaa_pruning ();
5571 dump_sa_points_to_info (dump_file);
5573 have_alias_info = true;
5575 timevar_pop (TV_TREE_PTA);
5579 /* Delete created points-to sets. */
5582 delete_points_to_sets (void)
5586 htab_delete (shared_bitmap_table);
5587 if (dump_file && (dump_flags & TDF_STATS))
5588 fprintf (dump_file, "Points to sets created:%d\n",
5589 stats.points_to_sets_created);
5591 pointer_map_destroy (vi_for_tree);
5592 bitmap_obstack_release (&pta_obstack);
5593 VEC_free (constraint_t, heap, constraints);
5595 for (i = 0; i < graph->size; i++)
5596 VEC_free (constraint_t, heap, graph->complex[i]);
5597 free (graph->complex);
5600 free (graph->succs);
5602 free (graph->pe_rep);
5603 free (graph->indirect_cycles);
5606 VEC_free (varinfo_t, heap, varmap);
5607 free_alloc_pool (variable_info_pool);
5608 free_alloc_pool (constraint_pool);
5609 have_alias_info = false;
5612 /* Return true if we should execute IPA PTA. */
5616 return (flag_ipa_pta
5617 /* Don't bother doing anything if the program has errors. */
5618 && !(errorcount || sorrycount));
5621 /* Execute the driver for IPA PTA. */
5623 ipa_pta_execute (void)
5625 struct cgraph_node *node;
5626 struct scc_info *si;
5629 init_alias_heapvars ();
5632 for (node = cgraph_nodes; node; node = node->next)
5634 if (!node->analyzed || cgraph_is_master_clone (node))
5638 varid = create_function_info_for (node->decl,
5639 cgraph_node_name (node));
5640 if (node->local.externally_visible)
5642 varinfo_t fi = get_varinfo (varid);
5643 for (; fi; fi = fi->next)
5644 make_constraint_from (fi, anything_id);
5648 for (node = cgraph_nodes; node; node = node->next)
5650 if (node->analyzed && cgraph_is_master_clone (node))
5652 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5654 tree old_func_decl = current_function_decl;
5657 "Generating constraints for %s\n",
5658 cgraph_node_name (node));
5660 current_function_decl = node->decl;
5662 FOR_EACH_BB_FN (bb, func)
5664 gimple_stmt_iterator gsi;
5666 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5669 gimple phi = gsi_stmt (gsi);
5671 if (is_gimple_reg (gimple_phi_result (phi)))
5672 find_func_aliases (phi);
5675 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5676 find_func_aliases (gsi_stmt (gsi));
5678 current_function_decl = old_func_decl;
5683 /* Make point to anything. */
5689 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5690 dump_constraints (dump_file);
5695 "\nCollapsing static cycles and doing variable "
5698 init_graph (VEC_length (varinfo_t, varmap) * 2);
5699 build_pred_graph ();
5700 si = perform_var_substitution (graph);
5701 rewrite_constraints (graph, si);
5702 free_var_substitution_info (si);
5704 build_succ_graph ();
5705 move_complex_constraints (graph);
5706 unite_pointer_equivalences (graph);
5707 find_indirect_cycles (graph);
5709 /* Implicit nodes and predecessors are no longer necessary at this
5711 remove_preds_and_fake_succs (graph);
5714 fprintf (dump_file, "\nSolving graph\n");
5716 solve_graph (graph);
5719 dump_sa_points_to_info (dump_file);
5722 delete_alias_heapvars ();
5723 delete_points_to_sets ();
5727 struct simple_ipa_opt_pass pass_ipa_pta =
5732 gate_ipa_pta, /* gate */
5733 ipa_pta_execute, /* execute */
5736 0, /* static_pass_number */
5737 TV_IPA_PTA, /* tv_id */
5738 0, /* properties_required */
5739 0, /* properties_provided */
5740 0, /* properties_destroyed */
5741 0, /* todo_flags_start */
5742 TODO_update_ssa /* todo_flags_finish */
5746 /* Initialize the heapvar for statement mapping. */
5748 init_alias_heapvars (void)
5750 if (!heapvar_for_stmt)
5751 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5756 delete_alias_heapvars (void)
5758 htab_delete (heapvar_for_stmt);
5759 heapvar_for_stmt = NULL;
5762 #include "gt-tree-ssa-structalias.h"