1 /* Tree based points-to analysis
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
36 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
43 #include "tree-gimple.h"
47 #include "tree-pass.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 /* The idea behind this analyzer is to generate set constraints from the
55 program, then solve the resulting constraints in order to generate the
58 Set constraints are a way of modeling program analysis problems that
59 involve sets. They consist of an inclusion constraint language,
60 describing the variables (each variable is a set) and operations that
61 are involved on the variables, and a set of rules that derive facts
62 from these operations. To solve a system of set constraints, you derive
63 all possible facts under the rules, which gives you the correct sets
66 See "Efficient Field-sensitive pointer analysis for C" by "David
67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68 http://citeseer.ist.psu.edu/pearce04efficient.html
70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72 http://citeseer.ist.psu.edu/heintze01ultrafast.html
74 There are three types of constraint expressions, DEREF, ADDRESSOF, and
75 SCALAR. Each constraint expression consists of a constraint type,
76 a variable, and an offset.
78 SCALAR is a constraint expression type used to represent x, whether
79 it appears on the LHS or the RHS of a statement.
80 DEREF is a constraint expression type used to represent *x, whether
81 it appears on the LHS or the RHS of a statement.
82 ADDRESSOF is a constraint expression used to represent &x, whether
83 it appears on the LHS or the RHS of a statement.
85 Each pointer variable in the program is assigned an integer id, and
86 each field of a structure variable is assigned an integer id as well.
88 Structure variables are linked to their list of fields through a "next
89 field" in each variable that points to the next field in offset
91 Each variable for a structure field has
93 1. "size", that tells the size in bits of that field.
94 2. "fullsize, that tells the size in bits of the entire structure.
95 3. "offset", that tells the offset in bits from the beginning of the
96 structure to this field.
108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113 In order to solve the system of set constraints, the following is
116 1. Each constraint variable x has a solution set associated with it,
119 2. Constraints are separated into direct, copy, and complex.
120 Direct constraints are ADDRESSOF constraints that require no extra
121 processing, such as P = &Q
122 Copy constraints are those of the form P = Q.
123 Complex constraints are all the constraints involving dereferences.
125 3. All direct constraints of the form P = &Q are processed, such
126 that Q is added to Sol(P)
128 4. All complex constraints for a given constraint variable are stored in a
129 linked list attached to that variable's node.
131 5. A directed graph is built out of the copy constraints. Each
132 constraint variable is a node in the graph, and an edge from
133 Q to P is added for each copy constraint of the form P = Q
135 6. The graph is then walked, and solution sets are
136 propagated along the copy edges, such that an edge from Q to P
137 causes Sol(P) <- Sol(P) union Sol(Q).
139 7. As we visit each node, all complex constraints associated with
140 that node are processed by adding appropriate copy edges to the graph, or the
141 appropriate variables to the solution set.
143 8. The process of walking the graph is iterated until no solution
146 Prior to walking the graph in steps 6 and 7, We perform static
147 cycle elimination on the constraint graph, as well
148 as off-line variable substitution.
150 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
151 on and turned into anything), but isn't. You can just see what offset
152 inside the pointed-to struct it's going to access.
154 TODO: Constant bounded arrays can be handled as if they were structs of the
155 same number of elements.
157 TODO: Modeling heap and incoming pointers becomes much better if we
158 add fields to them as we discover them, which we could do.
160 TODO: We could handle unions, but to be honest, it's probably not
161 worth the pain or slowdown. */
163 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
164 htab_t heapvar_for_stmt;
165 static bool use_field_sensitive = true;
166 static int in_ipa_mode = 0;
167 static bitmap_obstack predbitmap_obstack;
168 static bitmap_obstack ptabitmap_obstack;
169 static bitmap_obstack iteration_obstack;
171 static unsigned int create_variable_info_for (tree, const char *);
172 static void build_constraint_graph (void);
174 DEF_VEC_P(constraint_t);
175 DEF_VEC_ALLOC_P(constraint_t,heap);
177 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
179 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
181 static struct constraint_stats
183 unsigned int total_vars;
184 unsigned int collapsed_vars;
185 unsigned int unified_vars_static;
186 unsigned int unified_vars_dynamic;
187 unsigned int iterations;
188 unsigned int num_edges;
193 /* ID of this variable */
196 /* Name of this variable */
199 /* Tree that this variable is associated with. */
202 /* Offset of this variable, in bits, from the base variable */
203 unsigned HOST_WIDE_INT offset;
205 /* Size of the variable, in bits. */
206 unsigned HOST_WIDE_INT size;
208 /* Full size of the base variable, in bits. */
209 unsigned HOST_WIDE_INT fullsize;
211 /* A link to the variable for the next field in this structure. */
212 struct variable_info *next;
214 /* Node in the graph that represents the constraints and points-to
215 solution for the variable. */
218 /* True if the address of this variable is taken. Needed for
219 variable substitution. */
220 unsigned int address_taken:1;
222 /* True if this variable is the target of a dereference. Needed for
223 variable substitution. */
224 unsigned int indirect_target:1;
226 /* True if this is a variable created by the constraint analysis, such as
227 heap variables and constraints we had to break up. */
228 unsigned int is_artificial_var:1;
230 /* True if this is a special variable whose solution set should not be
232 unsigned int is_special_var:1;
234 /* True for variables whose size is not known or variable. */
235 unsigned int is_unknown_size_var:1;
237 /* True for variables that have unions somewhere in them. */
238 unsigned int has_union:1;
240 /* True if this is a heap variable. */
241 unsigned int is_heap_var:1;
243 /* Points-to set for this variable. */
246 /* Variable ids represented by this node. */
249 /* Vector of complex constraints for this node. Complex
250 constraints are those involving dereferences. */
251 VEC(constraint_t,heap) *complex;
253 /* Variable id this was collapsed to due to type unsafety.
254 This should be unused completely after build_constraint_graph, or
255 something is broken. */
256 struct variable_info *collapsed_to;
258 typedef struct variable_info *varinfo_t;
260 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
262 /* Pool of variable info structures. */
263 static alloc_pool variable_info_pool;
265 DEF_VEC_P(varinfo_t);
267 DEF_VEC_ALLOC_P(varinfo_t, heap);
269 /* Table of variable info structures for constraint variables. Indexed directly
270 by variable info id. */
271 static VEC(varinfo_t,heap) *varmap;
273 /* Return the varmap element N */
275 static inline varinfo_t
276 get_varinfo (unsigned int n)
278 return VEC_index(varinfo_t, varmap, n);
281 /* Return the varmap element N, following the collapsed_to link. */
283 static inline varinfo_t
284 get_varinfo_fc (unsigned int n)
286 varinfo_t v = VEC_index(varinfo_t, varmap, n);
289 return v->collapsed_to;
293 /* Variable that represents the unknown pointer. */
294 static varinfo_t var_anything;
295 static tree anything_tree;
296 static unsigned int anything_id;
298 /* Variable that represents the NULL pointer. */
299 static varinfo_t var_nothing;
300 static tree nothing_tree;
301 static unsigned int nothing_id;
303 /* Variable that represents read only memory. */
304 static varinfo_t var_readonly;
305 static tree readonly_tree;
306 static unsigned int readonly_id;
308 /* Variable that represents integers. This is used for when people do things
310 static varinfo_t var_integer;
311 static tree integer_tree;
312 static unsigned int integer_id;
315 /* Lookup a heap var for FROM, and return it if we find one. */
318 heapvar_lookup (tree from)
320 struct tree_map *h, in;
323 h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
329 /* Insert a mapping FROM->TO in the heap var for statement
333 heapvar_insert (tree from, tree to)
338 h = ggc_alloc (sizeof (struct tree_map));
339 h->hash = htab_hash_pointer (from);
342 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
343 *(struct tree_map **) loc = h;
346 /* Return a new variable info structure consisting for a variable
347 named NAME, and using constraint graph node NODE. */
350 new_var_info (tree t, unsigned int id, const char *name, unsigned int node)
352 varinfo_t ret = pool_alloc (variable_info_pool);
358 ret->address_taken = false;
359 ret->indirect_target = false;
360 ret->is_artificial_var = false;
361 ret->is_heap_var = false;
362 ret->is_special_var = false;
363 ret->is_unknown_size_var = false;
364 ret->has_union = false;
365 ret->solution = BITMAP_ALLOC (&ptabitmap_obstack);
366 ret->variables = BITMAP_ALLOC (&ptabitmap_obstack);
369 ret->collapsed_to = NULL;
373 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
375 /* An expression that appears in a constraint. */
377 struct constraint_expr
379 /* Constraint type. */
380 constraint_expr_type type;
382 /* Variable we are referring to in the constraint. */
385 /* Offset, in bits, of this constraint from the beginning of
386 variables it ends up referring to.
388 IOW, in a deref constraint, we would deref, get the result set,
389 then add OFFSET to each member. */
390 unsigned HOST_WIDE_INT offset;
393 typedef struct constraint_expr ce_s;
395 DEF_VEC_ALLOC_O(ce_s, heap);
396 static void get_constraint_for (tree, VEC(ce_s, heap) **);
397 static void do_deref (VEC (ce_s, heap) **);
399 /* Our set constraints are made up of two constraint expressions, one
402 As described in the introduction, our set constraints each represent an
403 operation between set valued variables.
407 struct constraint_expr lhs;
408 struct constraint_expr rhs;
411 /* List of constraints that we use to build the constraint graph from. */
413 static VEC(constraint_t,heap) *constraints;
414 static alloc_pool constraint_pool;
416 /* An edge in the weighted constraint graph. The edges are weighted,
417 with a bit set in weights meaning their is an edge with that
419 We don't keep the src in the edge, because we always know what it
422 struct constraint_edge
428 typedef struct constraint_edge *constraint_edge_t;
429 static alloc_pool constraint_edge_pool;
431 /* Return a new constraint edge from SRC to DEST. */
433 static constraint_edge_t
434 new_constraint_edge (unsigned int dest)
436 constraint_edge_t ret = pool_alloc (constraint_edge_pool);
442 DEF_VEC_P(constraint_edge_t);
443 DEF_VEC_ALLOC_P(constraint_edge_t,heap);
446 /* The constraint graph is represented internally in two different
447 ways. The overwhelming majority of edges in the constraint graph
448 are zero weigh edges, and thus, using a vector of contrainst_edge_t
449 is a waste of time and memory, since they have no weights. We
450 simply use a bitmap to store the preds and succs for each node.
451 The weighted edges are stored as a set of adjacency vectors, one
452 per variable. succs[x] is the vector of successors for variable x,
453 and preds[x] is the vector of predecessors for variable x. IOW,
454 all edges are "forward" edges, which is not like our CFG. So
455 remember that preds[x]->src == x, and succs[x]->src == x. */
457 struct constraint_graph
459 bitmap *zero_weight_succs;
460 bitmap *zero_weight_preds;
461 VEC(constraint_edge_t,heap) **succs;
462 VEC(constraint_edge_t,heap) **preds;
465 typedef struct constraint_graph *constraint_graph_t;
467 static constraint_graph_t graph;
469 /* Create a new constraint consisting of LHS and RHS expressions. */
472 new_constraint (const struct constraint_expr lhs,
473 const struct constraint_expr rhs)
475 constraint_t ret = pool_alloc (constraint_pool);
481 /* Print out constraint C to FILE. */
484 dump_constraint (FILE *file, constraint_t c)
486 if (c->lhs.type == ADDRESSOF)
488 else if (c->lhs.type == DEREF)
490 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
491 if (c->lhs.offset != 0)
492 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
493 fprintf (file, " = ");
494 if (c->rhs.type == ADDRESSOF)
496 else if (c->rhs.type == DEREF)
498 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
499 if (c->rhs.offset != 0)
500 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
501 fprintf (file, "\n");
504 /* Print out constraint C to stderr. */
507 debug_constraint (constraint_t c)
509 dump_constraint (stderr, c);
512 /* Print out all constraints to FILE */
515 dump_constraints (FILE *file)
519 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
520 dump_constraint (file, c);
523 /* Print out all constraints to stderr. */
526 debug_constraints (void)
528 dump_constraints (stderr);
533 The solver is a simple worklist solver, that works on the following
536 sbitmap changed_nodes = all ones;
537 changed_count = number of nodes;
538 For each node that was already collapsed:
541 while (changed_count > 0)
543 compute topological ordering for constraint graph
545 find and collapse cycles in the constraint graph (updating
546 changed if necessary)
548 for each node (n) in the graph in topological order:
551 Process each complex constraint associated with the node,
552 updating changed if necessary.
554 For each outgoing edge from n, propagate the solution from n to
555 the destination of the edge, updating changed as necessary.
559 /* Return true if two constraint expressions A and B are equal. */
562 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
564 return a.type == b.type && a.var == b.var && a.offset == b.offset;
567 /* Return true if constraint expression A is less than constraint expression
568 B. This is just arbitrary, but consistent, in order to give them an
572 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
574 if (a.type == b.type)
577 return a.offset < b.offset;
579 return a.var < b.var;
582 return a.type < b.type;
585 /* Return true if constraint A is less than constraint B. This is just
586 arbitrary, but consistent, in order to give them an ordering. */
589 constraint_less (const constraint_t a, const constraint_t b)
591 if (constraint_expr_less (a->lhs, b->lhs))
593 else if (constraint_expr_less (b->lhs, a->lhs))
596 return constraint_expr_less (a->rhs, b->rhs);
599 /* Return true if two constraints A and B are equal. */
602 constraint_equal (struct constraint a, struct constraint b)
604 return constraint_expr_equal (a.lhs, b.lhs)
605 && constraint_expr_equal (a.rhs, b.rhs);
609 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
612 constraint_vec_find (VEC(constraint_t,heap) *vec,
613 struct constraint lookfor)
621 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
622 if (place >= VEC_length (constraint_t, vec))
624 found = VEC_index (constraint_t, vec, place);
625 if (!constraint_equal (*found, lookfor))
630 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
633 constraint_set_union (VEC(constraint_t,heap) **to,
634 VEC(constraint_t,heap) **from)
639 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
641 if (constraint_vec_find (*to, *c) == NULL)
643 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
645 VEC_safe_insert (constraint_t, heap, *to, place, c);
650 /* Take a solution set SET, add OFFSET to each member of the set, and
651 overwrite SET with the result when done. */
654 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
656 bitmap result = BITMAP_ALLOC (&iteration_obstack);
660 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
662 /* If this is a properly sized variable, only add offset if it's
663 less than end. Otherwise, it is globbed to a single
666 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
668 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
669 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
672 bitmap_set_bit (result, v->id);
674 else if (get_varinfo (i)->is_artificial_var
675 || get_varinfo (i)->has_union
676 || get_varinfo (i)->is_unknown_size_var)
678 bitmap_set_bit (result, i);
682 bitmap_copy (set, result);
683 BITMAP_FREE (result);
686 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
690 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
693 return bitmap_ior_into (to, from);
699 tmp = BITMAP_ALLOC (&iteration_obstack);
700 bitmap_copy (tmp, from);
701 solution_set_add (tmp, inc);
702 res = bitmap_ior_into (to, tmp);
708 /* Insert constraint C into the list of complex constraints for VAR. */
711 insert_into_complex (unsigned int var, constraint_t c)
713 varinfo_t vi = get_varinfo (var);
714 unsigned int place = VEC_lower_bound (constraint_t, vi->complex, c,
716 VEC_safe_insert (constraint_t, heap, vi->complex, place, c);
720 /* Compare two constraint edges A and B, return true if they are equal. */
723 constraint_edge_equal (struct constraint_edge a, struct constraint_edge b)
725 return a.dest == b.dest;
728 /* Compare two constraint edges, return true if A is less than B */
731 constraint_edge_less (const constraint_edge_t a, const constraint_edge_t b)
733 if (a->dest < b->dest)
738 /* Find the constraint edge that matches LOOKFOR, in VEC.
739 Return the edge, if found, NULL otherwise. */
741 static constraint_edge_t
742 constraint_edge_vec_find (VEC(constraint_edge_t,heap) *vec,
743 struct constraint_edge lookfor)
746 constraint_edge_t edge = NULL;
748 place = VEC_lower_bound (constraint_edge_t, vec, &lookfor,
749 constraint_edge_less);
750 if (place >= VEC_length (constraint_edge_t, vec))
752 edge = VEC_index (constraint_edge_t, vec, place);
753 if (!constraint_edge_equal (*edge, lookfor))
758 /* Condense two variable nodes into a single variable node, by moving
759 all associated info from SRC to TO. */
762 condense_varmap_nodes (unsigned int to, unsigned int src)
764 varinfo_t tovi = get_varinfo (to);
765 varinfo_t srcvi = get_varinfo (src);
770 /* the src node, and all its variables, are now the to node. */
772 EXECUTE_IF_SET_IN_BITMAP (srcvi->variables, 0, i, bi)
773 get_varinfo (i)->node = to;
775 /* Merge the src node variables and the to node variables. */
776 bitmap_set_bit (tovi->variables, src);
777 bitmap_ior_into (tovi->variables, srcvi->variables);
778 bitmap_clear (srcvi->variables);
780 /* Move all complex constraints from src node into to node */
781 for (i = 0; VEC_iterate (constraint_t, srcvi->complex, i, c); i++)
783 /* In complex constraints for node src, we may have either
784 a = *src, and *src = a. */
786 if (c->rhs.type == DEREF)
791 constraint_set_union (&tovi->complex, &srcvi->complex);
792 VEC_free (constraint_t, heap, srcvi->complex);
793 srcvi->complex = NULL;
796 /* Erase an edge from SRC to SRC from GRAPH. This routine only
797 handles self-edges (e.g. an edge from a to a). */
800 erase_graph_self_edge (constraint_graph_t graph, unsigned int src)
802 VEC(constraint_edge_t,heap) *predvec = graph->preds[src];
803 VEC(constraint_edge_t,heap) *succvec = graph->succs[src];
804 struct constraint_edge edge;
809 /* Remove from the successors. */
810 place = VEC_lower_bound (constraint_edge_t, succvec, &edge,
811 constraint_edge_less);
813 /* Make sure we found the edge. */
814 #ifdef ENABLE_CHECKING
816 constraint_edge_t tmp = VEC_index (constraint_edge_t, succvec, place);
817 gcc_assert (constraint_edge_equal (*tmp, edge));
820 VEC_ordered_remove (constraint_edge_t, succvec, place);
822 /* Remove from the predecessors. */
823 place = VEC_lower_bound (constraint_edge_t, predvec, &edge,
824 constraint_edge_less);
826 /* Make sure we found the edge. */
827 #ifdef ENABLE_CHECKING
829 constraint_edge_t tmp = VEC_index (constraint_edge_t, predvec, place);
830 gcc_assert (constraint_edge_equal (*tmp, edge));
833 VEC_ordered_remove (constraint_edge_t, predvec, place);
836 /* Remove edges involving NODE from GRAPH. */
839 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
841 VEC(constraint_edge_t,heap) *succvec = graph->succs[node];
842 VEC(constraint_edge_t,heap) *predvec = graph->preds[node];
845 constraint_edge_t c = NULL;
848 /* Walk the successors, erase the associated preds. */
850 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[node], 0, j, bi)
852 bitmap_clear_bit (graph->zero_weight_preds[j], node);
854 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
858 struct constraint_edge lookfor;
859 constraint_edge_t result;
862 place = VEC_lower_bound (constraint_edge_t, graph->preds[c->dest],
863 &lookfor, constraint_edge_less);
864 result = VEC_ordered_remove (constraint_edge_t,
865 graph->preds[c->dest], place);
866 pool_free (constraint_edge_pool, result);
869 /* Walk the preds, erase the associated succs. */
871 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[node], 0, j, bi)
873 bitmap_clear_bit (graph->zero_weight_succs[j], node);
875 for (i =0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
879 struct constraint_edge lookfor;
880 constraint_edge_t result;
883 place = VEC_lower_bound (constraint_edge_t, graph->succs[c->dest],
884 &lookfor, constraint_edge_less);
885 result = VEC_ordered_remove (constraint_edge_t,
886 graph->succs[c->dest], place);
887 pool_free (constraint_edge_pool, result);
891 if (graph->zero_weight_preds[node])
893 BITMAP_FREE (graph->zero_weight_preds[node]);
894 graph->zero_weight_preds[node] = NULL;
897 if (graph->zero_weight_succs[node])
899 BITMAP_FREE (graph->zero_weight_succs[node]);
900 graph->zero_weight_succs[node] = NULL;
903 VEC_free (constraint_edge_t, heap, graph->preds[node]);
904 VEC_free (constraint_edge_t, heap, graph->succs[node]);
905 graph->preds[node] = NULL;
906 graph->succs[node] = NULL;
909 static bool edge_added = false;
911 /* Add edge (src, dest) to the graph. */
914 add_graph_edge (constraint_graph_t graph, unsigned int src, unsigned int dest)
917 VEC(constraint_edge_t,heap) *vec;
918 struct constraint_edge newe;
921 vec = graph->preds[src];
922 place = VEC_lower_bound (constraint_edge_t, vec, &newe,
923 constraint_edge_less);
924 if (place == VEC_length (constraint_edge_t, vec)
925 || VEC_index (constraint_edge_t, vec, place)->dest != dest)
927 constraint_edge_t edge = new_constraint_edge (dest);
929 VEC_safe_insert (constraint_edge_t, heap, graph->preds[src],
931 edge = new_constraint_edge (src);
933 place = VEC_lower_bound (constraint_edge_t, graph->succs[dest],
934 edge, constraint_edge_less);
935 VEC_safe_insert (constraint_edge_t, heap, graph->succs[dest],
946 /* Return the bitmap representing the weights of edge (SRC, DEST). */
949 get_graph_weights (constraint_graph_t graph, unsigned int src,
952 constraint_edge_t edge;
953 VEC(constraint_edge_t,heap) *vec;
954 struct constraint_edge lookfor;
958 vec = graph->preds[src];
959 edge = constraint_edge_vec_find (vec, lookfor);
960 gcc_assert (edge != NULL);
961 return &edge->weights;
964 /* Allocate graph weight bitmap for the edges associated with SRC and
965 DEST in GRAPH. Both the pred and the succ edges share a single
966 bitmap, so we need to set both edges to that bitmap. */
969 allocate_graph_weights (constraint_graph_t graph, unsigned int src,
973 constraint_edge_t edge;
974 VEC(constraint_edge_t,heap) *vec;
975 struct constraint_edge lookfor;
977 result = BITMAP_ALLOC (&ptabitmap_obstack);
979 /* Set the pred weight. */
981 vec = graph->preds[src];
982 edge = constraint_edge_vec_find (vec, lookfor);
983 gcc_assert (edge != NULL);
984 edge->weights = result;
986 /* Set the succ weight. */
988 vec = graph->succs[dest];
989 edge = constraint_edge_vec_find (vec, lookfor);
990 gcc_assert (edge != NULL);
991 edge->weights = result;
997 /* Merge GRAPH nodes FROM and TO into node TO. */
1000 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1003 VEC(constraint_edge_t,heap) *succvec = graph->succs[from];
1004 VEC(constraint_edge_t,heap) *predvec = graph->preds[from];
1006 constraint_edge_t c;
1010 /* Merge all the zero weighted predecessor edges. */
1011 if (graph->zero_weight_preds[from])
1013 if (!graph->zero_weight_preds[to])
1014 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1016 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_preds[from], 0, j, bi)
1020 bitmap_clear_bit (graph->zero_weight_succs[j], from);
1021 bitmap_set_bit (graph->zero_weight_succs[j], to);
1024 bitmap_ior_into (graph->zero_weight_preds[to],
1025 graph->zero_weight_preds[from]);
1028 /* Merge all the zero weighted successor edges. */
1029 if (graph->zero_weight_succs[from])
1031 if (!graph->zero_weight_succs[to])
1032 graph->zero_weight_succs[to] = BITMAP_ALLOC (&ptabitmap_obstack);
1033 EXECUTE_IF_SET_IN_BITMAP (graph->zero_weight_succs[from], 0, j, bi)
1035 bitmap_clear_bit (graph->zero_weight_preds[j], from);
1036 bitmap_set_bit (graph->zero_weight_preds[j], to);
1038 bitmap_ior_into (graph->zero_weight_succs[to],
1039 graph->zero_weight_succs[from]);
1042 /* Merge all the non-zero weighted predecessor edges. */
1043 for (i = 0; VEC_iterate (constraint_edge_t, predvec, i, c); i++)
1045 unsigned int d = c->dest;
1049 if (c->dest == from)
1052 add_graph_edge (graph, to, d);
1054 temp = *(get_graph_weights (graph, from, c->dest));
1057 weights = get_graph_weights (graph, to, d);
1059 *weights = allocate_graph_weights (graph, to, d);
1061 bitmap_ior_into (*weights, temp);
1066 /* Merge all the non-zero weighted successor edges. */
1067 for (i = 0; VEC_iterate (constraint_edge_t, succvec, i, c); i++)
1069 unsigned int d = c->dest;
1073 if (c->dest == from)
1076 add_graph_edge (graph, d, to);
1078 temp = *(get_graph_weights (graph, c->dest, from));
1081 weights = get_graph_weights (graph, d, to);
1083 *weights = allocate_graph_weights (graph, d, to);
1084 bitmap_ior_into (*weights, temp);
1087 clear_edges_for_node (graph, from);
1090 /* Add a graph edge to GRAPH, going from TO to FROM, with WEIGHT, if
1091 it doesn't exist in the graph already.
1092 Return false if the edge already existed, true otherwise. */
1095 int_add_graph_edge (constraint_graph_t graph, unsigned int to,
1096 unsigned int from, unsigned HOST_WIDE_INT weight)
1098 if (to == from && weight == 0)
1108 if (!graph->zero_weight_preds[to])
1109 graph->zero_weight_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1110 if (!graph->zero_weight_succs[from])
1111 graph->zero_weight_succs[from] = BITMAP_ALLOC (&ptabitmap_obstack);
1112 if (!bitmap_bit_p (graph->zero_weight_succs[from], to))
1117 bitmap_set_bit (graph->zero_weight_preds[to], from);
1118 bitmap_set_bit (graph->zero_weight_succs[from], to);
1125 r = add_graph_edge (graph, to, from);
1126 weights = get_graph_weights (graph, to, from);
1131 *weights = allocate_graph_weights (graph, to, from);
1132 bitmap_set_bit (*weights, weight);
1136 r |= !bitmap_bit_p (*weights, weight);
1137 bitmap_set_bit (*weights, weight);
1146 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1149 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1152 struct constraint_edge lookfor;
1155 return (graph->zero_weight_succs[dest]
1156 && bitmap_bit_p (graph->zero_weight_succs[dest], src))
1157 || constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1160 /* Return true if {DEST, SRC} is an existing weighted graph edge (IE has
1161 a weight other than 0) in GRAPH. */
1163 valid_weighted_graph_edge (constraint_graph_t graph, unsigned int src,
1166 struct constraint_edge lookfor;
1169 return graph->preds[src]
1170 && constraint_edge_vec_find (graph->succs[dest], lookfor) != NULL;
1174 /* Build the constraint graph. */
1177 build_constraint_graph (void)
1182 graph = xmalloc (sizeof (struct constraint_graph));
1183 graph->succs = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1184 sizeof (*graph->succs));
1185 graph->preds = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1186 sizeof (*graph->preds));
1187 graph->zero_weight_succs = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1188 sizeof (*graph->zero_weight_succs));
1189 graph->zero_weight_preds = xcalloc (VEC_length (varinfo_t, varmap) + 1,
1190 sizeof (*graph->zero_weight_preds));
1192 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1194 struct constraint_expr lhs = c->lhs;
1195 struct constraint_expr rhs = c->rhs;
1196 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1197 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1199 if (lhs.type == DEREF)
1201 /* *x = y or *x = &y (complex) */
1202 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1203 insert_into_complex (lhsvar, c);
1205 else if (rhs.type == DEREF)
1207 /* !special var= *y */
1208 if (!(get_varinfo (lhsvar)->is_special_var))
1209 insert_into_complex (rhsvar, c);
1211 else if (rhs.type == ADDRESSOF)
1214 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1216 else if (lhsvar > anything_id)
1218 /* Ignore 0 weighted self edges, as they can't possibly contribute
1220 if (lhsvar != rhsvar || rhs.offset != 0 || lhs.offset != 0)
1222 /* x = y (simple) */
1223 int_add_graph_edge (graph, lhs.var, rhs.var, rhs.offset);
1231 /* Changed variables on the last iteration. */
1232 static unsigned int changed_count;
1233 static sbitmap changed;
1235 DEF_VEC_I(unsigned);
1236 DEF_VEC_ALLOC_I(unsigned,heap);
1239 /* Strongly Connected Component visitation info. */
1244 sbitmap in_component;
1246 unsigned int *visited_index;
1247 VEC(unsigned,heap) *scc_stack;
1248 VEC(unsigned,heap) *unification_queue;
1252 /* Recursive routine to find strongly connected components in GRAPH.
1253 SI is the SCC info to store the information in, and N is the id of current
1254 graph node we are processing.
1256 This is Tarjan's strongly connected component finding algorithm, as
1257 modified by Nuutila to keep only non-root nodes on the stack.
1258 The algorithm can be found in "On finding the strongly connected
1259 connected components in a directed graph" by Esko Nuutila and Eljas
1260 Soisalon-Soininen, in Information Processing Letters volume 49,
1261 number 1, pages 9-14. */
1264 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1269 gcc_assert (get_varinfo (n)->node == n);
1270 SET_BIT (si->visited, n);
1271 RESET_BIT (si->in_component, n);
1272 si->visited_index[n] = si->current_index ++;
1274 /* Visit all the successors. */
1275 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[n], 0, i, bi)
1278 if (!TEST_BIT (si->visited, w))
1279 scc_visit (graph, si, w);
1280 if (!TEST_BIT (si->in_component, w))
1282 unsigned int t = get_varinfo (w)->node;
1283 unsigned int nnode = get_varinfo (n)->node;
1284 if (si->visited_index[t] < si->visited_index[nnode])
1285 get_varinfo (n)->node = t;
1289 /* See if any components have been identified. */
1290 if (get_varinfo (n)->node == n)
1292 unsigned int t = si->visited_index[n];
1293 SET_BIT (si->in_component, n);
1294 while (VEC_length (unsigned, si->scc_stack) != 0
1295 && t < si->visited_index[VEC_last (unsigned, si->scc_stack)])
1297 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1298 get_varinfo (w)->node = n;
1299 SET_BIT (si->in_component, w);
1300 /* Mark this node for collapsing. */
1301 VEC_safe_push (unsigned, heap, si->unification_queue, w);
1305 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1309 /* Collapse two variables into one variable. */
1312 collapse_nodes (constraint_graph_t graph, unsigned int to, unsigned int from)
1314 bitmap tosol, fromsol;
1316 condense_varmap_nodes (to, from);
1317 tosol = get_varinfo (to)->solution;
1318 fromsol = get_varinfo (from)->solution;
1319 bitmap_ior_into (tosol, fromsol);
1320 merge_graph_nodes (graph, to, from);
1322 if (valid_graph_edge (graph, to, to))
1324 if (graph->zero_weight_preds[to])
1326 bitmap_clear_bit (graph->zero_weight_preds[to], to);
1327 bitmap_clear_bit (graph->zero_weight_succs[to], to);
1329 if (valid_weighted_graph_edge (graph, to, to))
1331 bitmap weights = *(get_graph_weights (graph, to, to));
1332 if (!weights || bitmap_empty_p (weights))
1333 erase_graph_self_edge (graph, to);
1336 BITMAP_FREE (fromsol);
1337 get_varinfo (to)->address_taken |= get_varinfo (from)->address_taken;
1338 get_varinfo (to)->indirect_target |= get_varinfo (from)->indirect_target;
1342 /* Unify nodes in GRAPH that we have found to be part of a cycle.
1343 SI is the Strongly Connected Components information structure that tells us
1344 what components to unify.
1345 UPDATE_CHANGED should be set to true if the changed sbitmap and changed
1346 count should be updated to reflect the unification. */
1349 process_unification_queue (constraint_graph_t graph, struct scc_info *si,
1350 bool update_changed)
1353 bitmap tmp = BITMAP_ALLOC (update_changed ? &iteration_obstack : NULL);
1356 /* We proceed as follows:
1358 For each component in the queue (components are delineated by
1359 when current_queue_element->node != next_queue_element->node):
1361 rep = representative node for component
1363 For each node (tounify) to be unified in the component,
1364 merge the solution for tounify into tmp bitmap
1366 clear solution for tounify
1368 merge edges from tounify into rep
1370 merge complex constraints from tounify into rep
1372 update changed count to note that tounify will never change
1375 Merge tmp into solution for rep, marking rep changed if this
1376 changed rep's solution.
1378 Delete any 0 weighted self-edges we now have for rep. */
1379 while (i != VEC_length (unsigned, si->unification_queue))
1381 unsigned int tounify = VEC_index (unsigned, si->unification_queue, i);
1382 unsigned int n = get_varinfo (tounify)->node;
1384 if (dump_file && (dump_flags & TDF_DETAILS))
1385 fprintf (dump_file, "Unifying %s to %s\n",
1386 get_varinfo (tounify)->name,
1387 get_varinfo (n)->name);
1389 stats.unified_vars_dynamic++;
1391 stats.unified_vars_static++;
1392 bitmap_ior_into (tmp, get_varinfo (tounify)->solution);
1393 merge_graph_nodes (graph, n, tounify);
1394 condense_varmap_nodes (n, tounify);
1396 if (update_changed && TEST_BIT (changed, tounify))
1398 RESET_BIT (changed, tounify);
1399 if (!TEST_BIT (changed, n))
1400 SET_BIT (changed, n);
1403 gcc_assert (changed_count > 0);
1408 bitmap_clear (get_varinfo (tounify)->solution);
1411 /* If we've either finished processing the entire queue, or
1412 finished processing all nodes for component n, update the solution for
1414 if (i == VEC_length (unsigned, si->unification_queue)
1415 || get_varinfo (VEC_index (unsigned, si->unification_queue, i))->node != n)
1417 /* If the solution changes because of the merging, we need to mark
1418 the variable as changed. */
1419 if (bitmap_ior_into (get_varinfo (n)->solution, tmp))
1421 if (update_changed && !TEST_BIT (changed, n))
1423 SET_BIT (changed, n);
1429 if (valid_graph_edge (graph, n, n))
1431 if (graph->zero_weight_succs[n])
1433 if (graph->zero_weight_preds[n])
1434 bitmap_clear_bit (graph->zero_weight_preds[n], n);
1435 bitmap_clear_bit (graph->zero_weight_succs[n], n);
1437 if (valid_weighted_graph_edge (graph, n, n))
1439 bitmap weights = *(get_graph_weights (graph, n, n));
1440 if (!weights || bitmap_empty_p (weights))
1441 erase_graph_self_edge (graph, n);
1450 /* Information needed to compute the topological ordering of a graph. */
1454 /* sbitmap of visited nodes. */
1456 /* Array that stores the topological order of the graph, *in
1458 VEC(unsigned,heap) *topo_order;
1462 /* Initialize and return a topological info structure. */
1464 static struct topo_info *
1465 init_topo_info (void)
1467 size_t size = VEC_length (varinfo_t, varmap);
1468 struct topo_info *ti = xmalloc (sizeof (struct topo_info));
1469 ti->visited = sbitmap_alloc (size);
1470 sbitmap_zero (ti->visited);
1471 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1476 /* Free the topological sort info pointed to by TI. */
1479 free_topo_info (struct topo_info *ti)
1481 sbitmap_free (ti->visited);
1482 VEC_free (unsigned, heap, ti->topo_order);
1486 /* Visit the graph in topological order, and store the order in the
1487 topo_info structure. */
1490 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1493 VEC(constraint_edge_t,heap) *succs = graph->succs[n];
1496 constraint_edge_t c;
1500 SET_BIT (ti->visited, n);
1501 if (VEC_length (constraint_edge_t, succs) != 0)
1503 temp = BITMAP_ALLOC (&iteration_obstack);
1504 if (graph->zero_weight_succs[n])
1505 bitmap_ior_into (temp, graph->zero_weight_succs[n]);
1506 for (i = 0; VEC_iterate (constraint_edge_t, succs, i, c); i++)
1507 bitmap_set_bit (temp, c->dest);
1510 temp = graph->zero_weight_succs[n];
1513 EXECUTE_IF_SET_IN_BITMAP (temp, 0, j, bi)
1515 if (!TEST_BIT (ti->visited, j))
1516 topo_visit (graph, ti, j);
1518 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1521 /* Return true if variable N + OFFSET is a legal field of N. */
1524 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1526 varinfo_t ninfo = get_varinfo (n);
1528 /* For things we've globbed to single variables, any offset into the
1529 variable acts like the entire variable, so that it becomes offset
1531 if (ninfo->is_special_var
1532 || ninfo->is_artificial_var
1533 || ninfo->is_unknown_size_var)
1538 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1541 #define DONT_PROPAGATE_WITH_ANYTHING 0
1543 /* Process a constraint C that represents *x = &y. */
1546 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1547 constraint_t c, bitmap delta)
1549 unsigned int rhs = c->rhs.var;
1553 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1554 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1556 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1557 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1559 /* *x != NULL && *x != ANYTHING*/
1563 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1565 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1569 sol = get_varinfo (t)->solution;
1570 if (!bitmap_bit_p (sol, rhs))
1572 bitmap_set_bit (sol, rhs);
1573 if (!TEST_BIT (changed, t))
1575 SET_BIT (changed, t);
1580 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1581 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1586 /* Process a constraint C that represents x = *y, using DELTA as the
1587 starting solution. */
1590 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1593 unsigned int lhs = get_varinfo (c->lhs.var)->node;
1595 bitmap sol = get_varinfo (lhs)->solution;
1599 #if DONT_PROPAGATE_WITH_ANYTHING
1600 if (bitmap_bit_p (delta, anything_id))
1602 flag = !bitmap_bit_p (sol, anything_id);
1604 bitmap_set_bit (sol, anything_id);
1608 /* For each variable j in delta (Sol(y)), add
1609 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1610 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1612 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1613 if (type_safe (j, &roffset))
1616 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1619 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1624 /* Adding edges from the special vars is pointless.
1625 They don't have sets that can change. */
1626 if (get_varinfo (t) ->is_special_var)
1627 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1628 else if (int_add_graph_edge (graph, lhs, t, 0))
1629 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1631 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1632 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1635 #if DONT_PROPAGATE_WITH_ANYTHING
1638 /* If the LHS solution changed, mark the var as changed. */
1641 get_varinfo (lhs)->solution = sol;
1642 if (!TEST_BIT (changed, lhs))
1644 SET_BIT (changed, lhs);
1650 /* Process a constraint C that represents *x = y. */
1653 do_ds_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1655 unsigned int rhs = get_varinfo (c->rhs.var)->node;
1656 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1657 bitmap sol = get_varinfo (rhs)->solution;
1661 #if DONT_PROPAGATE_WITH_ANYTHING
1662 if (bitmap_bit_p (sol, anything_id))
1664 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1666 varinfo_t jvi = get_varinfo (j);
1668 unsigned int loff = c->lhs.offset;
1669 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1672 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1677 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1679 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1680 if (!TEST_BIT (changed, t))
1682 SET_BIT (changed, t);
1691 /* For each member j of delta (Sol(x)), add an edge from y to j and
1692 union Sol(y) into Sol(j) */
1693 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1695 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1696 if (type_safe (j, &loff) && !(get_varinfo(j)->is_special_var))
1700 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1702 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1706 if (int_add_graph_edge (graph, t, rhs, roff))
1708 bitmap tmp = get_varinfo (t)->solution;
1709 if (set_union_with_increment (tmp, sol, roff))
1711 get_varinfo (t)->solution = tmp;
1713 sol = get_varinfo (rhs)->solution;
1714 if (!TEST_BIT (changed, t))
1716 SET_BIT (changed, t);
1722 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1723 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1727 /* Handle a non-simple (simple meaning requires no iteration), non-copy
1728 constraint (IE *x = &y, x = *y, and *x = y). */
1731 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1733 if (c->lhs.type == DEREF)
1735 if (c->rhs.type == ADDRESSOF)
1738 do_da_constraint (graph, c, delta);
1743 do_ds_constraint (graph, c, delta);
1749 if (!(get_varinfo (c->lhs.var)->is_special_var))
1750 do_sd_constraint (graph, c, delta);
1754 /* Initialize and return a new SCC info structure. */
1756 static struct scc_info *
1757 init_scc_info (void)
1759 struct scc_info *si = xmalloc (sizeof (struct scc_info));
1760 size_t size = VEC_length (varinfo_t, varmap);
1762 si->current_index = 0;
1763 si->visited = sbitmap_alloc (size);
1764 sbitmap_zero (si->visited);
1765 si->in_component = sbitmap_alloc (size);
1766 sbitmap_ones (si->in_component);
1767 si->visited_index = xcalloc (sizeof (unsigned int), size + 1);
1768 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1769 si->unification_queue = VEC_alloc (unsigned, heap, 1);
1773 /* Free an SCC info structure pointed to by SI */
1776 free_scc_info (struct scc_info *si)
1778 sbitmap_free (si->visited);
1779 sbitmap_free (si->in_component);
1780 free (si->visited_index);
1781 VEC_free (unsigned, heap, si->scc_stack);
1782 VEC_free (unsigned, heap, si->unification_queue);
1787 /* Find cycles in GRAPH that occur, using strongly connected components, and
1788 collapse the cycles into a single representative node. if UPDATE_CHANGED
1789 is true, then update the changed sbitmap to note those nodes whose
1790 solutions have changed as a result of collapsing. */
1793 find_and_collapse_graph_cycles (constraint_graph_t graph, bool update_changed)
1796 unsigned int size = VEC_length (varinfo_t, varmap);
1797 struct scc_info *si = init_scc_info ();
1799 for (i = 0; i != size; ++i)
1800 if (!TEST_BIT (si->visited, i) && get_varinfo (i)->node == i)
1801 scc_visit (graph, si, i);
1803 process_unification_queue (graph, si, update_changed);
1807 /* Compute a topological ordering for GRAPH, and store the result in the
1808 topo_info structure TI. */
1811 compute_topo_order (constraint_graph_t graph,
1812 struct topo_info *ti)
1815 unsigned int size = VEC_length (varinfo_t, varmap);
1817 for (i = 0; i != size; ++i)
1818 if (!TEST_BIT (ti->visited, i) && get_varinfo (i)->node == i)
1819 topo_visit (graph, ti, i);
1822 /* Return true if bitmap B is empty, or a bitmap other than bit 0 is set. */
1825 bitmap_other_than_zero_bit_set (bitmap b)
1830 if (bitmap_empty_p (b))
1832 EXECUTE_IF_SET_IN_BITMAP (b, 1, i, bi)
1837 /* Perform offline variable substitution.
1839 This is a linear time way of identifying variables that must have
1840 equivalent points-to sets, including those caused by static cycles,
1841 and single entry subgraphs, in the constraint graph.
1843 The technique is described in "Off-line variable substitution for
1844 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1845 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56. */
1848 perform_var_substitution (constraint_graph_t graph)
1850 struct topo_info *ti = init_topo_info ();
1852 bitmap_obstack_initialize (&iteration_obstack);
1853 /* Compute the topological ordering of the graph, then visit each
1854 node in topological order. */
1855 compute_topo_order (graph, ti);
1857 while (VEC_length (unsigned, ti->topo_order) != 0)
1859 unsigned int i = VEC_pop (unsigned, ti->topo_order);
1861 varinfo_t vi = get_varinfo (i);
1862 bool okay_to_elim = false;
1863 unsigned int root = VEC_length (varinfo_t, varmap);
1864 VEC(constraint_edge_t,heap) *predvec = graph->preds[i];
1865 constraint_edge_t ce = NULL;
1870 /* We can't eliminate things whose address is taken, or which is
1871 the target of a dereference. */
1872 if (vi->address_taken || vi->indirect_target)
1875 /* See if all predecessors of I are ripe for elimination */
1876 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_preds[i], 0, k, bi)
1879 w = get_varinfo (k)->node;
1881 /* We can't eliminate the node if one of the predecessors is
1882 part of a different strongly connected component. */
1886 okay_to_elim = true;
1890 okay_to_elim = false;
1894 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1895 then Solution(i) is a subset of Solution (w), where w is a
1896 predecessor in the graph.
1897 Corollary: If all predecessors of i have the same
1898 points-to set, then i has that same points-to set as
1899 those predecessors. */
1900 tmp = BITMAP_ALLOC (NULL);
1901 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1902 get_varinfo (w)->solution);
1903 if (!bitmap_empty_p (tmp))
1905 okay_to_elim = false;
1914 VEC_iterate (constraint_edge_t, predvec, pred, ce);
1919 weight = *(get_graph_weights (graph, i, ce->dest));
1921 /* We can't eliminate variables that have nonzero weighted
1922 edges between them. */
1923 if (weight && bitmap_other_than_zero_bit_set (weight))
1925 okay_to_elim = false;
1928 w = get_varinfo (ce->dest)->node;
1930 /* We can't eliminate the node if one of the predecessors is
1931 part of a different strongly connected component. */
1935 okay_to_elim = true;
1939 okay_to_elim = false;
1943 /* Theorem 4 in Rountev and Chandra: If i is a direct node,
1944 then Solution(i) is a subset of Solution (w), where w is a
1945 predecessor in the graph.
1946 Corollary: If all predecessors of i have the same
1947 points-to set, then i has that same points-to set as
1948 those predecessors. */
1949 tmp = BITMAP_ALLOC (NULL);
1950 bitmap_and_compl (tmp, get_varinfo (i)->solution,
1951 get_varinfo (w)->solution);
1952 if (!bitmap_empty_p (tmp))
1954 okay_to_elim = false;
1961 /* See if the root is different than the original node.
1962 If so, we've found an equivalence. */
1963 if (root != get_varinfo (i)->node && okay_to_elim)
1965 /* Found an equivalence */
1966 get_varinfo (i)->node = root;
1967 collapse_nodes (graph, root, i);
1968 if (dump_file && (dump_flags & TDF_DETAILS))
1969 fprintf (dump_file, "Collapsing %s into %s\n",
1970 get_varinfo (i)->name,
1971 get_varinfo (root)->name);
1972 stats.collapsed_vars++;
1976 bitmap_obstack_release (&iteration_obstack);
1977 free_topo_info (ti);
1980 /* Solve the constraint graph GRAPH using our worklist solver.
1981 This is based on the PW* family of solvers from the "Efficient Field
1982 Sensitive Pointer Analysis for C" paper.
1983 It works by iterating over all the graph nodes, processing the complex
1984 constraints and propagating the copy constraints, until everything stops
1985 changed. This corresponds to steps 6-8 in the solving list given above. */
1988 solve_graph (constraint_graph_t graph)
1990 unsigned int size = VEC_length (varinfo_t, varmap);
1993 changed_count = size;
1994 changed = sbitmap_alloc (size);
1995 sbitmap_ones (changed);
1997 /* The already collapsed/unreachable nodes will never change, so we
1998 need to account for them in changed_count. */
1999 for (i = 0; i < size; i++)
2000 if (get_varinfo (i)->node != i)
2003 while (changed_count > 0)
2006 struct topo_info *ti = init_topo_info ();
2009 bitmap_obstack_initialize (&iteration_obstack);
2013 /* We already did cycle elimination once, when we did
2014 variable substitution, so we don't need it again for the
2016 if (stats.iterations > 1)
2017 find_and_collapse_graph_cycles (graph, true);
2022 compute_topo_order (graph, ti);
2024 while (VEC_length (unsigned, ti->topo_order) != 0)
2026 i = VEC_pop (unsigned, ti->topo_order);
2027 gcc_assert (get_varinfo (i)->node == i);
2029 /* If the node has changed, we need to process the
2030 complex constraints and outgoing edges again. */
2031 if (TEST_BIT (changed, i))
2035 constraint_edge_t e = NULL;
2038 VEC(constraint_t,heap) *complex = get_varinfo (i)->complex;
2039 VEC(constraint_edge_t,heap) *succs;
2041 RESET_BIT (changed, i);
2044 /* Process the complex constraints */
2045 solution = get_varinfo (i)->solution;
2046 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2047 do_complex_constraint (graph, c, solution);
2049 /* Propagate solution to all successors. */
2050 succs = graph->succs[i];
2052 EXECUTE_IF_IN_NONNULL_BITMAP (graph->zero_weight_succs[i], 0, j, bi)
2054 bitmap tmp = get_varinfo (j)->solution;
2057 flag = set_union_with_increment (tmp, solution, 0);
2061 get_varinfo (j)->solution = tmp;
2062 if (!TEST_BIT (changed, j))
2064 SET_BIT (changed, j);
2069 for (j = 0; VEC_iterate (constraint_edge_t, succs, j, e); j++)
2071 bitmap tmp = get_varinfo (e->dest)->solution;
2074 bitmap weights = e->weights;
2077 gcc_assert (weights && !bitmap_empty_p (weights));
2078 EXECUTE_IF_SET_IN_BITMAP (weights, 0, k, bi)
2079 flag |= set_union_with_increment (tmp, solution, k);
2083 get_varinfo (e->dest)->solution = tmp;
2084 if (!TEST_BIT (changed, e->dest))
2086 SET_BIT (changed, e->dest);
2093 free_topo_info (ti);
2094 bitmap_obstack_release (&iteration_obstack);
2097 sbitmap_free (changed);
2101 /* CONSTRAINT AND VARIABLE GENERATION FUNCTIONS */
2103 /* Map from trees to variable ids. */
2104 static htab_t id_for_tree;
2106 typedef struct tree_id
2112 /* Hash a tree id structure. */
2115 tree_id_hash (const void *p)
2117 const tree_id_t ta = (tree_id_t) p;
2118 return htab_hash_pointer (ta->t);
2121 /* Return true if the tree in P1 and the tree in P2 are the same. */
2124 tree_id_eq (const void *p1, const void *p2)
2126 const tree_id_t ta1 = (tree_id_t) p1;
2127 const tree_id_t ta2 = (tree_id_t) p2;
2128 return ta1->t == ta2->t;
2131 /* Insert ID as the variable id for tree T in the hashtable. */
2134 insert_id_for_tree (tree t, int id)
2137 struct tree_id finder;
2141 slot = htab_find_slot (id_for_tree, &finder, INSERT);
2142 gcc_assert (*slot == NULL);
2143 new_pair = xmalloc (sizeof (struct tree_id));
2146 *slot = (void *)new_pair;
2149 /* Find the variable id for tree T in ID_FOR_TREE. If T does not
2150 exist in the hash table, return false, otherwise, return true and
2151 set *ID to the id we found. */
2154 lookup_id_for_tree (tree t, unsigned int *id)
2157 struct tree_id finder;
2160 pair = htab_find (id_for_tree, &finder);
2167 /* Return a printable name for DECL */
2170 alias_get_name (tree decl)
2172 const char *res = get_name (decl);
2174 int num_printed = 0;
2180 if (TREE_CODE (decl) == SSA_NAME)
2182 num_printed = asprintf (&temp, "%s_%u",
2183 alias_get_name (SSA_NAME_VAR (decl)),
2184 SSA_NAME_VERSION (decl));
2186 else if (DECL_P (decl))
2188 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2190 if (num_printed > 0)
2192 res = ggc_strdup (temp);
2198 /* Find the variable id for tree T in the hashtable.
2199 If T doesn't exist in the hash table, create an entry for it. */
2202 get_id_for_tree (tree t)
2205 struct tree_id finder;
2208 pair = htab_find (id_for_tree, &finder);
2210 return create_variable_info_for (t, alias_get_name (t));
2215 /* Get a constraint expression from an SSA_VAR_P node. */
2217 static struct constraint_expr
2218 get_constraint_exp_from_ssa_var (tree t)
2220 struct constraint_expr cexpr;
2222 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2224 /* For parameters, get at the points-to set for the actual parm
2226 if (TREE_CODE (t) == SSA_NAME
2227 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2228 && default_def (SSA_NAME_VAR (t)) == t)
2229 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2231 cexpr.type = SCALAR;
2233 cexpr.var = get_id_for_tree (t);
2234 /* If we determine the result is "anything", and we know this is readonly,
2235 say it points to readonly memory instead. */
2236 if (cexpr.var == anything_id && TREE_READONLY (t))
2238 cexpr.type = ADDRESSOF;
2239 cexpr.var = readonly_id;
2246 /* Process a completed constraint T, and add it to the constraint
2250 process_constraint (constraint_t t)
2252 struct constraint_expr rhs = t->rhs;
2253 struct constraint_expr lhs = t->lhs;
2255 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2256 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2258 /* ANYTHING == ANYTHING is pointless. */
2259 if (lhs.var == anything_id && rhs.var == anything_id)
2262 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2263 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2268 process_constraint (t);
2270 /* This can happen in our IR with things like n->a = *p */
2271 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2273 /* Split into tmp = *rhs, *lhs = tmp */
2274 tree rhsdecl = get_varinfo (rhs.var)->decl;
2275 tree pointertype = TREE_TYPE (rhsdecl);
2276 tree pointedtotype = TREE_TYPE (pointertype);
2277 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2278 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2280 /* If this is an aggregate of known size, we should have passed
2281 this off to do_structure_copy, and it should have broken it
2283 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2284 || get_varinfo (rhs.var)->is_unknown_size_var);
2286 process_constraint (new_constraint (tmplhs, rhs));
2287 process_constraint (new_constraint (lhs, tmplhs));
2289 else if (rhs.type == ADDRESSOF)
2292 gcc_assert (rhs.offset == 0);
2294 for (vi = get_varinfo (rhs.var); vi != NULL; vi = vi->next)
2295 vi->address_taken = true;
2297 VEC_safe_push (constraint_t, heap, constraints, t);
2301 if (lhs.type != DEREF && rhs.type == DEREF)
2302 get_varinfo (lhs.var)->indirect_target = true;
2303 VEC_safe_push (constraint_t, heap, constraints, t);
2308 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2311 static unsigned HOST_WIDE_INT
2312 bitpos_of_field (const tree fdecl)
2315 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2316 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2319 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2320 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2324 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2325 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2328 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2329 const unsigned HOST_WIDE_INT fieldsize,
2330 const unsigned HOST_WIDE_INT accesspos,
2331 const unsigned HOST_WIDE_INT accesssize)
2333 if (fieldpos == accesspos && fieldsize == accesssize)
2335 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2337 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2343 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2346 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2349 HOST_WIDE_INT bitsize = -1;
2350 HOST_WIDE_INT bitmaxsize = -1;
2351 HOST_WIDE_INT bitpos;
2353 struct constraint_expr *result;
2354 unsigned int beforelength = VEC_length (ce_s, *results);
2356 /* Some people like to do cute things like take the address of
2359 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2360 forzero = TREE_OPERAND (forzero, 0);
2362 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2364 struct constraint_expr temp;
2367 temp.var = integer_id;
2369 VEC_safe_push (ce_s, heap, *results, &temp);
2373 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2374 get_constraint_for (t, results);
2375 result = VEC_last (ce_s, *results);
2376 result->offset = bitpos;
2378 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2380 /* This can also happen due to weird offsetof type macros. */
2381 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2382 result->type = SCALAR;
2384 if (result->type == SCALAR)
2386 /* In languages like C, you can access one past the end of an
2387 array. You aren't allowed to dereference it, so we can
2388 ignore this constraint. When we handle pointer subtraction,
2389 we may have to do something cute here. */
2391 if (result->offset < get_varinfo (result->var)->fullsize)
2393 /* It's also not true that the constraint will actually start at the
2394 right offset, it may start in some padding. We only care about
2395 setting the constraint to the first actual field it touches, so
2398 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2400 if (offset_overlaps_with_access (curr->offset, curr->size,
2401 result->offset, bitmaxsize))
2403 result->var = curr->id;
2407 /* assert that we found *some* field there. The user couldn't be
2408 accessing *only* padding. */
2409 /* Still the user could access one past the end of an array
2410 embedded in a struct resulting in accessing *only* padding. */
2411 gcc_assert (curr || ref_contains_array_ref (orig_t));
2414 if (dump_file && (dump_flags & TDF_DETAILS))
2415 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2422 /* Dereference the constraint expression CONS, and return the result.
2423 DEREF (ADDRESSOF) = SCALAR
2424 DEREF (SCALAR) = DEREF
2425 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2426 This is needed so that we can handle dereferencing DEREF constraints. */
2429 do_deref (VEC (ce_s, heap) **constraints)
2431 struct constraint_expr *c;
2433 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2435 if (c->type == SCALAR)
2437 else if (c->type == ADDRESSOF)
2439 else if (c->type == DEREF)
2441 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2442 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2443 process_constraint (new_constraint (tmplhs, *c));
2444 c->var = tmplhs.var;
2452 /* Given a tree T, return the constraint expression for it. */
2455 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2457 struct constraint_expr temp;
2459 /* x = integer is all glommed to a single variable, which doesn't
2460 point to anything by itself. That is, of course, unless it is an
2461 integer constant being treated as a pointer, in which case, we
2462 will return that this is really the addressof anything. This
2463 happens below, since it will fall into the default case. The only
2464 case we know something about an integer treated like a pointer is
2465 when it is the NULL pointer, and then we just say it points to
2467 if (TREE_CODE (t) == INTEGER_CST
2468 && !POINTER_TYPE_P (TREE_TYPE (t)))
2470 temp.var = integer_id;
2473 VEC_safe_push (ce_s, heap, *results, &temp);
2476 else if (TREE_CODE (t) == INTEGER_CST
2477 && integer_zerop (t))
2479 temp.var = nothing_id;
2480 temp.type = ADDRESSOF;
2482 VEC_safe_push (ce_s, heap, *results, &temp);
2486 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2488 case tcc_expression:
2490 switch (TREE_CODE (t))
2494 struct constraint_expr *c;
2497 get_constraint_for (TREE_OPERAND (t, 0), results);
2498 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2500 if (c->type == DEREF)
2503 c->type = ADDRESSOF;
2510 /* XXX: In interprocedural mode, if we didn't have the
2511 body, we would need to do *each pointer argument =
2513 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2516 tree heapvar = heapvar_lookup (t);
2518 if (heapvar == NULL)
2520 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2521 DECL_EXTERNAL (heapvar) = 1;
2522 add_referenced_tmp_var (heapvar);
2523 heapvar_insert (t, heapvar);
2526 temp.var = create_variable_info_for (heapvar,
2527 alias_get_name (heapvar));
2529 vi = get_varinfo (temp.var);
2530 vi->is_artificial_var = 1;
2531 vi->is_heap_var = 1;
2532 temp.type = ADDRESSOF;
2534 VEC_safe_push (ce_s, heap, *results, &temp);
2540 temp.type = ADDRESSOF;
2541 temp.var = anything_id;
2543 VEC_safe_push (ce_s, heap, *results, &temp);
2550 switch (TREE_CODE (t))
2554 get_constraint_for (TREE_OPERAND (t, 0), results);
2559 case ARRAY_RANGE_REF:
2561 get_constraint_for_component_ref (t, results);
2565 temp.type = ADDRESSOF;
2566 temp.var = anything_id;
2568 VEC_safe_push (ce_s, heap, *results, &temp);
2575 switch (TREE_CODE (t))
2579 case NON_LVALUE_EXPR:
2581 tree op = TREE_OPERAND (t, 0);
2583 /* Cast from non-pointer to pointers are bad news for us.
2584 Anything else, we see through */
2585 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2586 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2588 get_constraint_for (op, results);
2596 temp.type = ADDRESSOF;
2597 temp.var = anything_id;
2599 VEC_safe_push (ce_s, heap, *results, &temp);
2604 case tcc_exceptional:
2606 switch (TREE_CODE (t))
2610 get_constraint_for (PHI_RESULT (t), results);
2616 struct constraint_expr temp;
2617 temp = get_constraint_exp_from_ssa_var (t);
2618 VEC_safe_push (ce_s, heap, *results, &temp);
2624 temp.type = ADDRESSOF;
2625 temp.var = anything_id;
2627 VEC_safe_push (ce_s, heap, *results, &temp);
2632 case tcc_declaration:
2634 struct constraint_expr temp;
2635 temp = get_constraint_exp_from_ssa_var (t);
2636 VEC_safe_push (ce_s, heap, *results, &temp);
2641 temp.type = ADDRESSOF;
2642 temp.var = anything_id;
2644 VEC_safe_push (ce_s, heap, *results, &temp);
2651 /* Handle the structure copy case where we have a simple structure copy
2652 between LHS and RHS that is of SIZE (in bits)
2654 For each field of the lhs variable (lhsfield)
2655 For each field of the rhs variable at lhsfield.offset (rhsfield)
2656 add the constraint lhsfield = rhsfield
2658 If we fail due to some kind of type unsafety or other thing we
2659 can't handle, return false. We expect the caller to collapse the
2660 variable in that case. */
2663 do_simple_structure_copy (const struct constraint_expr lhs,
2664 const struct constraint_expr rhs,
2665 const unsigned HOST_WIDE_INT size)
2667 varinfo_t p = get_varinfo (lhs.var);
2668 unsigned HOST_WIDE_INT pstart, last;
2670 last = p->offset + size;
2671 for (; p && p->offset < last; p = p->next)
2674 struct constraint_expr templhs = lhs;
2675 struct constraint_expr temprhs = rhs;
2676 unsigned HOST_WIDE_INT fieldoffset;
2678 templhs.var = p->id;
2679 q = get_varinfo (temprhs.var);
2680 fieldoffset = p->offset - pstart;
2681 q = first_vi_for_offset (q, q->offset + fieldoffset);
2684 temprhs.var = q->id;
2685 process_constraint (new_constraint (templhs, temprhs));
2691 /* Handle the structure copy case where we have a structure copy between a
2692 aggregate on the LHS and a dereference of a pointer on the RHS
2693 that is of SIZE (in bits)
2695 For each field of the lhs variable (lhsfield)
2696 rhs.offset = lhsfield->offset
2697 add the constraint lhsfield = rhs
2701 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2702 const struct constraint_expr rhs,
2703 const unsigned HOST_WIDE_INT size)
2705 varinfo_t p = get_varinfo (lhs.var);
2706 unsigned HOST_WIDE_INT pstart,last;
2708 last = p->offset + size;
2710 for (; p && p->offset < last; p = p->next)
2713 struct constraint_expr templhs = lhs;
2714 struct constraint_expr temprhs = rhs;
2715 unsigned HOST_WIDE_INT fieldoffset;
2718 if (templhs.type == SCALAR)
2719 templhs.var = p->id;
2721 templhs.offset = p->offset;
2723 q = get_varinfo (temprhs.var);
2724 fieldoffset = p->offset - pstart;
2725 temprhs.offset += fieldoffset;
2726 process_constraint (new_constraint (templhs, temprhs));
2730 /* Handle the structure copy case where we have a structure copy
2731 between a aggregate on the RHS and a dereference of a pointer on
2732 the LHS that is of SIZE (in bits)
2734 For each field of the rhs variable (rhsfield)
2735 lhs.offset = rhsfield->offset
2736 add the constraint lhs = rhsfield
2740 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2741 const struct constraint_expr rhs,
2742 const unsigned HOST_WIDE_INT size)
2744 varinfo_t p = get_varinfo (rhs.var);
2745 unsigned HOST_WIDE_INT pstart,last;
2747 last = p->offset + size;
2749 for (; p && p->offset < last; p = p->next)
2752 struct constraint_expr templhs = lhs;
2753 struct constraint_expr temprhs = rhs;
2754 unsigned HOST_WIDE_INT fieldoffset;
2757 if (temprhs.type == SCALAR)
2758 temprhs.var = p->id;
2760 temprhs.offset = p->offset;
2762 q = get_varinfo (templhs.var);
2763 fieldoffset = p->offset - pstart;
2764 templhs.offset += fieldoffset;
2765 process_constraint (new_constraint (templhs, temprhs));
2769 /* Sometimes, frontends like to give us bad type information. This
2770 function will collapse all the fields from VAR to the end of VAR,
2771 into VAR, so that we treat those fields as a single variable.
2772 We return the variable they were collapsed into. */
2775 collapse_rest_of_var (unsigned int var)
2777 varinfo_t currvar = get_varinfo (var);
2780 for (field = currvar->next; field; field = field->next)
2783 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2784 field->name, currvar->name);
2786 gcc_assert (!field->collapsed_to);
2787 field->collapsed_to = currvar;
2790 currvar->next = NULL;
2791 currvar->size = currvar->fullsize - currvar->offset;
2796 /* Handle aggregate copies by expanding into copies of the respective
2797 fields of the structures. */
2800 do_structure_copy (tree lhsop, tree rhsop)
2802 struct constraint_expr lhs, rhs, tmp;
2803 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2805 unsigned HOST_WIDE_INT lhssize;
2806 unsigned HOST_WIDE_INT rhssize;
2808 get_constraint_for (lhsop, &lhsc);
2809 get_constraint_for (rhsop, &rhsc);
2810 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2811 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2812 lhs = *(VEC_last (ce_s, lhsc));
2813 rhs = *(VEC_last (ce_s, rhsc));
2815 VEC_free (ce_s, heap, lhsc);
2816 VEC_free (ce_s, heap, rhsc);
2818 /* If we have special var = x, swap it around. */
2819 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2826 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2827 possible it's something we could handle. However, most cases falling
2828 into this are dealing with transparent unions, which are slightly
2830 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2832 rhs.type = ADDRESSOF;
2833 rhs.var = anything_id;
2836 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2837 that special var. */
2838 if (rhs.var <= integer_id)
2840 for (p = get_varinfo (lhs.var); p; p = p->next)
2842 struct constraint_expr templhs = lhs;
2843 struct constraint_expr temprhs = rhs;
2845 if (templhs.type == SCALAR )
2846 templhs.var = p->id;
2848 templhs.offset += p->offset;
2849 process_constraint (new_constraint (templhs, temprhs));
2854 tree rhstype = TREE_TYPE (rhsop);
2855 tree lhstype = TREE_TYPE (lhsop);
2859 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2860 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2862 /* If we have a variably sized types on the rhs or lhs, and a deref
2863 constraint, add the constraint, lhsconstraint = &ANYTHING.
2864 This is conservatively correct because either the lhs is an unknown
2865 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2866 constraint, and every variable it can point to must be unknown sized
2867 anyway, so we don't need to worry about fields at all. */
2868 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2869 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2871 rhs.var = anything_id;
2872 rhs.type = ADDRESSOF;
2874 process_constraint (new_constraint (lhs, rhs));
2878 /* The size only really matters insofar as we don't set more or less of
2879 the variable. If we hit an unknown size var, the size should be the
2880 whole darn thing. */
2881 if (get_varinfo (rhs.var)->is_unknown_size_var)
2884 rhssize = TREE_INT_CST_LOW (rhstypesize);
2886 if (get_varinfo (lhs.var)->is_unknown_size_var)
2889 lhssize = TREE_INT_CST_LOW (lhstypesize);
2892 if (rhs.type == SCALAR && lhs.type == SCALAR)
2894 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2896 lhs.var = collapse_rest_of_var (lhs.var);
2897 rhs.var = collapse_rest_of_var (rhs.var);
2902 process_constraint (new_constraint (lhs, rhs));
2905 else if (lhs.type != DEREF && rhs.type == DEREF)
2906 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2907 else if (lhs.type == DEREF && rhs.type != DEREF)
2908 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
2911 tree pointedtotype = lhstype;
2914 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
2915 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
2916 do_structure_copy (tmpvar, rhsop);
2917 do_structure_copy (lhsop, tmpvar);
2922 /* Update related alias information kept in AI. This is used when
2923 building name tags, alias sets and deciding grouping heuristics.
2924 STMT is the statement to process. This function also updates
2925 ADDRESSABLE_VARS. */
2928 update_alias_info (tree stmt, struct alias_info *ai)
2931 use_operand_p use_p;
2933 bool stmt_escapes_p = is_escape_site (stmt, ai);
2936 /* Mark all the variables whose address are taken by the statement. */
2937 addr_taken = addresses_taken (stmt);
2940 bitmap_ior_into (addressable_vars, addr_taken);
2942 /* If STMT is an escape point, all the addresses taken by it are
2949 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
2950 mark_call_clobbered (referenced_var (i));
2954 /* Process each operand use. If an operand may be aliased, keep
2955 track of how many times it's being used. For pointers, determine
2956 whether they are dereferenced by the statement, or whether their
2957 value escapes, etc. */
2958 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2962 struct ptr_info_def *pi;
2963 bool is_store, is_potential_deref;
2964 unsigned num_uses, num_derefs;
2966 op = USE_FROM_PTR (use_p);
2968 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2969 to the set of addressable variables. */
2970 if (TREE_CODE (op) == ADDR_EXPR)
2972 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
2974 /* PHI nodes don't have annotations for pinning the set
2975 of addresses taken, so we collect them here.
2977 FIXME, should we allow PHI nodes to have annotations
2978 so that they can be treated like regular statements?
2979 Currently, they are treated as second-class
2981 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2985 /* Ignore constants. */
2986 if (TREE_CODE (op) != SSA_NAME)
2989 var = SSA_NAME_VAR (op);
2990 v_ann = var_ann (var);
2992 /* The base variable of an ssa name must be a GIMPLE register, and thus
2993 it cannot be aliased. */
2994 gcc_assert (!may_be_aliased (var));
2996 /* We are only interested in pointers. */
2997 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3000 pi = get_ptr_info (op);
3002 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3003 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3005 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3006 VARRAY_PUSH_TREE (ai->processed_ptrs, op);
3009 /* If STMT is a PHI node, then it will not have pointer
3010 dereferences and it will not be an escape point. */
3011 if (TREE_CODE (stmt) == PHI_NODE)
3014 /* Determine whether OP is a dereferenced pointer, and if STMT
3015 is an escape point, whether OP escapes. */
3016 count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
3018 /* Handle a corner case involving address expressions of the
3019 form '&PTR->FLD'. The problem with these expressions is that
3020 they do not represent a dereference of PTR. However, if some
3021 other transformation propagates them into an INDIRECT_REF
3022 expression, we end up with '*(&PTR->FLD)' which is folded
3025 So, if the original code had no other dereferences of PTR,
3026 the aliaser will not create memory tags for it, and when
3027 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3028 memory operations will receive no V_MAY_DEF/VUSE operands.
3030 One solution would be to have count_uses_and_derefs consider
3031 &PTR->FLD a dereference of PTR. But that is wrong, since it
3032 is not really a dereference but an offset calculation.
3034 What we do here is to recognize these special ADDR_EXPR
3035 nodes. Since these expressions are never GIMPLE values (they
3036 are not GIMPLE invariants), they can only appear on the RHS
3037 of an assignment and their base address is always an
3038 INDIRECT_REF expression. */
3039 is_potential_deref = false;
3040 if (TREE_CODE (stmt) == MODIFY_EXPR
3041 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
3042 && !is_gimple_val (TREE_OPERAND (stmt, 1)))
3044 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3045 this represents a potential dereference of PTR. */
3046 tree rhs = TREE_OPERAND (stmt, 1);
3047 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3048 if (TREE_CODE (base) == INDIRECT_REF
3049 && TREE_OPERAND (base, 0) == op)
3050 is_potential_deref = true;
3053 if (num_derefs > 0 || is_potential_deref)
3055 /* Mark OP as dereferenced. In a subsequent pass,
3056 dereferenced pointers that point to a set of
3057 variables will be assigned a name tag to alias
3058 all the variables OP points to. */
3059 pi->is_dereferenced = 1;
3061 /* Keep track of how many time we've dereferenced each
3063 NUM_REFERENCES_INC (v_ann);
3065 /* If this is a store operation, mark OP as being
3066 dereferenced to store, otherwise mark it as being
3067 dereferenced to load. */
3069 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3071 bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
3074 if (stmt_escapes_p && num_derefs < num_uses)
3076 /* If STMT is an escape point and STMT contains at
3077 least one direct use of OP, then the value of OP
3078 escapes and so the pointed-to variables need to
3079 be marked call-clobbered. */
3080 pi->value_escapes_p = 1;
3082 /* If the statement makes a function call, assume
3083 that pointer OP will be dereferenced in a store
3084 operation inside the called function. */
3085 if (get_call_expr_in (stmt))
3087 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
3088 pi->is_dereferenced = 1;
3093 if (TREE_CODE (stmt) == PHI_NODE)
3096 /* Update reference counter for definitions to any
3097 potentially aliased variable. This is used in the alias
3098 grouping heuristics. */
3099 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3101 tree var = SSA_NAME_VAR (op);
3102 var_ann_t ann = var_ann (var);
3103 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3104 if (may_be_aliased (var))
3105 NUM_REFERENCES_INC (ann);
3109 /* Mark variables in V_MAY_DEF operands as being written to. */
3110 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3112 tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
3113 bitmap_set_bit (ai->written_vars, DECL_UID (var));
3118 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3119 Expressions of the type PTR + CST can be handled in two ways:
3121 1- If the constraint for PTR is ADDRESSOF for a non-structure
3122 variable, then we can use it directly because adding or
3123 subtracting a constant may not alter the original ADDRESSOF
3124 constraint (i.e., pointer arithmetic may not legally go outside
3125 an object's boundaries).
3127 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3128 then if CST is a compile-time constant that can be used as an
3129 offset, we can determine which sub-variable will be pointed-to
3132 Return true if the expression is handled. For any other kind of
3133 expression, return false so that each operand can be added as a
3134 separate constraint by the caller. */
3137 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3140 struct constraint_expr *c, *c2;
3143 VEC (ce_s, heap) *temp = NULL;
3144 unsigned int rhsoffset = 0;
3146 if (TREE_CODE (expr) != PLUS_EXPR)
3149 op0 = TREE_OPERAND (expr, 0);
3150 op1 = TREE_OPERAND (expr, 1);
3152 get_constraint_for (op0, &temp);
3153 if (POINTER_TYPE_P (TREE_TYPE (op0))
3154 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == RECORD_TYPE
3155 && TREE_CODE (op1) == INTEGER_CST)
3157 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3161 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3162 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3164 if (c2->type == ADDRESSOF && rhsoffset != 0)
3166 varinfo_t temp = get_varinfo (c2->var);
3168 /* An access one after the end of an array is valid,
3169 so simply punt on accesses we cannot resolve. */
3170 temp = first_vi_for_offset (temp, rhsoffset);
3177 c2->offset = rhsoffset;
3178 process_constraint (new_constraint (*c, *c2));
3181 VEC_free (ce_s, heap, temp);
3187 /* Walk statement T setting up aliasing constraints according to the
3188 references found in T. This function is the main part of the
3189 constraint builder. AI points to auxiliary alias information used
3190 when building alias sets and computing alias grouping heuristics. */
3193 find_func_aliases (tree origt)
3196 VEC(ce_s, heap) *lhsc = NULL;
3197 VEC(ce_s, heap) *rhsc = NULL;
3198 struct constraint_expr *c;
3200 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3201 t = TREE_OPERAND (t, 0);
3203 /* Now build constraints expressions. */
3204 if (TREE_CODE (t) == PHI_NODE)
3206 /* Only care about pointers and structures containing
3208 if (POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (t)))
3209 || AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))))
3214 /* For a phi node, assign all the arguments to
3216 get_constraint_for (PHI_RESULT (t), &lhsc);
3217 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3219 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3220 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3222 struct constraint_expr *c2;
3223 while (VEC_length (ce_s, rhsc) > 0)
3225 c2 = VEC_last (ce_s, rhsc);
3226 process_constraint (new_constraint (*c, *c2));
3227 VEC_pop (ce_s, rhsc);
3233 /* In IPA mode, we need to generate constraints to pass call
3234 arguments through their calls. There are two case, either a
3235 modify_expr when we are returning a value, or just a plain
3236 call_expr when we are not. */
3237 else if (in_ipa_mode
3238 && ((TREE_CODE (t) == MODIFY_EXPR
3239 && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR
3240 && !(call_expr_flags (TREE_OPERAND (t, 1))
3241 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3242 || (TREE_CODE (t) == CALL_EXPR
3243 && !(call_expr_flags (t)
3244 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3254 if (TREE_CODE (t) == MODIFY_EXPR)
3256 lhsop = TREE_OPERAND (t, 0);
3257 rhsop = TREE_OPERAND (t, 1);
3264 decl = get_callee_fndecl (rhsop);
3266 /* If we can directly resolve the function being called, do so.
3267 Otherwise, it must be some sort of indirect expression that
3268 we should still be able to handle. */
3271 found = lookup_id_for_tree (decl, &varid);
3276 decl = TREE_OPERAND (rhsop, 0);
3277 found = lookup_id_for_tree (decl, &varid);
3281 /* Assign all the passed arguments to the appropriate incoming
3282 parameters of the function. */
3283 fi = get_varinfo (varid);
3284 arglist = TREE_OPERAND (rhsop, 1);
3286 for (;arglist; arglist = TREE_CHAIN (arglist))
3288 tree arg = TREE_VALUE (arglist);
3289 struct constraint_expr lhs ;
3290 struct constraint_expr *rhsp;
3292 get_constraint_for (arg, &rhsc);
3293 if (TREE_CODE (decl) != FUNCTION_DECL)
3302 lhs.var = first_vi_for_offset (fi, i)->id;
3305 while (VEC_length (ce_s, rhsc) != 0)
3307 rhsp = VEC_last (ce_s, rhsc);
3308 process_constraint (new_constraint (lhs, *rhsp));
3309 VEC_pop (ce_s, rhsc);
3313 /* If we are returning a value, assign it to the result. */
3316 struct constraint_expr rhs;
3317 struct constraint_expr *lhsp;
3320 get_constraint_for (lhsop, &lhsc);
3321 if (TREE_CODE (decl) != FUNCTION_DECL)
3330 rhs.var = first_vi_for_offset (fi, i)->id;
3333 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3334 process_constraint (new_constraint (*lhsp, rhs));
3337 /* Otherwise, just a regular assignment statement. */
3338 else if (TREE_CODE (t) == MODIFY_EXPR)
3340 tree lhsop = TREE_OPERAND (t, 0);
3341 tree rhsop = TREE_OPERAND (t, 1);
3344 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3345 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
3347 do_structure_copy (lhsop, rhsop);
3351 /* Only care about operations with pointers, structures
3352 containing pointers, dereferences, and call expressions. */
3353 if (POINTER_TYPE_P (TREE_TYPE (lhsop))
3354 || AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3355 || TREE_CODE (rhsop) == CALL_EXPR)
3357 get_constraint_for (lhsop, &lhsc);
3358 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3360 /* RHS that consist of unary operations,
3361 exceptional types, or bare decls/constants, get
3362 handled directly by get_constraint_for. */
3364 case tcc_declaration:
3366 case tcc_exceptional:
3367 case tcc_expression:
3371 tree strippedrhs = rhsop;
3374 /* XXX: Push this back into the ADDR_EXPR
3375 case, and remove anyoffset handling. */
3376 STRIP_NOPS (strippedrhs);
3377 rhstype = TREE_TYPE (strippedrhs);
3379 get_constraint_for (rhsop, &rhsc);
3380 if (TREE_CODE (strippedrhs) == ADDR_EXPR
3381 && AGGREGATE_TYPE_P (TREE_TYPE (rhstype)))
3383 struct constraint_expr *origrhs;
3385 struct constraint_expr tmp;
3387 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3388 origrhs = VEC_last (ce_s, rhsc);
3390 VEC_pop (ce_s, rhsc);
3391 origvar = get_varinfo (origrhs->var);
3392 for (; origvar; origvar = origvar->next)
3394 tmp.var = origvar->id;
3395 VEC_safe_push (ce_s, heap, rhsc, &tmp);
3399 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3401 struct constraint_expr *c2;
3404 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3405 process_constraint (new_constraint (*c, *c2));
3413 /* For pointer arithmetic of the form
3414 PTR + CST, we can simply use PTR's
3415 constraint because pointer arithmetic is
3416 not allowed to go out of bounds. */
3417 if (handle_ptr_arith (lhsc, rhsop))
3422 /* Otherwise, walk each operand. Notice that we
3423 can't use the operand interface because we need
3424 to process expressions other than simple operands
3425 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3427 for (i = 0; i < TREE_CODE_LENGTH (TREE_CODE (rhsop)); i++)
3429 tree op = TREE_OPERAND (rhsop, i);
3432 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3433 get_constraint_for (op, &rhsc);
3434 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3436 struct constraint_expr *c2;
3437 while (VEC_length (ce_s, rhsc) > 0)
3439 c2 = VEC_last (ce_s, rhsc);
3440 process_constraint (new_constraint (*c, *c2));
3441 VEC_pop (ce_s, rhsc);
3450 /* After promoting variables and computing aliasing we will
3451 need to re-scan most statements. FIXME: Try to minimize the
3452 number of statements re-scanned. It's not really necessary to
3453 re-scan *all* statements. */
3454 mark_stmt_modified (origt);
3455 VEC_free (ce_s, heap, rhsc);
3456 VEC_free (ce_s, heap, lhsc);
3460 /* Find the first varinfo in the same variable as START that overlaps with
3462 Effectively, walk the chain of fields for the variable START to find the
3463 first field that overlaps with OFFSET.
3464 Return NULL if we can't find one. */
3467 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3469 varinfo_t curr = start;
3472 /* We may not find a variable in the field list with the actual
3473 offset when when we have glommed a structure to a variable.
3474 In that case, however, offset should still be within the size
3476 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3484 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3488 insert_into_field_list (varinfo_t base, varinfo_t field)
3490 varinfo_t prev = base;
3491 varinfo_t curr = base->next;
3502 if (field->offset <= curr->offset)
3507 field->next = prev->next;
3512 /* qsort comparison function for two fieldoff's PA and PB */
3515 fieldoff_compare (const void *pa, const void *pb)
3517 const fieldoff_s *foa = (const fieldoff_s *)pa;
3518 const fieldoff_s *fob = (const fieldoff_s *)pb;
3519 HOST_WIDE_INT foasize, fobsize;
3521 if (foa->offset != fob->offset)
3522 return foa->offset - fob->offset;
3524 foasize = TREE_INT_CST_LOW (foa->size);
3525 fobsize = TREE_INT_CST_LOW (fob->size);
3526 return foasize - fobsize;
3529 /* Sort a fieldstack according to the field offset and sizes. */
3530 void sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3532 qsort (VEC_address (fieldoff_s, fieldstack),
3533 VEC_length (fieldoff_s, fieldstack),
3534 sizeof (fieldoff_s),
3538 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3539 of TYPE onto fieldstack, recording their offsets along the way.
3540 OFFSET is used to keep track of the offset in this entire structure, rather
3541 than just the immediately containing structure. Returns the number
3543 HAS_UNION is set to true if we find a union type as a field of
3547 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3548 HOST_WIDE_INT offset, bool *has_union)
3553 if (TREE_CODE (type) == COMPLEX_TYPE)
3555 fieldoff_s *real_part, *img_part;
3556 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3557 real_part->type = TREE_TYPE (type);
3558 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3559 real_part->offset = offset;
3560 real_part->decl = NULL_TREE;
3562 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3563 img_part->type = TREE_TYPE (type);
3564 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3565 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3566 img_part->decl = NULL_TREE;
3571 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3572 if (TREE_CODE (field) == FIELD_DECL)
3578 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3579 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3582 if (!var_can_have_subvars (field))
3584 else if (!(pushed = push_fields_onto_fieldstack
3585 (TREE_TYPE (field), fieldstack,
3586 offset + bitpos_of_field (field), has_union))
3587 && DECL_SIZE (field)
3588 && !integer_zerop (DECL_SIZE (field)))
3589 /* Empty structures may have actual size, like in C++. So
3590 see if we didn't push any subfields and the size is
3591 nonzero, push the field onto the stack */
3598 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3599 pair->type = TREE_TYPE (field);
3600 pair->size = DECL_SIZE (field);
3602 pair->offset = offset + bitpos_of_field (field);
3613 make_constraint_to_anything (varinfo_t vi)
3615 struct constraint_expr lhs, rhs;
3621 rhs.var = anything_id;
3623 rhs.type = ADDRESSOF;
3624 process_constraint (new_constraint (lhs, rhs));
3627 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3628 if it is a varargs function. */
3631 count_num_arguments (tree decl, bool *is_varargs)
3636 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3640 if (TREE_VALUE (t) == void_type_node)
3650 /* Creation function node for DECL, using NAME, and return the index
3651 of the variable we've created for the function. */
3654 create_function_info_for (tree decl, const char *name)
3656 unsigned int index = VEC_length (varinfo_t, varmap);
3660 bool is_varargs = false;
3662 /* Create the variable info. */
3664 vi = new_var_info (decl, index, name, index);
3669 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3670 insert_id_for_tree (vi->decl, index);
3671 VEC_safe_push (varinfo_t, heap, varmap, vi);
3675 /* If it's varargs, we don't know how many arguments it has, so we
3682 vi->is_unknown_size_var = true;
3687 arg = DECL_ARGUMENTS (decl);
3689 /* Set up variables for each argument. */
3690 for (i = 1; i < vi->fullsize; i++)
3693 const char *newname;
3695 unsigned int newindex;
3696 tree argdecl = decl;
3701 newindex = VEC_length (varinfo_t, varmap);
3702 asprintf (&tempname, "%s.arg%d", name, i-1);
3703 newname = ggc_strdup (tempname);
3706 argvi = new_var_info (argdecl, newindex,newname, newindex);
3707 argvi->decl = argdecl;
3708 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3711 argvi->fullsize = vi->fullsize;
3712 argvi->has_union = false;
3713 insert_into_field_list (vi, argvi);
3714 stats.total_vars ++;
3717 insert_id_for_tree (arg, newindex);
3718 arg = TREE_CHAIN (arg);
3722 /* Create a variable for the return var. */
3723 if (DECL_RESULT (decl) != NULL
3724 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3727 const char *newname;
3729 unsigned int newindex;
3730 tree resultdecl = decl;
3735 if (DECL_RESULT (decl))
3736 resultdecl = DECL_RESULT (decl);
3738 newindex = VEC_length (varinfo_t, varmap);
3739 asprintf (&tempname, "%s.result", name);
3740 newname = ggc_strdup (tempname);
3743 resultvi = new_var_info (resultdecl, newindex, newname, newindex);
3744 resultvi->decl = resultdecl;
3745 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3746 resultvi->offset = i;
3748 resultvi->fullsize = vi->fullsize;
3749 resultvi->has_union = false;
3750 insert_into_field_list (vi, resultvi);
3751 stats.total_vars ++;
3752 if (DECL_RESULT (decl))
3753 insert_id_for_tree (DECL_RESULT (decl), newindex);
3759 /* Return true if FIELDSTACK contains fields that overlap.
3760 FIELDSTACK is assumed to be sorted by offset. */
3763 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3765 fieldoff_s *fo = NULL;
3767 HOST_WIDE_INT lastoffset = -1;
3769 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3771 if (fo->offset == lastoffset)
3773 lastoffset = fo->offset;
3778 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3779 This will also create any varinfo structures necessary for fields
3783 create_variable_info_for (tree decl, const char *name)
3785 unsigned int index = VEC_length (varinfo_t, varmap);
3787 tree decltype = TREE_TYPE (decl);
3788 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3789 bool notokay = false;
3791 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3792 VEC (fieldoff_s,heap) *fieldstack = NULL;
3794 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3795 return create_function_info_for (decl, name);
3797 hasunion = TREE_CODE (decltype) == UNION_TYPE
3798 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
3799 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
3801 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
3804 VEC_free (fieldoff_s, heap, fieldstack);
3810 /* If the variable doesn't have subvars, we may end up needing to
3811 sort the field list and create fake variables for all the
3813 vi = new_var_info (decl, index, name, index);
3816 vi->has_union = hasunion;
3818 || TREE_CODE (declsize) != INTEGER_CST
3819 || TREE_CODE (decltype) == UNION_TYPE
3820 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
3822 vi->is_unknown_size_var = true;
3828 vi->fullsize = TREE_INT_CST_LOW (declsize);
3829 vi->size = vi->fullsize;
3832 insert_id_for_tree (vi->decl, index);
3833 VEC_safe_push (varinfo_t, heap, varmap, vi);
3834 if (is_global && (!flag_whole_program || !in_ipa_mode))
3835 make_constraint_to_anything (vi);
3838 if (use_field_sensitive
3840 && !vi->is_unknown_size_var
3841 && var_can_have_subvars (decl))
3843 unsigned int newindex = VEC_length (varinfo_t, varmap);
3844 fieldoff_s *fo = NULL;
3847 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3850 || TREE_CODE (fo->size) != INTEGER_CST
3858 /* We can't sort them if we have a field with a variable sized type,
3859 which will make notokay = true. In that case, we are going to return
3860 without creating varinfos for the fields anyway, so sorting them is a
3864 sort_fieldstack (fieldstack);
3865 /* Due to some C++ FE issues, like PR 22488, we might end up
3866 what appear to be overlapping fields even though they,
3867 in reality, do not overlap. Until the C++ FE is fixed,
3868 we will simply disable field-sensitivity for these cases. */
3869 notokay = check_for_overlaps (fieldstack);
3873 if (VEC_length (fieldoff_s, fieldstack) != 0)
3874 fo = VEC_index (fieldoff_s, fieldstack, 0);
3876 if (fo == NULL || notokay)
3878 vi->is_unknown_size_var = 1;
3881 VEC_free (fieldoff_s, heap, fieldstack);
3885 vi->size = TREE_INT_CST_LOW (fo->size);
3886 vi->offset = fo->offset;
3887 for (i = 1; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3890 const char *newname;
3893 newindex = VEC_length (varinfo_t, varmap);
3895 asprintf (&tempname, "%s.%s", vi->name, alias_get_name (fo->decl));
3897 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC, vi->name, fo->offset);
3898 newname = ggc_strdup (tempname);
3900 newvi = new_var_info (decl, newindex, newname, newindex);
3901 newvi->offset = fo->offset;
3902 newvi->size = TREE_INT_CST_LOW (fo->size);
3903 newvi->fullsize = vi->fullsize;
3904 insert_into_field_list (vi, newvi);
3905 VEC_safe_push (varinfo_t, heap, varmap, newvi);
3906 if (is_global && (!flag_whole_program || !in_ipa_mode))
3907 make_constraint_to_anything (newvi);
3911 VEC_free (fieldoff_s, heap, fieldstack);
3916 /* Print out the points-to solution for VAR to FILE. */
3919 dump_solution_for_var (FILE *file, unsigned int var)
3921 varinfo_t vi = get_varinfo (var);
3925 fprintf (file, "%s = { ", vi->name);
3926 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (vi->node)->solution, 0, i, bi)
3928 fprintf (file, "%s ", get_varinfo (i)->name);
3930 fprintf (file, "}\n");
3933 /* Print the points-to solution for VAR to stdout. */
3936 debug_solution_for_var (unsigned int var)
3938 dump_solution_for_var (stdout, var);
3942 /* Create varinfo structures for all of the variables in the
3943 function for intraprocedural mode. */
3946 intra_create_variable_infos (void)
3950 /* For each incoming argument arg, ARG = &ANYTHING */
3951 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
3953 struct constraint_expr lhs;
3958 lhs.var = create_variable_info_for (t, alias_get_name (t));
3960 for (p = get_varinfo (lhs.var); p; p = p->next)
3961 make_constraint_to_anything (p);
3966 /* Set bits in INTO corresponding to the variable uids in solution set
3970 set_uids_in_ptset (bitmap into, bitmap from)
3976 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
3978 varinfo_t vi = get_varinfo (i);
3980 /* The only artificial variables that are allowed in a may-alias
3981 set are heap variables. */
3982 if (vi->is_artificial_var && !vi->is_heap_var)
3985 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
3987 /* Variables containing unions may need to be converted to
3988 their SFT's, because SFT's can have unions and we cannot. */
3989 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
3990 bitmap_set_bit (into, DECL_UID (sv->var));
3992 else if (TREE_CODE (vi->decl) == VAR_DECL
3993 || TREE_CODE (vi->decl) == PARM_DECL)
3995 if (var_can_have_subvars (vi->decl)
3996 && get_subvars_for_var (vi->decl))
3998 /* If VI->DECL is an aggregate for which we created
3999 SFTs, add the SFT corresponding to VI->OFFSET. */
4000 tree sft = get_subvar_at (vi->decl, vi->offset);
4002 bitmap_set_bit (into, DECL_UID (sft));
4006 /* Otherwise, just add VI->DECL to the alias set. */
4007 bitmap_set_bit (into, DECL_UID (vi->decl));
4014 static bool have_alias_info = false;
4016 /* Given a pointer variable P, fill in its points-to set, or return
4017 false if we can't. */
4020 find_what_p_points_to (tree p)
4022 unsigned int id = 0;
4024 if (!have_alias_info)