1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
33 #include "langhooks.h"
36 #include "diagnostic.h"
37 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-structalias.h"
45 #include "ipa-type-escape.h"
49 #include "pointer-set.h"
50 #include "alloc-pool.h"
52 /* Broad overview of how aliasing works:
54 First we compute points-to sets, which is done in
55 tree-ssa-structalias.c
57 During points-to set constraint finding, a bunch of little bits of
58 information is collected.
59 This is not done because it is necessary for points-to, but because
60 points-to has to walk every statement anyway. The function performing
61 this collecting is update_alias_info.
63 Bits update_alias_info collects include:
64 1. Directly escaping variables and variables whose value escapes
65 (using is_escape_site). This is the set of variables and values that
66 escape prior to transitive closure of the clobbers.
67 2. The set of variables dereferenced on the LHS (into
68 dereferenced_ptr_stores)
69 3. The set of variables dereferenced on the RHS (into
70 dereferenced_ptr_loads)
71 4. The set of all pointers we saw.
72 5. The number of loads and stores for each variable
73 6. The number of statements touching memory
74 7. The set of address taken variables.
77 #1 is computed by a combination of is_escape_site, and counting the
78 number of uses/deref operators. This function properly accounts for
79 situations like &ptr->field, which is *not* a dereference.
81 After points-to sets are computed, the sets themselves still
82 contain points-to specific variables, such as a variable that says
83 the pointer points to anything, a variable that says the pointer
84 points to readonly memory, etc.
86 These are eliminated in a later phase, as we will see.
88 The rest of the phases are located in tree-ssa-alias.c
90 The next phase after points-to set computation is called
91 "setup_pointers_and_addressables"
93 This pass does 3 main things:
95 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
96 non-globals whose address is not taken), have TREE_ADDRESSABLE
98 2. All variables that may be aliased (which is the set of addressable
99 variables and globals) at all, are marked for renaming, and have
100 symbol memory tags created for them.
101 3. All variables which are stored into have their SMT's added to
105 After this function is run, all variables that will ever have an
106 SMT, have one, though its aliases are not filled in.
108 The next phase is to compute flow-insensitive aliasing, which in
109 our case, is a misnomer. it is really computing aliasing that
110 requires no transitive closure to be correct. In particular, it
111 uses stack vs non-stack, TBAA, etc, to determine whether two
112 symbols could *ever* alias . This phase works by going through all
113 the pointers we collected during update_alias_info, and for every
114 addressable variable in the program, seeing if they alias. If so,
115 the addressable variable is added to the symbol memory tag for the
118 As part of this, we handle symbol memory tags that conflict but
119 have no aliases in common, by forcing them to have a symbol in
120 common (through unioning alias sets or adding one as an alias of
121 the other), or by adding one as an alias of another. The case of
122 conflicts with no aliases in common occurs mainly due to aliasing
123 we cannot see. In particular, it generally means we have a load
124 through a pointer whose value came from outside the function.
125 Without an addressable symbol to point to, they would get the wrong
128 After flow insensitive aliasing is computed, we compute name tags
129 (called compute_flow_sensitive_info). We walk each pointer we
130 collected and see if it has a usable points-to set. If so, we
131 generate a name tag using that pointer, and make an alias bitmap for
132 it. Name tags are shared between all things with the same alias
133 bitmap. The alias bitmap will be translated from what points-to
134 computed. In particular, the "anything" variable in points-to will be
135 transformed into a pruned set of SMT's and their aliases that
136 compute_flow_insensitive_aliasing computed.
137 Note that since 4.3, every pointer that points-to computed a solution for
138 will get a name tag (whereas before 4.3, only those whose set did
139 *not* include the anything variable would). At the point where name
140 tags are all assigned, symbol memory tags are dead, and could be
141 deleted, *except* on global variables. Global variables still use
142 symbol memory tags as of right now.
144 After name tags are computed, the set of clobbered variables is
145 transitively closed. In particular, we compute the set of clobbered
146 variables based on the initial set of clobbers, plus the aliases of
147 pointers which either escape, or have their value escape.
149 After this, maybe_create_global_var is run, which handles a corner
150 case where we have no call clobbered variables, but have pure and
153 Staring at this function, I now remember it is a hack for the fact
154 that we do not mark all globals in the program as call clobbered for a
155 function unless they are actually used in that function. Instead, we
156 only mark the set that is actually clobbered. As a result, you can
157 end up with situations where you have no call clobbered vars set.
159 After maybe_create_global_var, we set pointers with the REF_ALL flag
160 to have alias sets that include all clobbered
161 memory tags and variables.
163 After this, memory partitioning is computed (by the function
164 compute_memory_partitions) and alias sets are reworked accordingly.
166 Lastly, we delete partitions with no symbols, and clean up after
170 /* Alias information used by compute_may_aliases and its helpers. */
173 /* SSA names visited while collecting points-to information. If bit I
174 is set, it means that SSA variable with version I has already been
176 sbitmap ssa_names_visited;
178 /* Array of SSA_NAME pointers processed by the points-to collector. */
179 VEC(tree,heap) *processed_ptrs;
181 /* ADDRESSABLE_VARS contains all the global variables and locals that
182 have had their address taken. */
183 struct alias_map_d **addressable_vars;
184 size_t num_addressable_vars;
186 /* POINTERS contains all the _DECL pointers with unique memory tags
187 that have been referenced in the program. */
188 struct alias_map_d **pointers;
191 /* Pointers that have been used in an indirect load/store operation. */
192 struct pointer_set_t *dereferenced_ptrs;
196 /* Structure to map a variable to its alias set. */
199 /* Variable and its alias set. */
205 /* Counters used to display statistics on alias analysis. */
208 unsigned int alias_queries;
209 unsigned int alias_mayalias;
210 unsigned int alias_noalias;
211 unsigned int simple_queries;
212 unsigned int simple_resolved;
213 unsigned int tbaa_queries;
214 unsigned int tbaa_resolved;
215 unsigned int structnoaddress_queries;
216 unsigned int structnoaddress_resolved;
220 /* Local variables. */
221 static struct alias_stats_d alias_stats;
222 static bitmap_obstack alias_bitmap_obstack;
224 /* Local functions. */
225 static void compute_flow_insensitive_aliasing (struct alias_info *);
226 static void dump_alias_stats (FILE *);
227 static tree create_memory_tag (tree type, bool is_type_tag);
228 static tree get_smt_for (tree, struct alias_info *);
229 static tree get_nmt_for (tree);
230 static void add_may_alias (tree, tree);
231 static struct alias_info *init_alias_info (void);
232 static void delete_alias_info (struct alias_info *);
233 static void compute_flow_sensitive_aliasing (struct alias_info *);
234 static void setup_pointers_and_addressables (struct alias_info *);
235 static void update_alias_info (struct alias_info *);
236 static void create_global_var (void);
237 static void maybe_create_global_var (void);
238 static void set_pt_anything (tree);
240 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
242 static alloc_pool mem_sym_stats_pool;
244 /* Return memory reference stats for symbol VAR. Create a new slot in
245 cfun->gimple_df->mem_sym_stats if needed. */
247 static struct mem_sym_stats_d *
248 get_mem_sym_stats_for (tree var)
251 struct mem_sym_stats_d *stats;
252 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
256 slot = pointer_map_insert (map, var);
259 stats = (struct mem_sym_stats_d *) pool_alloc (mem_sym_stats_pool);
260 memset (stats, 0, sizeof (*stats));
262 *slot = (void *) stats;
265 stats = (struct mem_sym_stats_d *) *slot;
271 /* Return memory reference statistics for variable VAR in function FN.
272 This is computed by alias analysis, but it is not kept
273 incrementally up-to-date. So, these stats are only accurate if
274 pass_may_alias has been run recently. If no alias information
275 exists, this function returns NULL. */
277 static mem_sym_stats_t
278 mem_sym_stats (struct function *fn, tree var)
281 struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
283 if (stats_map == NULL)
286 slot = pointer_map_contains (stats_map, var);
290 return (mem_sym_stats_t) *slot;
294 /* Set MPT to be the memory partition associated with symbol SYM. */
297 set_memory_partition (tree sym, tree mpt)
299 #if defined ENABLE_CHECKING
301 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
302 && !is_gimple_reg (sym));
305 var_ann (sym)->mpt = mpt;
308 if (MPT_SYMBOLS (mpt) == NULL)
309 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
311 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
313 /* MPT inherits the call-clobbering attributes from SYM. */
314 if (is_call_clobbered (sym))
316 MTAG_GLOBAL (mpt) = 1;
317 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
323 /* Mark variable VAR as being non-addressable. */
326 mark_non_addressable (tree var)
330 if (!TREE_ADDRESSABLE (var))
333 mpt = memory_partition (var);
335 clear_call_clobbered (var);
336 TREE_ADDRESSABLE (var) = 0;
340 /* Note that it's possible for a symbol to have an associated
341 MPT and the MPT have a NULL empty set. During
342 init_alias_info, all MPTs get their sets cleared out, but the
343 symbols still point to the old MPTs that used to hold them.
344 This is done so that compute_memory_partitions can now which
345 symbols are losing or changing partitions and mark them for
347 if (MPT_SYMBOLS (mpt))
348 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
349 set_memory_partition (var, NULL_TREE);
354 /* qsort comparison function to sort type/name tags by DECL_UID. */
357 sort_tags_by_id (const void *pa, const void *pb)
359 const_tree const a = *(const_tree const *)pa;
360 const_tree const b = *(const_tree const *)pb;
362 return DECL_UID (a) - DECL_UID (b);
365 /* Initialize WORKLIST to contain those memory tags that are marked call
366 clobbered. Initialized WORKLIST2 to contain the reasons these
367 memory tags escaped. */
370 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
371 VEC (int, heap) **worklist2,
374 referenced_var_iterator rvi;
377 FOR_EACH_REFERENCED_VAR (curr, rvi)
379 if (MTAG_P (curr) && is_call_clobbered (curr))
381 VEC_safe_push (tree, heap, *worklist, curr);
382 VEC_safe_push (int, heap, *worklist2,
383 var_ann (curr)->escape_mask);
384 bitmap_set_bit (on_worklist, DECL_UID (curr));
389 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
390 ALIAS is not already marked call clobbered, and is a memory
394 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
395 VEC (int, heap) **worklist2, int reason,
398 if (MTAG_P (alias) && !is_call_clobbered (alias)
399 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
401 VEC_safe_push (tree, heap, *worklist, alias);
402 VEC_safe_push (int, heap, *worklist2, reason);
403 bitmap_set_bit (on_worklist, DECL_UID (alias));
407 /* Mark aliases of TAG as call clobbered, and place any tags on the
408 alias list that were not already call clobbered on WORKLIST. */
411 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
412 VEC (int, heap) **worklist2, bitmap on_worklist)
418 var_ann_t ta = var_ann (tag);
422 aliases = may_aliases (tag);
426 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
428 entry = referenced_var (i);
429 /* If you clobber one part of a structure, you
430 clobber the entire thing. While this does not make
431 the world a particularly nice place, it is necessary
432 in order to allow C/C++ tricks that involve
433 pointer arithmetic to work. */
434 if (!unmodifiable_var_p (entry))
436 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
438 mark_call_clobbered (entry, ta->escape_mask);
443 /* Tags containing global vars need to be marked as global.
444 Tags containing call clobbered vars need to be marked as call
448 compute_tag_properties (void)
450 referenced_var_iterator rvi;
453 VEC (tree, heap) *taglist = NULL;
455 FOR_EACH_REFERENCED_VAR (tag, rvi)
459 VEC_safe_push (tree, heap, taglist, tag);
462 /* We sort the taglist by DECL_UID, for two reasons.
463 1. To get a sequential ordering to make the bitmap accesses
465 2. Because of the way we compute aliases, it's more likely that
466 an earlier tag is included in a later tag, and this will reduce
467 the number of iterations.
469 If we had a real tag graph, we would just topo-order it and be
471 qsort (VEC_address (tree, taglist),
472 VEC_length (tree, taglist),
476 /* Go through each tag not marked as global, and if it aliases
477 global vars, mark it global.
479 If the tag contains call clobbered vars, mark it call
482 This loop iterates because tags may appear in the may-aliases
483 list of other tags when we group. */
490 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
496 bool tagcc = is_call_clobbered (tag);
497 bool tagglobal = MTAG_GLOBAL (tag);
499 if (tagcc && tagglobal)
502 ma = may_aliases (tag);
506 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
508 entry = referenced_var (i);
509 /* Call clobbered entries cause the tag to be marked
511 if (!tagcc && is_call_clobbered (entry))
513 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
518 /* Global vars cause the tag to be marked global. */
519 if (!tagglobal && is_global_var (entry))
521 MTAG_GLOBAL (tag) = true;
526 /* Early exit once both global and cc are set, since the
527 loop can't do any more than that. */
528 if (tagcc && tagglobal)
533 VEC_free (tree, heap, taglist);
536 /* Set up the initial variable clobbers, call-uses and globalness.
537 When this function completes, only tags whose aliases need to be
538 clobbered will be set clobbered. Tags clobbered because they
539 contain call clobbered vars are handled in compute_tag_properties. */
542 set_initial_properties (struct alias_info *ai)
545 referenced_var_iterator rvi;
548 bool any_pt_anything = false;
549 enum escape_type pt_anything_mask = 0;
551 FOR_EACH_REFERENCED_VAR (var, rvi)
553 if (is_global_var (var))
555 if (!unmodifiable_var_p (var))
556 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
558 else if (TREE_CODE (var) == PARM_DECL
559 && gimple_default_def (cfun, var)
560 && POINTER_TYPE_P (TREE_TYPE (var)))
562 tree def = gimple_default_def (cfun, var);
563 get_ptr_info (def)->value_escapes_p = 1;
564 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
568 if (!clobber_what_escaped ())
570 any_pt_anything = true;
571 pt_anything_mask |= ESCAPE_TO_CALL;
574 compute_call_used_vars ();
576 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
578 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
579 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
581 /* A pointer that only escapes via a function return does not
582 add to the call clobber or call used solution.
583 To exclude ESCAPE_TO_PURE_CONST we would need to track
584 call used variables separately or compute those properly
585 in the operand scanner. */
586 if (pi->value_escapes_p
587 && pi->escape_mask & ~ESCAPE_TO_RETURN)
589 /* If PTR escapes then its associated memory tags and
590 pointed-to variables are call-clobbered. */
591 if (pi->name_mem_tag)
592 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
595 mark_call_clobbered (tag, pi->escape_mask);
598 /* If the name tag is call clobbered, so is the symbol tag
599 associated with the base VAR_DECL. */
602 && is_call_clobbered (pi->name_mem_tag))
603 mark_call_clobbered (tag, pi->escape_mask);
605 /* Name tags and symbol tags that we don't know where they point
606 to, might point to global memory, and thus, are clobbered.
608 FIXME: This is not quite right. They should only be
609 clobbered if value_escapes_p is true, regardless of whether
610 they point to global memory or not.
611 So removing this code and fixing all the bugs would be nice.
612 It is the cause of a bunch of clobbering. */
613 if ((pi->pt_global_mem || pi->pt_anything)
614 && pi->memory_tag_needed && pi->name_mem_tag)
616 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
617 MTAG_GLOBAL (pi->name_mem_tag) = true;
620 if ((pi->pt_global_mem || pi->pt_anything)
621 && pi->memory_tag_needed
624 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
625 MTAG_GLOBAL (tag) = true;
629 /* If a pt_anything pointer escaped we need to mark all addressable
630 variables call clobbered. */
636 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, j, bi)
638 tree var = referenced_var (j);
639 if (!unmodifiable_var_p (var))
640 mark_call_clobbered (var, pt_anything_mask);
645 /* Compute which variables need to be marked call clobbered because
646 their tag is call clobbered, and which tags need to be marked
647 global because they contain global variables. */
650 compute_call_clobbered (struct alias_info *ai)
652 VEC (tree, heap) *worklist = NULL;
653 VEC (int,heap) *worklist2 = NULL;
656 timevar_push (TV_CALL_CLOBBER);
657 on_worklist = BITMAP_ALLOC (NULL);
659 set_initial_properties (ai);
660 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
661 while (VEC_length (tree, worklist) != 0)
663 tree curr = VEC_pop (tree, worklist);
664 int reason = VEC_pop (int, worklist2);
666 bitmap_clear_bit (on_worklist, DECL_UID (curr));
667 mark_call_clobbered (curr, reason);
668 mark_aliases_call_clobbered (curr, &worklist, &worklist2, on_worklist);
670 VEC_free (tree, heap, worklist);
671 VEC_free (int, heap, worklist2);
672 BITMAP_FREE (on_worklist);
673 compute_tag_properties ();
674 timevar_pop (TV_CALL_CLOBBER);
678 /* Dump memory partition information to FILE. */
681 dump_memory_partitions (FILE *file)
687 fprintf (file, "\nMemory partitions\n\n");
688 for (i = 0, npart = 0, nsyms = 0;
689 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
694 bitmap syms = MPT_SYMBOLS (mpt);
695 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
697 fprintf (file, "#%u: ", i);
698 print_generic_expr (file, mpt, 0);
699 fprintf (file, ": %lu elements: ", n);
700 dump_decl_set (file, syms);
706 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
710 /* Dump memory partition information to stderr. */
713 debug_memory_partitions (void)
715 dump_memory_partitions (stderr);
719 /* Return true if memory partitioning is required given the memory
720 reference estimates in STATS. */
723 need_to_partition_p (struct mem_ref_stats_d *stats)
725 long num_vops = stats->num_vuses + stats->num_vdefs;
726 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
727 return (num_vops > (long) MAX_ALIASED_VOPS
728 && avg_vops > (long) AVG_ALIASED_VOPS);
732 /* Count the actual number of virtual operators in CFUN. Note that
733 this is only meaningful after virtual operands have been populated,
734 so it should be invoked at the end of compute_may_aliases.
736 The number of virtual operators are stored in *NUM_VDEFS_P and
737 *NUM_VUSES_P, the number of partitioned symbols in
738 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
739 *NUM_UNPARTITIONED_P.
741 If any of these pointers is NULL the corresponding count is not
745 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
746 long *num_partitioned_p, long *num_unpartitioned_p)
748 gimple_stmt_iterator gsi;
750 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
751 referenced_var_iterator rvi;
754 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
756 if (num_vuses_p || num_vdefs_p)
758 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
760 gimple stmt = gsi_stmt (gsi);
761 if (gimple_references_memory_p (stmt))
763 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
764 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
768 if (num_partitioned_p || num_unpartitioned_p)
769 FOR_EACH_REFERENCED_VAR (sym, rvi)
771 if (is_gimple_reg (sym))
774 if (memory_partition (sym))
781 *num_vdefs_p = num_vdefs;
784 *num_vuses_p = num_vuses;
786 if (num_partitioned_p)
787 *num_partitioned_p = num_partitioned;
789 if (num_unpartitioned_p)
790 *num_unpartitioned_p = num_unpartitioned;
794 /* The list is sorted by increasing partitioning score (PSCORE).
795 This score is computed such that symbols with high scores are
796 those that are least likely to be partitioned. Given a symbol
797 MP->VAR, PSCORE(S) is the result of the following weighted sum
799 PSCORE(S) = FW * 64 + FR * 32
806 FW Execution frequency of writes to S
807 FR Execution frequency of reads from S
808 DW Number of direct writes to S
809 DR Number of direct reads from S
810 IW Number of indirect writes to S
811 IR Number of indirect reads from S
812 NO_ALIAS State of the NO_ALIAS* flags
814 The basic idea here is that symbols that are frequently
815 written-to in hot paths of the code are the last to be considered
819 mem_sym_score (mem_sym_stats_t mp)
821 return mp->frequency_writes * 64 + mp->frequency_reads * 32
822 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
823 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
824 + var_ann (mp->var)->noalias_state;
828 /* Dump memory reference stats for function CFUN to FILE. */
831 dump_mem_ref_stats (FILE *file)
833 long actual_num_vuses, actual_num_vdefs;
834 long num_partitioned, num_unpartitioned;
835 struct mem_ref_stats_d *stats;
837 stats = gimple_mem_ref_stats (cfun);
839 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
842 fprintf (file, "\nMemory reference statistics for %s\n\n",
843 lang_hooks.decl_printable_name (current_function_decl, 2));
845 fprintf (file, "Number of memory statements: %ld\n",
846 stats->num_mem_stmts);
847 fprintf (file, "Number of call sites: %ld\n",
848 stats->num_call_sites);
849 fprintf (file, "Number of pure/const call sites: %ld\n",
850 stats->num_pure_const_call_sites);
851 fprintf (file, "Number of asm sites: %ld\n",
852 stats->num_asm_sites);
853 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
855 (stats->num_mem_stmts)
856 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
858 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
860 (stats->num_mem_stmts)
861 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
864 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
865 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
867 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
869 (stats->num_mem_stmts)
870 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
872 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
874 (stats->num_mem_stmts)
875 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
878 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
879 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
881 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
882 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
883 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
884 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
885 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
889 /* Dump memory reference stats for function FN to stderr. */
892 debug_mem_ref_stats (void)
894 dump_mem_ref_stats (stderr);
898 /* Dump memory reference stats for variable VAR to FILE. */
901 dump_mem_sym_stats (FILE *file, tree var)
903 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
908 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
909 "direct reads: %3ld, direct writes: %3ld, "
910 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
911 stats->frequency_reads, stats->frequency_writes,
912 stats->num_direct_reads, stats->num_direct_writes,
913 stats->num_indirect_reads, stats->num_indirect_writes);
914 print_generic_expr (file, stats->var, 0);
915 fprintf (file, ", tags: ");
916 dump_decl_set (file, stats->parent_tags);
920 /* Dump memory reference stats for variable VAR to stderr. */
923 debug_mem_sym_stats (tree var)
925 dump_mem_sym_stats (stderr, var);
928 /* Dump memory reference stats for variable VAR to FILE. For use
929 of tree-dfa.c:dump_variable. */
932 dump_mem_sym_stats_for_var (FILE *file, tree var)
934 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
939 fprintf (file, ", score: %ld", mem_sym_score (stats));
940 fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
941 fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
942 fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
943 fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
946 /* Dump memory reference stats for all memory symbols to FILE. */
949 dump_all_mem_sym_stats (FILE *file)
951 referenced_var_iterator rvi;
954 FOR_EACH_REFERENCED_VAR (sym, rvi)
956 if (is_gimple_reg (sym))
959 dump_mem_sym_stats (file, sym);
964 /* Dump memory reference stats for all memory symbols to stderr. */
967 debug_all_mem_sym_stats (void)
969 dump_all_mem_sym_stats (stderr);
973 /* Dump the MP_INFO array to FILE. */
976 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
979 mem_sym_stats_t mp_p;
981 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
982 if (!mp_p->partitioned_p)
983 dump_mem_sym_stats (file, mp_p->var);
987 /* Dump the MP_INFO array to stderr. */
990 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
992 dump_mp_info (stderr, mp_info);
996 /* Update memory reference stats for symbol VAR in statement STMT.
997 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
998 that VAR is read/written in STMT (indirect reads/writes are not
999 recorded by this function, see compute_memory_partitions). */
1002 update_mem_sym_stats_from_stmt (tree var, gimple stmt, long num_direct_reads,
1003 long num_direct_writes)
1005 mem_sym_stats_t stats;
1007 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
1009 stats = get_mem_sym_stats_for (var);
1011 stats->num_direct_reads += num_direct_reads;
1012 stats->frequency_reads += ((long) gimple_bb (stmt)->frequency
1013 * num_direct_reads);
1015 stats->num_direct_writes += num_direct_writes;
1016 stats->frequency_writes += ((long) gimple_bb (stmt)->frequency
1017 * num_direct_writes);
1021 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
1022 be partitioned before MP2->VAR, 0 if they are the same or 1 if
1023 MP1->VAR should be partitioned after MP2->VAR. */
1026 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
1028 long pscore1 = mem_sym_score (mp1);
1029 long pscore2 = mem_sym_score (mp2);
1031 if (pscore1 < pscore2)
1033 else if (pscore1 > pscore2)
1036 return DECL_UID (mp1->var) - DECL_UID (mp2->var);
1040 /* Comparison routine for qsort. The list is sorted by increasing
1041 partitioning score (PSCORE). This score is computed such that
1042 symbols with high scores are those that are least likely to be
1046 mp_info_cmp (const void *p, const void *q)
1048 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1049 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1050 return compare_mp_info_entries (e1, e2);
1054 /* Sort the array of reference counts used to compute memory partitions.
1055 Elements are sorted in ascending order of execution frequency and
1056 descending order of virtual operators needed. */
1059 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1061 unsigned num = VEC_length (mem_sym_stats_t, list);
1068 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1069 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1071 /* Swap elements if they are in the wrong order. */
1072 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1073 VEC_replace (mem_sym_stats_t, list, 0,
1074 VEC_index (mem_sym_stats_t, list, 1));
1075 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1081 /* There are 3 or more elements, call qsort. */
1082 qsort (VEC_address (mem_sym_stats_t, list),
1083 VEC_length (mem_sym_stats_t, list),
1084 sizeof (mem_sym_stats_t),
1089 /* Return the memory partition tag (MPT) associated with memory
1093 get_mpt_for (tree sym)
1097 /* Don't create a new tag unnecessarily. */
1098 mpt = memory_partition (sym);
1099 if (mpt == NULL_TREE)
1101 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1102 TREE_ADDRESSABLE (mpt) = 0;
1103 add_referenced_var (mpt);
1104 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1105 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1106 set_memory_partition (sym, mpt);
1113 /* Add MP_P->VAR to a memory partition and return the partition. */
1116 find_partition_for (mem_sym_stats_t mp_p)
1119 VEC(tree,heap) *mpt_table;
1122 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1125 /* Find an existing partition for MP_P->VAR. */
1126 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1128 mem_sym_stats_t mpt_stats;
1130 /* If MPT does not have any symbols yet, use it. */
1131 if (MPT_SYMBOLS (mpt) == NULL)
1134 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1135 but avoid grouping clobbered variables with non-clobbered
1136 variables (otherwise, this tends to creates a single memory
1137 partition because other call-clobbered variables may have
1138 common parent tags with non-clobbered ones). */
1139 mpt_stats = get_mem_sym_stats_for (mpt);
1140 if (mp_p->parent_tags
1141 && mpt_stats->parent_tags
1142 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1143 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1146 /* If no common parent tags are found, see if both MPT and
1147 MP_P->VAR are call-clobbered. */
1148 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1152 if (mpt == NULL_TREE)
1153 mpt = get_mpt_for (mp_p->var);
1155 set_memory_partition (mp_p->var, mpt);
1157 mp_p->partitioned_p = true;
1159 mark_sym_for_renaming (mp_p->var);
1160 mark_sym_for_renaming (mpt);
1166 /* Rewrite the alias set for TAG to use the newly created partitions.
1167 If TAG is NULL, rewrite the set of call-clobbered variables.
1168 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1172 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1178 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
1180 sym = referenced_var (i);
1181 mpt = memory_partition (sym);
1183 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1185 bitmap_set_bit (new_aliases, DECL_UID (sym));
1188 /* Rebuild the may-alias array for TAG. */
1189 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1193 /* Determine how many virtual operands can be saved by partitioning
1194 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1195 P, every virtual operand that used to reference S will now
1196 reference P. Whether it reduces the number of virtual operands
1199 1- Direct references to S are never saved. Instead of the virtual
1200 operand to S, we will now have a virtual operand to P.
1202 2- Indirect references to S are reduced only for those memory tags
1203 holding S that already had other symbols partitioned into P.
1204 For instance, if a memory tag T has the alias set { a b S c },
1205 the first time we partition S into P, the alias set will become
1206 { a b P c }, so no virtual operands will be saved. However, if
1207 we now partition symbol 'c' into P, then the alias set for T
1208 will become { a b P }, so we will be saving one virtual operand
1209 for every indirect reference to 'c'.
1211 3- Is S is call-clobbered, we save as many virtual operands as
1212 call/asm sites exist in the code, but only if other
1213 call-clobbered symbols have been grouped into P. The first
1214 call-clobbered symbol that we group does not produce any
1217 MEM_REF_STATS points to CFUN's memory reference information. */
1220 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1221 mem_sym_stats_t mp_p, tree mpt)
1225 mem_sym_stats_t mpt_stats;
1227 /* We should only get symbols with indirect references here. */
1228 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1230 /* Note that the only statistics we keep for MPT is the set of
1231 parent tags to know which memory tags have had alias members
1232 partitioned, and the indicator has_call_clobbered_vars.
1233 Reference counts are not important for MPT. */
1234 mpt_stats = get_mem_sym_stats_for (mpt);
1236 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1237 partition P is already grouping aliases of T, then reduce the
1238 number of virtual operands by the number of direct references
1240 if (mp_p->parent_tags)
1242 if (mpt_stats->parent_tags == NULL)
1243 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1245 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1247 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1249 /* Partition MPT is already partitioning symbols in the
1250 alias set for TAG. This means that we are now saving
1251 1 virtual operand for every direct reference to TAG. */
1252 tree tag = referenced_var (i);
1253 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1254 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1255 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1259 /* This is the first symbol in tag I's alias set that is
1260 being grouped under MPT. We will not save any
1261 virtual operands this time, but record that MPT is
1262 grouping a symbol from TAG's alias set so that the
1263 next time we get the savings. */
1264 bitmap_set_bit (mpt_stats->parent_tags, i);
1269 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1270 call-clobbered symbols, then we will save as many virtual
1271 operands as asm/call sites there are. */
1272 if (is_call_clobbered (mp_p->var))
1274 if (mpt_stats->has_call_clobbered_vars)
1275 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1276 + mem_ref_stats->num_asm_sites;
1278 mpt_stats->has_call_clobbered_vars = true;
1283 /* Helper for compute_memory_partitions. Transfer reference counts
1284 from pointers to their pointed-to sets. Counters for pointers were
1285 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1286 memory reference information. */
1289 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1293 mem_sym_stats_t sym_stats;
1295 for (i = 1; i < num_ssa_names; i++)
1298 struct ptr_info_def *pi;
1302 && POINTER_TYPE_P (TREE_TYPE (ptr))
1303 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1304 && pi->memory_tag_needed)
1309 mem_sym_stats_t ptr_stats, tag_stats;
1311 /* If PTR has flow-sensitive points-to information, use
1312 PTR's name tag, otherwise use the symbol tag associated
1313 with PTR's symbol. */
1314 if (pi->name_mem_tag)
1315 tag = pi->name_mem_tag;
1317 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1319 ptr_stats = get_mem_sym_stats_for (ptr);
1320 tag_stats = get_mem_sym_stats_for (tag);
1322 /* TAG has as many direct references as dereferences we
1323 found for its parent pointer. */
1324 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1325 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1327 /* All the dereferences of pointer PTR are considered direct
1328 references to PTR's memory tag (TAG). In turn,
1329 references to TAG will become virtual operands for every
1330 symbol in TAG's alias set. So, for every symbol ALIAS in
1331 TAG's alias set, add as many indirect references to ALIAS
1332 as direct references there are for TAG. */
1333 if (MTAG_ALIASES (tag))
1334 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1336 tree alias = referenced_var (j);
1337 sym_stats = get_mem_sym_stats_for (alias);
1339 /* All the direct references to TAG are indirect references
1341 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1342 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1343 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1344 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1346 /* Indicate that TAG is one of ALIAS's parent tags. */
1347 if (sym_stats->parent_tags == NULL)
1348 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1349 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1354 /* Call-clobbered symbols are indirectly written at every
1356 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1358 tree sym = referenced_var (i);
1359 sym_stats = get_mem_sym_stats_for (sym);
1360 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1361 + mem_ref_stats->num_asm_sites;
1364 /* Addressable symbols are indirectly written at some ASM sites.
1365 Since only ASM sites that clobber memory actually affect
1366 addressable symbols, this is an over-estimation. */
1367 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1369 tree sym = referenced_var (i);
1370 sym_stats = get_mem_sym_stats_for (sym);
1371 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1376 /* Helper for compute_memory_partitions. Add all memory symbols to
1377 *MP_INFO_P and compute the initial estimate for the total number of
1378 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1379 reference information. On exit, *TAGS_P will contain the list of
1380 memory tags whose alias set need to be rewritten after
1384 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1385 VEC(mem_sym_stats_t,heap) **mp_info_p,
1386 VEC(tree,heap) **tags_p)
1389 referenced_var_iterator rvi;
1391 FOR_EACH_REFERENCED_VAR (var, rvi)
1393 mem_sym_stats_t sym_stats;
1396 /* We are only interested in memory symbols other than MPTs. */
1397 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1400 /* Collect memory tags into the TAGS array so that we can
1401 rewrite their alias sets after partitioning. */
1402 if (MTAG_P (var) && MTAG_ALIASES (var))
1403 VEC_safe_push (tree, heap, *tags_p, var);
1405 /* Since we are going to re-compute partitions, any symbols that
1406 used to belong to a partition must be detached from it and
1407 marked for renaming. */
1408 if ((old_mpt = memory_partition (var)) != NULL)
1410 mark_sym_for_renaming (old_mpt);
1411 set_memory_partition (var, NULL_TREE);
1412 mark_sym_for_renaming (var);
1415 sym_stats = get_mem_sym_stats_for (var);
1417 /* Add VAR's reference info to MP_INFO. Note that the only
1418 symbols that make sense to partition are those that have
1419 indirect references. If a symbol S is always directly
1420 referenced, partitioning it will not reduce the number of
1421 virtual operators. The only symbols that are profitable to
1422 partition are those that belong to alias sets and/or are
1424 if (sym_stats->num_indirect_reads > 0
1425 || sym_stats->num_indirect_writes > 0)
1426 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1428 /* Update the number of estimated VOPS. Note that direct
1429 references to memory tags are always counted as indirect
1430 references to their alias set members, so if a memory tag has
1431 aliases, do not count its direct references to avoid double
1433 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1435 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1436 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1439 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1440 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1445 /* Compute memory partitions. A memory partition (MPT) is an
1446 arbitrary grouping of memory symbols, such that references to one
1447 member of the group is considered a reference to all the members of
1450 As opposed to alias sets in memory tags, the grouping into
1451 partitions is completely arbitrary and only done to reduce the
1452 number of virtual operands. The only rule that needs to be
1453 observed when creating memory partitions is that given two memory
1454 partitions MPT.i and MPT.j, they must not contain symbols in
1457 Memory partitions are used when putting the program into Memory-SSA
1458 form. In particular, in Memory-SSA PHI nodes are not computed for
1459 individual memory symbols. They are computed for memory
1460 partitions. This reduces the amount of PHI nodes in the SSA graph
1461 at the expense of precision (i.e., it makes unrelated stores affect
1464 However, it is possible to increase precision by changing this
1465 partitioning scheme. For instance, if the partitioning scheme is
1466 such that get_mpt_for is the identity function (that is,
1467 get_mpt_for (s) = s), this will result in ultimate precision at the
1468 expense of huge SSA webs.
1470 At the other extreme, a partitioning scheme that groups all the
1471 symbols in the same set results in minimal SSA webs and almost
1472 total loss of precision.
1474 There partitioning heuristic uses three parameters to decide the
1475 order in which symbols are processed. The list of symbols is
1476 sorted so that symbols that are more likely to be partitioned are
1477 near the top of the list:
1479 - Execution frequency. If a memory references is in a frequently
1480 executed code path, grouping it into a partition may block useful
1481 transformations and cause sub-optimal code generation. So, the
1482 partition heuristic tries to avoid grouping symbols with high
1483 execution frequency scores. Execution frequency is taken
1484 directly from the basic blocks where every reference is made (see
1485 update_mem_sym_stats_from_stmt), which in turn uses the
1486 profile guided machinery, so if the program is compiled with PGO
1487 enabled, more accurate partitioning decisions will be made.
1489 - Number of references. Symbols with few references in the code,
1490 are partitioned before symbols with many references.
1492 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1493 attributes are partitioned after symbols marked MAY_ALIAS.
1495 Once the list is sorted, the partitioning proceeds as follows:
1497 1- For every symbol S in MP_INFO, create a new memory partition MP,
1498 if necessary. To avoid memory partitions that contain symbols
1499 from non-conflicting alias sets, memory partitions are
1500 associated to the memory tag that holds S in its alias set. So,
1501 when looking for a memory partition for S, the memory partition
1502 associated with one of the memory tags holding S is chosen. If
1503 none exists, a new one is created.
1505 2- Add S to memory partition MP.
1507 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1509 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1510 average number of VOPS per statement is less than
1511 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1515 compute_memory_partitions (void)
1519 mem_sym_stats_t mp_p;
1520 VEC(mem_sym_stats_t,heap) *mp_info;
1522 VEC(tree,heap) *tags;
1523 struct mem_ref_stats_d *mem_ref_stats;
1524 int prev_max_aliased_vops;
1526 mem_ref_stats = gimple_mem_ref_stats (cfun);
1527 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1529 if (mem_ref_stats->num_mem_stmts == 0)
1532 timevar_push (TV_MEMORY_PARTITIONING);
1536 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1538 /* Since we clearly cannot lower the number of virtual operators
1539 below the total number of memory statements in the function, we
1540 may need to adjust MAX_ALIASED_VOPS beforehand. */
1541 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1542 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1544 /* Update reference stats for all the pointed-to variables and
1546 update_reference_counts (mem_ref_stats);
1548 /* Add all the memory symbols to MP_INFO. */
1549 build_mp_info (mem_ref_stats, &mp_info, &tags);
1551 /* No partitions required if we are below the threshold. */
1552 if (!need_to_partition_p (mem_ref_stats))
1555 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1556 get_name (current_function_decl));
1560 /* Sort the MP_INFO array so that symbols that should be partitioned
1561 first are near the top of the list. */
1562 sort_mp_info (mp_info);
1566 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1567 get_name (current_function_decl));
1568 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1569 dump_mp_info (dump_file, mp_info);
1572 /* Create partitions for variables in MP_INFO until we have enough
1573 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1574 the average number of VOPS per statement is below
1575 AVG_ALIASED_VOPS. */
1576 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1580 /* If we are below the threshold, stop. */
1581 if (!need_to_partition_p (mem_ref_stats))
1584 mpt = find_partition_for (mp_p);
1585 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1588 /* After partitions have been created, rewrite alias sets to use
1589 them instead of the original symbols. This way, if the alias set
1590 was computed as { a b c d e f }, and the subset { b e f } was
1591 grouped into partition MPT.3, then the new alias set for the tag
1592 will be { a c d MPT.3 }.
1594 Note that this is not strictly necessary. The operand scanner
1595 will always check if a symbol belongs to a partition when adding
1596 virtual operands. However, by reducing the size of the alias
1597 sets to be scanned, the work needed inside the operand scanner is
1598 significantly reduced. */
1599 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1601 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1603 rewrite_alias_set_for (tag, new_aliases);
1604 bitmap_clear (new_aliases);
1607 BITMAP_FREE (new_aliases);
1611 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1612 dump_mp_info (dump_file, mp_info);
1616 /* Free allocated memory. */
1617 VEC_free (mem_sym_stats_t, heap, mp_info);
1618 VEC_free (tree, heap, tags);
1620 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1622 timevar_pop (TV_MEMORY_PARTITIONING);
1625 /* Compute may-alias information for every variable referenced in function
1628 Alias analysis proceeds in 3 main phases:
1630 1- Points-to and escape analysis.
1632 This phase walks the use-def chains in the SSA web looking for three
1635 * Assignments of the form P_i = &VAR
1636 * Assignments of the form P_i = malloc()
1637 * Pointers and ADDR_EXPR that escape the current function.
1639 The concept of 'escaping' is the same one used in the Java world. When
1640 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1641 outside of the current function. So, assignment to global variables,
1642 function arguments and returning a pointer are all escape sites, as are
1643 conversions between pointers and integers.
1645 This is where we are currently limited. Since not everything is renamed
1646 into SSA, we lose track of escape properties when a pointer is stashed
1647 inside a field in a structure, for instance. In those cases, we are
1648 assuming that the pointer does escape.
1650 We use escape analysis to determine whether a variable is
1651 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1652 is call-clobbered. If a pointer P_i escapes, then all the variables
1653 pointed-to by P_i (and its memory tag) also escape.
1655 2- Compute flow-sensitive aliases
1657 We have two classes of memory tags. Memory tags associated with the
1658 pointed-to data type of the pointers in the program. These tags are
1659 called "symbol memory tag" (SMT). The other class are those associated
1660 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1661 when adding operands for an INDIRECT_REF *P_i, we will first check
1662 whether P_i has a name tag, if it does we use it, because that will have
1663 more precise aliasing information. Otherwise, we use the standard symbol
1666 In this phase, we go through all the pointers we found in points-to
1667 analysis and create alias sets for the name memory tags associated with
1668 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1669 it points to and its tag.
1672 3- Compute flow-insensitive aliases
1674 This pass will compare the alias set of every symbol memory tag and
1675 every addressable variable found in the program. Given a symbol
1676 memory tag SMT and an addressable variable V. If the alias sets of
1677 SMT and V conflict (as computed by may_alias_p), then V is marked
1678 as an alias tag and added to the alias set of SMT.
1680 For instance, consider the following function:
1696 After aliasing analysis has finished, the symbol memory tag for pointer
1697 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1698 pointer 'p' is dereferenced, we want to mark the operation as a
1699 potential reference to 'a' and 'b'.
1709 # p_1 = PHI <p_4(1), p_6(2)>;
1724 In certain cases, the list of may aliases for a pointer may grow too
1725 large. This may cause an explosion in the number of virtual operands
1726 inserted in the code. Resulting in increased memory consumption and
1729 When the number of virtual operands needed to represent aliased
1730 loads and stores grows too large (configurable with option --param
1731 max-aliased-vops and --param avg-aliased-vops), alias sets are
1732 grouped to avoid severe compile-time slow downs and memory
1733 consumption. See compute_memory_partitions. */
1736 compute_may_aliases (void)
1738 struct alias_info *ai;
1740 timevar_push (TV_TREE_MAY_ALIAS);
1742 memset (&alias_stats, 0, sizeof (alias_stats));
1744 /* Initialize aliasing information. */
1745 ai = init_alias_info ();
1747 /* For each pointer P_i, determine the sets of variables that P_i may
1748 point-to. For every addressable variable V, determine whether the
1749 address of V escapes the current function, making V call-clobbered
1750 (i.e., whether &V is stored in a global variable or if its passed as a
1751 function call argument). */
1752 compute_points_to_sets ();
1754 /* Update various related attributes like escaped addresses,
1755 pointer dereferences for loads and stores. This is used
1756 when creating name tags and alias sets. */
1757 update_alias_info (ai);
1759 /* Collect all pointers and addressable variables, compute alias sets,
1760 create memory tags for pointers and promote variables whose address is
1761 not needed anymore. */
1762 setup_pointers_and_addressables (ai);
1764 /* Compute type-based flow-insensitive aliasing for all the type
1766 compute_flow_insensitive_aliasing (ai);
1768 /* Compute flow-sensitive, points-to based aliasing for all the name
1770 compute_flow_sensitive_aliasing (ai);
1772 /* Compute call clobbering information. */
1773 compute_call_clobbered (ai);
1775 /* If the program makes no reference to global variables, but it
1776 contains a mixture of pure and non-pure functions, then we need
1777 to create use-def and def-def links between these functions to
1778 avoid invalid transformations on them. */
1779 maybe_create_global_var ();
1781 /* Compute memory partitions for every memory variable. */
1782 compute_memory_partitions ();
1784 /* Remove partitions with no symbols. Partitions may end up with an
1785 empty MPT_SYMBOLS set if a previous round of alias analysis
1786 needed to partition more symbols. Since we don't need those
1787 partitions anymore, remove them to free up the space. */
1791 VEC(tree,heap) *mpt_table;
1793 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1795 while (i < VEC_length (tree, mpt_table))
1797 mpt = VEC_index (tree, mpt_table, i);
1798 if (MPT_SYMBOLS (mpt) == NULL)
1799 VEC_unordered_remove (tree, mpt_table, i);
1805 /* Populate all virtual operands and newly promoted register operands. */
1807 gimple_stmt_iterator gsi;
1810 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1811 update_stmt_if_modified (gsi_stmt (gsi));
1814 /* Debugging dumps. */
1817 dump_mem_ref_stats (dump_file);
1818 dump_alias_info (dump_file);
1819 dump_points_to_info (dump_file);
1821 if (dump_flags & TDF_STATS)
1822 dump_alias_stats (dump_file);
1824 if (dump_flags & TDF_DETAILS)
1825 dump_referenced_vars (dump_file);
1828 /* Report strict aliasing violations. */
1829 strict_aliasing_warning_backend ();
1831 /* Deallocate memory used by aliasing data structures. */
1832 delete_alias_info (ai);
1834 if (need_ssa_update_p ())
1835 update_ssa (TODO_update_ssa);
1837 timevar_pop (TV_TREE_MAY_ALIAS);
1842 /* Data structure used to count the number of dereferences to PTR
1843 inside an expression. */
1847 unsigned num_stores;
1852 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1853 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1856 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1858 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1859 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
1861 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1862 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1863 the address of 'fld' as 'ptr + offsetof(fld)'. */
1864 if (TREE_CODE (*tp) == ADDR_EXPR)
1870 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1873 count_p->num_stores++;
1875 count_p->num_loads++;
1882 /* Count the number of direct and indirect uses for pointer PTR in
1883 statement STMT. The number of direct uses is stored in
1884 *NUM_USES_P. Indirect references are counted separately depending
1885 on whether they are store or load operations. The counts are
1886 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1889 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
1890 unsigned *num_loads_p, unsigned *num_stores_p)
1899 /* Find out the total number of uses of PTR in STMT. */
1900 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1904 /* Now count the number of indirect references to PTR. This is
1905 truly awful, but we don't have much choice. There are no parent
1906 pointers inside INDIRECT_REFs, so an expression like
1907 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1908 find all the indirect and direct uses of x_1 inside. The only
1909 shortcut we can take is the fact that GIMPLE only allows
1910 INDIRECT_REFs inside the expressions below. */
1911 if (is_gimple_assign (stmt)
1912 || gimple_code (stmt) == GIMPLE_RETURN
1913 || gimple_code (stmt) == GIMPLE_ASM
1914 || is_gimple_call (stmt))
1916 struct walk_stmt_info wi;
1917 struct count_ptr_d count;
1920 count.num_stores = 0;
1921 count.num_loads = 0;
1923 memset (&wi, 0, sizeof (wi));
1925 walk_gimple_op (stmt, count_ptr_derefs, &wi);
1927 *num_stores_p = count.num_stores;
1928 *num_loads_p = count.num_loads;
1931 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1934 /* Remove memory references stats for function FN. */
1937 delete_mem_ref_stats (struct function *fn)
1939 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1941 free_alloc_pool (mem_sym_stats_pool);
1942 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1944 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1948 /* Initialize memory reference stats. */
1951 init_mem_ref_stats (void)
1953 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1955 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
1956 sizeof (struct mem_sym_stats_d),
1958 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1959 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1963 /* Helper for init_alias_info. Reset existing aliasing information. */
1966 reset_alias_info (void)
1968 referenced_var_iterator rvi;
1971 bitmap active_nmts, all_nmts;
1973 /* Clear the set of addressable variables. We do not need to clear
1974 the TREE_ADDRESSABLE bit on every symbol because we are going to
1975 re-compute addressability here. */
1976 bitmap_clear (gimple_addressable_vars (cfun));
1978 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1979 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1981 /* Clear flow-insensitive alias information from each symbol. */
1982 FOR_EACH_REFERENCED_VAR (var, rvi)
1984 if (is_gimple_reg (var))
1988 MTAG_ALIASES (var) = NULL;
1990 /* Memory partition information will be computed from scratch. */
1991 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
1992 MPT_SYMBOLS (var) = NULL;
1994 /* Collect all the name tags to determine if we have any
1995 orphaned that need to be removed from the IL. A name tag
1996 will be orphaned if it is not associated with any active SSA
1998 if (TREE_CODE (var) == NAME_MEMORY_TAG)
1999 bitmap_set_bit (all_nmts, DECL_UID (var));
2001 /* Since we are about to re-discover call-clobbered
2002 variables, clear the call-clobbered flag. */
2003 clear_call_clobbered (var);
2006 /* There should be no call-clobbered variable left. */
2007 gcc_assert (bitmap_empty_p (gimple_call_clobbered_vars (cfun)));
2009 /* Clear the call-used variables. */
2010 bitmap_clear (gimple_call_used_vars (cfun));
2012 /* Clear flow-sensitive points-to information from each SSA name. */
2013 for (i = 1; i < num_ssa_names; i++)
2015 tree name = ssa_name (i);
2017 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2020 if (SSA_NAME_PTR_INFO (name))
2022 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2024 /* Clear all the flags but keep the name tag to
2025 avoid creating new temporaries unnecessarily. If
2026 this pointer is found to point to a subset or
2027 superset of its former points-to set, then a new
2028 tag will need to be created in create_name_tags. */
2029 pi->pt_anything = 0;
2031 pi->value_escapes_p = 0;
2032 pi->memory_tag_needed = 0;
2033 pi->is_dereferenced = 0;
2035 bitmap_clear (pi->pt_vars);
2037 /* Add NAME's name tag to the set of active tags. */
2038 if (pi->name_mem_tag)
2039 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2043 /* Name memory tags that are no longer associated with an SSA name
2044 are considered stale and should be removed from the IL. All the
2045 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2046 considered stale and marked for renaming. */
2047 bitmap_and_compl_into (all_nmts, active_nmts);
2048 mark_set_for_renaming (all_nmts);
2050 BITMAP_FREE (all_nmts);
2051 BITMAP_FREE (active_nmts);
2055 /* Initialize the data structures used for alias analysis. */
2057 static struct alias_info *
2058 init_alias_info (void)
2060 struct alias_info *ai;
2061 referenced_var_iterator rvi;
2063 static bool alias_bitmap_obstack_initialized;
2065 ai = XCNEW (struct alias_info);
2066 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2067 sbitmap_zero (ai->ssa_names_visited);
2068 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2069 ai->dereferenced_ptrs = pointer_set_create ();
2071 /* Clear out all memory reference stats. */
2072 init_mem_ref_stats ();
2074 /* If aliases have been computed before, clear existing information. */
2075 if (gimple_aliases_computed_p (cfun))
2076 reset_alias_info ();
2079 /* If this is the first time we compute aliasing information,
2080 every non-register symbol will need to be put into SSA form
2081 (the initial SSA form only operates on GIMPLE registers). */
2082 FOR_EACH_REFERENCED_VAR (var, rvi)
2083 if (!is_gimple_reg (var))
2084 mark_sym_for_renaming (var);
2087 /* Next time, we will need to reset alias information. */
2088 cfun->gimple_df->aliases_computed_p = true;
2089 if (alias_bitmap_obstack_initialized)
2090 bitmap_obstack_release (&alias_bitmap_obstack);
2091 bitmap_obstack_initialize (&alias_bitmap_obstack);
2092 alias_bitmap_obstack_initialized = true;
2098 /* Deallocate memory used by alias analysis. */
2101 delete_alias_info (struct alias_info *ai)
2105 sbitmap_free (ai->ssa_names_visited);
2107 VEC_free (tree, heap, ai->processed_ptrs);
2109 for (i = 0; i < ai->num_addressable_vars; i++)
2110 free (ai->addressable_vars[i]);
2111 free (ai->addressable_vars);
2113 for (i = 0; i < ai->num_pointers; i++)
2114 free (ai->pointers[i]);
2115 free (ai->pointers);
2117 pointer_set_destroy (ai->dereferenced_ptrs);
2120 delete_mem_ref_stats (cfun);
2121 delete_points_to_sets ();
2125 /* Used for hashing to identify pointer infos with identical
2129 eq_ptr_info (const void *p1, const void *p2)
2131 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2132 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2133 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2137 ptr_info_hash (const void *p)
2139 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2140 return bitmap_hash (n->pt_vars);
2144 /* Create name tags for all the pointers that have been dereferenced.
2145 We only create a name tag for a pointer P if P is found to point to
2146 a set of variables (so that we can alias them to *P) or if it is
2147 the result of a call to malloc (which means that P cannot point to
2148 anything else nor alias any other variable).
2150 If two pointers P and Q point to the same set of variables, they
2151 are assigned the same name tag. */
2154 create_name_tags (void)
2157 VEC (tree, heap) *with_ptvars = NULL;
2161 /* Collect the list of pointers with a non-empty points to set. */
2162 for (i = 1; i < num_ssa_names; i++)
2164 tree ptr = ssa_name (i);
2165 struct ptr_info_def *pi;
2168 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2169 || !SSA_NAME_PTR_INFO (ptr))
2172 pi = SSA_NAME_PTR_INFO (ptr);
2174 if (pi->pt_anything || !pi->memory_tag_needed)
2176 /* No name tags for pointers that have not been
2177 dereferenced or point to an arbitrary location. */
2178 pi->name_mem_tag = NULL_TREE;
2182 /* Set pt_anything on the pointers without pt_vars filled in so
2183 that they are assigned a symbol tag. */
2184 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2185 VEC_safe_push (tree, heap, with_ptvars, ptr);
2187 set_pt_anything (ptr);
2190 /* If we didn't find any pointers with pt_vars set, we're done. */
2194 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2196 /* Now go through the pointers with pt_vars, and find a name tag
2197 with the same pt_vars as this pointer, or create one if one
2199 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2201 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2202 tree old_name_tag = pi->name_mem_tag;
2203 struct ptr_info_def **slot;
2205 /* If PTR points to a set of variables, check if we don't
2206 have another pointer Q with the same points-to set before
2207 creating a tag. If so, use Q's tag instead of creating a
2210 This is important for not creating unnecessary symbols
2211 and also for copy propagation. If we ever need to
2212 propagate PTR into Q or vice-versa, we would run into
2213 problems if they both had different name tags because
2214 they would have different SSA version numbers (which
2215 would force us to take the name tags in and out of SSA). */
2216 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2218 pi->name_mem_tag = (*slot)->name_mem_tag;
2223 /* If we didn't find a pointer with the same points-to set
2224 as PTR, create a new name tag if needed. */
2225 if (pi->name_mem_tag == NULL_TREE)
2226 pi->name_mem_tag = get_nmt_for (ptr);
2229 /* If the new name tag computed for PTR is different than
2230 the old name tag that it used to have, then the old tag
2231 needs to be removed from the IL, so we mark it for
2233 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2234 mark_sym_for_renaming (old_name_tag);
2236 /* Inherit volatility from the pointed-to type. */
2237 TREE_THIS_VOLATILE (pi->name_mem_tag)
2238 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2240 /* Mark the new name tag for renaming. */
2241 mark_sym_for_renaming (pi->name_mem_tag);
2244 htab_delete (ptr_hash);
2246 VEC_free (tree, heap, with_ptvars);
2250 /* Union the alias set SET into the may-aliases for TAG. */
2253 union_alias_set_into (tree tag, bitmap set)
2255 bitmap ma = MTAG_ALIASES (tag);
2257 if (bitmap_empty_p (set))
2261 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2262 bitmap_ior_into (ma, set);
2266 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2267 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2268 name tag and the variables it points-to are call-clobbered. Finally, if
2269 P_i escapes and we could not determine where it points to, then all the
2270 variables in the same alias set as *P_i are marked call-clobbered. This
2271 is necessary because we must assume that P_i may take the address of any
2272 variable in the same alias set. */
2275 compute_flow_sensitive_aliasing (struct alias_info *ai)
2280 timevar_push (TV_FLOW_SENSITIVE);
2282 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2284 if (!find_what_p_points_to (ptr))
2285 set_pt_anything (ptr);
2288 create_name_tags ();
2290 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2292 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2294 /* Set up aliasing information for PTR's name memory tag (if it has
2295 one). Note that only pointers that have been dereferenced will
2296 have a name memory tag. */
2297 if (pi->name_mem_tag && pi->pt_vars)
2299 if (!bitmap_empty_p (pi->pt_vars))
2300 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2303 timevar_pop (TV_FLOW_SENSITIVE);
2307 /* Return TRUE if at least one symbol in TAG2's alias set is also
2308 present in TAG1's alias set. */
2311 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2314 /* This is the old behavior of have_common_aliases_p, which is to
2315 return false if both sets are empty, or one set is and the other
2317 if (tag1aliases == NULL || tag2aliases == NULL)
2320 return bitmap_intersect_p (tag1aliases, tag2aliases);
2323 /* Compute type-based alias sets. Traverse all the pointers and
2324 addressable variables found in setup_pointers_and_addressables.
2326 For every pointer P in AI->POINTERS and addressable variable V in
2327 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2328 memory tag (SMT) if their alias sets conflict. V is then marked as
2329 an aliased symbol so that the operand scanner knows that statements
2330 containing V have aliased operands. */
2333 compute_flow_insensitive_aliasing (struct alias_info *ai)
2335 referenced_var_iterator rvi;
2339 timevar_push (TV_FLOW_INSENSITIVE);
2340 /* For every pointer P, determine which addressable variables may alias
2341 with P's symbol memory tag. */
2342 for (i = 0; i < ai->num_pointers; i++)
2345 struct alias_map_d *p_map = ai->pointers[i];
2346 tree tag = symbol_mem_tag (p_map->var);
2349 for (j = 0; j < ai->num_addressable_vars; j++)
2351 struct alias_map_d *v_map;
2354 v_map = ai->addressable_vars[j];
2356 v_ann = var_ann (var);
2358 /* We used to skip variables that have never been written to
2359 if the memory tag has been never written to directly (or
2360 either of them were call clobbered). This is not enough
2361 though, as this misses writes through the tags aliases.
2362 So, for correctness we need to include any aliased
2365 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2367 /* Add VAR to TAG's may-aliases set. */
2368 add_may_alias (tag, var);
2373 /* Since this analysis is based exclusively on symbols, it fails to
2374 handle cases where two pointers P and Q have different memory
2375 tags with conflicting alias set numbers but no aliased symbols in
2378 For example, suppose that we have two memory tags SMT.1 and SMT.2
2381 may-aliases (SMT.1) = { a }
2382 may-aliases (SMT.2) = { b }
2384 and the alias set number of SMT.1 conflicts with that of SMT.2.
2385 Since they don't have symbols in common, loads and stores from
2386 SMT.1 and SMT.2 will seem independent of each other, which will
2387 lead to the optimizers making invalid transformations (see
2388 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2390 To avoid this problem, we do a final traversal of AI->POINTERS
2391 looking for pairs of pointers that have no aliased symbols in
2392 common and yet have conflicting alias set numbers. */
2393 for (i = 0; i < ai->num_pointers; i++)
2396 struct alias_map_d *p_map1 = ai->pointers[i];
2397 tree tag1 = symbol_mem_tag (p_map1->var);
2398 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2400 for (j = 0; j < ai->num_pointers; j++)
2402 struct alias_map_d *p_map2 = ai->pointers[j];
2403 tree tag2 = symbol_mem_tag (p_map2->var);
2404 bitmap may_aliases2 = may_aliases (tag2);
2406 /* By convention tags don't alias themselves. */
2410 /* If the pointers may not point to each other, do nothing. */
2411 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2414 /* The two pointers may alias each other. If they already have
2415 symbols in common, do nothing. */
2416 if (have_common_aliases_p (may_aliases1, may_aliases2))
2419 add_may_alias (tag1, tag2);
2423 /* We have to add all HEAP variables to all SMTs aliases bitmaps.
2424 As we don't know which effective type the HEAP will have we cannot
2425 do better here and we need the conflicts with obfuscated pointers
2426 (a simple (*(int[n] *)ptr)[i] will do, with ptr from a VLA array
2428 for (i = 0; i < ai->num_pointers; i++)
2430 struct alias_map_d *p_map = ai->pointers[i];
2431 tree tag = symbol_mem_tag (p_map->var);
2433 FOR_EACH_REFERENCED_VAR (var, rvi)
2435 if (var_ann (var)->is_heapvar)
2436 add_may_alias (tag, var);
2440 timevar_pop (TV_FLOW_INSENSITIVE);
2444 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2447 create_alias_map_for (tree var, struct alias_info *ai)
2449 struct alias_map_d *alias_map;
2450 alias_map = XCNEW (struct alias_map_d);
2451 alias_map->var = var;
2452 alias_map->set = get_alias_set (var);
2453 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2457 /* Update related alias information kept in AI. This is used when
2458 building name tags, alias sets and deciding grouping heuristics.
2459 STMT is the statement to process. This function also updates
2460 ADDRESSABLE_VARS. */
2463 update_alias_info_1 (gimple stmt, struct alias_info *ai)
2466 use_operand_p use_p;
2468 bool stmt_dereferences_ptr_p;
2469 enum escape_type stmt_escape_type = is_escape_site (stmt);
2470 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
2472 stmt_dereferences_ptr_p = false;
2474 if (stmt_escape_type == ESCAPE_TO_CALL
2475 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
2477 mem_ref_stats->num_call_sites++;
2478 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
2479 mem_ref_stats->num_pure_const_call_sites++;
2481 else if (stmt_escape_type == ESCAPE_TO_ASM)
2482 mem_ref_stats->num_asm_sites++;
2484 /* Mark all the variables whose address are taken by the statement. */
2485 addr_taken = gimple_addresses_taken (stmt);
2487 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
2489 /* Process each operand use. For pointers, determine whether they
2490 are dereferenced by the statement, or whether their value
2492 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2496 struct ptr_info_def *pi;
2497 unsigned num_uses, num_loads, num_stores;
2499 op = USE_FROM_PTR (use_p);
2501 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2502 to the set of addressable variables. */
2503 if (TREE_CODE (op) == ADDR_EXPR)
2505 bitmap addressable_vars = gimple_addressable_vars (cfun);
2507 gcc_assert (gimple_code (stmt) == GIMPLE_PHI);
2508 gcc_assert (addressable_vars);
2510 /* PHI nodes don't have annotations for pinning the set
2511 of addresses taken, so we collect them here.
2513 FIXME, should we allow PHI nodes to have annotations
2514 so that they can be treated like regular statements?
2515 Currently, they are treated as second-class
2517 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2521 /* Ignore constants (they may occur in PHI node arguments). */
2522 if (TREE_CODE (op) != SSA_NAME)
2525 var = SSA_NAME_VAR (op);
2526 v_ann = var_ann (var);
2528 /* The base variable of an SSA name must be a GIMPLE register, and thus
2529 it cannot be aliased. */
2530 gcc_assert (!may_be_aliased (var));
2532 /* We are only interested in pointers. */
2533 if (!POINTER_TYPE_P (TREE_TYPE (op)))
2536 pi = get_ptr_info (op);
2538 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
2539 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
2541 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
2542 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
2545 /* If STMT is a PHI node, then it will not have pointer
2546 dereferences and it will not be an escape point. */
2547 if (gimple_code (stmt) == GIMPLE_PHI)
2550 /* Determine whether OP is a dereferenced pointer, and if STMT
2551 is an escape point, whether OP escapes. */
2552 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
2554 /* For directly dereferenced pointers we can apply
2555 TBAA-pruning to their points-to set. We may not count the
2556 implicit dereferences &PTR->FLD here. */
2557 if (num_loads + num_stores > 0)
2558 pi->is_dereferenced = 1;
2560 /* Handle a corner case involving address expressions of the
2561 form '&PTR->FLD'. The problem with these expressions is that
2562 they do not represent a dereference of PTR. However, if some
2563 other transformation propagates them into an INDIRECT_REF
2564 expression, we end up with '*(&PTR->FLD)' which is folded
2567 So, if the original code had no other dereferences of PTR,
2568 the aliaser will not create memory tags for it, and when
2569 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
2570 memory operations will receive no VDEF/VUSE operands.
2572 One solution would be to have count_uses_and_derefs consider
2573 &PTR->FLD a dereference of PTR. But that is wrong, since it
2574 is not really a dereference but an offset calculation.
2576 What we do here is to recognize these special ADDR_EXPR
2577 nodes. Since these expressions are never GIMPLE values (they
2578 are not GIMPLE invariants), they can only appear on the RHS
2579 of an assignment and their base address is always an
2580 INDIRECT_REF expression. */
2581 if (is_gimple_assign (stmt)
2582 && gimple_assign_rhs_code (stmt) == ADDR_EXPR
2583 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2585 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
2586 this represents a potential dereference of PTR. */
2587 tree rhs = gimple_assign_rhs1 (stmt);
2588 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2589 if (TREE_CODE (base) == INDIRECT_REF
2590 && TREE_OPERAND (base, 0) == op)
2594 if (num_loads + num_stores > 0)
2596 /* Mark OP as dereferenced. In a subsequent pass,
2597 dereferenced pointers that point to a set of
2598 variables will be assigned a name tag to alias
2599 all the variables OP points to. */
2600 pi->memory_tag_needed = 1;
2602 /* ??? For always executed direct dereferences we can
2603 apply TBAA-pruning to their escape set. */
2605 /* Mark OP as being dereferenced. */
2606 pointer_set_insert (ai->dereferenced_ptrs, var);
2608 /* Update the frequency estimate for all the dereferences of
2610 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
2612 /* Indicate that STMT contains pointer dereferences. */
2613 stmt_dereferences_ptr_p = true;
2616 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
2618 /* If STMT is an escape point and STMT contains at
2619 least one direct use of OP, then the value of OP
2620 escapes and so the pointed-to variables need to
2621 be marked call-clobbered. */
2622 pi->value_escapes_p = 1;
2623 pi->escape_mask |= stmt_escape_type;
2625 /* If the statement makes a function call, assume
2626 that pointer OP will be dereferenced in a store
2627 operation inside the called function. */
2628 if (is_gimple_call (stmt)
2629 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
2631 pointer_set_insert (ai->dereferenced_ptrs, var);
2632 pi->memory_tag_needed = 1;
2637 if (gimple_code (stmt) == GIMPLE_PHI)
2640 /* Mark stored variables in STMT as being written to and update the
2641 memory reference stats for all memory symbols referenced by STMT. */
2642 if (gimple_references_memory_p (stmt))
2647 mem_ref_stats->num_mem_stmts++;
2649 /* Notice that we only update memory reference stats for symbols
2650 loaded and stored by the statement if the statement does not
2651 contain pointer dereferences and it is not a call/asm site.
2652 This is to avoid double accounting problems when creating
2653 memory partitions. After computing points-to information,
2654 pointer dereference statistics are used to update the
2655 reference stats of the pointed-to variables, so here we
2656 should only update direct references to symbols.
2658 Indirect references are not updated here for two reasons: (1)
2659 The first time we compute alias information, the sets
2660 LOADED/STORED are empty for pointer dereferences, (2) After
2661 partitioning, LOADED/STORED may have references to
2662 partitions, not the original pointed-to variables. So, if we
2663 always counted LOADED/STORED here and during partitioning, we
2664 would count many symbols more than once.
2666 This does cause some imprecision when a statement has a
2667 combination of direct symbol references and pointer
2668 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
2669 memory symbols in its argument list, but these cases do not
2670 occur so frequently as to constitute a serious problem. */
2671 if (!stmt_dereferences_ptr_p
2672 && stmt_escape_type != ESCAPE_TO_CALL
2673 && stmt_escape_type != ESCAPE_TO_PURE_CONST
2674 && stmt_escape_type != ESCAPE_TO_ASM)
2676 if (gimple_stored_syms (stmt))
2677 EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
2678 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 0, 1);
2680 if (gimple_loaded_syms (stmt))
2681 EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
2682 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
2687 /* Update various related attributes like escaped addresses,
2688 pointer dereferences for loads and stores. This is used
2689 when creating name tags and alias sets. */
2692 update_alias_info (struct alias_info *ai)
2698 gimple_stmt_iterator gsi;
2701 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2703 phi = gsi_stmt (gsi);
2704 if (is_gimple_reg (PHI_RESULT (phi)))
2705 update_alias_info_1 (phi, ai);
2708 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2709 update_alias_info_1 (gsi_stmt (gsi), ai);
2713 /* Create memory tags for all the dereferenced pointers and build the
2714 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2715 sets. Based on the address escape and points-to information collected
2716 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2717 variables whose address is not needed anymore. */
2720 setup_pointers_and_addressables (struct alias_info *ai)
2722 size_t num_addressable_vars, num_pointers;
2723 referenced_var_iterator rvi;
2725 VEC (tree, heap) *varvec = NULL;
2726 safe_referenced_var_iterator srvi;
2728 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2729 num_addressable_vars = num_pointers = 0;
2731 FOR_EACH_REFERENCED_VAR (var, rvi)
2733 if (may_be_aliased (var))
2734 num_addressable_vars++;
2736 if (POINTER_TYPE_P (TREE_TYPE (var)))
2738 /* Since we don't keep track of volatile variables, assume that
2739 these pointers are used in indirect store operations. */
2740 if (TREE_THIS_VOLATILE (var))
2741 pointer_set_insert (ai->dereferenced_ptrs, var);
2747 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2748 always going to be slightly bigger than we actually need them
2749 because some TREE_ADDRESSABLE variables will be marked
2750 non-addressable below and only pointers with unique symbol tags are
2751 going to be added to POINTERS. */
2752 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2753 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2754 ai->num_addressable_vars = 0;
2755 ai->num_pointers = 0;
2757 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2759 /* Name memory tags already have flow-sensitive aliasing
2760 information, so they need not be processed by
2761 compute_flow_insensitive_aliasing. Similarly, symbol memory
2762 tags are already accounted for when we process their
2765 Structure fields, on the other hand, have to have some of this
2766 information processed for them, but it's pointless to mark them
2767 non-addressable (since they are fake variables anyway). */
2771 /* Remove the ADDRESSABLE flag from every addressable variable whose
2772 address is not needed anymore. This is caused by the propagation
2773 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2774 removal of dead pointer assignments done by the early scalar
2776 if (TREE_ADDRESSABLE (var))
2778 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2779 && TREE_CODE (var) != RESULT_DECL
2780 && !is_global_var (var))
2782 bool okay_to_mark = true;
2784 /* Since VAR is now a regular GIMPLE register, we will need
2785 to rename VAR into SSA afterwards. */
2786 mark_sym_for_renaming (var);
2788 /* The address of VAR is not needed, remove the
2789 addressable bit, so that it can be optimized as a
2790 regular variable. */
2793 /* The memory partition holding VAR will no longer
2794 contain VAR, and statements referencing it will need
2796 if (memory_partition (var))
2797 mark_sym_for_renaming (memory_partition (var));
2799 mark_non_addressable (var);
2804 /* Global variables and addressable locals may be aliased. Create an
2805 entry in ADDRESSABLE_VARS for VAR. */
2806 if (may_be_aliased (var))
2808 create_alias_map_for (var, ai);
2809 mark_sym_for_renaming (var);
2812 /* Add pointer variables that have been dereferenced to the POINTERS
2813 array and create a symbol memory tag for them. */
2814 if (POINTER_TYPE_P (TREE_TYPE (var)))
2816 if (pointer_set_contains (ai->dereferenced_ptrs, var))
2821 /* If pointer VAR still doesn't have a memory tag
2822 associated with it, create it now or re-use an
2824 tag = get_smt_for (var, ai);
2825 t_ann = var_ann (tag);
2827 /* The symbol tag will need to be renamed into SSA
2828 afterwards. Note that we cannot do this inside
2829 get_smt_for because aliasing may run multiple times
2830 and we only create symbol tags the first time. */
2831 mark_sym_for_renaming (tag);
2833 /* Similarly, if pointer VAR used to have another type
2834 tag, we will need to process it in the renamer to
2835 remove the stale virtual operands. */
2836 old_tag = symbol_mem_tag (var);
2838 mark_sym_for_renaming (old_tag);
2840 /* Associate the tag with pointer VAR. */
2841 set_symbol_mem_tag (var, tag);
2845 /* The pointer has not been dereferenced. If it had a
2846 symbol memory tag, remove it and mark the old tag for
2847 renaming to remove it out of the IL. */
2848 tree tag = symbol_mem_tag (var);
2851 mark_sym_for_renaming (tag);
2852 set_symbol_mem_tag (var, NULL_TREE);
2858 VEC_free (tree, heap, varvec);
2862 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2863 semantics. If the function makes no references to global
2864 variables and contains at least one call to a non-pure function,
2865 then we need to mark the side-effects of the call using .GLOBAL_VAR
2866 to represent all possible global memory referenced by the callee. */
2869 maybe_create_global_var (void)
2871 /* No need to create it, if we have one already. */
2872 if (gimple_global_var (cfun) == NULL_TREE)
2874 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2876 /* Create .GLOBAL_VAR if there are no call-clobbered
2877 variables and the program contains a mixture of pure/const
2878 and regular function calls. This is to avoid the problem
2879 described in PR 20115:
2882 int func_pure (void) { return X; }
2883 int func_non_pure (int a) { X += a; }
2886 int a = func_pure ();
2892 Since foo() has no call-clobbered variables, there is
2893 no relationship between the calls to func_pure and
2894 func_non_pure. Since func_pure has no side-effects, value
2895 numbering optimizations elide the second call to func_pure.
2896 So, if we have some pure/const and some regular calls in the
2897 program we create .GLOBAL_VAR to avoid missing these
2899 if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
2900 && stats->num_call_sites > 0
2901 && stats->num_pure_const_call_sites > 0
2902 && stats->num_call_sites > stats->num_pure_const_call_sites)
2903 create_global_var ();
2908 /* Return TRUE if pointer PTR may point to variable VAR.
2910 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2911 This is needed because when checking for type conflicts we are
2912 interested in the alias set of the memory location pointed-to by
2913 PTR. The alias set of PTR itself is irrelevant.
2915 VAR_ALIAS_SET is the alias set for VAR. */
2918 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2919 tree var, alias_set_type var_alias_set,
2920 bool alias_set_only)
2924 alias_stats.alias_queries++;
2925 alias_stats.simple_queries++;
2927 /* By convention, a variable cannot alias itself. */
2928 mem = symbol_mem_tag (ptr);
2931 alias_stats.alias_noalias++;
2932 alias_stats.simple_resolved++;
2936 /* If -fargument-noalias-global is > 2, pointer arguments may
2937 not point to anything else. */
2938 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2940 alias_stats.alias_noalias++;
2941 alias_stats.simple_resolved++;
2945 /* If -fargument-noalias-global is > 1, pointer arguments may
2946 not point to global variables. */
2947 if (flag_argument_noalias > 1 && is_global_var (var)
2948 && TREE_CODE (ptr) == PARM_DECL)
2950 alias_stats.alias_noalias++;
2951 alias_stats.simple_resolved++;
2955 /* If the pointed to memory has alias set zero, or the pointer
2956 is ref-all, or the pointer decl is marked that no TBAA is to
2957 be applied, the MEM can alias VAR. */
2958 if (mem_alias_set == 0
2959 || DECL_POINTER_ALIAS_SET (ptr) == 0
2960 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (ptr))
2961 || DECL_NO_TBAA_P (ptr))
2963 alias_stats.alias_mayalias++;
2964 alias_stats.simple_resolved++;
2968 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2970 alias_stats.tbaa_queries++;
2972 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2973 if (mem_alias_set != var_alias_set
2974 && !alias_set_subset_of (mem_alias_set, var_alias_set))
2976 alias_stats.alias_noalias++;
2977 alias_stats.tbaa_resolved++;
2981 /* If VAR is a record or union type, PTR cannot point into VAR
2982 unless there is some explicit address operation in the
2983 program that can reference a field of the type pointed-to by
2984 PTR. This also assumes that the types of both VAR and PTR
2985 are contained within the compilation unit, and that there is
2986 no fancy addressing arithmetic associated with any of the
2988 if (mem_alias_set != 0 && var_alias_set != 0)
2990 tree ptr_type = TREE_TYPE (ptr);
2991 tree var_type = TREE_TYPE (var);
2993 /* The star count is -1 if the type at the end of the
2994 pointer_to chain is not a record or union type. */
2995 if (!alias_set_only &&
2996 0 /* FIXME tuples ipa_type_escape_star_count_of_interesting_type (var_type) >= 0*/)
2998 int ptr_star_count = 0;
3000 /* ipa_type_escape_star_count_of_interesting_type is a
3001 little too restrictive for the pointer type, need to
3002 allow pointers to primitive types as long as those
3003 types cannot be pointers to everything. */
3004 while (POINTER_TYPE_P (ptr_type))
3006 /* Strip the *s off. */
3007 ptr_type = TREE_TYPE (ptr_type);
3011 /* There does not appear to be a better test to see if
3012 the pointer type was one of the pointer to everything
3014 if (ptr_star_count > 0)
3016 alias_stats.structnoaddress_queries++;
3017 if (ipa_type_escape_field_does_not_clobber_p (var_type,
3020 alias_stats.structnoaddress_resolved++;
3021 alias_stats.alias_noalias++;
3025 else if (ptr_star_count == 0)
3027 /* If PTR_TYPE was not really a pointer to type, it cannot
3029 alias_stats.structnoaddress_queries++;
3030 alias_stats.structnoaddress_resolved++;
3031 alias_stats.alias_noalias++;
3037 alias_stats.alias_mayalias++;
3041 /* Return true, if PTR may point to a global variable. */
3044 may_point_to_global_var (tree ptr)
3046 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3048 /* If we do not have points-to information for this variable,
3051 || !pi->name_mem_tag)
3054 /* The name memory tag is marked as global variable if the points-to
3055 set contains a global variable. */
3056 return is_global_var (pi->name_mem_tag);
3059 /* Add ALIAS to the set of variables that may alias VAR. */
3062 add_may_alias (tree var, tree alias)
3064 /* Don't allow self-referential aliases. */
3065 gcc_assert (var != alias);
3067 /* ALIAS must be addressable if it's being added to an alias set. */
3069 TREE_ADDRESSABLE (alias) = 1;
3071 gcc_assert (may_be_aliased (alias));
3074 /* VAR must be a symbol or a name tag. */
3075 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
3076 || TREE_CODE (var) == NAME_MEMORY_TAG);
3078 if (MTAG_ALIASES (var) == NULL)
3079 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
3081 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
3085 /* Mark pointer PTR as pointing to an arbitrary memory location. */
3088 set_pt_anything (tree ptr)
3090 struct ptr_info_def *pi = get_ptr_info (ptr);
3092 pi->pt_anything = 1;
3093 /* Anything includes global memory. */
3094 pi->pt_global_mem = 1;
3097 /* The pointer used to have a name tag, but we now found it pointing
3098 to an arbitrary location. The name tag needs to be renamed and
3099 disassociated from PTR. */
3100 if (pi->name_mem_tag)
3102 mark_sym_for_renaming (pi->name_mem_tag);
3103 pi->name_mem_tag = NULL_TREE;
3108 /* Return true if STMT is an "escape" site from the current function. Escape
3109 sites those statements which might expose the address of a variable
3110 outside the current function. STMT is an escape site iff:
3112 1- STMT is a function call, or
3113 2- STMT is an __asm__ expression, or
3114 3- STMT is an assignment to a non-local variable, or
3115 4- STMT is a return statement.
3117 Return the type of escape site found, if we found one, or NO_ESCAPE
3121 is_escape_site (gimple stmt)
3123 if (is_gimple_call (stmt))
3125 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3126 return ESCAPE_TO_PURE_CONST;
3128 return ESCAPE_TO_CALL;
3130 else if (gimple_code (stmt) == GIMPLE_ASM)
3131 return ESCAPE_TO_ASM;
3132 else if (is_gimple_assign (stmt))
3134 tree lhs = gimple_assign_lhs (stmt);
3136 /* Get to the base of _REF nodes. */
3137 if (TREE_CODE (lhs) != SSA_NAME)
3138 lhs = get_base_address (lhs);
3140 /* If we couldn't recognize the LHS of the assignment, assume that it
3141 is a non-local store. */
3142 if (lhs == NULL_TREE)
3143 return ESCAPE_UNKNOWN;
3145 if (gimple_assign_cast_p (stmt))
3147 tree from = TREE_TYPE (gimple_assign_rhs1 (stmt));
3148 tree to = TREE_TYPE (lhs);
3150 /* If the RHS is a conversion between a pointer and an integer, the
3151 pointer escapes since we can't track the integer. */
3152 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
3153 return ESCAPE_BAD_CAST;
3156 /* If the LHS is an SSA name, it can't possibly represent a non-local
3158 if (TREE_CODE (lhs) == SSA_NAME)
3161 /* If the LHS is a non-global decl, it isn't a non-local memory store.
3162 If the LHS escapes, the RHS escape is dealt with in the PTA solver. */
3164 && !is_global_var (lhs))
3167 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
3168 local variables we cannot be sure if it will escape, because we
3169 don't have information about objects not in SSA form. Need to
3170 implement something along the lines of
3172 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
3173 Midkiff, ``Escape analysis for java,'' in Proceedings of the
3174 Conference on Object-Oriented Programming Systems, Languages, and
3175 Applications (OOPSLA), pp. 1-19, 1999. */
3176 return ESCAPE_STORED_IN_GLOBAL;
3178 else if (gimple_code (stmt) == GIMPLE_RETURN)
3179 return ESCAPE_TO_RETURN;
3184 /* Create a new memory tag of type TYPE.
3185 Does NOT push it into the current binding. */
3188 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3192 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3194 /* Memory tags are always writable and non-static. */
3195 TREE_READONLY (tmp_var) = 0;
3196 TREE_STATIC (tmp_var) = 0;
3198 /* It doesn't start out global. */
3199 MTAG_GLOBAL (tmp_var) = 0;
3200 TREE_USED (tmp_var) = 1;
3205 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3206 is considered to represent all the pointers whose pointed-to types are
3207 in the same alias set class. Otherwise, the tag represents a single
3208 SSA_NAME pointer variable. */
3211 create_memory_tag (tree type, bool is_type_tag)
3213 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3214 type, (is_type_tag) ? "SMT" : "NMT");
3216 /* By default, memory tags are local variables. Alias analysis will
3217 determine whether they should be considered globals. */
3218 DECL_CONTEXT (tag) = current_function_decl;
3220 /* Memory tags are by definition addressable. */
3221 TREE_ADDRESSABLE (tag) = 1;
3223 set_symbol_mem_tag (tag, NULL_TREE);
3225 /* Add the tag to the symbol table. */
3226 add_referenced_var (tag);
3232 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3233 This is used if P_i has been found to point to a specific set of
3234 variables or to a non-aliased memory location like the address returned
3235 by malloc functions. */
3238 get_nmt_for (tree ptr)
3240 struct ptr_info_def *pi = get_ptr_info (ptr);
3241 tree tag = pi->name_mem_tag;
3243 if (tag == NULL_TREE)
3244 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3249 /* Return the symbol memory tag associated to pointer PTR. A memory
3250 tag is an artificial variable that represents the memory location
3251 pointed-to by PTR. It is used to model the effects of pointer
3252 de-references on addressable variables.
3254 AI points to the data gathered during alias analysis. This
3255 function populates the array AI->POINTERS. */
3258 get_smt_for (tree ptr, struct alias_info *ai)
3262 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3263 alias_set_type tag_set;
3265 /* Get the alias set to be used for the pointed-to memory. If that
3266 differs from what we would get from looking at the type adjust
3267 the tag_type to void to make sure we get a proper alias set from
3268 just looking at the SMT we create. */
3269 tag_set = get_alias_set (tag_type);
3270 if (TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (ptr))
3271 /* This is overly conservative but we do not want to assign
3272 restrict alias sets here (which if they are not assigned
3273 are -2 but still "known"). */
3274 || DECL_POINTER_ALIAS_SET_KNOWN_P (ptr))
3277 tag_type = void_type_node;
3280 /* To avoid creating unnecessary memory tags, only create one memory tag
3281 per alias set class. Note that it may be tempting to group
3282 memory tags based on conflicting alias sets instead of
3283 equivalence. That would be wrong because alias sets are not
3284 necessarily transitive (as demonstrated by the libstdc++ test
3285 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3286 such that conflicts (A, B) == true and conflicts (A, C) == true,
3287 it does not necessarily follow that conflicts (B, C) == true. */
3288 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3290 struct alias_map_d *curr = ai->pointers[i];
3291 tree curr_tag = symbol_mem_tag (curr->var);
3292 if (tag_set == curr->set)
3299 /* If VAR cannot alias with any of the existing memory tags, create a new
3300 tag for PTR and add it to the POINTERS array. */
3301 if (tag == NULL_TREE)
3303 struct alias_map_d *alias_map;
3305 /* If PTR did not have a symbol tag already, create a new SMT.*
3306 artificial variable representing the memory location
3307 pointed-to by PTR. */
3308 tag = symbol_mem_tag (ptr);
3309 if (tag == NULL_TREE
3310 || tag_set != get_alias_set (tag))
3311 tag = create_memory_tag (tag_type, true);
3313 /* Add PTR to the POINTERS array. Note that we are not interested in
3314 PTR's alias set. Instead, we cache the alias set for the memory that
3316 alias_map = XCNEW (struct alias_map_d);
3317 alias_map->var = ptr;
3318 alias_map->set = tag_set;
3319 ai->pointers[ai->num_pointers++] = alias_map;
3322 /* If the pointed-to type is volatile, so is the tag. */
3323 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3325 /* Make sure that the symbol tag has the same alias set as the
3326 pointed-to type or at least accesses through the pointer will
3327 alias that set. The latter can happen after the vectorizer
3328 created pointers of vector type. */
3329 gcc_assert (tag_set == get_alias_set (tag)
3330 || alias_set_subset_of (tag_set, get_alias_set (tag)));
3336 /* Create GLOBAL_VAR, an artificial global variable to act as a
3337 representative of all the variables that may be clobbered by function
3341 create_global_var (void)
3343 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3345 DECL_ARTIFICIAL (global_var) = 1;
3346 TREE_READONLY (global_var) = 0;
3347 DECL_EXTERNAL (global_var) = 1;
3348 TREE_STATIC (global_var) = 1;
3349 TREE_USED (global_var) = 1;
3350 DECL_CONTEXT (global_var) = NULL_TREE;
3351 TREE_THIS_VOLATILE (global_var) = 0;
3352 TREE_ADDRESSABLE (global_var) = 0;
3354 create_var_ann (global_var);
3355 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3356 add_referenced_var (global_var);
3357 mark_sym_for_renaming (global_var);
3358 cfun->gimple_df->global_var = global_var;
3362 /* Dump alias statistics on FILE. */
3365 dump_alias_stats (FILE *file)
3367 const char *funcname
3368 = lang_hooks.decl_printable_name (current_function_decl, 2);
3369 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3370 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3371 fprintf (file, "Total alias mayalias results:\t%u\n",
3372 alias_stats.alias_mayalias);
3373 fprintf (file, "Total alias noalias results:\t%u\n",
3374 alias_stats.alias_noalias);
3375 fprintf (file, "Total simple queries:\t%u\n",
3376 alias_stats.simple_queries);
3377 fprintf (file, "Total simple resolved:\t%u\n",
3378 alias_stats.simple_resolved);
3379 fprintf (file, "Total TBAA queries:\t%u\n",
3380 alias_stats.tbaa_queries);
3381 fprintf (file, "Total TBAA resolved:\t%u\n",
3382 alias_stats.tbaa_resolved);
3383 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3384 alias_stats.structnoaddress_queries);
3385 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3386 alias_stats.structnoaddress_resolved);
3390 /* Dump alias information on FILE. */
3393 dump_alias_info (FILE *file)
3396 const char *funcname
3397 = lang_hooks.decl_printable_name (current_function_decl, 2);
3398 referenced_var_iterator rvi;
3401 fprintf (file, "\nAlias information for %s\n\n", funcname);
3403 dump_memory_partitions (file);
3405 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3407 fprintf (file, "Aliased symbols\n\n");
3409 FOR_EACH_REFERENCED_VAR (var, rvi)
3411 if (may_be_aliased (var))
3412 dump_variable (file, var);
3415 fprintf (file, "\nDereferenced pointers\n\n");
3417 FOR_EACH_REFERENCED_VAR (var, rvi)
3418 if (symbol_mem_tag (var))
3419 dump_variable (file, var);
3421 fprintf (file, "\nSymbol memory tags\n\n");
3423 FOR_EACH_REFERENCED_VAR (var, rvi)
3425 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3426 dump_variable (file, var);
3429 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3431 fprintf (file, "SSA_NAME pointers\n\n");
3432 for (i = 1; i < num_ssa_names; i++)
3434 tree ptr = ssa_name (i);
3435 struct ptr_info_def *pi;
3437 if (ptr == NULL_TREE)
3440 pi = SSA_NAME_PTR_INFO (ptr);
3441 if (!SSA_NAME_IN_FREE_LIST (ptr)
3443 && pi->name_mem_tag)
3444 dump_points_to_info_for (file, ptr);
3447 fprintf (file, "\nName memory tags\n\n");
3449 FOR_EACH_REFERENCED_VAR (var, rvi)
3451 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3452 dump_variable (file, var);
3455 fprintf (file, "\n");
3459 /* Dump alias information on stderr. */
3462 debug_alias_info (void)
3464 dump_alias_info (stderr);
3468 /* Return the alias information associated with pointer T. It creates a
3469 new instance if none existed. */
3471 struct ptr_info_def *
3472 get_ptr_info (tree t)
3474 struct ptr_info_def *pi;
3476 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3478 pi = SSA_NAME_PTR_INFO (t);
3481 pi = GGC_CNEW (struct ptr_info_def);
3482 SSA_NAME_PTR_INFO (t) = pi;
3488 /* Dump points-to information for SSA_NAME PTR into FILE. */
3491 dump_points_to_info_for (FILE *file, tree ptr)
3493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3495 print_generic_expr (file, ptr, dump_flags);
3499 if (pi->name_mem_tag)
3501 fprintf (file, ", name memory tag: ");
3502 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3505 if (pi->is_dereferenced)
3506 fprintf (file, ", is dereferenced");
3507 else if (pi->memory_tag_needed)
3508 fprintf (file, ", is dereferenced in call");
3510 if (pi->value_escapes_p)
3511 fprintf (file, ", its value escapes");
3513 if (pi->pt_anything)
3514 fprintf (file, ", points-to anything");
3517 fprintf (file, ", points-to NULL");
3521 fprintf (file, ", points-to vars: ");
3522 dump_decl_set (file, pi->pt_vars);
3526 fprintf (file, "\n");
3530 /* Dump points-to information for VAR into stderr. */
3533 debug_points_to_info_for (tree var)
3535 dump_points_to_info_for (stderr, var);
3539 /* Dump points-to information into FILE. NOTE: This function is slow, as
3540 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3543 dump_points_to_info (FILE *file ATTRIBUTE_UNUSED)
3546 gimple_stmt_iterator si;
3549 lang_hooks.decl_printable_name (current_function_decl, 2);
3550 referenced_var_iterator rvi;
3553 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3555 /* First dump points-to information for the default definitions of
3556 pointer variables. This is necessary because default definitions are
3557 not part of the code. */
3558 FOR_EACH_REFERENCED_VAR (var, rvi)
3560 if (POINTER_TYPE_P (TREE_TYPE (var)))
3562 tree def = gimple_default_def (cfun, var);
3564 dump_points_to_info_for (file, def);
3568 /* Dump points-to information for every pointer defined in the program. */
3571 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
3573 gimple phi = gsi_stmt (si);
3574 tree ptr = PHI_RESULT (phi);
3575 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3576 dump_points_to_info_for (file, ptr);
3579 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3581 gimple stmt = gsi_stmt (si);
3583 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3584 if (TREE_CODE (def) == SSA_NAME
3585 && POINTER_TYPE_P (TREE_TYPE (def)))
3586 dump_points_to_info_for (file, def);
3590 fprintf (file, "\n");
3594 /* Dump points-to info pointed to by PTO into STDERR. */
3597 debug_points_to_info (void)
3599 dump_points_to_info (stderr);
3602 /* Dump to FILE the list of variables that may be aliasing VAR. */
3605 dump_may_aliases_for (FILE *file, tree var)
3609 aliases = MTAG_ALIASES (var);
3616 fprintf (file, "{ ");
3617 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3619 al = referenced_var (i);
3620 print_generic_expr (file, al, dump_flags);
3621 fprintf (file, " ");
3623 fprintf (file, "}");
3628 /* Dump to stderr the list of variables that may be aliasing VAR. */
3631 debug_may_aliases_for (tree var)
3633 dump_may_aliases_for (stderr, var);
3636 /* Return true if VAR may be aliased. */
3639 may_be_aliased (tree var)
3642 if (TREE_ADDRESSABLE (var))
3645 /* Globally visible variables can have their addresses taken by other
3646 translation units. */
3648 && MTAG_GLOBAL (var))
3650 else if (!MTAG_P (var)
3651 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3654 /* Automatic variables can't have their addresses escape any other
3655 way. This must be after the check for global variables, as
3656 extern declarations do not have TREE_STATIC set. */
3657 if (!TREE_STATIC (var))
3663 /* The following is based on code in add_stmt_operand to ensure that the
3664 same defs/uses/vdefs/vuses will be found after replacing a reference
3665 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3666 is the address of var. Return a memtag for the ptr, after adding the
3667 proper may_aliases to it (which are the aliases of var, if it has any,
3671 add_may_alias_for_new_tag (tree tag, tree var)
3673 bitmap aliases = NULL;
3676 aliases = may_aliases (var);
3678 /* Case 1: |aliases| == 1 */
3680 && bitmap_single_bit_set_p (aliases))
3682 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3683 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3687 /* Case 2: |aliases| == 0 */
3688 if (aliases == NULL)
3689 add_may_alias (tag, var);
3692 /* Case 3: |aliases| > 1 */
3693 union_alias_set_into (tag, aliases);
3698 /* Create a new symbol tag for PTR. Construct the may-alias list of
3699 this type tag so that it has the aliasing of VAR according to the
3700 location accessed by EXPR.
3702 Note, the set of aliases represented by the new symbol tag are not
3703 marked for renaming. */
3706 new_type_alias (tree ptr, tree var, tree expr)
3708 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3710 tree ali = NULL_TREE;
3711 HOST_WIDE_INT offset, size, maxsize;
3714 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3715 gcc_assert (!MTAG_P (var));
3717 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3720 tag = create_memory_tag (tag_type, true);
3721 set_symbol_mem_tag (ptr, tag);
3723 ali = add_may_alias_for_new_tag (tag, var);
3725 set_symbol_mem_tag (ptr, ali);
3726 MTAG_GLOBAL (tag) = is_global_var (var);
3730 /* Reset the call_clobbered flags on our referenced vars. In
3731 theory, this only needs to be done for globals. */
3734 reset_cc_flags (void)
3737 referenced_var_iterator rvi;
3739 FOR_EACH_REFERENCED_VAR (var, rvi)
3740 var_ann (var)->call_clobbered = false;
3744 struct gimple_opt_pass pass_reset_cc_flags =
3750 reset_cc_flags, /* execute */
3753 0, /* static_pass_number */
3755 PROP_referenced_vars |PROP_cfg, /* properties_required */
3756 0, /* properties_provided */
3757 0, /* properties_destroyed */
3758 0, /* todo_flags_start */
3759 0 /* todo_flags_finish */
3764 /* A dummy pass to cause aliases to be computed via TODO_rebuild_alias. */
3766 struct gimple_opt_pass pass_build_alias =
3775 0, /* static_pass_number */
3777 PROP_cfg | PROP_ssa, /* properties_required */
3778 PROP_alias, /* properties_provided */
3779 0, /* properties_destroyed */
3780 0, /* todo_flags_start */
3781 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */