1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
33 #include "langhooks.h"
36 #include "diagnostic.h"
37 #include "tree-dump.h"
38 #include "tree-gimple.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-structalias.h"
45 #include "ipa-type-escape.h"
49 #include "pointer-set.h"
50 #include "alloc-pool.h"
52 /* Broad overview of how aliasing works:
54 First we compute points-to sets, which is done in
55 tree-ssa-structalias.c
57 During points-to set constraint finding, a bunch of little bits of
58 information is collected.
59 This is not done because it is necessary for points-to, but because
60 points-to has to walk every statement anyway. The function performing
61 this collecting is update_alias_info.
63 Bits update_alias_info collects include:
64 1. Directly escaping variables and variables whose value escapes
65 (using is_escape_site). This is the set of variables and values that
66 escape prior to transitive closure of the clobbers.
67 2. The set of variables dereferenced on the LHS (into
68 dereferenced_ptr_stores)
69 3. The set of variables dereferenced on the RHS (into
70 dereferenced_ptr_loads)
71 4. The set of all pointers we saw.
72 5. The number of loads and stores for each variable
73 6. The number of statements touching memory
74 7. The set of address taken variables.
77 #1 is computed by a combination of is_escape_site, and counting the
78 number of uses/deref operators. This function properly accounts for
79 situations like &ptr->field, which is *not* a dereference.
81 After points-to sets are computed, the sets themselves still
82 contain points-to specific variables, such as a variable that says
83 the pointer points to anything, a variable that says the pointer
84 points to readonly memory, etc.
86 These are eliminated in a later phase, as we will see.
88 The rest of the phases are located in tree-ssa-alias.c
90 The next phase after points-to set computation is called
91 "setup_pointers_and_addressables"
93 This pass does 3 main things:
95 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
96 non-globals whose address is not taken), have TREE_ADDRESSABLE
98 2. All variables that may be aliased (which is the set of addressable
99 variables and globals) at all, are marked for renaming, and have
100 symbol memory tags created for them.
101 3. All variables which are stored into have their SMT's added to
105 After this function is run, all variables that will ever have an
106 SMT, have one, though its aliases are not filled in.
108 The next phase is to compute flow-insensitive aliasing, which in
109 our case, is a misnomer. it is really computing aliasing that
110 requires no transitive closure to be correct. In particular, it
111 uses stack vs non-stack, TBAA, etc, to determine whether two
112 symbols could *ever* alias . This phase works by going through all
113 the pointers we collected during update_alias_info, and for every
114 addressable variable in the program, seeing if they alias. If so,
115 the addressable variable is added to the symbol memory tag for the
118 As part of this, we handle symbol memory tags that conflict but
119 have no aliases in common, by forcing them to have a symbol in
120 common (through unioning alias sets or adding one as an alias of
121 the other), or by adding one as an alias of another. The case of
122 conflicts with no aliases in common occurs mainly due to aliasing
123 we cannot see. In particular, it generally means we have a load
124 through a pointer whose value came from outside the function.
125 Without an addressable symbol to point to, they would get the wrong
128 After flow insensitive aliasing is computed, we compute name tags
129 (called compute_flow_sensitive_info). We walk each pointer we
130 collected and see if it has a usable points-to set. If so, we
131 generate a name tag using that pointer, and make an alias bitmap for
132 it. Name tags are shared between all things with the same alias
133 bitmap. The alias bitmap will be translated from what points-to
134 computed. In particular, the "anything" variable in points-to will be
135 transformed into a pruned set of SMT's and their aliases that
136 compute_flow_insensitive_aliasing computed.
137 Note that since 4.3, every pointer that points-to computed a solution for
138 will get a name tag (whereas before 4.3, only those whose set did
139 *not* include the anything variable would). At the point where name
140 tags are all assigned, symbol memory tags are dead, and could be
141 deleted, *except* on global variables. Global variables still use
142 symbol memory tags as of right now.
144 After name tags are computed, the set of clobbered variables is
145 transitively closed. In particular, we compute the set of clobbered
146 variables based on the initial set of clobbers, plus the aliases of
147 pointers which either escape, or have their value escape.
149 After this, maybe_create_global_var is run, which handles a corner
150 case where we have no call clobbered variables, but have pure and
153 Staring at this function, I now remember it is a hack for the fact
154 that we do not mark all globals in the program as call clobbered for a
155 function unless they are actually used in that function. Instead, we
156 only mark the set that is actually clobbered. As a result, you can
157 end up with situations where you have no call clobbered vars set.
159 After maybe_create_global_var, we set pointers with the REF_ALL flag
160 to have alias sets that include all clobbered
161 memory tags and variables.
163 After this, memory partitioning is computed (by the function
164 compute_memory_partitions) and alias sets are reworked accordingly.
166 Lastly, we delete partitions with no symbols, and clean up after
169 /* Structure to map a variable to its alias set. */
172 /* Variable and its alias set. */
178 /* Counters used to display statistics on alias analysis. */
181 unsigned int alias_queries;
182 unsigned int alias_mayalias;
183 unsigned int alias_noalias;
184 unsigned int simple_queries;
185 unsigned int simple_resolved;
186 unsigned int tbaa_queries;
187 unsigned int tbaa_resolved;
188 unsigned int structnoaddress_queries;
189 unsigned int structnoaddress_resolved;
193 /* Local variables. */
194 static struct alias_stats_d alias_stats;
195 static bitmap_obstack alias_bitmap_obstack;
197 /* Local functions. */
198 static void compute_flow_insensitive_aliasing (struct alias_info *);
199 static void finalize_ref_all_pointers (struct alias_info *);
200 static void dump_alias_stats (FILE *);
201 static bool may_alias_p (tree, alias_set_type, tree, alias_set_type, bool);
202 static tree create_memory_tag (tree type, bool is_type_tag);
203 static tree get_smt_for (tree, struct alias_info *);
204 static tree get_nmt_for (tree);
205 static void add_may_alias (tree, tree);
206 static struct alias_info *init_alias_info (void);
207 static void delete_alias_info (struct alias_info *);
208 static void compute_flow_sensitive_aliasing (struct alias_info *);
209 static void setup_pointers_and_addressables (struct alias_info *);
210 static void create_global_var (void);
211 static void maybe_create_global_var (void);
212 static void set_pt_anything (tree);
214 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
216 static alloc_pool mem_sym_stats_pool;
218 /* Return memory reference stats for symbol VAR. Create a new slot in
219 cfun->gimple_df->mem_sym_stats if needed. */
221 static struct mem_sym_stats_d *
222 get_mem_sym_stats_for (tree var)
225 struct mem_sym_stats_d *stats;
226 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
230 slot = pointer_map_insert (map, var);
233 stats = pool_alloc (mem_sym_stats_pool);
234 memset (stats, 0, sizeof (*stats));
236 *slot = (void *) stats;
239 stats = (struct mem_sym_stats_d *) *slot;
245 /* Return memory reference statistics for variable VAR in function FN.
246 This is computed by alias analysis, but it is not kept
247 incrementally up-to-date. So, these stats are only accurate if
248 pass_may_alias has been run recently. If no alias information
249 exists, this function returns NULL. */
251 static mem_sym_stats_t
252 mem_sym_stats (struct function *fn, tree var)
255 struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
257 if (stats_map == NULL)
260 slot = pointer_map_contains (stats_map, var);
264 return (mem_sym_stats_t) *slot;
268 /* Set MPT to be the memory partition associated with symbol SYM. */
271 set_memory_partition (tree sym, tree mpt)
273 #if defined ENABLE_CHECKING
275 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
276 && !is_gimple_reg (sym));
279 var_ann (sym)->mpt = mpt;
282 if (MPT_SYMBOLS (mpt) == NULL)
283 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
285 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
287 /* MPT inherits the call-clobbering attributes from SYM. */
288 if (is_call_clobbered (sym))
290 MTAG_GLOBAL (mpt) = 1;
291 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
297 /* Mark variable VAR as being non-addressable. */
300 mark_non_addressable (tree var)
304 if (!TREE_ADDRESSABLE (var))
307 mpt = memory_partition (var);
310 var_ann (var)->call_clobbered = false;
312 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
313 TREE_ADDRESSABLE (var) = 0;
317 /* Note that it's possible for a symbol to have an associated
318 MPT and the MPT have a NULL empty set. During
319 init_alias_info, all MPTs get their sets cleared out, but the
320 symbols still point to the old MPTs that used to hold them.
321 This is done so that compute_memory_partitions can now which
322 symbols are losing or changing partitions and mark them for
324 if (MPT_SYMBOLS (mpt))
325 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
326 set_memory_partition (var, NULL_TREE);
331 /* qsort comparison function to sort type/name tags by DECL_UID. */
334 sort_tags_by_id (const void *pa, const void *pb)
336 const_tree const a = *(const_tree const *)pa;
337 const_tree const b = *(const_tree const *)pb;
339 return DECL_UID (a) - DECL_UID (b);
342 /* Initialize WORKLIST to contain those memory tags that are marked call
343 clobbered. Initialized WORKLIST2 to contain the reasons these
344 memory tags escaped. */
347 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
348 VEC (int, heap) **worklist2,
351 referenced_var_iterator rvi;
354 FOR_EACH_REFERENCED_VAR (curr, rvi)
356 if (MTAG_P (curr) && is_call_clobbered (curr))
358 VEC_safe_push (tree, heap, *worklist, curr);
359 VEC_safe_push (int, heap, *worklist2,
360 var_ann (curr)->escape_mask);
361 bitmap_set_bit (on_worklist, DECL_UID (curr));
366 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
367 ALIAS is not already marked call clobbered, and is a memory
371 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
372 VEC (int, heap) **worklist2, int reason,
375 if (MTAG_P (alias) && !is_call_clobbered (alias)
376 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
378 VEC_safe_push (tree, heap, *worklist, alias);
379 VEC_safe_push (int, heap, *worklist2, reason);
380 bitmap_set_bit (on_worklist, DECL_UID (alias));
384 /* Mark aliases of TAG as call clobbered, and place any tags on the
385 alias list that were not already call clobbered on WORKLIST. */
388 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
389 VEC (int, heap) **worklist2,
390 bitmap on_worklist, bitmap queued)
393 referenced_var_iterator ri;
395 var_ann_t ta = var_ann (tag);
399 aliases = may_aliases (tag);
403 FOR_EACH_REFERENCED_VAR_IN_BITMAP (aliases, entry, ri)
405 /* If you clobber one part of a structure, you
406 clobber the entire thing. While this does not make
407 the world a particularly nice place, it is necessary
408 in order to allow C/C++ tricks that involve
409 pointer arithmetic to work. */
410 if (TREE_CODE (entry) == STRUCT_FIELD_TAG)
411 bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (entry)));
412 else if (!unmodifiable_var_p (entry))
414 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
416 mark_call_clobbered (entry, ta->escape_mask);
419 if (!bitmap_empty_p (queued))
421 FOR_EACH_REFERENCED_VAR_IN_BITMAP (queued, entry, ri)
423 subvar_t svars = get_subvars_for_var (entry);
427 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
428 if (!unmodifiable_var_p (subvar))
429 mark_call_clobbered (subvar, ta->escape_mask);
431 bitmap_clear (queued);
435 /* Tags containing global vars need to be marked as global.
436 Tags containing call clobbered vars need to be marked as call
440 compute_tag_properties (void)
442 referenced_var_iterator rvi;
445 VEC (tree, heap) *taglist = NULL;
447 FOR_EACH_REFERENCED_VAR (tag, rvi)
449 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
451 VEC_safe_push (tree, heap, taglist, tag);
454 /* We sort the taglist by DECL_UID, for two reasons.
455 1. To get a sequential ordering to make the bitmap accesses
457 2. Because of the way we compute aliases, it's more likely that
458 an earlier tag is included in a later tag, and this will reduce
459 the number of iterations.
461 If we had a real tag graph, we would just topo-order it and be
463 qsort (VEC_address (tree, taglist),
464 VEC_length (tree, taglist),
468 /* Go through each tag not marked as global, and if it aliases
469 global vars, mark it global.
471 If the tag contains call clobbered vars, mark it call
474 This loop iterates because tags may appear in the may-aliases
475 list of other tags when we group. */
482 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
485 referenced_var_iterator ri;
487 bool tagcc = is_call_clobbered (tag);
488 bool tagglobal = MTAG_GLOBAL (tag);
490 if (tagcc && tagglobal)
493 ma = may_aliases (tag);
497 FOR_EACH_REFERENCED_VAR_IN_BITMAP (ma, entry, ri)
499 /* Call clobbered entries cause the tag to be marked
501 if (!tagcc && is_call_clobbered (entry))
503 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
508 /* Global vars cause the tag to be marked global. */
509 if (!tagglobal && is_global_var (entry))
511 MTAG_GLOBAL (tag) = true;
516 /* Early exit once both global and cc are set, since the
517 loop can't do any more than that. */
518 if (tagcc && tagglobal)
523 VEC_free (tree, heap, taglist);
526 /* Set up the initial variable clobbers and globalness.
527 When this function completes, only tags whose aliases need to be
528 clobbered will be set clobbered. Tags clobbered because they
529 contain call clobbered vars are handled in compute_tag_properties. */
532 set_initial_properties (struct alias_info *ai)
535 referenced_var_iterator rvi;
540 /* Temporary bitmap to avoid quadratic behavior in marking
542 queued = BITMAP_ALLOC (&alias_bitmap_obstack);
544 FOR_EACH_REFERENCED_VAR (var, rvi)
546 if (is_global_var (var)
547 && (!var_can_have_subvars (var)
548 || get_subvars_for_var (var) == NULL))
550 if (!unmodifiable_var_p (var))
551 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
553 else if (TREE_CODE (var) == PARM_DECL
554 && gimple_default_def (cfun, var)
555 && POINTER_TYPE_P (TREE_TYPE (var)))
557 tree def = gimple_default_def (cfun, var);
558 get_ptr_info (def)->value_escapes_p = 1;
559 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
563 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
565 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
566 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
568 if (pi->value_escapes_p)
570 /* If PTR escapes then its associated memory tags and
571 pointed-to variables are call-clobbered. */
572 if (pi->name_mem_tag)
573 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
576 mark_call_clobbered (tag, pi->escape_mask);
580 referenced_var_iterator ri;
582 FOR_EACH_REFERENCED_VAR_IN_BITMAP (pi->pt_vars, alias, ri)
584 /* If you clobber one part of a structure, you
585 clobber the entire thing. While this does not make
586 the world a particularly nice place, it is necessary
587 in order to allow C/C++ tricks that involve
588 pointer arithmetic to work. */
589 if (TREE_CODE (alias) == STRUCT_FIELD_TAG)
590 bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (alias)));
591 else if (!unmodifiable_var_p (alias))
592 mark_call_clobbered (alias, pi->escape_mask);
594 /* Process variables we need to clobber all parts of. */
595 if (!bitmap_empty_p (queued))
597 FOR_EACH_REFERENCED_VAR_IN_BITMAP (queued, alias, ri)
599 subvar_t svars = get_subvars_for_var (alias);
603 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
604 if (!unmodifiable_var_p (subvar))
605 mark_call_clobbered (subvar, pi->escape_mask);
607 bitmap_clear (queued);
612 /* If the name tag is call clobbered, so is the symbol tag
613 associated with the base VAR_DECL. */
616 && is_call_clobbered (pi->name_mem_tag))
617 mark_call_clobbered (tag, pi->escape_mask);
619 /* Name tags and symbol tags that we don't know where they point
620 to, might point to global memory, and thus, are clobbered.
622 FIXME: This is not quite right. They should only be
623 clobbered if value_escapes_p is true, regardless of whether
624 they point to global memory or not.
625 So removing this code and fixing all the bugs would be nice.
626 It is the cause of a bunch of clobbering. */
627 if ((pi->pt_global_mem || pi->pt_anything)
628 && pi->is_dereferenced && pi->name_mem_tag)
630 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
631 MTAG_GLOBAL (pi->name_mem_tag) = true;
634 if ((pi->pt_global_mem || pi->pt_anything)
635 && pi->is_dereferenced
638 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
639 MTAG_GLOBAL (tag) = true;
643 BITMAP_FREE (queued);
646 /* Compute which variables need to be marked call clobbered because
647 their tag is call clobbered, and which tags need to be marked
648 global because they contain global variables. */
651 compute_call_clobbered (struct alias_info *ai)
653 VEC (tree, heap) *worklist = NULL;
654 VEC (int,heap) *worklist2 = NULL;
655 bitmap on_worklist, queued;
657 timevar_push (TV_CALL_CLOBBER);
658 on_worklist = BITMAP_ALLOC (NULL);
659 queued = BITMAP_ALLOC (NULL);
661 set_initial_properties (ai);
662 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
663 while (VEC_length (tree, worklist) != 0)
665 tree curr = VEC_pop (tree, worklist);
666 int reason = VEC_pop (int, worklist2);
668 bitmap_clear_bit (on_worklist, DECL_UID (curr));
669 mark_call_clobbered (curr, reason);
670 mark_aliases_call_clobbered (curr, &worklist, &worklist2,
671 on_worklist, queued);
673 VEC_free (tree, heap, worklist);
674 VEC_free (int, heap, worklist2);
675 BITMAP_FREE (on_worklist);
676 BITMAP_FREE (queued);
677 compute_tag_properties ();
678 timevar_pop (TV_CALL_CLOBBER);
682 /* Dump memory partition information to FILE. */
685 dump_memory_partitions (FILE *file)
691 fprintf (file, "\nMemory partitions\n\n");
692 for (i = 0, npart = 0, nsyms = 0;
693 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
698 bitmap syms = MPT_SYMBOLS (mpt);
699 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
701 fprintf (file, "#%u: ", i);
702 print_generic_expr (file, mpt, 0);
703 fprintf (file, ": %lu elements: ", n);
704 dump_decl_set (file, syms);
710 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
714 /* Dump memory partition information to stderr. */
717 debug_memory_partitions (void)
719 dump_memory_partitions (stderr);
723 /* Return true if memory partitioning is required given the memory
724 reference estimates in STATS. */
727 need_to_partition_p (struct mem_ref_stats_d *stats)
729 long num_vops = stats->num_vuses + stats->num_vdefs;
730 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
731 return (num_vops > (long) MAX_ALIASED_VOPS
732 && avg_vops > (long) AVG_ALIASED_VOPS);
736 /* Count the actual number of virtual operators in CFUN. Note that
737 this is only meaningful after virtual operands have been populated,
738 so it should be invoked at the end of compute_may_aliases.
740 The number of virtual operators are stored in *NUM_VDEFS_P and
741 *NUM_VUSES_P, the number of partitioned symbols in
742 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
743 *NUM_UNPARTITIONED_P.
745 If any of these pointers is NULL the corresponding count is not
749 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
750 long *num_partitioned_p, long *num_unpartitioned_p)
752 block_stmt_iterator bsi;
754 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
755 referenced_var_iterator rvi;
758 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
760 if (num_vuses_p || num_vdefs_p)
762 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
764 tree stmt = bsi_stmt (bsi);
765 if (stmt_references_memory_p (stmt))
767 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
768 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
772 if (num_partitioned_p || num_unpartitioned_p)
773 FOR_EACH_REFERENCED_VAR (sym, rvi)
775 if (is_gimple_reg (sym))
778 if (memory_partition (sym))
785 *num_vdefs_p = num_vdefs;
788 *num_vuses_p = num_vuses;
790 if (num_partitioned_p)
791 *num_partitioned_p = num_partitioned;
793 if (num_unpartitioned_p)
794 *num_unpartitioned_p = num_unpartitioned;
798 /* The list is sorted by increasing partitioning score (PSCORE).
799 This score is computed such that symbols with high scores are
800 those that are least likely to be partitioned. Given a symbol
801 MP->VAR, PSCORE(S) is the result of the following weighted sum
803 PSCORE(S) = FW * 64 + FR * 32
810 FW Execution frequency of writes to S
811 FR Execution frequency of reads from S
812 DW Number of direct writes to S
813 DR Number of direct reads from S
814 IW Number of indirect writes to S
815 IR Number of indirect reads from S
816 NO_ALIAS State of the NO_ALIAS* flags
818 The basic idea here is that symbols that are frequently
819 written-to in hot paths of the code are the last to be considered
823 mem_sym_score (mem_sym_stats_t mp)
825 /* Unpartitionable SFTs are automatically thrown to the bottom of
826 the list. They are not stored in partitions, but they are used
827 for computing overall statistics. */
828 if (TREE_CODE (mp->var) == STRUCT_FIELD_TAG
829 && SFT_UNPARTITIONABLE_P (mp->var))
832 return mp->frequency_writes * 64 + mp->frequency_reads * 32
833 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
834 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
835 + var_ann (mp->var)->noalias_state;
839 /* Dump memory reference stats for function CFUN to FILE. */
842 dump_mem_ref_stats (FILE *file)
844 long actual_num_vuses, actual_num_vdefs;
845 long num_partitioned, num_unpartitioned;
846 struct mem_ref_stats_d *stats;
848 stats = gimple_mem_ref_stats (cfun);
850 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
853 fprintf (file, "\nMemory reference statistics for %s\n\n",
854 lang_hooks.decl_printable_name (current_function_decl, 2));
856 fprintf (file, "Number of memory statements: %ld\n",
857 stats->num_mem_stmts);
858 fprintf (file, "Number of call sites: %ld\n",
859 stats->num_call_sites);
860 fprintf (file, "Number of pure/const call sites: %ld\n",
861 stats->num_pure_const_call_sites);
862 fprintf (file, "Number of asm sites: %ld\n",
863 stats->num_asm_sites);
864 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
866 (stats->num_mem_stmts)
867 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
869 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
871 (stats->num_mem_stmts)
872 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
875 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
876 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
878 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
880 (stats->num_mem_stmts)
881 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
883 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
885 (stats->num_mem_stmts)
886 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
889 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
890 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
892 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
893 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
894 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
895 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
896 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
900 /* Dump memory reference stats for function FN to stderr. */
903 debug_mem_ref_stats (void)
905 dump_mem_ref_stats (stderr);
909 /* Dump memory reference stats for variable VAR to FILE. */
912 dump_mem_sym_stats (FILE *file, tree var)
914 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
919 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
920 "direct reads: %3ld, direct writes: %3ld, "
921 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
922 stats->frequency_reads, stats->frequency_writes,
923 stats->num_direct_reads, stats->num_direct_writes,
924 stats->num_indirect_reads, stats->num_indirect_writes);
925 print_generic_expr (file, stats->var, 0);
926 fprintf (file, ", tags: ");
927 dump_decl_set (file, stats->parent_tags);
931 /* Dump memory reference stats for variable VAR to stderr. */
934 debug_mem_sym_stats (tree var)
936 dump_mem_sym_stats (stderr, var);
939 /* Dump memory reference stats for variable VAR to FILE. For use
940 of tree-dfa.c:dump_variable. */
943 dump_mem_sym_stats_for_var (FILE *file, tree var)
945 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
950 fprintf (file, ", score: %ld", mem_sym_score (stats));
951 fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
952 fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
953 fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
954 fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
957 /* Dump memory reference stats for all memory symbols to FILE. */
960 dump_all_mem_sym_stats (FILE *file)
962 referenced_var_iterator rvi;
965 FOR_EACH_REFERENCED_VAR (sym, rvi)
967 if (is_gimple_reg (sym))
970 dump_mem_sym_stats (file, sym);
975 /* Dump memory reference stats for all memory symbols to stderr. */
978 debug_all_mem_sym_stats (void)
980 dump_all_mem_sym_stats (stderr);
984 /* Dump the MP_INFO array to FILE. */
987 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
990 mem_sym_stats_t mp_p;
992 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
993 if (!mp_p->partitioned_p)
994 dump_mem_sym_stats (file, mp_p->var);
998 /* Dump the MP_INFO array to stderr. */
1001 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
1003 dump_mp_info (stderr, mp_info);
1007 /* Update memory reference stats for symbol VAR in statement STMT.
1008 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
1009 that VAR is read/written in STMT (indirect reads/writes are not
1010 recorded by this function, see compute_memory_partitions). */
1013 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
1014 long num_direct_writes)
1016 mem_sym_stats_t stats;
1018 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
1020 stats = get_mem_sym_stats_for (var);
1022 stats->num_direct_reads += num_direct_reads;
1023 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
1024 * num_direct_reads);
1026 stats->num_direct_writes += num_direct_writes;
1027 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
1028 * num_direct_writes);
1032 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
1033 be partitioned before MP2->VAR, 0 if they are the same or 1 if
1034 MP1->VAR should be partitioned after MP2->VAR. */
1037 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
1039 long pscore1 = mem_sym_score (mp1);
1040 long pscore2 = mem_sym_score (mp2);
1042 if (pscore1 < pscore2)
1044 else if (pscore1 > pscore2)
1047 return DECL_UID (mp1->var) - DECL_UID (mp2->var);
1051 /* Comparison routine for qsort. The list is sorted by increasing
1052 partitioning score (PSCORE). This score is computed such that
1053 symbols with high scores are those that are least likely to be
1057 mp_info_cmp (const void *p, const void *q)
1059 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1060 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1061 return compare_mp_info_entries (e1, e2);
1065 /* Sort the array of reference counts used to compute memory partitions.
1066 Elements are sorted in ascending order of execution frequency and
1067 descending order of virtual operators needed. */
1070 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1072 unsigned num = VEC_length (mem_sym_stats_t, list);
1079 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1080 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1082 /* Swap elements if they are in the wrong order. */
1083 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1084 VEC_replace (mem_sym_stats_t, list, 0,
1085 VEC_index (mem_sym_stats_t, list, 1));
1086 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1092 /* There are 3 or more elements, call qsort. */
1093 qsort (VEC_address (mem_sym_stats_t, list),
1094 VEC_length (mem_sym_stats_t, list),
1095 sizeof (mem_sym_stats_t),
1100 /* Return the memory partition tag (MPT) associated with memory
1104 get_mpt_for (tree sym)
1108 /* Don't create a new tag unnecessarily. */
1109 mpt = memory_partition (sym);
1110 if (mpt == NULL_TREE)
1112 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1113 TREE_ADDRESSABLE (mpt) = 0;
1114 add_referenced_var (mpt);
1115 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1116 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1117 set_memory_partition (sym, mpt);
1124 /* Add MP_P->VAR to a memory partition and return the partition. */
1127 find_partition_for (mem_sym_stats_t mp_p)
1130 VEC(tree,heap) *mpt_table;
1133 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1136 /* Find an existing partition for MP_P->VAR. */
1137 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1139 mem_sym_stats_t mpt_stats;
1141 /* If MPT does not have any symbols yet, use it. */
1142 if (MPT_SYMBOLS (mpt) == NULL)
1145 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1146 but avoid grouping clobbered variables with non-clobbered
1147 variables (otherwise, this tends to creates a single memory
1148 partition because other call-clobbered variables may have
1149 common parent tags with non-clobbered ones). */
1150 mpt_stats = get_mem_sym_stats_for (mpt);
1151 if (mp_p->parent_tags
1152 && mpt_stats->parent_tags
1153 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1154 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1157 /* If no common parent tags are found, see if both MPT and
1158 MP_P->VAR are call-clobbered. */
1159 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1163 if (mpt == NULL_TREE)
1164 mpt = get_mpt_for (mp_p->var);
1166 set_memory_partition (mp_p->var, mpt);
1168 mp_p->partitioned_p = true;
1170 mark_sym_for_renaming (mp_p->var);
1171 mark_sym_for_renaming (mpt);
1177 /* Rewrite the alias set for TAG to use the newly created partitions.
1178 If TAG is NULL, rewrite the set of call-clobbered variables.
1179 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1183 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1185 referenced_var_iterator ri;
1188 FOR_EACH_REFERENCED_VAR_IN_BITMAP (MTAG_ALIASES (tag), sym, ri)
1190 mpt = memory_partition (sym);
1192 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1194 bitmap_set_bit (new_aliases, DECL_UID (sym));
1197 /* Rebuild the may-alias array for TAG. */
1198 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1202 /* Determine how many virtual operands can be saved by partitioning
1203 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1204 P, every virtual operand that used to reference S will now
1205 reference P. Whether it reduces the number of virtual operands
1208 1- Direct references to S are never saved. Instead of the virtual
1209 operand to S, we will now have a virtual operand to P.
1211 2- Indirect references to S are reduced only for those memory tags
1212 holding S that already had other symbols partitioned into P.
1213 For instance, if a memory tag T has the alias set { a b S c },
1214 the first time we partition S into P, the alias set will become
1215 { a b P c }, so no virtual operands will be saved. However, if
1216 we now partition symbol 'c' into P, then the alias set for T
1217 will become { a b P }, so we will be saving one virtual operand
1218 for every indirect reference to 'c'.
1220 3- Is S is call-clobbered, we save as many virtual operands as
1221 call/asm sites exist in the code, but only if other
1222 call-clobbered symbols have been grouped into P. The first
1223 call-clobbered symbol that we group does not produce any
1226 MEM_REF_STATS points to CFUN's memory reference information. */
1229 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1230 mem_sym_stats_t mp_p, tree mpt)
1234 mem_sym_stats_t mpt_stats;
1236 /* We should only get symbols with indirect references here. */
1237 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1239 /* Note that the only statistics we keep for MPT is the set of
1240 parent tags to know which memory tags have had alias members
1241 partitioned, and the indicator has_call_clobbered_vars.
1242 Reference counts are not important for MPT. */
1243 mpt_stats = get_mem_sym_stats_for (mpt);
1245 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1246 partition P is already grouping aliases of T, then reduce the
1247 number of virtual operands by the number of direct references
1249 if (mp_p->parent_tags)
1251 if (mpt_stats->parent_tags == NULL)
1252 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1254 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1256 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1258 /* Partition MPT is already partitioning symbols in the
1259 alias set for TAG. This means that we are now saving
1260 1 virtual operand for every direct reference to TAG. */
1261 tree tag = referenced_var (i);
1262 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1263 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1264 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1268 /* This is the first symbol in tag I's alias set that is
1269 being grouped under MPT. We will not save any
1270 virtual operands this time, but record that MPT is
1271 grouping a symbol from TAG's alias set so that the
1272 next time we get the savings. */
1273 bitmap_set_bit (mpt_stats->parent_tags, i);
1278 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1279 call-clobbered symbols, then we will save as many virtual
1280 operands as asm/call sites there are. */
1281 if (is_call_clobbered (mp_p->var))
1283 if (mpt_stats->has_call_clobbered_vars)
1284 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1285 + mem_ref_stats->num_asm_sites;
1287 mpt_stats->has_call_clobbered_vars = true;
1292 /* Helper for compute_memory_partitions. Transfer reference counts
1293 from pointers to their pointed-to sets. Counters for pointers were
1294 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1295 memory reference information. */
1298 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1300 referenced_var_iterator ri;
1301 mem_sym_stats_t sym_stats;
1305 for (i = 1; i < num_ssa_names; i++)
1308 struct ptr_info_def *pi;
1312 && POINTER_TYPE_P (TREE_TYPE (ptr))
1313 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1314 && pi->is_dereferenced)
1317 mem_sym_stats_t ptr_stats, tag_stats;
1319 /* If PTR has flow-sensitive points-to information, use
1320 PTR's name tag, otherwise use the symbol tag associated
1321 with PTR's symbol. */
1322 if (pi->name_mem_tag)
1323 tag = pi->name_mem_tag;
1325 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1327 ptr_stats = get_mem_sym_stats_for (ptr);
1328 tag_stats = get_mem_sym_stats_for (tag);
1330 /* TAG has as many direct references as dereferences we
1331 found for its parent pointer. */
1332 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1333 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1335 /* All the dereferences of pointer PTR are considered direct
1336 references to PTR's memory tag (TAG). In turn,
1337 references to TAG will become virtual operands for every
1338 symbol in TAG's alias set. So, for every symbol ALIAS in
1339 TAG's alias set, add as many indirect references to ALIAS
1340 as direct references there are for TAG. */
1341 if (MTAG_ALIASES (tag))
1342 FOR_EACH_REFERENCED_VAR_IN_BITMAP (MTAG_ALIASES (tag), alias, ri)
1344 sym_stats = get_mem_sym_stats_for (alias);
1346 /* All the direct references to TAG are indirect references
1348 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1349 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1350 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1351 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1353 /* Indicate that TAG is one of ALIAS's parent tags. */
1354 if (sym_stats->parent_tags == NULL)
1355 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1356 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1361 /* Call-clobbered symbols are indirectly written at every
1363 FOR_EACH_REFERENCED_VAR_IN_BITMAP (gimple_call_clobbered_vars (cfun), sym, ri)
1365 sym_stats = get_mem_sym_stats_for (sym);
1366 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1367 + mem_ref_stats->num_asm_sites;
1370 /* Addressable symbols are indirectly written at some ASM sites.
1371 Since only ASM sites that clobber memory actually affect
1372 addressable symbols, this is an over-estimation. */
1373 FOR_EACH_REFERENCED_VAR_IN_BITMAP (gimple_addressable_vars (cfun), sym, ri)
1375 sym_stats = get_mem_sym_stats_for (sym);
1376 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1381 /* Helper for compute_memory_partitions. Add all memory symbols to
1382 *MP_INFO_P and compute the initial estimate for the total number of
1383 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1384 reference information. On exit, *TAGS_P will contain the list of
1385 memory tags whose alias set need to be rewritten after
1389 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1390 VEC(mem_sym_stats_t,heap) **mp_info_p,
1391 VEC(tree,heap) **tags_p)
1394 referenced_var_iterator rvi;
1396 FOR_EACH_REFERENCED_VAR (var, rvi)
1398 mem_sym_stats_t sym_stats;
1401 /* We are only interested in memory symbols other than MPTs. */
1402 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1405 /* Collect memory tags into the TAGS array so that we can
1406 rewrite their alias sets after partitioning. */
1407 if (MTAG_P (var) && MTAG_ALIASES (var))
1408 VEC_safe_push (tree, heap, *tags_p, var);
1410 /* Since we are going to re-compute partitions, any symbols that
1411 used to belong to a partition must be detached from it and
1412 marked for renaming. */
1413 if ((old_mpt = memory_partition (var)) != NULL)
1415 mark_sym_for_renaming (old_mpt);
1416 set_memory_partition (var, NULL_TREE);
1417 mark_sym_for_renaming (var);
1420 sym_stats = get_mem_sym_stats_for (var);
1422 /* Add VAR's reference info to MP_INFO. Note that the only
1423 symbols that make sense to partition are those that have
1424 indirect references. If a symbol S is always directly
1425 referenced, partitioning it will not reduce the number of
1426 virtual operators. The only symbols that are profitable to
1427 partition are those that belong to alias sets and/or are
1429 if (sym_stats->num_indirect_reads > 0
1430 || sym_stats->num_indirect_writes > 0)
1431 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1433 /* Update the number of estimated VOPS. Note that direct
1434 references to memory tags are always counted as indirect
1435 references to their alias set members, so if a memory tag has
1436 aliases, do not count its direct references to avoid double
1438 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1440 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1441 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1444 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1445 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1450 /* Compute memory partitions. A memory partition (MPT) is an
1451 arbitrary grouping of memory symbols, such that references to one
1452 member of the group is considered a reference to all the members of
1455 As opposed to alias sets in memory tags, the grouping into
1456 partitions is completely arbitrary and only done to reduce the
1457 number of virtual operands. The only rule that needs to be
1458 observed when creating memory partitions is that given two memory
1459 partitions MPT.i and MPT.j, they must not contain symbols in
1462 Memory partitions are used when putting the program into Memory-SSA
1463 form. In particular, in Memory-SSA PHI nodes are not computed for
1464 individual memory symbols. They are computed for memory
1465 partitions. This reduces the amount of PHI nodes in the SSA graph
1466 at the expense of precision (i.e., it makes unrelated stores affect
1469 However, it is possible to increase precision by changing this
1470 partitioning scheme. For instance, if the partitioning scheme is
1471 such that get_mpt_for is the identity function (that is,
1472 get_mpt_for (s) = s), this will result in ultimate precision at the
1473 expense of huge SSA webs.
1475 At the other extreme, a partitioning scheme that groups all the
1476 symbols in the same set results in minimal SSA webs and almost
1477 total loss of precision.
1479 There partitioning heuristic uses three parameters to decide the
1480 order in which symbols are processed. The list of symbols is
1481 sorted so that symbols that are more likely to be partitioned are
1482 near the top of the list:
1484 - Execution frequency. If a memory references is in a frequently
1485 executed code path, grouping it into a partition may block useful
1486 transformations and cause sub-optimal code generation. So, the
1487 partition heuristic tries to avoid grouping symbols with high
1488 execution frequency scores. Execution frequency is taken
1489 directly from the basic blocks where every reference is made (see
1490 update_mem_sym_stats_from_stmt), which in turn uses the
1491 profile guided machinery, so if the program is compiled with PGO
1492 enabled, more accurate partitioning decisions will be made.
1494 - Number of references. Symbols with few references in the code,
1495 are partitioned before symbols with many references.
1497 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1498 attributes are partitioned after symbols marked MAY_ALIAS.
1500 Once the list is sorted, the partitioning proceeds as follows:
1502 1- For every symbol S in MP_INFO, create a new memory partition MP,
1503 if necessary. To avoid memory partitions that contain symbols
1504 from non-conflicting alias sets, memory partitions are
1505 associated to the memory tag that holds S in its alias set. So,
1506 when looking for a memory partition for S, the memory partition
1507 associated with one of the memory tags holding S is chosen. If
1508 none exists, a new one is created.
1510 2- Add S to memory partition MP.
1512 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1514 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1515 average number of VOPS per statement is less than
1516 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1520 compute_memory_partitions (void)
1524 mem_sym_stats_t mp_p;
1525 VEC(mem_sym_stats_t,heap) *mp_info;
1527 VEC(tree,heap) *tags;
1528 struct mem_ref_stats_d *mem_ref_stats;
1529 int prev_max_aliased_vops;
1531 mem_ref_stats = gimple_mem_ref_stats (cfun);
1532 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1534 if (mem_ref_stats->num_mem_stmts == 0)
1537 timevar_push (TV_MEMORY_PARTITIONING);
1541 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1543 /* Since we clearly cannot lower the number of virtual operators
1544 below the total number of memory statements in the function, we
1545 may need to adjust MAX_ALIASED_VOPS beforehand. */
1546 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1547 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1549 /* Update reference stats for all the pointed-to variables and
1551 update_reference_counts (mem_ref_stats);
1553 /* Add all the memory symbols to MP_INFO. */
1554 build_mp_info (mem_ref_stats, &mp_info, &tags);
1556 /* No partitions required if we are below the threshold. */
1557 if (!need_to_partition_p (mem_ref_stats))
1560 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1561 get_name (current_function_decl));
1565 /* Sort the MP_INFO array so that symbols that should be partitioned
1566 first are near the top of the list. */
1567 sort_mp_info (mp_info);
1571 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1572 get_name (current_function_decl));
1573 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1574 dump_mp_info (dump_file, mp_info);
1577 /* Create partitions for variables in MP_INFO until we have enough
1578 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1579 the average number of VOPS per statement is below
1580 AVG_ALIASED_VOPS. */
1581 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1585 /* If we are below the threshold, stop. */
1586 if (!need_to_partition_p (mem_ref_stats))
1589 /* SFTs that are marked unpartitionable should not be added to
1590 partitions. These SFTs are special because they mark the
1591 first SFT into a structure where a pointer is pointing to.
1592 This is needed by the operand scanner to find adjacent
1593 fields. See add_vars_for_offset for details. */
1594 if (TREE_CODE (mp_p->var) == STRUCT_FIELD_TAG
1595 && SFT_UNPARTITIONABLE_P (mp_p->var))
1598 mpt = find_partition_for (mp_p);
1599 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1602 /* After partitions have been created, rewrite alias sets to use
1603 them instead of the original symbols. This way, if the alias set
1604 was computed as { a b c d e f }, and the subset { b e f } was
1605 grouped into partition MPT.3, then the new alias set for the tag
1606 will be { a c d MPT.3 }.
1608 Note that this is not strictly necessary. The operand scanner
1609 will always check if a symbol belongs to a partition when adding
1610 virtual operands. However, by reducing the size of the alias
1611 sets to be scanned, the work needed inside the operand scanner is
1612 significantly reduced. */
1613 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1615 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1617 rewrite_alias_set_for (tag, new_aliases);
1618 bitmap_clear (new_aliases);
1621 BITMAP_FREE (new_aliases);
1625 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1626 dump_mp_info (dump_file, mp_info);
1630 /* Free allocated memory. */
1631 VEC_free (mem_sym_stats_t, heap, mp_info);
1632 VEC_free (tree, heap, tags);
1634 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1636 timevar_pop (TV_MEMORY_PARTITIONING);
1640 /* Compute may-alias information for every variable referenced in function
1643 Alias analysis proceeds in 3 main phases:
1645 1- Points-to and escape analysis.
1647 This phase walks the use-def chains in the SSA web looking for three
1650 * Assignments of the form P_i = &VAR
1651 * Assignments of the form P_i = malloc()
1652 * Pointers and ADDR_EXPR that escape the current function.
1654 The concept of 'escaping' is the same one used in the Java world. When
1655 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1656 outside of the current function. So, assignment to global variables,
1657 function arguments and returning a pointer are all escape sites, as are
1658 conversions between pointers and integers.
1660 This is where we are currently limited. Since not everything is renamed
1661 into SSA, we lose track of escape properties when a pointer is stashed
1662 inside a field in a structure, for instance. In those cases, we are
1663 assuming that the pointer does escape.
1665 We use escape analysis to determine whether a variable is
1666 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1667 is call-clobbered. If a pointer P_i escapes, then all the variables
1668 pointed-to by P_i (and its memory tag) also escape.
1670 2- Compute flow-sensitive aliases
1672 We have two classes of memory tags. Memory tags associated with the
1673 pointed-to data type of the pointers in the program. These tags are
1674 called "symbol memory tag" (SMT). The other class are those associated
1675 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1676 when adding operands for an INDIRECT_REF *P_i, we will first check
1677 whether P_i has a name tag, if it does we use it, because that will have
1678 more precise aliasing information. Otherwise, we use the standard symbol
1681 In this phase, we go through all the pointers we found in points-to
1682 analysis and create alias sets for the name memory tags associated with
1683 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1684 it points to and its tag.
1687 3- Compute flow-insensitive aliases
1689 This pass will compare the alias set of every symbol memory tag and
1690 every addressable variable found in the program. Given a symbol
1691 memory tag SMT and an addressable variable V. If the alias sets of
1692 SMT and V conflict (as computed by may_alias_p), then V is marked
1693 as an alias tag and added to the alias set of SMT.
1695 For instance, consider the following function:
1711 After aliasing analysis has finished, the symbol memory tag for pointer
1712 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1713 pointer 'p' is dereferenced, we want to mark the operation as a
1714 potential reference to 'a' and 'b'.
1724 # p_1 = PHI <p_4(1), p_6(2)>;
1739 In certain cases, the list of may aliases for a pointer may grow too
1740 large. This may cause an explosion in the number of virtual operands
1741 inserted in the code. Resulting in increased memory consumption and
1744 When the number of virtual operands needed to represent aliased
1745 loads and stores grows too large (configurable with option --param
1746 max-aliased-vops and --param avg-aliased-vops), alias sets are
1747 grouped to avoid severe compile-time slow downs and memory
1748 consumption. See compute_memory_partitions. */
1751 compute_may_aliases (void)
1753 struct alias_info *ai;
1755 timevar_push (TV_TREE_MAY_ALIAS);
1757 memset (&alias_stats, 0, sizeof (alias_stats));
1759 /* Initialize aliasing information. */
1760 ai = init_alias_info ();
1762 /* For each pointer P_i, determine the sets of variables that P_i may
1763 point-to. For every addressable variable V, determine whether the
1764 address of V escapes the current function, making V call-clobbered
1765 (i.e., whether &V is stored in a global variable or if its passed as a
1766 function call argument). */
1767 compute_points_to_sets (ai);
1769 /* Collect all pointers and addressable variables, compute alias sets,
1770 create memory tags for pointers and promote variables whose address is
1771 not needed anymore. */
1772 setup_pointers_and_addressables (ai);
1774 /* Compute type-based flow-insensitive aliasing for all the type
1776 compute_flow_insensitive_aliasing (ai);
1778 /* Compute flow-sensitive, points-to based aliasing for all the name
1780 compute_flow_sensitive_aliasing (ai);
1782 /* Compute call clobbering information. */
1783 compute_call_clobbered (ai);
1785 /* If the program makes no reference to global variables, but it
1786 contains a mixture of pure and non-pure functions, then we need
1787 to create use-def and def-def links between these functions to
1788 avoid invalid transformations on them. */
1789 maybe_create_global_var ();
1791 /* If the program contains ref-all pointers, finalize may-alias information
1792 for them. This pass needs to be run after call-clobbering information
1793 has been computed. */
1794 if (ai->ref_all_symbol_mem_tag)
1795 finalize_ref_all_pointers (ai);
1797 /* Compute memory partitions for every memory variable. */
1798 compute_memory_partitions ();
1800 /* Remove partitions with no symbols. Partitions may end up with an
1801 empty MPT_SYMBOLS set if a previous round of alias analysis
1802 needed to partition more symbols. Since we don't need those
1803 partitions anymore, remove them to free up the space. */
1807 VEC(tree,heap) *mpt_table;
1809 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1811 while (i < VEC_length (tree, mpt_table))
1813 mpt = VEC_index (tree, mpt_table, i);
1814 if (MPT_SYMBOLS (mpt) == NULL)
1815 VEC_unordered_remove (tree, mpt_table, i);
1821 /* Populate all virtual operands and newly promoted register operands. */
1823 block_stmt_iterator bsi;
1826 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1827 update_stmt_if_modified (bsi_stmt (bsi));
1830 /* Debugging dumps. */
1833 dump_mem_ref_stats (dump_file);
1834 dump_alias_info (dump_file);
1835 dump_points_to_info (dump_file);
1837 if (dump_flags & TDF_STATS)
1838 dump_alias_stats (dump_file);
1840 if (dump_flags & TDF_DETAILS)
1841 dump_referenced_vars (dump_file);
1844 /* Report strict aliasing violations. */
1845 strict_aliasing_warning_backend ();
1847 /* Deallocate memory used by aliasing data structures. */
1848 delete_alias_info (ai);
1850 if (need_ssa_update_p ())
1851 update_ssa (TODO_update_ssa);
1853 timevar_pop (TV_TREE_MAY_ALIAS);
1858 /* Data structure used to count the number of dereferences to PTR
1859 inside an expression. */
1867 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1868 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1871 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1873 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1875 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1876 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1877 the address of 'fld' as 'ptr + offsetof(fld)'. */
1878 if (TREE_CODE (*tp) == ADDR_EXPR)
1884 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1891 /* Count the number of direct and indirect uses for pointer PTR in
1892 statement STMT. The number of direct uses is stored in
1893 *NUM_USES_P. Indirect references are counted separately depending
1894 on whether they are store or load operations. The counts are
1895 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1898 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1899 unsigned *num_loads_p, unsigned *num_stores_p)
1908 /* Find out the total number of uses of PTR in STMT. */
1909 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1913 /* Now count the number of indirect references to PTR. This is
1914 truly awful, but we don't have much choice. There are no parent
1915 pointers inside INDIRECT_REFs, so an expression like
1916 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1917 find all the indirect and direct uses of x_1 inside. The only
1918 shortcut we can take is the fact that GIMPLE only allows
1919 INDIRECT_REFs inside the expressions below. */
1920 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1921 || (TREE_CODE (stmt) == RETURN_EXPR
1922 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1923 || TREE_CODE (stmt) == ASM_EXPR
1924 || TREE_CODE (stmt) == CALL_EXPR)
1928 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1930 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1931 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1933 else if (TREE_CODE (stmt) == RETURN_EXPR)
1935 tree e = TREE_OPERAND (stmt, 0);
1936 lhs = GIMPLE_STMT_OPERAND (e, 0);
1937 rhs = GIMPLE_STMT_OPERAND (e, 1);
1939 else if (TREE_CODE (stmt) == ASM_EXPR)
1941 lhs = ASM_OUTPUTS (stmt);
1942 rhs = ASM_INPUTS (stmt);
1951 && (TREE_CODE (lhs) == TREE_LIST
1953 || GIMPLE_STMT_P (lhs)))
1955 struct count_ptr_d count;
1958 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1959 *num_stores_p = count.count;
1963 && (TREE_CODE (rhs) == TREE_LIST
1965 || GIMPLE_STMT_P (rhs)))
1967 struct count_ptr_d count;
1970 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1971 *num_loads_p = count.count;
1975 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1978 /* Remove memory references stats for function FN. */
1981 delete_mem_ref_stats (struct function *fn)
1983 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1985 free_alloc_pool (mem_sym_stats_pool);
1986 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1988 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1992 /* Initialize memory reference stats. */
1995 init_mem_ref_stats (void)
1997 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1999 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
2000 sizeof (struct mem_sym_stats_d),
2002 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
2003 mem_ref_stats->mem_sym_stats = pointer_map_create ();
2007 /* Helper for init_alias_info. Reset existing aliasing information. */
2010 reset_alias_info (void)
2012 referenced_var_iterator rvi;
2015 bitmap active_nmts, all_nmts;
2017 /* Clear the set of addressable variables. We do not need to clear
2018 the TREE_ADDRESSABLE bit on every symbol because we are going to
2019 re-compute addressability here. */
2020 bitmap_clear (gimple_addressable_vars (cfun));
2022 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2023 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2025 /* Clear flow-insensitive alias information from each symbol. */
2026 FOR_EACH_REFERENCED_VAR (var, rvi)
2028 if (is_gimple_reg (var))
2032 MTAG_ALIASES (var) = NULL;
2034 /* Memory partition information will be computed from scratch. */
2035 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
2036 MPT_SYMBOLS (var) = NULL;
2038 /* Collect all the name tags to determine if we have any
2039 orphaned that need to be removed from the IL. A name tag
2040 will be orphaned if it is not associated with any active SSA
2042 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2043 bitmap_set_bit (all_nmts, DECL_UID (var));
2045 /* Since we are about to re-discover call-clobbered
2046 variables, clear the call-clobbered flag. Variables that
2047 are intrinsically call-clobbered (globals, local statics,
2048 etc) will not be marked by the aliasing code, so we can't
2049 remove them from CALL_CLOBBERED_VARS.
2051 NB: STRUCT_FIELDS are still call clobbered if they are for a
2052 global variable, so we *don't* clear their call clobberedness
2053 just because they are tags, though we will clear it if they
2054 aren't for global variables. */
2055 if (TREE_CODE (var) == NAME_MEMORY_TAG
2056 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
2057 || TREE_CODE (var) == MEMORY_PARTITION_TAG
2058 || !is_global_var (var))
2059 clear_call_clobbered (var);
2062 /* Clear flow-sensitive points-to information from each SSA name. */
2063 for (i = 1; i < num_ssa_names; i++)
2065 tree name = ssa_name (i);
2067 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2070 if (SSA_NAME_PTR_INFO (name))
2072 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2074 /* Clear all the flags but keep the name tag to
2075 avoid creating new temporaries unnecessarily. If
2076 this pointer is found to point to a subset or
2077 superset of its former points-to set, then a new
2078 tag will need to be created in create_name_tags. */
2079 pi->pt_anything = 0;
2081 pi->value_escapes_p = 0;
2082 pi->is_dereferenced = 0;
2084 bitmap_clear (pi->pt_vars);
2086 /* Add NAME's name tag to the set of active tags. */
2087 if (pi->name_mem_tag)
2088 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2092 /* Name memory tags that are no longer associated with an SSA name
2093 are considered stale and should be removed from the IL. All the
2094 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2095 considered stale and marked for renaming. */
2096 bitmap_and_compl_into (all_nmts, active_nmts);
2097 mark_set_for_renaming (all_nmts);
2099 BITMAP_FREE (all_nmts);
2100 BITMAP_FREE (active_nmts);
2104 /* Initialize the data structures used for alias analysis. */
2106 static struct alias_info *
2107 init_alias_info (void)
2109 struct alias_info *ai;
2110 referenced_var_iterator rvi;
2113 ai = XCNEW (struct alias_info);
2114 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2115 sbitmap_zero (ai->ssa_names_visited);
2116 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2117 ai->written_vars = pointer_set_create ();
2118 ai->dereferenced_ptrs_store = pointer_set_create ();
2119 ai->dereferenced_ptrs_load = pointer_set_create ();
2121 /* Clear out all memory reference stats. */
2122 init_mem_ref_stats ();
2124 /* If aliases have been computed before, clear existing information. */
2125 if (gimple_aliases_computed_p (cfun))
2126 reset_alias_info ();
2129 /* If this is the first time we compute aliasing information,
2130 every non-register symbol will need to be put into SSA form
2131 (the initial SSA form only operates on GIMPLE registers). */
2132 FOR_EACH_REFERENCED_VAR (var, rvi)
2133 if (!is_gimple_reg (var))
2134 mark_sym_for_renaming (var);
2137 /* Next time, we will need to reset alias information. */
2138 cfun->gimple_df->aliases_computed_p = true;
2139 if (alias_bitmap_obstack.elements != NULL)
2140 bitmap_obstack_release (&alias_bitmap_obstack);
2141 bitmap_obstack_initialize (&alias_bitmap_obstack);
2147 /* Deallocate memory used by alias analysis. */
2150 delete_alias_info (struct alias_info *ai)
2154 sbitmap_free (ai->ssa_names_visited);
2156 VEC_free (tree, heap, ai->processed_ptrs);
2158 for (i = 0; i < ai->num_addressable_vars; i++)
2159 free (ai->addressable_vars[i]);
2160 free (ai->addressable_vars);
2162 for (i = 0; i < ai->num_pointers; i++)
2163 free (ai->pointers[i]);
2164 free (ai->pointers);
2166 pointer_set_destroy (ai->written_vars);
2167 pointer_set_destroy (ai->dereferenced_ptrs_store);
2168 pointer_set_destroy (ai->dereferenced_ptrs_load);
2171 delete_mem_ref_stats (cfun);
2172 delete_points_to_sets ();
2176 /* Used for hashing to identify pointer infos with identical
2180 eq_ptr_info (const void *p1, const void *p2)
2182 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2183 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2184 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2188 ptr_info_hash (const void *p)
2190 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2191 return bitmap_hash (n->pt_vars);
2195 /* Create name tags for all the pointers that have been dereferenced.
2196 We only create a name tag for a pointer P if P is found to point to
2197 a set of variables (so that we can alias them to *P) or if it is
2198 the result of a call to malloc (which means that P cannot point to
2199 anything else nor alias any other variable).
2201 If two pointers P and Q point to the same set of variables, they
2202 are assigned the same name tag. */
2205 create_name_tags (void)
2208 VEC (tree, heap) *with_ptvars = NULL;
2212 /* Collect the list of pointers with a non-empty points to set. */
2213 for (i = 1; i < num_ssa_names; i++)
2215 tree ptr = ssa_name (i);
2216 struct ptr_info_def *pi;
2219 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2220 || !SSA_NAME_PTR_INFO (ptr))
2223 pi = SSA_NAME_PTR_INFO (ptr);
2225 if (pi->pt_anything || !pi->is_dereferenced)
2227 /* No name tags for pointers that have not been
2228 dereferenced or point to an arbitrary location. */
2229 pi->name_mem_tag = NULL_TREE;
2233 /* Set pt_anything on the pointers without pt_vars filled in so
2234 that they are assigned a symbol tag. */
2235 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2236 VEC_safe_push (tree, heap, with_ptvars, ptr);
2238 set_pt_anything (ptr);
2241 /* If we didn't find any pointers with pt_vars set, we're done. */
2245 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2247 /* Now go through the pointers with pt_vars, and find a name tag
2248 with the same pt_vars as this pointer, or create one if one
2250 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2252 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2253 tree old_name_tag = pi->name_mem_tag;
2254 struct ptr_info_def **slot;
2256 /* If PTR points to a set of variables, check if we don't
2257 have another pointer Q with the same points-to set before
2258 creating a tag. If so, use Q's tag instead of creating a
2261 This is important for not creating unnecessary symbols
2262 and also for copy propagation. If we ever need to
2263 propagate PTR into Q or vice-versa, we would run into
2264 problems if they both had different name tags because
2265 they would have different SSA version numbers (which
2266 would force us to take the name tags in and out of SSA). */
2267 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2269 pi->name_mem_tag = (*slot)->name_mem_tag;
2274 /* If we didn't find a pointer with the same points-to set
2275 as PTR, create a new name tag if needed. */
2276 if (pi->name_mem_tag == NULL_TREE)
2277 pi->name_mem_tag = get_nmt_for (ptr);
2280 /* If the new name tag computed for PTR is different than
2281 the old name tag that it used to have, then the old tag
2282 needs to be removed from the IL, so we mark it for
2284 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2285 mark_sym_for_renaming (old_name_tag);
2287 /* Inherit volatility from the pointed-to type. */
2288 TREE_THIS_VOLATILE (pi->name_mem_tag)
2289 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2291 /* Mark the new name tag for renaming. */
2292 mark_sym_for_renaming (pi->name_mem_tag);
2295 htab_delete (ptr_hash);
2297 VEC_free (tree, heap, with_ptvars);
2301 /* Union the alias set SET into the may-aliases for TAG. */
2304 union_alias_set_into (tree tag, bitmap set)
2306 bitmap ma = MTAG_ALIASES (tag);
2308 if (bitmap_empty_p (set))
2312 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2313 bitmap_ior_into (ma, set);
2317 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2318 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2319 name tag and the variables it points-to are call-clobbered. Finally, if
2320 P_i escapes and we could not determine where it points to, then all the
2321 variables in the same alias set as *P_i are marked call-clobbered. This
2322 is necessary because we must assume that P_i may take the address of any
2323 variable in the same alias set. */
2326 compute_flow_sensitive_aliasing (struct alias_info *ai)
2331 timevar_push (TV_FLOW_SENSITIVE);
2334 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2336 if (!find_what_p_points_to (ptr))
2337 set_pt_anything (ptr);
2340 create_name_tags ();
2342 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2344 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2345 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
2347 /* Set up aliasing information for PTR's name memory tag (if it has
2348 one). Note that only pointers that have been dereferenced will
2349 have a name memory tag. */
2350 if (pi->name_mem_tag && pi->pt_vars)
2352 if (!bitmap_empty_p (pi->pt_vars))
2354 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2355 union_alias_set_into (tag, pi->pt_vars);
2356 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
2358 /* It may be the case that this the tag uid was the only
2359 bit we had set in the aliases list, and in this case,
2360 we don't want to keep an empty bitmap, as this
2361 asserts in tree-ssa-operands.c . */
2362 if (bitmap_empty_p (MTAG_ALIASES (tag)))
2363 BITMAP_FREE (MTAG_ALIASES (tag));
2367 timevar_pop (TV_FLOW_SENSITIVE);
2371 /* Return TRUE if at least one symbol in TAG2's alias set is also
2372 present in TAG1's alias set. */
2375 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2378 /* This is the old behavior of have_common_aliases_p, which is to
2379 return false if both sets are empty, or one set is and the other
2381 if ((tag1aliases == NULL && tag2aliases != NULL)
2382 || (tag2aliases == NULL && tag1aliases != NULL)
2383 || (tag1aliases == NULL && tag2aliases == NULL))
2386 return bitmap_intersect_p (tag1aliases, tag2aliases);
2389 /* Compute type-based alias sets. Traverse all the pointers and
2390 addressable variables found in setup_pointers_and_addressables.
2392 For every pointer P in AI->POINTERS and addressable variable V in
2393 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2394 memory tag (SMT) if their alias sets conflict. V is then marked as
2395 an aliased symbol so that the operand scanner knows that statements
2396 containing V have aliased operands. */
2399 compute_flow_insensitive_aliasing (struct alias_info *ai)
2403 timevar_push (TV_FLOW_INSENSITIVE);
2404 /* For every pointer P, determine which addressable variables may alias
2405 with P's symbol memory tag. */
2406 for (i = 0; i < ai->num_pointers; i++)
2409 struct alias_map_d *p_map = ai->pointers[i];
2410 tree tag = symbol_mem_tag (p_map->var);
2413 /* Call-clobbering information is not finalized yet at this point. */
2414 if (PTR_IS_REF_ALL (p_map->var))
2417 for (j = 0; j < ai->num_addressable_vars; j++)
2419 struct alias_map_d *v_map;
2421 bool tag_stored_p, var_stored_p;
2423 v_map = ai->addressable_vars[j];
2425 v_ann = var_ann (var);
2427 /* Skip memory tags and variables that have never been
2428 written to. We also need to check if the variables are
2429 call-clobbered because they may be overwritten by
2431 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2432 || is_call_clobbered (tag);
2433 var_stored_p = pointer_set_contains (ai->written_vars, var)
2434 || is_call_clobbered (var);
2435 if (!tag_stored_p && !var_stored_p)
2438 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2440 /* We should never have a var with subvars here, because
2441 they shouldn't get into the set of addressable vars */
2442 gcc_assert (!var_can_have_subvars (var)
2443 || get_subvars_for_var (var) == NULL);
2445 /* Add VAR to TAG's may-aliases set. */
2446 add_may_alias (tag, var);
2451 /* Since this analysis is based exclusively on symbols, it fails to
2452 handle cases where two pointers P and Q have different memory
2453 tags with conflicting alias set numbers but no aliased symbols in
2456 For example, suppose that we have two memory tags SMT.1 and SMT.2
2459 may-aliases (SMT.1) = { a }
2460 may-aliases (SMT.2) = { b }
2462 and the alias set number of SMT.1 conflicts with that of SMT.2.
2463 Since they don't have symbols in common, loads and stores from
2464 SMT.1 and SMT.2 will seem independent of each other, which will
2465 lead to the optimizers making invalid transformations (see
2466 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2468 To avoid this problem, we do a final traversal of AI->POINTERS
2469 looking for pairs of pointers that have no aliased symbols in
2470 common and yet have conflicting alias set numbers. */
2471 for (i = 0; i < ai->num_pointers; i++)
2474 struct alias_map_d *p_map1 = ai->pointers[i];
2475 tree tag1 = symbol_mem_tag (p_map1->var);
2476 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2478 if (PTR_IS_REF_ALL (p_map1->var))
2481 for (j = i + 1; j < ai->num_pointers; j++)
2483 struct alias_map_d *p_map2 = ai->pointers[j];
2484 tree tag2 = symbol_mem_tag (p_map2->var);
2485 bitmap may_aliases2 = may_aliases (tag2);
2487 if (PTR_IS_REF_ALL (p_map2->var))
2490 /* If the pointers may not point to each other, do nothing. */
2491 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2494 /* The two pointers may alias each other. If they already have
2495 symbols in common, do nothing. */
2496 if (have_common_aliases_p (may_aliases1, may_aliases2))
2499 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
2501 union_alias_set_into (tag1, may_aliases2);
2505 /* Since TAG2 does not have any aliases of its own, add
2506 TAG2 itself to the alias set of TAG1. */
2507 add_may_alias (tag1, tag2);
2512 timevar_pop (TV_FLOW_INSENSITIVE);
2516 /* Finalize may-alias information for ref-all pointers. Traverse all
2517 the addressable variables found in setup_pointers_and_addressables.
2519 If flow-sensitive alias analysis has attached a name memory tag to
2520 a ref-all pointer, we will use it for the dereferences because that
2521 will have more precise aliasing information. But if there is no
2522 name tag, we will use a special symbol tag that aliases all the
2523 call-clobbered addressable variables. */
2526 finalize_ref_all_pointers (struct alias_info *ai)
2530 /* First add the real call-clobbered variables. */
2531 for (i = 0; i < ai->num_addressable_vars; i++)
2533 tree var = ai->addressable_vars[i]->var;
2534 if (is_call_clobbered (var))
2535 add_may_alias (ai->ref_all_symbol_mem_tag, var);
2538 /* Then add the call-clobbered pointer memory tags. See
2539 compute_flow_insensitive_aliasing for the rationale. */
2540 for (i = 0; i < ai->num_pointers; i++)
2542 tree ptr = ai->pointers[i]->var, tag;
2543 /* Avoid adding to self and clean up. */
2544 if (PTR_IS_REF_ALL (ptr))
2546 struct ptr_info_def *pi = get_ptr_info (ptr);
2547 if (pi->is_dereferenced)
2548 pi->pt_anything = 0;
2551 tag = symbol_mem_tag (ptr);
2552 if (is_call_clobbered (tag))
2553 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
2559 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2562 create_alias_map_for (tree var, struct alias_info *ai)
2564 struct alias_map_d *alias_map;
2565 alias_map = XCNEW (struct alias_map_d);
2566 alias_map->var = var;
2567 alias_map->set = get_alias_set (var);
2568 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2572 /* Create memory tags for all the dereferenced pointers and build the
2573 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2574 sets. Based on the address escape and points-to information collected
2575 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2576 variables whose address is not needed anymore. */
2579 setup_pointers_and_addressables (struct alias_info *ai)
2581 size_t num_addressable_vars, num_pointers;
2582 referenced_var_iterator rvi;
2584 VEC (tree, heap) *varvec = NULL;
2585 safe_referenced_var_iterator srvi;
2587 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2588 num_addressable_vars = num_pointers = 0;
2590 FOR_EACH_REFERENCED_VAR (var, rvi)
2592 if (may_be_aliased (var))
2593 num_addressable_vars++;
2595 if (POINTER_TYPE_P (TREE_TYPE (var)))
2597 /* Since we don't keep track of volatile variables, assume that
2598 these pointers are used in indirect store operations. */
2599 if (TREE_THIS_VOLATILE (var))
2600 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2606 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2607 always going to be slightly bigger than we actually need them
2608 because some TREE_ADDRESSABLE variables will be marked
2609 non-addressable below and only pointers with unique symbol tags are
2610 going to be added to POINTERS. */
2611 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2612 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2613 ai->num_addressable_vars = 0;
2614 ai->num_pointers = 0;
2616 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2620 /* Name memory tags already have flow-sensitive aliasing
2621 information, so they need not be processed by
2622 compute_flow_insensitive_aliasing. Similarly, symbol memory
2623 tags are already accounted for when we process their
2626 Structure fields, on the other hand, have to have some of this
2627 information processed for them, but it's pointless to mark them
2628 non-addressable (since they are fake variables anyway). */
2629 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
2632 /* Remove the ADDRESSABLE flag from every addressable variable whose
2633 address is not needed anymore. This is caused by the propagation
2634 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2635 removal of dead pointer assignments done by the early scalar
2637 if (TREE_ADDRESSABLE (var))
2639 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2640 && TREE_CODE (var) != RESULT_DECL
2641 && !is_global_var (var))
2643 bool okay_to_mark = true;
2645 /* Since VAR is now a regular GIMPLE register, we will need
2646 to rename VAR into SSA afterwards. */
2647 mark_sym_for_renaming (var);
2649 /* If VAR can have sub-variables, and any of its
2650 sub-variables has its address taken, then we cannot
2651 remove the addressable flag from VAR. */
2652 if (var_can_have_subvars (var)
2653 && (svars = get_subvars_for_var (var)))
2658 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
2660 if (bitmap_bit_p (gimple_addressable_vars (cfun),
2662 okay_to_mark = false;
2663 mark_sym_for_renaming (subvar);
2667 /* The address of VAR is not needed, remove the
2668 addressable bit, so that it can be optimized as a
2669 regular variable. */
2672 /* The memory partition holding VAR will no longer
2673 contain VAR, and statements referencing it will need
2675 if (memory_partition (var))
2676 mark_sym_for_renaming (memory_partition (var));
2678 mark_non_addressable (var);
2683 /* Global variables and addressable locals may be aliased. Create an
2684 entry in ADDRESSABLE_VARS for VAR. */
2685 if (may_be_aliased (var))
2687 if (!var_can_have_subvars (var)
2688 || get_subvars_for_var (var) == NULL)
2689 create_alias_map_for (var, ai);
2691 mark_sym_for_renaming (var);
2694 /* Add pointer variables that have been dereferenced to the POINTERS
2695 array and create a symbol memory tag for them. */
2696 if (POINTER_TYPE_P (TREE_TYPE (var)))
2698 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2699 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2704 /* If pointer VAR still doesn't have a memory tag
2705 associated with it, create it now or re-use an
2707 tag = get_smt_for (var, ai);
2708 t_ann = var_ann (tag);
2710 /* The symbol tag will need to be renamed into SSA
2711 afterwards. Note that we cannot do this inside
2712 get_smt_for because aliasing may run multiple times
2713 and we only create symbol tags the first time. */
2714 mark_sym_for_renaming (tag);
2716 /* Similarly, if pointer VAR used to have another type
2717 tag, we will need to process it in the renamer to
2718 remove the stale virtual operands. */
2719 old_tag = symbol_mem_tag (var);
2721 mark_sym_for_renaming (old_tag);
2723 /* Associate the tag with pointer VAR. */
2724 set_symbol_mem_tag (var, tag);
2726 /* If pointer VAR has been used in a store operation,
2727 then its memory tag must be marked as written-to. */
2728 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2729 pointer_set_insert (ai->written_vars, tag);
2733 /* The pointer has not been dereferenced. If it had a
2734 symbol memory tag, remove it and mark the old tag for
2735 renaming to remove it out of the IL. */
2736 tree tag = symbol_mem_tag (var);
2739 mark_sym_for_renaming (tag);
2740 set_symbol_mem_tag (var, NULL_TREE);
2746 VEC_free (tree, heap, varvec);
2750 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2751 semantics. If the function makes no references to global
2752 variables and contains at least one call to a non-pure function,
2753 then we need to mark the side-effects of the call using .GLOBAL_VAR
2754 to represent all possible global memory referenced by the callee. */
2757 maybe_create_global_var (void)
2759 /* No need to create it, if we have one already. */
2760 if (gimple_global_var (cfun) == NULL_TREE)
2762 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2764 /* Create .GLOBAL_VAR if there are no call-clobbered
2765 variables and the program contains a mixture of pure/const
2766 and regular function calls. This is to avoid the problem
2767 described in PR 20115:
2770 int func_pure (void) { return X; }
2771 int func_non_pure (int a) { X += a; }
2774 int a = func_pure ();
2780 Since foo() has no call-clobbered variables, there is
2781 no relationship between the calls to func_pure and
2782 func_non_pure. Since func_pure has no side-effects, value
2783 numbering optimizations elide the second call to func_pure.
2784 So, if we have some pure/const and some regular calls in the
2785 program we create .GLOBAL_VAR to avoid missing these
2787 if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
2788 && stats->num_call_sites > 0
2789 && stats->num_pure_const_call_sites > 0
2790 && stats->num_call_sites > stats->num_pure_const_call_sites)
2791 create_global_var ();
2796 /* Return TRUE if pointer PTR may point to variable VAR.
2798 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2799 This is needed because when checking for type conflicts we are
2800 interested in the alias set of the memory location pointed-to by
2801 PTR. The alias set of PTR itself is irrelevant.
2803 VAR_ALIAS_SET is the alias set for VAR. */
2806 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2807 tree var, alias_set_type var_alias_set,
2808 bool alias_set_only)
2812 alias_stats.alias_queries++;
2813 alias_stats.simple_queries++;
2815 /* By convention, a variable cannot alias itself. */
2816 mem = symbol_mem_tag (ptr);
2819 alias_stats.alias_noalias++;
2820 alias_stats.simple_resolved++;
2824 /* If -fargument-noalias-global is > 2, pointer arguments may
2825 not point to anything else. */
2826 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2828 alias_stats.alias_noalias++;
2829 alias_stats.simple_resolved++;
2833 /* If -fargument-noalias-global is > 1, pointer arguments may
2834 not point to global variables. */
2835 if (flag_argument_noalias > 1 && is_global_var (var)
2836 && TREE_CODE (ptr) == PARM_DECL)
2838 alias_stats.alias_noalias++;
2839 alias_stats.simple_resolved++;
2843 /* If either MEM or VAR is a read-only global and the other one
2844 isn't, then PTR cannot point to VAR. */
2845 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
2846 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2848 alias_stats.alias_noalias++;
2849 alias_stats.simple_resolved++;
2853 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2855 if (!DECL_NO_TBAA_P (ptr))
2857 alias_stats.tbaa_queries++;
2859 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2860 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2862 alias_stats.alias_noalias++;
2863 alias_stats.tbaa_resolved++;
2867 /* If VAR is a record or union type, PTR cannot point into VAR
2868 unless there is some explicit address operation in the
2869 program that can reference a field of the type pointed-to by
2870 PTR. This also assumes that the types of both VAR and PTR
2871 are contained within the compilation unit, and that there is
2872 no fancy addressing arithmetic associated with any of the
2874 if (mem_alias_set != 0 && var_alias_set != 0)
2876 tree ptr_type = TREE_TYPE (ptr);
2877 tree var_type = TREE_TYPE (var);
2879 /* The star count is -1 if the type at the end of the
2880 pointer_to chain is not a record or union type. */
2881 if ((!alias_set_only) &&
2882 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2884 int ptr_star_count = 0;
2886 /* ipa_type_escape_star_count_of_interesting_type is a
2887 little too restrictive for the pointer type, need to
2888 allow pointers to primitive types as long as those
2889 types cannot be pointers to everything. */
2890 while (POINTER_TYPE_P (ptr_type))
2892 /* Strip the *s off. */
2893 ptr_type = TREE_TYPE (ptr_type);
2897 /* There does not appear to be a better test to see if
2898 the pointer type was one of the pointer to everything
2900 if (ptr_star_count > 0)
2902 alias_stats.structnoaddress_queries++;
2903 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2906 alias_stats.structnoaddress_resolved++;
2907 alias_stats.alias_noalias++;
2911 else if (ptr_star_count == 0)
2913 /* If PTR_TYPE was not really a pointer to type, it cannot
2915 alias_stats.structnoaddress_queries++;
2916 alias_stats.structnoaddress_resolved++;
2917 alias_stats.alias_noalias++;
2924 alias_stats.alias_mayalias++;
2929 /* Add ALIAS to the set of variables that may alias VAR. */
2932 add_may_alias (tree var, tree alias)
2934 /* Don't allow self-referential aliases. */
2935 gcc_assert (var != alias);
2937 /* ALIAS must be addressable if it's being added to an alias set. */
2939 TREE_ADDRESSABLE (alias) = 1;
2941 gcc_assert (may_be_aliased (alias));
2944 /* VAR must be a symbol or a name tag. */
2945 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
2946 || TREE_CODE (var) == NAME_MEMORY_TAG);
2948 if (MTAG_ALIASES (var) == NULL)
2949 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
2951 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
2955 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2958 set_pt_anything (tree ptr)
2960 struct ptr_info_def *pi = get_ptr_info (ptr);
2962 pi->pt_anything = 1;
2965 /* The pointer used to have a name tag, but we now found it pointing
2966 to an arbitrary location. The name tag needs to be renamed and
2967 disassociated from PTR. */
2968 if (pi->name_mem_tag)
2970 mark_sym_for_renaming (pi->name_mem_tag);
2971 pi->name_mem_tag = NULL_TREE;
2976 /* Return true if STMT is an "escape" site from the current function. Escape
2977 sites those statements which might expose the address of a variable
2978 outside the current function. STMT is an escape site iff:
2980 1- STMT is a function call, or
2981 2- STMT is an __asm__ expression, or
2982 3- STMT is an assignment to a non-local variable, or
2983 4- STMT is a return statement.
2985 Return the type of escape site found, if we found one, or NO_ESCAPE
2989 is_escape_site (tree stmt)
2991 tree call = get_call_expr_in (stmt);
2992 if (call != NULL_TREE)
2994 if (!TREE_SIDE_EFFECTS (call))
2995 return ESCAPE_TO_PURE_CONST;
2997 return ESCAPE_TO_CALL;
2999 else if (TREE_CODE (stmt) == ASM_EXPR)
3000 return ESCAPE_TO_ASM;
3001 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
3003 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3005 /* Get to the base of _REF nodes. */
3006 if (TREE_CODE (lhs) != SSA_NAME)
3007 lhs = get_base_address (lhs);
3009 /* If we couldn't recognize the LHS of the assignment, assume that it
3010 is a non-local store. */
3011 if (lhs == NULL_TREE)
3012 return ESCAPE_UNKNOWN;
3014 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
3015 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
3016 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
3019 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
3020 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
3022 /* If the RHS is a conversion between a pointer and an integer, the
3023 pointer escapes since we can't track the integer. */
3024 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
3025 return ESCAPE_BAD_CAST;
3027 /* Same if the RHS is a conversion between a regular pointer and a
3028 ref-all pointer since we can't track the SMT of the former. */
3029 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
3030 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
3031 return ESCAPE_BAD_CAST;
3034 /* If the LHS is an SSA name, it can't possibly represent a non-local
3036 if (TREE_CODE (lhs) == SSA_NAME)
3039 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
3040 local variables we cannot be sure if it will escape, because we
3041 don't have information about objects not in SSA form. Need to
3042 implement something along the lines of
3044 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
3045 Midkiff, ``Escape analysis for java,'' in Proceedings of the
3046 Conference on Object-Oriented Programming Systems, Languages, and
3047 Applications (OOPSLA), pp. 1-19, 1999. */
3048 return ESCAPE_STORED_IN_GLOBAL;
3050 else if (TREE_CODE (stmt) == RETURN_EXPR)
3051 return ESCAPE_TO_RETURN;
3056 /* Create a new memory tag of type TYPE.
3057 Does NOT push it into the current binding. */
3060 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3064 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3066 /* Make the variable writable. */
3067 TREE_READONLY (tmp_var) = 0;
3069 /* It doesn't start out global. */
3070 MTAG_GLOBAL (tmp_var) = 0;
3071 TREE_STATIC (tmp_var) = 0;
3072 TREE_USED (tmp_var) = 1;
3077 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3078 is considered to represent all the pointers whose pointed-to types are
3079 in the same alias set class. Otherwise, the tag represents a single
3080 SSA_NAME pointer variable. */
3083 create_memory_tag (tree type, bool is_type_tag)
3085 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3086 type, (is_type_tag) ? "SMT" : "NMT");
3088 /* By default, memory tags are local variables. Alias analysis will
3089 determine whether they should be considered globals. */
3090 DECL_CONTEXT (tag) = current_function_decl;
3092 /* Memory tags are by definition addressable. */
3093 TREE_ADDRESSABLE (tag) = 1;
3095 set_symbol_mem_tag (tag, NULL_TREE);
3097 /* Add the tag to the symbol table. */
3098 add_referenced_var (tag);
3104 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3105 This is used if P_i has been found to point to a specific set of
3106 variables or to a non-aliased memory location like the address returned
3107 by malloc functions. */
3110 get_nmt_for (tree ptr)
3112 struct ptr_info_def *pi = get_ptr_info (ptr);
3113 tree tag = pi->name_mem_tag;
3115 if (tag == NULL_TREE)
3116 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3121 /* Return the symbol memory tag associated to pointer PTR. A memory
3122 tag is an artificial variable that represents the memory location
3123 pointed-to by PTR. It is used to model the effects of pointer
3124 de-references on addressable variables.
3126 AI points to the data gathered during alias analysis. This
3127 function populates the array AI->POINTERS. */
3130 get_smt_for (tree ptr, struct alias_info *ai)
3134 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3135 alias_set_type tag_set = get_alias_set (tag_type);
3137 /* We use a unique memory tag for all the ref-all pointers. */
3138 if (PTR_IS_REF_ALL (ptr))
3140 if (!ai->ref_all_symbol_mem_tag)
3141 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
3142 return ai->ref_all_symbol_mem_tag;
3145 /* To avoid creating unnecessary memory tags, only create one memory tag
3146 per alias set class. Note that it may be tempting to group
3147 memory tags based on conflicting alias sets instead of
3148 equivalence. That would be wrong because alias sets are not
3149 necessarily transitive (as demonstrated by the libstdc++ test
3150 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3151 such that conflicts (A, B) == true and conflicts (A, C) == true,
3152 it does not necessarily follow that conflicts (B, C) == true. */
3153 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3155 struct alias_map_d *curr = ai->pointers[i];
3156 tree curr_tag = symbol_mem_tag (curr->var);
3157 if (tag_set == curr->set)
3164 /* If VAR cannot alias with any of the existing memory tags, create a new
3165 tag for PTR and add it to the POINTERS array. */
3166 if (tag == NULL_TREE)
3168 struct alias_map_d *alias_map;
3170 /* If PTR did not have a symbol tag already, create a new SMT.*
3171 artificial variable representing the memory location
3172 pointed-to by PTR. */
3173 tag = symbol_mem_tag (ptr);
3174 if (tag == NULL_TREE)
3175 tag = create_memory_tag (tag_type, true);
3177 /* Add PTR to the POINTERS array. Note that we are not interested in
3178 PTR's alias set. Instead, we cache the alias set for the memory that
3180 alias_map = XCNEW (struct alias_map_d);
3181 alias_map->var = ptr;
3182 alias_map->set = tag_set;
3183 ai->pointers[ai->num_pointers++] = alias_map;
3186 /* If the pointed-to type is volatile, so is the tag. */
3187 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3189 /* Make sure that the symbol tag has the same alias set as the
3191 gcc_assert (tag_set == get_alias_set (tag));
3197 /* Create GLOBAL_VAR, an artificial global variable to act as a
3198 representative of all the variables that may be clobbered by function
3202 create_global_var (void)
3204 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3206 DECL_ARTIFICIAL (global_var) = 1;
3207 TREE_READONLY (global_var) = 0;
3208 DECL_EXTERNAL (global_var) = 1;
3209 TREE_STATIC (global_var) = 1;
3210 TREE_USED (global_var) = 1;
3211 DECL_CONTEXT (global_var) = NULL_TREE;
3212 TREE_THIS_VOLATILE (global_var) = 0;
3213 TREE_ADDRESSABLE (global_var) = 0;
3215 create_var_ann (global_var);
3216 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3217 add_referenced_var (global_var);
3218 mark_sym_for_renaming (global_var);
3219 cfun->gimple_df->global_var = global_var;
3223 /* Dump alias statistics on FILE. */
3226 dump_alias_stats (FILE *file)
3228 const char *funcname
3229 = lang_hooks.decl_printable_name (current_function_decl, 2);
3230 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3231 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3232 fprintf (file, "Total alias mayalias results:\t%u\n",
3233 alias_stats.alias_mayalias);
3234 fprintf (file, "Total alias noalias results:\t%u\n",
3235 alias_stats.alias_noalias);
3236 fprintf (file, "Total simple queries:\t%u\n",
3237 alias_stats.simple_queries);
3238 fprintf (file, "Total simple resolved:\t%u\n",
3239 alias_stats.simple_resolved);
3240 fprintf (file, "Total TBAA queries:\t%u\n",
3241 alias_stats.tbaa_queries);
3242 fprintf (file, "Total TBAA resolved:\t%u\n",
3243 alias_stats.tbaa_resolved);
3244 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3245 alias_stats.structnoaddress_queries);
3246 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3247 alias_stats.structnoaddress_resolved);
3251 /* Dump alias information on FILE. */
3254 dump_alias_info (FILE *file)
3257 const char *funcname
3258 = lang_hooks.decl_printable_name (current_function_decl, 2);
3259 referenced_var_iterator rvi;
3262 fprintf (file, "\nAlias information for %s\n\n", funcname);
3264 dump_memory_partitions (file);
3266 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3268 fprintf (file, "Aliased symbols\n\n");
3270 FOR_EACH_REFERENCED_VAR (var, rvi)
3272 if (may_be_aliased (var))
3273 dump_variable (file, var);
3276 fprintf (file, "\nDereferenced pointers\n\n");
3278 FOR_EACH_REFERENCED_VAR (var, rvi)
3279 if (symbol_mem_tag (var))
3280 dump_variable (file, var);
3282 fprintf (file, "\nSymbol memory tags\n\n");
3284 FOR_EACH_REFERENCED_VAR (var, rvi)
3286 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3287 dump_variable (file, var);
3290 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3292 fprintf (file, "SSA_NAME pointers\n\n");
3293 for (i = 1; i < num_ssa_names; i++)
3295 tree ptr = ssa_name (i);
3296 struct ptr_info_def *pi;
3298 if (ptr == NULL_TREE)
3301 pi = SSA_NAME_PTR_INFO (ptr);
3302 if (!SSA_NAME_IN_FREE_LIST (ptr)
3304 && pi->name_mem_tag)
3305 dump_points_to_info_for (file, ptr);
3308 fprintf (file, "\nName memory tags\n\n");
3310 FOR_EACH_REFERENCED_VAR (var, rvi)
3312 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3313 dump_variable (file, var);
3316 fprintf (file, "\n");
3320 /* Dump alias information on stderr. */
3323 debug_alias_info (void)
3325 dump_alias_info (stderr);
3329 /* Return the alias information associated with pointer T. It creates a
3330 new instance if none existed. */
3332 struct ptr_info_def *
3333 get_ptr_info (tree t)
3335 struct ptr_info_def *pi;
3337 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3339 pi = SSA_NAME_PTR_INFO (t);
3342 pi = GGC_CNEW (struct ptr_info_def);
3343 SSA_NAME_PTR_INFO (t) = pi;
3350 /* Dump points-to information for SSA_NAME PTR into FILE. */
3353 dump_points_to_info_for (FILE *file, tree ptr)
3355 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3357 print_generic_expr (file, ptr, dump_flags);
3361 if (pi->name_mem_tag)
3363 fprintf (file, ", name memory tag: ");
3364 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3367 if (pi->is_dereferenced)
3368 fprintf (file, ", is dereferenced");
3370 if (pi->value_escapes_p)
3371 fprintf (file, ", its value escapes");
3373 if (pi->pt_anything)
3374 fprintf (file, ", points-to anything");
3377 fprintf (file, ", points-to NULL");
3381 fprintf (file, ", points-to vars: ");
3382 dump_decl_set (file, pi->pt_vars);
3386 fprintf (file, "\n");
3390 /* Dump points-to information for VAR into stderr. */
3393 debug_points_to_info_for (tree var)
3395 dump_points_to_info_for (stderr, var);
3399 /* Dump points-to information into FILE. NOTE: This function is slow, as
3400 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3403 dump_points_to_info (FILE *file)
3406 block_stmt_iterator si;
3409 lang_hooks.decl_printable_name (current_function_decl, 2);
3410 referenced_var_iterator rvi;
3413 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3415 /* First dump points-to information for the default definitions of
3416 pointer variables. This is necessary because default definitions are
3417 not part of the code. */
3418 FOR_EACH_REFERENCED_VAR (var, rvi)
3420 if (POINTER_TYPE_P (TREE_TYPE (var)))
3422 tree def = gimple_default_def (cfun, var);
3424 dump_points_to_info_for (file, def);
3428 /* Dump points-to information for every pointer defined in the program. */
3433 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3435 tree ptr = PHI_RESULT (phi);
3436 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3437 dump_points_to_info_for (file, ptr);
3440 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3442 tree stmt = bsi_stmt (si);
3444 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3445 if (TREE_CODE (def) == SSA_NAME
3446 && POINTER_TYPE_P (TREE_TYPE (def)))
3447 dump_points_to_info_for (file, def);
3451 fprintf (file, "\n");
3455 /* Dump points-to info pointed to by PTO into STDERR. */
3458 debug_points_to_info (void)
3460 dump_points_to_info (stderr);
3463 /* Dump to FILE the list of variables that may be aliasing VAR. */
3466 dump_may_aliases_for (FILE *file, tree var)
3470 aliases = MTAG_ALIASES (var);
3473 referenced_var_iterator ri;
3476 fprintf (file, "{ ");
3477 FOR_EACH_REFERENCED_VAR_IN_BITMAP (aliases, al, ri)
3479 print_generic_expr (file, al, dump_flags);
3480 fprintf (file, " ");
3482 fprintf (file, "}");
3487 /* Dump to stderr the list of variables that may be aliasing VAR. */
3490 debug_may_aliases_for (tree var)
3492 dump_may_aliases_for (stderr, var);
3496 /* Return true if VAR may be aliased. */
3499 may_be_aliased (tree var)
3502 if (TREE_ADDRESSABLE (var))
3505 /* Globally visible variables can have their addresses taken by other
3506 translation units. */
3508 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
3510 else if (!MTAG_P (var)
3511 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3514 /* Automatic variables can't have their addresses escape any other
3515 way. This must be after the check for global variables, as
3516 extern declarations do not have TREE_STATIC set. */
3517 if (!TREE_STATIC (var))
3520 /* If we're in unit-at-a-time mode, then we must have seen all
3521 occurrences of address-of operators, and so we can trust
3522 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3523 isn't addressable if it's local to the current function. */
3524 if (flag_unit_at_a_time)
3527 if (decl_function_context (var) == current_function_decl)
3533 /* The following is based on code in add_stmt_operand to ensure that the
3534 same defs/uses/vdefs/vuses will be found after replacing a reference
3535 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3536 is the address of var. Return a memtag for the ptr, after adding the
3537 proper may_aliases to it (which are the aliases of var, if it has any,
3541 add_may_alias_for_new_tag (tree tag, tree var)
3543 bitmap aliases = NULL;
3546 aliases = may_aliases (var);
3548 /* Case 1: |aliases| == 1 */
3550 && bitmap_single_bit_set_p (aliases))
3552 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3553 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3557 /* Case 2: |aliases| == 0 */
3558 if (aliases == NULL)
3559 add_may_alias (tag, var);
3562 /* Case 3: |aliases| > 1 */
3563 union_alias_set_into (tag, aliases);
3568 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
3569 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
3570 according to the location accessed by EXPR.
3572 Note, the set of aliases represented by the new symbol tag are not marked
3576 new_type_alias (tree ptr, tree var, tree expr)
3578 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3581 tree ali = NULL_TREE;
3582 HOST_WIDE_INT offset, size, maxsize;
3584 VEC (tree, heap) *overlaps = NULL;
3585 unsigned int len, i;
3589 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3590 gcc_assert (!MTAG_P (var));
3592 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3595 tag = create_memory_tag (tag_type, true);
3596 set_symbol_mem_tag (ptr, tag);
3598 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
3599 subvars, add the subvars to the tag instead of the actual var. */
3600 if (var_can_have_subvars (ref)
3601 && (svars = get_subvars_for_var (ref)))
3603 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
3607 if (overlap_subvar (offset, maxsize, subvar, &exact))
3608 VEC_safe_push (tree, heap, overlaps, subvar);
3610 gcc_assert (overlaps != NULL);
3612 else if (var_can_have_subvars (var)
3613 && (svars = get_subvars_for_var (var)))
3615 /* If the REF is not a direct access to VAR (e.g., it is a dereference
3616 of a pointer), we should scan the virtual operands of REF the same
3617 way as tree-ssa-operands do. At the moment, this is somewhat
3618 difficult, so we just give up and add all the subvars of VAR.
3619 On mem-ssa branch, the scanning for virtual operands have been
3620 split from the rest of tree-ssa-operands, so it should be much
3621 easier to fix this problem correctly once mem-ssa is merged. */
3622 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
3623 VEC_safe_push (tree, heap, overlaps, subvar);
3625 gcc_assert (overlaps != NULL);
3628 ali = add_may_alias_for_new_tag (tag, var);
3630 len = VEC_length (tree, overlaps);
3633 if (dump_file && (dump_flags & TDF_DETAILS))
3634 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
3637 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
3643 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
3645 ali = add_may_alias_for_new_tag (tag, sv_var);
3649 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
3650 took place. Since more than one svar was found, we add
3651 'ali' as one of the may_aliases of the new tag. */
3652 add_may_alias (tag, ali);
3657 VEC_free (tree, heap, overlaps);
3660 set_symbol_mem_tag (ptr, ali);
3661 TREE_READONLY (tag) = TREE_READONLY (var);
3662 MTAG_GLOBAL (tag) = is_global_var (var);
3665 /* This represents the used range of a variable. */
3667 typedef struct used_part
3669 HOST_WIDE_INT minused;
3670 HOST_WIDE_INT maxused;
3671 /* True if we have an explicit use/def of some portion of this variable,
3672 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
3674 /* True if we have an implicit use/def of some portion of this
3675 variable. Implicit uses occur when we can't tell what part we
3676 are referencing, and have to make conservative assumptions. */
3678 /* True if the structure is only written to or taken its address. */
3682 /* An array of used_part structures, indexed by variable uid. */
3684 static htab_t used_portions;
3686 struct used_part_map
3692 /* Return true if the uid in the two used part maps are equal. */
3695 used_part_map_eq (const void *va, const void *vb)
3697 const struct used_part_map *a = (const struct used_part_map *) va;
3698 const struct used_part_map *b = (const struct used_part_map *) vb;
3699 return (a->uid == b->uid);
3702 /* Hash a from uid in a used_part_map. */
3705 used_part_map_hash (const void *item)
3707 return ((const struct used_part_map *)item)->uid;
3710 /* Free a used part map element. */
3713 free_used_part_map (void *item)
3715 free (((struct used_part_map *)item)->to);
3719 /* Lookup a used_part structure for a UID. */
3722 up_lookup (unsigned int uid)
3724 struct used_part_map *h, in;
3726 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
3732 /* Insert the pair UID, TO into the used part hashtable. */
3735 up_insert (unsigned int uid, used_part_t to)
3737 struct used_part_map *h;
3740 h = XNEW (struct used_part_map);
3743 loc = htab_find_slot_with_hash (used_portions, h,
3747 *(struct used_part_map **) loc = h;
3751 /* Given a variable uid, UID, get or create the entry in the used portions
3752 table for the variable. */
3755 get_or_create_used_part_for (size_t uid)
3758 if ((up = up_lookup (uid)) == NULL)
3760 up = XCNEW (struct used_part);
3761 up->minused = INT_MAX;
3763 up->explicit_uses = false;
3764 up->implicit_uses = false;
3765 up->write_only = true;
3772 /* Create and return a structure sub-variable for field type FIELD at
3773 offset OFFSET, with size SIZE, of variable VAR. If ALIAS_SET not
3774 -1 this field is non-addressable and we should use this alias set
3778 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
3779 unsigned HOST_WIDE_INT size, alias_set_type alias_set,
3780 bool base_for_components)
3782 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3784 /* We need to copy the various flags from VAR to SUBVAR, so that
3785 they are is_global_var iff the original variable was. */
3786 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3787 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3788 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3789 TREE_STATIC (subvar) = TREE_STATIC (var);
3790 TREE_READONLY (subvar) = TREE_READONLY (var);
3791 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3793 /* Add the new variable to REFERENCED_VARS. */
3794 set_symbol_mem_tag (subvar, NULL);
3795 add_referenced_var (subvar);
3796 SFT_PARENT_VAR (subvar) = var;
3797 SFT_OFFSET (subvar) = offset;
3798 SFT_SIZE (subvar) = size;
3799 SFT_ALIAS_SET (subvar) = alias_set;
3800 SFT_BASE_FOR_COMPONENTS_P (subvar) = base_for_components;
3801 SFT_UNPARTITIONABLE_P (subvar) = false;
3807 /* Given an aggregate VAR, create the subvariables that represent its
3811 create_overlap_variables_for (tree var)
3813 VEC(fieldoff_s,heap) *fieldstack = NULL;
3815 size_t uid = DECL_UID (var);
3817 up = up_lookup (uid);
3822 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL,
3824 /* Make sure to not create SFTs for structs we won't generate variable
3825 infos for. See tree-ssa-structalias.c:create_variable_info_for (). */
3826 if (VEC_length (fieldoff_s, fieldstack) > 1
3827 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3831 bool notokay = false;
3834 HOST_WIDE_INT lastfooffset = -1;
3835 HOST_WIDE_INT lastfosize = -1;
3836 tree lastfotype = NULL_TREE;
3838 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3839 know their size, and thus, can't handle.
3840 The same is true of fields with DECL_SIZE that is not an integer
3841 constant (such as variable sized fields).
3842 Fields with offsets which are not constant will have an offset < 0
3843 We *could* handle fields that are constant sized arrays, but
3844 currently don't. Doing so would require some extra changes to
3845 tree-ssa-operands.c. */
3847 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3850 || TREE_CODE (fo->size) != INTEGER_CST
3859 /* The current heuristic we use is as follows:
3860 If the variable has no used portions in this function, no
3861 structure vars are created for it.
3863 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3864 we always create structure vars for them.
3865 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3866 some explicit uses, we create structure vars for them.
3867 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3868 no explicit uses, we do not create structure vars for them.
3871 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3872 && !up->explicit_uses)
3874 if (dump_file && (dump_flags & TDF_DETAILS))
3876 fprintf (dump_file, "Variable ");
3877 print_generic_expr (dump_file, var, 0);
3878 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3883 /* Bail out, if we can't create overlap variables. */
3886 VEC_free (fieldoff_s, heap, fieldstack);
3890 /* Otherwise, create the variables. */
3891 subvars = lookup_subvars_for_var (var);
3892 *subvars = VEC_alloc (tree, gc, VEC_length (fieldoff_s, fieldstack));
3894 sort_fieldstack (fieldstack);
3896 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); ++i)
3898 HOST_WIDE_INT fosize;
3899 tree currfotype, subvar;
3901 fosize = TREE_INT_CST_LOW (fo->size);
3902 currfotype = fo->type;
3904 /* If this field isn't in the used portion,
3905 or it has the exact same offset and size as the last
3906 field, skip it. Note that we always need the field at
3907 offset 0 so we can properly handle pointers to the
3910 if ((fo->offset != 0
3911 && ((fo->offset <= up->minused
3912 && fo->offset + fosize <= up->minused)
3913 || fo->offset >= up->maxused))
3914 || (fo->offset == lastfooffset
3915 && fosize == lastfosize
3916 && currfotype == lastfotype))
3918 subvar = create_sft (var, fo->type, fo->offset,
3919 fosize, fo->alias_set, fo->base_for_components);
3920 VEC_quick_push (tree, *subvars, subvar);
3924 fprintf (dump_file, "structure field tag %s created for var %s",
3925 get_name (subvar), get_name (var));
3926 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3927 SFT_OFFSET (subvar));
3928 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3930 fprintf (dump_file, "\n");
3933 lastfotype = currfotype;
3934 lastfooffset = fo->offset;
3935 lastfosize = fosize;
3938 /* Once we have created subvars, the original is no longer call
3939 clobbered on its own. Its call clobbered status depends
3940 completely on the call clobbered status of the subvars.
3942 add_referenced_var in the above loop will take care of
3943 marking subvars of global variables as call clobbered for us
3944 to start, since they are global as well. */
3945 clear_call_clobbered (var);
3948 VEC_free (fieldoff_s, heap, fieldstack);
3952 /* Find the conservative answer to the question of what portions of what
3953 structures are used by this statement. We assume that if we have a
3954 component ref with a known size + offset, that we only need that part
3955 of the structure. For unknown cases, or cases where we do something
3956 to the whole structure, we assume we need to create fields for the
3957 entire structure. */
3960 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3962 switch (TREE_CODE (*tp))
3964 case GIMPLE_MODIFY_STMT:
3965 /* Recurse manually here to track whether the use is in the
3966 LHS of an assignment. */
3967 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3968 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3969 walk_subtrees, NULL);
3975 HOST_WIDE_INT bitsize;
3976 HOST_WIDE_INT bitmaxsize;
3977 HOST_WIDE_INT bitpos;
3979 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3981 && var_can_have_subvars (ref)
3982 && bitmaxsize != -1)
3984 size_t uid = DECL_UID (ref);
3987 up = get_or_create_used_part_for (uid);
3989 if (bitpos <= up->minused)
3990 up->minused = bitpos;
3991 if ((bitpos + bitmaxsize >= up->maxused))
3992 up->maxused = bitpos + bitmaxsize;
3994 if (bitsize == bitmaxsize)
3995 up->explicit_uses = true;
3997 up->implicit_uses = true;
3999 up->write_only = false;
4000 up_insert (uid, up);
4007 /* This is here to make sure we mark the entire base variable as used
4008 when you take its address. Because our used portion analysis is
4009 simple, we aren't looking at casts or pointer arithmetic to see what
4010 happens when you take the address. */
4013 tree var = get_base_address (TREE_OPERAND (*tp, 0));
4018 && var_can_have_subvars (var)
4019 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4022 size_t uid = DECL_UID (var);
4024 up = get_or_create_used_part_for (uid);
4027 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
4028 up->implicit_uses = true;
4030 up->write_only = false;
4032 up_insert (uid, up);
4041 int nargs = call_expr_nargs (*tp);
4042 for (i = 0; i < nargs; i++)
4044 tree *arg = &CALL_EXPR_ARG (*tp, i);
4045 if (TREE_CODE (*arg) == ADDR_EXPR)
4046 find_used_portions (arg, walk_subtrees, NULL);
4057 && var_can_have_subvars (var)
4058 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4061 size_t uid = DECL_UID (var);
4063 up = get_or_create_used_part_for (uid);
4066 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
4067 up->implicit_uses = true;
4069 up_insert (uid, up);
4083 /* Create structure field variables for structures used in this function. */
4086 create_structure_vars (void)
4089 safe_referenced_var_iterator rvi;
4090 VEC (tree, heap) *varvec = NULL;
4093 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
4094 free_used_part_map);
4098 block_stmt_iterator bsi;
4101 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4106 FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
4108 tree op = USE_FROM_PTR (use);
4109 walk_tree_without_duplicates (&op, find_used_portions,
4114 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4116 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
4121 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
4123 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
4126 && var_can_have_subvars (var)
4128 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4129 create_overlap_variables_for (var);
4131 htab_delete (used_portions);
4132 VEC_free (tree, heap, varvec);
4134 /* Update SSA operands of statements mentioning variables we split. */
4135 if (gimple_in_ssa_p (cfun))
4138 block_stmt_iterator bsi;
4139 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4141 tree stmt = bsi_stmt (bsi);
4142 bool update = false;
4146 if (STORED_SYMS (stmt))
4147 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
4149 tree sym = referenced_var_lookup (i);
4150 if (get_subvars_for_var (sym))
4157 if (LOADED_SYMS (stmt) && !update)
4158 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
4160 tree sym = referenced_var_lookup (i);
4161 if (get_subvars_for_var (sym))
4168 if (stmt_ann (stmt)->addresses_taken && !update)
4169 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
4172 tree sym = referenced_var_lookup (i);
4173 if (get_subvars_for_var (sym))
4185 return TODO_rebuild_alias;
4189 gate_structure_vars (void)
4191 return flag_tree_salias != 0;
4194 struct tree_opt_pass pass_create_structure_vars =
4196 "salias", /* name */
4197 gate_structure_vars, /* gate */
4198 create_structure_vars, /* execute */
4201 0, /* static_pass_number */
4203 PROP_cfg, /* properties_required */
4204 0, /* properties_provided */
4205 0, /* properties_destroyed */
4206 0, /* todo_flags_start */
4207 TODO_dump_func, /* todo_flags_finish */
4211 /* Reset the call_clobbered flags on our referenced vars. In
4212 theory, this only needs to be done for globals. */
4215 reset_cc_flags (void)
4218 referenced_var_iterator rvi;
4220 FOR_EACH_REFERENCED_VAR (var, rvi)
4221 var_ann (var)->call_clobbered = false;
4225 struct tree_opt_pass pass_reset_cc_flags =
4229 reset_cc_flags, /* execute */
4232 0, /* static_pass_number */
4234 PROP_referenced_vars |PROP_cfg, /* properties_required */
4235 0, /* properties_provided */
4236 0, /* properties_destroyed */
4237 0, /* todo_flags_start */
4238 0, /* todo_flags_finish */
4243 gate_build_alias (void)
4245 return !gate_structure_vars();
4249 struct tree_opt_pass pass_build_alias =
4251 "build_alias", /* name */
4252 gate_build_alias, /* gate */
4256 0, /* static_pass_number */
4258 PROP_cfg | PROP_ssa, /* properties_required */
4259 PROP_alias, /* properties_provided */
4260 0, /* properties_destroyed */
4261 0, /* todo_flags_start */
4262 TODO_rebuild_alias, /* todo_flags_finish */