1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
34 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
46 #include "ipa-type-escape.h"
50 #include "pointer-set.h"
52 /* Structure to map a variable to its alias set. */
55 /* Variable and its alias set. */
61 /* Counters used to display statistics on alias analysis. */
64 unsigned int alias_queries;
65 unsigned int alias_mayalias;
66 unsigned int alias_noalias;
67 unsigned int simple_queries;
68 unsigned int simple_resolved;
69 unsigned int tbaa_queries;
70 unsigned int tbaa_resolved;
71 unsigned int structnoaddress_queries;
72 unsigned int structnoaddress_resolved;
76 /* Local variables. */
77 static struct alias_stats_d alias_stats;
78 static bitmap_obstack alias_bitmap_obstack;
80 /* Local functions. */
81 static void compute_flow_insensitive_aliasing (struct alias_info *);
82 static void finalize_ref_all_pointers (struct alias_info *);
83 static void dump_alias_stats (FILE *);
84 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
85 static tree create_memory_tag (tree type, bool is_type_tag);
86 static tree get_smt_for (tree, struct alias_info *);
87 static tree get_nmt_for (tree);
88 static void add_may_alias (tree, tree);
89 static struct alias_info *init_alias_info (void);
90 static void delete_alias_info (struct alias_info *);
91 static void compute_flow_sensitive_aliasing (struct alias_info *);
92 static void setup_pointers_and_addressables (struct alias_info *);
93 static void create_global_var (void);
94 static void maybe_create_global_var (void);
95 static void set_pt_anything (tree);
97 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
101 /* Return memory reference stats for symbol VAR. Create a new slot in
102 cfun->gimple_df->mem_sym_stats if needed. */
104 static struct mem_sym_stats_d *
105 get_mem_sym_stats_for (tree var)
108 struct mem_sym_stats_d *stats;
109 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
113 slot = pointer_map_insert (map, var);
116 stats = XCNEW (struct mem_sym_stats_d);
118 *slot = (void *) stats;
121 stats = (struct mem_sym_stats_d *) *slot;
127 /* Set MPT to be the memory partition associated with symbol SYM. */
130 set_memory_partition (tree sym, tree mpt)
132 #if defined ENABLE_CHECKING
134 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
135 && !is_gimple_reg (sym));
138 var_ann (sym)->mpt = mpt;
141 if (MPT_SYMBOLS (mpt) == NULL)
142 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
144 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
146 /* MPT inherits the call-clobbering attributes from SYM. */
147 if (is_call_clobbered (sym))
149 MTAG_GLOBAL (mpt) = 1;
150 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
156 /* Mark variable VAR as being non-addressable. */
159 mark_non_addressable (tree var)
163 if (!TREE_ADDRESSABLE (var))
166 mpt = memory_partition (var);
169 var_ann (var)->call_clobbered = false;
171 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
172 TREE_ADDRESSABLE (var) = 0;
176 /* Note that it's possible for a symbol to have an associated
177 MPT and the MPT have a NULL empty set. During
178 init_alias_info, all MPTs get their sets cleared out, but the
179 symbols still point to the old MPTs that used to hold them.
180 This is done so that compute_memory_partitions can now which
181 symbols are losing or changing partitions and mark them for
183 if (MPT_SYMBOLS (mpt))
184 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
185 set_memory_partition (var, NULL_TREE);
190 /* qsort comparison function to sort type/name tags by DECL_UID. */
193 sort_tags_by_id (const void *pa, const void *pb)
195 tree a = *(tree *)pa;
196 tree b = *(tree *)pb;
198 return DECL_UID (a) - DECL_UID (b);
201 /* Initialize WORKLIST to contain those memory tags that are marked call
202 clobbered. Initialized WORKLIST2 to contain the reasons these
203 memory tags escaped. */
206 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
207 VEC (int, heap) **worklist2)
209 referenced_var_iterator rvi;
212 FOR_EACH_REFERENCED_VAR (curr, rvi)
214 if (MTAG_P (curr) && is_call_clobbered (curr))
216 VEC_safe_push (tree, heap, *worklist, curr);
217 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
222 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
223 ALIAS is not already marked call clobbered, and is a memory
227 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
228 VEC (int, heap) **worklist2,
231 if (MTAG_P (alias) && !is_call_clobbered (alias))
233 VEC_safe_push (tree, heap, *worklist, alias);
234 VEC_safe_push (int, heap, *worklist2, reason);
238 /* Mark aliases of TAG as call clobbered, and place any tags on the
239 alias list that were not already call clobbered on WORKLIST. */
242 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
243 VEC (int, heap) **worklist2)
249 var_ann_t ta = var_ann (tag);
253 aliases = may_aliases (tag);
257 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
259 entry = referenced_var (i);
260 if (!unmodifiable_var_p (entry))
262 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
263 mark_call_clobbered (entry, ta->escape_mask);
268 /* Tags containing global vars need to be marked as global.
269 Tags containing call clobbered vars need to be marked as call
273 compute_tag_properties (void)
275 referenced_var_iterator rvi;
278 VEC (tree, heap) *taglist = NULL;
280 FOR_EACH_REFERENCED_VAR (tag, rvi)
282 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
284 VEC_safe_push (tree, heap, taglist, tag);
287 /* We sort the taglist by DECL_UID, for two reasons.
288 1. To get a sequential ordering to make the bitmap accesses
290 2. Because of the way we compute aliases, it's more likely that
291 an earlier tag is included in a later tag, and this will reduce
292 the number of iterations.
294 If we had a real tag graph, we would just topo-order it and be
296 qsort (VEC_address (tree, taglist),
297 VEC_length (tree, taglist),
301 /* Go through each tag not marked as global, and if it aliases
302 global vars, mark it global.
304 If the tag contains call clobbered vars, mark it call
307 This loop iterates because tags may appear in the may-aliases
308 list of other tags when we group. */
315 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
321 bool tagcc = is_call_clobbered (tag);
322 bool tagglobal = MTAG_GLOBAL (tag);
324 if (tagcc && tagglobal)
327 ma = may_aliases (tag);
331 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
333 entry = referenced_var (i);
334 /* Call clobbered entries cause the tag to be marked
336 if (!tagcc && is_call_clobbered (entry))
338 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
343 /* Global vars cause the tag to be marked global. */
344 if (!tagglobal && is_global_var (entry))
346 MTAG_GLOBAL (tag) = true;
351 /* Early exit once both global and cc are set, since the
352 loop can't do any more than that. */
353 if (tagcc && tagglobal)
358 VEC_free (tree, heap, taglist);
361 /* Set up the initial variable clobbers and globalness.
362 When this function completes, only tags whose aliases need to be
363 clobbered will be set clobbered. Tags clobbered because they
364 contain call clobbered vars are handled in compute_tag_properties. */
367 set_initial_properties (struct alias_info *ai)
370 referenced_var_iterator rvi;
374 FOR_EACH_REFERENCED_VAR (var, rvi)
376 if (is_global_var (var)
377 && (!var_can_have_subvars (var)
378 || get_subvars_for_var (var) == NULL))
380 if (!unmodifiable_var_p (var))
381 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
383 else if (TREE_CODE (var) == PARM_DECL
384 && gimple_default_def (cfun, var)
385 && POINTER_TYPE_P (TREE_TYPE (var)))
387 tree def = gimple_default_def (cfun, var);
388 get_ptr_info (def)->value_escapes_p = 1;
389 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
393 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
395 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
396 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
398 if (pi->value_escapes_p)
400 /* If PTR escapes then its associated memory tags and
401 pointed-to variables are call-clobbered. */
402 if (pi->name_mem_tag)
403 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
406 mark_call_clobbered (tag, pi->escape_mask);
412 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
413 if (!unmodifiable_var_p (referenced_var (j)))
414 mark_call_clobbered (referenced_var (j), pi->escape_mask);
418 /* If the name tag is call clobbered, so is the symbol tag
419 associated with the base VAR_DECL. */
422 && is_call_clobbered (pi->name_mem_tag))
423 mark_call_clobbered (tag, pi->escape_mask);
425 /* Name tags and symbol tags that we don't know where they point
426 to, might point to global memory, and thus, are clobbered.
428 FIXME: This is not quite right. They should only be
429 clobbered if value_escapes_p is true, regardless of whether
430 they point to global memory or not.
431 So removing this code and fixing all the bugs would be nice.
432 It is the cause of a bunch of clobbering. */
433 if ((pi->pt_global_mem || pi->pt_anything)
434 && pi->is_dereferenced && pi->name_mem_tag)
436 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
437 MTAG_GLOBAL (pi->name_mem_tag) = true;
440 if ((pi->pt_global_mem || pi->pt_anything)
441 && pi->is_dereferenced
444 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
445 MTAG_GLOBAL (tag) = true;
450 /* Compute which variables need to be marked call clobbered because
451 their tag is call clobbered, and which tags need to be marked
452 global because they contain global variables. */
455 compute_call_clobbered (struct alias_info *ai)
457 VEC (tree, heap) *worklist = NULL;
458 VEC(int,heap) *worklist2 = NULL;
460 set_initial_properties (ai);
461 init_transitive_clobber_worklist (&worklist, &worklist2);
462 while (VEC_length (tree, worklist) != 0)
464 tree curr = VEC_pop (tree, worklist);
465 int reason = VEC_pop (int, worklist2);
467 mark_call_clobbered (curr, reason);
468 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
470 VEC_free (tree, heap, worklist);
471 VEC_free (int, heap, worklist2);
472 compute_tag_properties ();
476 /* Dump memory partition information to FILE. */
479 dump_memory_partitions (FILE *file)
485 fprintf (file, "\nMemory partitions\n\n");
486 for (i = 0, npart = 0, nsyms = 0;
487 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
492 bitmap syms = MPT_SYMBOLS (mpt);
493 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
495 fprintf (file, "#%u: ", i);
496 print_generic_expr (file, mpt, 0);
497 fprintf (file, ": %lu elements: ", n);
498 dump_decl_set (file, syms);
504 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
508 /* Dump memory partition information to stderr. */
511 debug_memory_partitions (void)
513 dump_memory_partitions (stderr);
517 /* Return true if memory partitioning is required given the memory
518 reference estimates in STATS. */
521 need_to_partition_p (struct mem_ref_stats_d *stats)
523 long num_vops = stats->num_vuses + stats->num_vdefs;
524 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
525 return (num_vops > (long) MAX_ALIASED_VOPS
526 && avg_vops > (long) AVG_ALIASED_VOPS);
530 /* Count the actual number of virtual operators in CFUN. Note that
531 this is only meaningful after virtual operands have been populated,
532 so it should be invoked at the end of compute_may_aliases.
534 The number of virtual operators are stored in *NUM_VDEFS_P and
535 *NUM_VUSES_P, the number of partitioned symbols in
536 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
537 *NUM_UNPARTITIONED_P.
539 If any of these pointers is NULL the corresponding count is not
543 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
544 long *num_partitioned_p, long *num_unpartitioned_p)
546 block_stmt_iterator bsi;
548 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
549 referenced_var_iterator rvi;
552 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
554 if (num_vuses_p || num_vdefs_p)
556 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
558 tree stmt = bsi_stmt (bsi);
559 if (stmt_references_memory_p (stmt))
561 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
562 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
566 if (num_partitioned_p || num_unpartitioned_p)
567 FOR_EACH_REFERENCED_VAR (sym, rvi)
569 if (is_gimple_reg (sym))
572 if (memory_partition (sym))
579 *num_vdefs_p = num_vdefs;
582 *num_vuses_p = num_vuses;
584 if (num_partitioned_p)
585 *num_partitioned_p = num_partitioned;
587 if (num_unpartitioned_p)
588 *num_unpartitioned_p = num_unpartitioned;
592 /* Dump memory reference stats for function CFUN to FILE. */
595 dump_mem_ref_stats (FILE *file)
597 long actual_num_vuses, actual_num_vdefs;
598 long num_partitioned, num_unpartitioned;
599 struct mem_ref_stats_d *stats;
601 stats = gimple_mem_ref_stats (cfun);
603 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
606 fprintf (file, "\nMemory reference statistics for %s\n\n",
607 lang_hooks.decl_printable_name (current_function_decl, 2));
609 fprintf (file, "Number of memory statements: %ld\n",
610 stats->num_mem_stmts);
611 fprintf (file, "Number of call sites: %ld\n",
612 stats->num_call_sites);
613 fprintf (file, "Number of pure/const call sites: %ld\n",
614 stats->num_pure_const_call_sites);
615 fprintf (file, "Number of asm sites: %ld\n",
616 stats->num_asm_sites);
617 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
619 (stats->num_mem_stmts)
620 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
622 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
624 (stats->num_mem_stmts)
625 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
628 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
629 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
631 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
633 (stats->num_mem_stmts)
634 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
636 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
638 (stats->num_mem_stmts)
639 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
642 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
643 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
645 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
646 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
647 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
648 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
649 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
653 /* Dump memory reference stats for function FN to stderr. */
656 debug_mem_ref_stats (void)
658 dump_mem_ref_stats (stderr);
662 /* Dump memory reference stats for variable VAR to FILE. */
665 dump_mem_sym_stats (FILE *file, tree var)
667 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
672 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
673 "direct reads: %3ld, direct writes: %3ld, "
674 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
675 stats->frequency_reads, stats->frequency_writes,
676 stats->num_direct_reads, stats->num_direct_writes,
677 stats->num_indirect_reads, stats->num_indirect_writes);
678 print_generic_expr (file, stats->var, 0);
679 fprintf (file, ", tags: ");
680 dump_decl_set (file, stats->parent_tags);
684 /* Dump memory reference stats for variable VAR to stderr. */
687 debug_mem_sym_stats (tree var)
689 dump_mem_sym_stats (stderr, var);
693 /* Dump memory reference stats for all memory symbols to FILE. */
696 dump_all_mem_sym_stats (FILE *file)
698 referenced_var_iterator rvi;
701 FOR_EACH_REFERENCED_VAR (sym, rvi)
703 if (is_gimple_reg (sym))
706 dump_mem_sym_stats (file, sym);
711 /* Dump memory reference stats for all memory symbols to stderr. */
714 debug_all_mem_sym_stats (void)
716 dump_all_mem_sym_stats (stderr);
720 /* Dump the MP_INFO array to FILE. */
723 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
726 mem_sym_stats_t mp_p;
728 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
729 if (!mp_p->partitioned_p)
730 dump_mem_sym_stats (file, mp_p->var);
734 /* Dump the MP_INFO array to stderr. */
737 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
739 dump_mp_info (stderr, mp_info);
743 /* Update memory reference stats for symbol VAR in statement STMT.
744 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
745 that VAR is read/written in STMT (indirect reads/writes are not
746 recorded by this function, see compute_memory_partitions). */
749 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
750 long num_direct_writes)
752 mem_sym_stats_t stats;
754 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
756 stats = get_mem_sym_stats_for (var);
758 stats->num_direct_reads += num_direct_reads;
759 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
762 stats->num_direct_writes += num_direct_writes;
763 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
764 * num_direct_writes);
768 /* The list is sorted by increasing partitioning score (PSCORE).
769 This score is computed such that symbols with high scores are
770 those that are least likely to be partitioned. Given a symbol
771 MP->VAR, PSCORE(S) is the result of the following weighted sum
773 PSCORE(S) = FW * 64 + FR * 32
780 FW Execution frequency of writes to S
781 FR Execution frequency of reads from S
782 DW Number of direct writes to S
783 DR Number of direct reads from S
784 IW Number of indirect writes to S
785 IR Number of indirect reads from S
786 NO_ALIAS State of the NO_ALIAS* flags
788 The basic idea here is that symbols that are frequently
789 written-to in hot paths of the code are the last to be considered
793 pscore (mem_sym_stats_t mp)
795 return mp->frequency_writes * 64 + mp->frequency_reads * 32
796 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
797 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
798 + var_ann (mp->var)->noalias_state;
802 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
803 be partitioned before MP2->VAR, 0 if they are the same or 1 if
804 MP1->VAR should be partitioned after MP2->VAR. */
807 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
809 long pscore1 = pscore (mp1);
810 long pscore2 = pscore (mp2);
812 if (pscore1 < pscore2)
814 else if (pscore1 > pscore2)
821 /* Comparison routine for qsort. The list is sorted by increasing
822 partitioning score (PSCORE). This score is computed such that
823 symbols with high scores are those that are least likely to be
827 mp_info_cmp (const void *p, const void *q)
829 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
830 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
831 return compare_mp_info_entries (e1, e2);
835 /* Sort the array of reference counts used to compute memory partitions.
836 Elements are sorted in ascending order of execution frequency and
837 descending order of virtual operators needed. */
840 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
842 unsigned num = VEC_length (mem_sym_stats_t, list);
849 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
850 VEC_index (mem_sym_stats_t, list, 1)) > 0)
852 /* Swap elements if they are in the wrong order. */
853 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
854 VEC_replace (mem_sym_stats_t, list, 0,
855 VEC_index (mem_sym_stats_t, list, 1));
856 VEC_replace (mem_sym_stats_t, list, 1, tmp);
862 /* There are 3 or more elements, call qsort. */
863 qsort (VEC_address (mem_sym_stats_t, list),
864 VEC_length (mem_sym_stats_t, list),
865 sizeof (mem_sym_stats_t),
870 /* Return the memory partition tag (MPT) associated with memory
874 get_mpt_for (tree sym)
878 /* Don't create a new tag unnecessarily. */
879 mpt = memory_partition (sym);
880 if (mpt == NULL_TREE)
882 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
883 TREE_ADDRESSABLE (mpt) = 0;
884 add_referenced_var (mpt);
885 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
886 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
887 set_memory_partition (sym, mpt);
894 /* Add MP_P->VAR to a memory partition and return the partition. */
897 find_partition_for (mem_sym_stats_t mp_p)
900 VEC(tree,heap) *mpt_table;
903 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
906 /* Find an existing partition for MP_P->VAR. */
907 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
909 mem_sym_stats_t mpt_stats;
911 /* If MPT does not have any symbols yet, use it. */
912 if (MPT_SYMBOLS (mpt) == NULL)
915 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
916 but avoid grouping clobbered variables with non-clobbered
917 variables (otherwise, this tends to creates a single memory
918 partition because other call-clobbered variables may have
919 common parent tags with non-clobbered ones). */
920 mpt_stats = get_mem_sym_stats_for (mpt);
921 if (mp_p->parent_tags
922 && mpt_stats->parent_tags
923 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
924 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
927 /* If no common parent tags are found, see if both MPT and
928 MP_P->VAR are call-clobbered. */
929 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
933 if (mpt == NULL_TREE)
934 mpt = get_mpt_for (mp_p->var);
936 set_memory_partition (mp_p->var, mpt);
938 mp_p->partitioned_p = true;
940 mark_sym_for_renaming (mp_p->var);
941 mark_sym_for_renaming (mpt);
947 /* Rewrite the alias set for TAG to use the newly created partitions.
948 If TAG is NULL, rewrite the set of call-clobbered variables.
949 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
953 rewrite_alias_set_for (tree tag, bitmap new_aliases)
959 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
961 sym = referenced_var (i);
962 mpt = memory_partition (sym);
964 bitmap_set_bit (new_aliases, DECL_UID (mpt));
966 bitmap_set_bit (new_aliases, DECL_UID (sym));
969 /* Rebuild the may-alias array for TAG. */
970 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
974 /* Determine how many virtual operands can be saved by partitioning
975 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
976 P, every virtual operand that used to reference S will now
977 reference P. Whether it reduces the number of virtual operands
980 1- Direct references to S are never saved. Instead of the virtual
981 operand to S, we will now have a virtual operand to P.
983 2- Indirect references to S are reduced only for those memory tags
984 holding S that already had other symbols partitioned into P.
985 For instance, if a memory tag T has the alias set { a b S c },
986 the first time we partition S into P, the alias set will become
987 { a b P c }, so no virtual operands will be saved. However, if
988 we now partition symbol 'c' into P, then the alias set for T
989 will become { a b P }, so we will be saving one virtual operand
990 for every indirect reference to 'c'.
992 3- Is S is call-clobbered, we save as many virtual operands as
993 call/asm sites exist in the code, but only if other
994 call-clobbered symbols have been grouped into P. The first
995 call-clobbered symbol that we group does not produce any
998 MEM_REF_STATS points to CFUN's memory reference information. */
1001 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1002 mem_sym_stats_t mp_p, tree mpt)
1006 mem_sym_stats_t mpt_stats;
1008 /* We should only get symbols with indirect references here. */
1009 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1011 /* Note that the only statistics we keep for MPT is the set of
1012 parent tags to know which memory tags have had alias members
1013 partitioned, and the indicator has_call_clobbered_vars.
1014 Reference counts are not important for MPT. */
1015 mpt_stats = get_mem_sym_stats_for (mpt);
1017 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1018 partition P is already grouping aliases of T, then reduce the
1019 number of virtual operands by the number of direct references
1021 if (mp_p->parent_tags)
1023 if (mpt_stats->parent_tags == NULL)
1024 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1026 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1028 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1030 /* Partition MPT is already partitioning symbols in the
1031 alias set for TAG. This means that we are now saving
1032 1 virtual operand for every direct reference to TAG. */
1033 tree tag = referenced_var (i);
1034 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1035 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1036 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1040 /* This is the first symbol in tag I's alias set that is
1041 being grouped under MPT. We will not save any
1042 virtual operands this time, but record that MPT is
1043 grouping a symbol from TAG's alias set so that the
1044 next time we get the savings. */
1045 bitmap_set_bit (mpt_stats->parent_tags, i);
1050 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1051 call-clobbered symbols, then we will save as many virtual
1052 operands as asm/call sites there are. */
1053 if (is_call_clobbered (mp_p->var))
1055 if (mpt_stats->has_call_clobbered_vars)
1056 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1057 + mem_ref_stats->num_asm_sites;
1059 mpt_stats->has_call_clobbered_vars = true;
1064 /* Helper for compute_memory_partitions. Transfer reference counts
1065 from pointers to their pointed-to sets. Counters for pointers were
1066 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1067 memory reference information. */
1070 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1074 mem_sym_stats_t sym_stats;
1076 for (i = 1; i < num_ssa_names; i++)
1079 struct ptr_info_def *pi;
1083 && POINTER_TYPE_P (TREE_TYPE (ptr))
1084 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1085 && pi->is_dereferenced)
1090 mem_sym_stats_t ptr_stats, tag_stats;
1092 /* If PTR has flow-sensitive points-to information, use
1093 PTR's name tag, otherwise use the symbol tag associated
1094 with PTR's symbol. */
1095 if (pi->name_mem_tag)
1096 tag = pi->name_mem_tag;
1098 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1100 ptr_stats = get_mem_sym_stats_for (ptr);
1101 tag_stats = get_mem_sym_stats_for (tag);
1103 /* TAG has as many direct references as dereferences we
1104 found for its parent pointer. */
1105 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1106 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1108 /* All the dereferences of pointer PTR are considered direct
1109 references to PTR's memory tag (TAG). In turn,
1110 references to TAG will become virtual operands for every
1111 symbol in TAG's alias set. So, for every symbol ALIAS in
1112 TAG's alias set, add as many indirect references to ALIAS
1113 as direct references there are for TAG. */
1114 if (MTAG_ALIASES (tag))
1115 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1117 tree alias = referenced_var (j);
1118 sym_stats = get_mem_sym_stats_for (alias);
1120 /* All the direct references to TAG are indirect references
1122 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1123 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1124 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1125 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1127 /* Indicate that TAG is one of ALIAS's parent tags. */
1128 if (sym_stats->parent_tags == NULL)
1129 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1130 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1135 /* Call-clobbered symbols are indirectly written at every
1137 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1139 tree sym = referenced_var (i);
1140 sym_stats = get_mem_sym_stats_for (sym);
1141 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1142 + mem_ref_stats->num_asm_sites;
1145 /* Addressable symbols are indirectly written at some ASM sites.
1146 Since only ASM sites that clobber memory actually affect
1147 addressable symbols, this is an over-estimation. */
1148 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1150 tree sym = referenced_var (i);
1151 sym_stats = get_mem_sym_stats_for (sym);
1152 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1157 /* Helper for compute_memory_partitions. Add all memory symbols to
1158 *MP_INFO_P and compute the initial estimate for the total number of
1159 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1160 reference information. On exit, *TAGS_P will contain the list of
1161 memory tags whose alias set need to be rewritten after
1165 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1166 VEC(mem_sym_stats_t,heap) **mp_info_p,
1167 VEC(tree,heap) **tags_p)
1170 referenced_var_iterator rvi;
1172 FOR_EACH_REFERENCED_VAR (var, rvi)
1174 mem_sym_stats_t sym_stats;
1177 /* We are only interested in memory symbols other than MPTs. */
1178 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1181 /* Collect memory tags into the TAGS array so that we can
1182 rewrite their alias sets after partitioning. */
1183 if (MTAG_P (var) && MTAG_ALIASES (var))
1184 VEC_safe_push (tree, heap, *tags_p, var);
1186 /* Since we are going to re-compute partitions, any symbols that
1187 used to belong to a partition must be detached from it and
1188 marked for renaming. */
1189 if ((old_mpt = memory_partition (var)) != NULL)
1191 mark_sym_for_renaming (old_mpt);
1192 set_memory_partition (var, NULL_TREE);
1193 mark_sym_for_renaming (var);
1196 sym_stats = get_mem_sym_stats_for (var);
1198 /* Add VAR's reference info to MP_INFO. Note that the only
1199 symbols that make sense to partition are those that have
1200 indirect references. If a symbol S is always directly
1201 referenced, partitioning it will not reduce the number of
1202 virtual operators. The only symbols that are profitable to
1203 partition are those that belong to alias sets and/or are
1205 if (sym_stats->num_indirect_reads > 0
1206 || sym_stats->num_indirect_writes > 0)
1207 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1209 /* Update the number of estimated VOPS. Note that direct
1210 references to memory tags are always counted as indirect
1211 references to their alias set members, so if a memory tag has
1212 aliases, do not count its direct references to avoid double
1214 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1216 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1217 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1220 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1221 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1226 /* Compute memory partitions. A memory partition (MPT) is an
1227 arbitrary grouping of memory symbols, such that references to one
1228 member of the group is considered a reference to all the members of
1231 As opposed to alias sets in memory tags, the grouping into
1232 partitions is completely arbitrary and only done to reduce the
1233 number of virtual operands. The only rule that needs to be
1234 observed when creating memory partitions is that given two memory
1235 partitions MPT.i and MPT.j, they must not contain symbols in
1238 Memory partitions are used when putting the program into Memory-SSA
1239 form. In particular, in Memory-SSA PHI nodes are not computed for
1240 individual memory symbols. They are computed for memory
1241 partitions. This reduces the amount of PHI nodes in the SSA graph
1242 at the expense of precision (i.e., it makes unrelated stores affect
1245 However, it is possible to increase precision by changing this
1246 partitioning scheme. For instance, if the partitioning scheme is
1247 such that get_mpt_for is the identity function (that is,
1248 get_mpt_for (s) = s), this will result in ultimate precision at the
1249 expense of huge SSA webs.
1251 At the other extreme, a partitioning scheme that groups all the
1252 symbols in the same set results in minimal SSA webs and almost
1253 total loss of precision.
1255 There partitioning heuristic uses three parameters to decide the
1256 order in which symbols are processed. The list of symbols is
1257 sorted so that symbols that are more likely to be partitioned are
1258 near the top of the list:
1260 - Execution frequency. If a memory references is in a frequently
1261 executed code path, grouping it into a partition may block useful
1262 transformations and cause sub-optimal code generation. So, the
1263 partition heuristic tries to avoid grouping symbols with high
1264 execution frequency scores. Execution frequency is taken
1265 directly from the basic blocks where every reference is made (see
1266 update_mem_sym_stats_from_stmt), which in turn uses the
1267 profile guided machinery, so if the program is compiled with PGO
1268 enabled, more accurate partitioning decisions will be made.
1270 - Number of references. Symbols with few references in the code,
1271 are partitioned before symbols with many references.
1273 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1274 attributes are partitioned after symbols marked MAY_ALIAS.
1276 Once the list is sorted, the partitioning proceeds as follows:
1278 1- For every symbol S in MP_INFO, create a new memory partition MP,
1279 if necessary. To avoid memory partitions that contain symbols
1280 from non-conflicting alias sets, memory partitions are
1281 associated to the memory tag that holds S in its alias set. So,
1282 when looking for a memory partition for S, the memory partition
1283 associated with one of the memory tags holding S is chosen. If
1284 none exists, a new one is created.
1286 2- Add S to memory partition MP.
1288 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1290 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1291 average number of VOPS per statement is less than
1292 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1296 compute_memory_partitions (void)
1300 mem_sym_stats_t mp_p;
1301 VEC(mem_sym_stats_t,heap) *mp_info;
1303 VEC(tree,heap) *tags;
1304 struct mem_ref_stats_d *mem_ref_stats;
1305 int prev_max_aliased_vops;
1307 mem_ref_stats = gimple_mem_ref_stats (cfun);
1308 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1310 if (mem_ref_stats->num_mem_stmts == 0)
1313 timevar_push (TV_MEMORY_PARTITIONING);
1317 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1319 /* Since we clearly cannot lower the number of virtual operators
1320 below the total number of memory statements in the function, we
1321 may need to adjust MAX_ALIASED_VOPS beforehand. */
1322 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1323 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1325 /* Update reference stats for all the pointed-to variables and
1327 update_reference_counts (mem_ref_stats);
1329 /* Add all the memory symbols to MP_INFO. */
1330 build_mp_info (mem_ref_stats, &mp_info, &tags);
1332 /* No partitions required if we are below the threshold. */
1333 if (!need_to_partition_p (mem_ref_stats))
1336 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1337 get_name (current_function_decl));
1341 /* Sort the MP_INFO array so that symbols that should be partitioned
1342 first are near the top of the list. */
1343 sort_mp_info (mp_info);
1347 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1348 get_name (current_function_decl));
1349 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1350 dump_mp_info (dump_file, mp_info);
1353 /* Create partitions for variables in MP_INFO until we have enough
1354 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1355 the average number of VOPS per statement is below
1356 AVG_ALIASED_VOPS. */
1357 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1361 /* If we are below the threshold, stop. */
1362 if (!need_to_partition_p (mem_ref_stats))
1365 mpt = find_partition_for (mp_p);
1366 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1369 /* After partitions have been created, rewrite alias sets to use
1370 them instead of the original symbols. This way, if the alias set
1371 was computed as { a b c d e f }, and the subset { b e f } was
1372 grouped into partition MPT.3, then the new alias set for the tag
1373 will be { a c d MPT.3 }.
1375 Note that this is not strictly necessary. The operand scanner
1376 will always check if a symbol belongs to a partition when adding
1377 virtual operands. However, by reducing the size of the alias
1378 sets to be scanned, the work needed inside the operand scanner is
1379 significantly reduced. */
1380 new_aliases = BITMAP_ALLOC (NULL);
1382 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1384 rewrite_alias_set_for (tag, new_aliases);
1385 bitmap_clear (new_aliases);
1388 BITMAP_FREE (new_aliases);
1392 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1393 dump_mp_info (dump_file, mp_info);
1397 /* Free allocated memory. */
1398 VEC_free (mem_sym_stats_t, heap, mp_info);
1399 VEC_free (tree, heap, tags);
1401 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1403 timevar_pop (TV_MEMORY_PARTITIONING);
1407 /* Compute may-alias information for every variable referenced in function
1410 Alias analysis proceeds in 3 main phases:
1412 1- Points-to and escape analysis.
1414 This phase walks the use-def chains in the SSA web looking for three
1417 * Assignments of the form P_i = &VAR
1418 * Assignments of the form P_i = malloc()
1419 * Pointers and ADDR_EXPR that escape the current function.
1421 The concept of 'escaping' is the same one used in the Java world. When
1422 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1423 outside of the current function. So, assignment to global variables,
1424 function arguments and returning a pointer are all escape sites, as are
1425 conversions between pointers and integers.
1427 This is where we are currently limited. Since not everything is renamed
1428 into SSA, we lose track of escape properties when a pointer is stashed
1429 inside a field in a structure, for instance. In those cases, we are
1430 assuming that the pointer does escape.
1432 We use escape analysis to determine whether a variable is
1433 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1434 is call-clobbered. If a pointer P_i escapes, then all the variables
1435 pointed-to by P_i (and its memory tag) also escape.
1437 2- Compute flow-sensitive aliases
1439 We have two classes of memory tags. Memory tags associated with the
1440 pointed-to data type of the pointers in the program. These tags are
1441 called "symbol memory tag" (SMT). The other class are those associated
1442 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1443 when adding operands for an INDIRECT_REF *P_i, we will first check
1444 whether P_i has a name tag, if it does we use it, because that will have
1445 more precise aliasing information. Otherwise, we use the standard symbol
1448 In this phase, we go through all the pointers we found in points-to
1449 analysis and create alias sets for the name memory tags associated with
1450 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1451 it points to and its tag.
1454 3- Compute flow-insensitive aliases
1456 This pass will compare the alias set of every symbol memory tag and
1457 every addressable variable found in the program. Given a symbol
1458 memory tag SMT and an addressable variable V. If the alias sets of
1459 SMT and V conflict (as computed by may_alias_p), then V is marked
1460 as an alias tag and added to the alias set of SMT.
1462 For instance, consider the following function:
1478 After aliasing analysis has finished, the symbol memory tag for pointer
1479 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1480 pointer 'p' is dereferenced, we want to mark the operation as a
1481 potential reference to 'a' and 'b'.
1491 # p_1 = PHI <p_4(1), p_6(2)>;
1506 In certain cases, the list of may aliases for a pointer may grow too
1507 large. This may cause an explosion in the number of virtual operands
1508 inserted in the code. Resulting in increased memory consumption and
1511 When the number of virtual operands needed to represent aliased
1512 loads and stores grows too large (configurable with option --param
1513 max-aliased-vops and --param avg-aliased-vops), alias sets are
1514 grouped to avoid severe compile-time slow downs and memory
1515 consumption. See compute_memory_partitions. */
1518 compute_may_aliases (void)
1520 struct alias_info *ai;
1522 memset (&alias_stats, 0, sizeof (alias_stats));
1524 /* Initialize aliasing information. */
1525 ai = init_alias_info ();
1527 /* For each pointer P_i, determine the sets of variables that P_i may
1528 point-to. For every addressable variable V, determine whether the
1529 address of V escapes the current function, making V call-clobbered
1530 (i.e., whether &V is stored in a global variable or if its passed as a
1531 function call argument). */
1532 compute_points_to_sets (ai);
1534 /* Collect all pointers and addressable variables, compute alias sets,
1535 create memory tags for pointers and promote variables whose address is
1536 not needed anymore. */
1537 setup_pointers_and_addressables (ai);
1539 /* Compute type-based flow-insensitive aliasing for all the type
1541 compute_flow_insensitive_aliasing (ai);
1543 /* Compute flow-sensitive, points-to based aliasing for all the name
1545 compute_flow_sensitive_aliasing (ai);
1547 /* Compute call clobbering information. */
1548 compute_call_clobbered (ai);
1550 /* If the program makes no reference to global variables, but it
1551 contains a mixture of pure and non-pure functions, then we need
1552 to create use-def and def-def links between these functions to
1553 avoid invalid transformations on them. */
1554 maybe_create_global_var ();
1556 /* If the program contains ref-all pointers, finalize may-alias information
1557 for them. This pass needs to be run after call-clobbering information
1558 has been computed. */
1559 if (ai->ref_all_symbol_mem_tag)
1560 finalize_ref_all_pointers (ai);
1562 /* Compute memory partitions for every memory variable. */
1563 compute_memory_partitions ();
1565 /* Remove partitions with no symbols. Partitions may end up with an
1566 empty MPT_SYMBOLS set if a previous round of alias analysis
1567 needed to partition more symbols. Since we don't need those
1568 partitions anymore, remove them to free up the space. */
1572 VEC(tree,heap) *mpt_table;
1574 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1576 while (i < VEC_length (tree, mpt_table))
1578 mpt = VEC_index (tree, mpt_table, i);
1579 if (MPT_SYMBOLS (mpt) == NULL)
1580 VEC_unordered_remove (tree, mpt_table, i);
1586 /* Populate all virtual operands and newly promoted register operands. */
1588 block_stmt_iterator bsi;
1591 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1592 update_stmt_if_modified (bsi_stmt (bsi));
1595 /* Debugging dumps. */
1598 dump_mem_ref_stats (dump_file);
1599 dump_alias_info (dump_file);
1600 dump_points_to_info (dump_file);
1602 if (dump_flags & TDF_STATS)
1603 dump_alias_stats (dump_file);
1605 if (dump_flags & TDF_DETAILS)
1606 dump_referenced_vars (dump_file);
1609 /* Report strict aliasing violations. */
1610 strict_aliasing_warning_backend ();
1612 /* Deallocate memory used by aliasing data structures. */
1613 delete_alias_info (ai);
1619 struct tree_opt_pass pass_may_alias =
1623 compute_may_aliases, /* execute */
1626 0, /* static_pass_number */
1627 TV_TREE_MAY_ALIAS, /* tv_id */
1628 PROP_cfg | PROP_ssa, /* properties_required */
1629 PROP_alias, /* properties_provided */
1630 0, /* properties_destroyed */
1631 0, /* todo_flags_start */
1636 | TODO_verify_stmts, /* todo_flags_finish */
1641 /* Data structure used to count the number of dereferences to PTR
1642 inside an expression. */
1650 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1651 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1654 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1656 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1658 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1659 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1660 the address of 'fld' as 'ptr + offsetof(fld)'. */
1661 if (TREE_CODE (*tp) == ADDR_EXPR)
1667 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1674 /* Count the number of direct and indirect uses for pointer PTR in
1675 statement STMT. The number of direct uses is stored in
1676 *NUM_USES_P. Indirect references are counted separately depending
1677 on whether they are store or load operations. The counts are
1678 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1681 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1682 unsigned *num_loads_p, unsigned *num_stores_p)
1691 /* Find out the total number of uses of PTR in STMT. */
1692 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1696 /* Now count the number of indirect references to PTR. This is
1697 truly awful, but we don't have much choice. There are no parent
1698 pointers inside INDIRECT_REFs, so an expression like
1699 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1700 find all the indirect and direct uses of x_1 inside. The only
1701 shortcut we can take is the fact that GIMPLE only allows
1702 INDIRECT_REFs inside the expressions below. */
1703 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1704 || (TREE_CODE (stmt) == RETURN_EXPR
1705 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1706 || TREE_CODE (stmt) == ASM_EXPR
1707 || TREE_CODE (stmt) == CALL_EXPR)
1711 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1713 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1714 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1716 else if (TREE_CODE (stmt) == RETURN_EXPR)
1718 tree e = TREE_OPERAND (stmt, 0);
1719 lhs = GIMPLE_STMT_OPERAND (e, 0);
1720 rhs = GIMPLE_STMT_OPERAND (e, 1);
1722 else if (TREE_CODE (stmt) == ASM_EXPR)
1724 lhs = ASM_OUTPUTS (stmt);
1725 rhs = ASM_INPUTS (stmt);
1734 && (TREE_CODE (lhs) == TREE_LIST
1736 || GIMPLE_STMT_P (lhs)))
1738 struct count_ptr_d count;
1741 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1742 *num_stores_p = count.count;
1746 && (TREE_CODE (rhs) == TREE_LIST
1748 || GIMPLE_STMT_P (rhs)))
1750 struct count_ptr_d count;
1753 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1754 *num_loads_p = count.count;
1758 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1762 /* Helper for delete_mem_ref_stats. Free all the slots in the
1763 mem_sym_stats map. */
1766 delete_mem_sym_stats (void *key ATTRIBUTE_UNUSED, void **value,
1767 void *data ATTRIBUTE_UNUSED)
1775 /* Remove memory references stats for function FN. */
1778 delete_mem_ref_stats (struct function *fn)
1780 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1782 pointer_map_traverse (gimple_mem_ref_stats (fn)->mem_sym_stats,
1783 delete_mem_sym_stats, NULL);
1784 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1787 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1791 /* Initialize memory reference stats. */
1794 init_mem_ref_stats (void)
1796 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1798 if (mem_ref_stats->mem_sym_stats)
1799 delete_mem_ref_stats (cfun);
1801 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1802 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1806 /* Initialize the data structures used for alias analysis. */
1808 static struct alias_info *
1809 init_alias_info (void)
1811 struct alias_info *ai;
1812 referenced_var_iterator rvi;
1815 ai = XCNEW (struct alias_info);
1816 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
1817 sbitmap_zero (ai->ssa_names_visited);
1818 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
1819 ai->written_vars = pointer_set_create ();
1820 ai->dereferenced_ptrs_store = pointer_set_create ();
1821 ai->dereferenced_ptrs_load = pointer_set_create ();
1823 /* Clear out all memory reference stats. */
1824 init_mem_ref_stats ();
1826 /* If aliases have been computed before, clear existing information. */
1827 if (gimple_aliases_computed_p (cfun))
1831 bitmap_obstack_release (&alias_bitmap_obstack);
1833 /* Similarly, clear the set of addressable variables. In this
1834 case, we can just clear the set because addressability is
1835 only computed here. */
1836 bitmap_clear (gimple_addressable_vars (cfun));
1838 /* Clear flow-insensitive alias information from each symbol. */
1839 FOR_EACH_REFERENCED_VAR (var, rvi)
1841 if (is_gimple_reg (var))
1845 MTAG_ALIASES (var) = NULL;
1847 /* Memory partition information will be computed from scratch. */
1848 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
1849 MPT_SYMBOLS (var) = NULL;
1851 /* Since we are about to re-discover call-clobbered
1852 variables, clear the call-clobbered flag. Variables that
1853 are intrinsically call-clobbered (globals, local statics,
1854 etc) will not be marked by the aliasing code, so we can't
1855 remove them from CALL_CLOBBERED_VARS.
1857 NB: STRUCT_FIELDS are still call clobbered if they are
1858 for a global variable, so we *don't* clear their call
1859 clobberedness just because they are tags, though we will
1860 clear it if they aren't for global variables. */
1861 if (TREE_CODE (var) == NAME_MEMORY_TAG
1862 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
1863 || TREE_CODE (var) == MEMORY_PARTITION_TAG
1864 || !is_global_var (var))
1865 clear_call_clobbered (var);
1868 /* Clear flow-sensitive points-to information from each SSA name. */
1869 for (i = 1; i < num_ssa_names; i++)
1871 tree name = ssa_name (i);
1873 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
1876 if (SSA_NAME_PTR_INFO (name))
1878 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
1880 /* Clear all the flags but keep the name tag to
1881 avoid creating new temporaries unnecessarily. If
1882 this pointer is found to point to a subset or
1883 superset of its former points-to set, then a new
1884 tag will need to be created in create_name_tags. */
1885 pi->pt_anything = 0;
1887 pi->value_escapes_p = 0;
1888 pi->is_dereferenced = 0;
1890 bitmap_clear (pi->pt_vars);
1896 /* If this is the first time we compute aliasing information,
1897 every non-register symbol will need to be put into SSA form
1898 (the initial SSA form only operates on GIMPLE registers). */
1899 FOR_EACH_REFERENCED_VAR (var, rvi)
1900 if (!is_gimple_reg (var))
1901 mark_sym_for_renaming (var);
1904 /* Next time, we will need to reset alias information. */
1905 cfun->gimple_df->aliases_computed_p = true;
1906 bitmap_obstack_initialize (&alias_bitmap_obstack);
1912 /* Deallocate memory used by alias analysis. */
1915 delete_alias_info (struct alias_info *ai)
1919 sbitmap_free (ai->ssa_names_visited);
1921 VEC_free (tree, heap, ai->processed_ptrs);
1923 for (i = 0; i < ai->num_addressable_vars; i++)
1924 free (ai->addressable_vars[i]);
1925 free (ai->addressable_vars);
1927 for (i = 0; i < ai->num_pointers; i++)
1928 free (ai->pointers[i]);
1929 free (ai->pointers);
1931 pointer_set_destroy (ai->written_vars);
1932 pointer_set_destroy (ai->dereferenced_ptrs_store);
1933 pointer_set_destroy (ai->dereferenced_ptrs_load);
1936 delete_points_to_sets ();
1940 /* Used for hashing to identify pointer infos with identical
1944 eq_ptr_info (const void *p1, const void *p2)
1946 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
1947 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
1948 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
1952 ptr_info_hash (const void *p)
1954 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
1955 return bitmap_hash (n->pt_vars);
1959 /* Create name tags for all the pointers that have been dereferenced.
1960 We only create a name tag for a pointer P if P is found to point to
1961 a set of variables (so that we can alias them to *P) or if it is
1962 the result of a call to malloc (which means that P cannot point to
1963 anything else nor alias any other variable).
1965 If two pointers P and Q point to the same set of variables, they
1966 are assigned the same name tag. */
1969 create_name_tags (void)
1972 VEC (tree, heap) *with_ptvars = NULL;
1976 /* Collect the list of pointers with a non-empty points to set. */
1977 for (i = 1; i < num_ssa_names; i++)
1979 tree ptr = ssa_name (i);
1980 struct ptr_info_def *pi;
1983 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1984 || !SSA_NAME_PTR_INFO (ptr))
1987 pi = SSA_NAME_PTR_INFO (ptr);
1989 if (pi->pt_anything || !pi->is_dereferenced)
1991 /* No name tags for pointers that have not been
1992 dereferenced or point to an arbitrary location. */
1993 pi->name_mem_tag = NULL_TREE;
1997 /* Set pt_anything on the pointers without pt_vars filled in so
1998 that they are assigned a symbol tag. */
1999 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2000 VEC_safe_push (tree, heap, with_ptvars, ptr);
2002 set_pt_anything (ptr);
2005 /* If we didn't find any pointers with pt_vars set, we're done. */
2009 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2011 /* Now go through the pointers with pt_vars, and find a name tag
2012 with the same pt_vars as this pointer, or create one if one
2014 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2016 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2017 tree old_name_tag = pi->name_mem_tag;
2018 struct ptr_info_def **slot;
2020 /* If PTR points to a set of variables, check if we don't
2021 have another pointer Q with the same points-to set before
2022 creating a tag. If so, use Q's tag instead of creating a
2025 This is important for not creating unnecessary symbols
2026 and also for copy propagation. If we ever need to
2027 propagate PTR into Q or vice-versa, we would run into
2028 problems if they both had different name tags because
2029 they would have different SSA version numbers (which
2030 would force us to take the name tags in and out of SSA). */
2031 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2033 pi->name_mem_tag = (*slot)->name_mem_tag;
2037 /* If we didn't find a pointer with the same points-to set
2038 as PTR, create a new name tag if needed. */
2039 if (pi->name_mem_tag == NULL_TREE)
2040 pi->name_mem_tag = get_nmt_for (ptr);
2043 /* If the new name tag computed for PTR is different than
2044 the old name tag that it used to have, then the old tag
2045 needs to be removed from the IL, so we mark it for
2047 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2048 mark_sym_for_renaming (old_name_tag);
2050 TREE_THIS_VOLATILE (pi->name_mem_tag)
2051 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2053 /* Mark the new name tag for renaming. */
2054 mark_sym_for_renaming (pi->name_mem_tag);
2057 htab_delete (ptr_hash);
2059 VEC_free (tree, heap, with_ptvars);
2063 /* Union the alias set SET into the may-aliases for TAG. */
2066 union_alias_set_into (tree tag, bitmap set)
2068 bitmap ma = MTAG_ALIASES (tag);
2070 if (bitmap_empty_p (set))
2074 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2075 bitmap_ior_into (ma, set);
2079 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2080 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2081 name tag and the variables it points-to are call-clobbered. Finally, if
2082 P_i escapes and we could not determine where it points to, then all the
2083 variables in the same alias set as *P_i are marked call-clobbered. This
2084 is necessary because we must assume that P_i may take the address of any
2085 variable in the same alias set. */
2088 compute_flow_sensitive_aliasing (struct alias_info *ai)
2095 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2097 if (!find_what_p_points_to (ptr))
2098 set_pt_anything (ptr);
2101 create_name_tags ();
2103 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2105 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2106 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
2108 /* Set up aliasing information for PTR's name memory tag (if it has
2109 one). Note that only pointers that have been dereferenced will
2110 have a name memory tag. */
2111 if (pi->name_mem_tag && pi->pt_vars)
2113 if (!bitmap_empty_p (pi->pt_vars))
2115 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2116 union_alias_set_into (tag, pi->pt_vars);
2117 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
2119 /* It may be the case that this the tag uid was the only
2120 bit we had set in the aliases list, and in this case,
2121 we don't want to keep an empty bitmap, as this
2122 asserts in tree-ssa-operands.c . */
2123 if (bitmap_empty_p (MTAG_ALIASES (tag)))
2124 BITMAP_FREE (MTAG_ALIASES (tag));
2131 /* Return TRUE if at least one symbol in TAG2's alias set is also
2132 present in TAG1's alias set. */
2135 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2138 /* This is the old behavior of have_common_aliases_p, which is to
2139 return false if both sets are empty, or one set is and the other
2141 if ((tag1aliases == NULL && tag2aliases != NULL)
2142 || (tag2aliases == NULL && tag1aliases != NULL)
2143 || (tag1aliases == NULL && tag2aliases == NULL))
2146 return bitmap_intersect_p (tag1aliases, tag2aliases);
2149 /* Compute type-based alias sets. Traverse all the pointers and
2150 addressable variables found in setup_pointers_and_addressables.
2152 For every pointer P in AI->POINTERS and addressable variable V in
2153 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2154 memory tag (SMT) if their alias sets conflict. V is then marked as
2155 an aliased symbol so that the operand scanner knows that statements
2156 containing V have aliased operands. */
2159 compute_flow_insensitive_aliasing (struct alias_info *ai)
2163 /* For every pointer P, determine which addressable variables may alias
2164 with P's symbol memory tag. */
2165 for (i = 0; i < ai->num_pointers; i++)
2168 struct alias_map_d *p_map = ai->pointers[i];
2169 tree tag = symbol_mem_tag (p_map->var);
2172 /* Call-clobbering information is not finalized yet at this point. */
2173 if (PTR_IS_REF_ALL (p_map->var))
2176 for (j = 0; j < ai->num_addressable_vars; j++)
2178 struct alias_map_d *v_map;
2180 bool tag_stored_p, var_stored_p;
2182 v_map = ai->addressable_vars[j];
2184 v_ann = var_ann (var);
2186 /* Skip memory tags and variables that have never been
2187 written to. We also need to check if the variables are
2188 call-clobbered because they may be overwritten by
2190 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2191 || is_call_clobbered (tag);
2192 var_stored_p = pointer_set_contains (ai->written_vars, var)
2193 || is_call_clobbered (var);
2194 if (!tag_stored_p && !var_stored_p)
2197 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2199 /* We should never have a var with subvars here, because
2200 they shouldn't get into the set of addressable vars */
2201 gcc_assert (!var_can_have_subvars (var)
2202 || get_subvars_for_var (var) == NULL);
2204 /* Add VAR to TAG's may-aliases set. */
2205 add_may_alias (tag, var);
2210 /* Since this analysis is based exclusively on symbols, it fails to
2211 handle cases where two pointers P and Q have different memory
2212 tags with conflicting alias set numbers but no aliased symbols in
2215 For example, suppose that we have two memory tags SMT.1 and SMT.2
2218 may-aliases (SMT.1) = { a }
2219 may-aliases (SMT.2) = { b }
2221 and the alias set number of SMT.1 conflicts with that of SMT.2.
2222 Since they don't have symbols in common, loads and stores from
2223 SMT.1 and SMT.2 will seem independent of each other, which will
2224 lead to the optimizers making invalid transformations (see
2225 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2227 To avoid this problem, we do a final traversal of AI->POINTERS
2228 looking for pairs of pointers that have no aliased symbols in
2229 common and yet have conflicting alias set numbers. */
2230 for (i = 0; i < ai->num_pointers; i++)
2233 struct alias_map_d *p_map1 = ai->pointers[i];
2234 tree tag1 = symbol_mem_tag (p_map1->var);
2235 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2237 if (PTR_IS_REF_ALL (p_map1->var))
2240 for (j = i + 1; j < ai->num_pointers; j++)
2242 struct alias_map_d *p_map2 = ai->pointers[j];
2243 tree tag2 = symbol_mem_tag (p_map2->var);
2244 bitmap may_aliases2 = may_aliases (tag2);
2246 if (PTR_IS_REF_ALL (p_map2->var))
2249 /* If the pointers may not point to each other, do nothing. */
2250 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2253 /* The two pointers may alias each other. If they already have
2254 symbols in common, do nothing. */
2255 if (have_common_aliases_p (may_aliases1, may_aliases2))
2258 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
2260 union_alias_set_into (tag1, may_aliases2);
2264 /* Since TAG2 does not have any aliases of its own, add
2265 TAG2 itself to the alias set of TAG1. */
2266 add_may_alias (tag1, tag2);
2274 /* Finalize may-alias information for ref-all pointers. Traverse all
2275 the addressable variables found in setup_pointers_and_addressables.
2277 If flow-sensitive alias analysis has attached a name memory tag to
2278 a ref-all pointer, we will use it for the dereferences because that
2279 will have more precise aliasing information. But if there is no
2280 name tag, we will use a special symbol tag that aliases all the
2281 call-clobbered addressable variables. */
2284 finalize_ref_all_pointers (struct alias_info *ai)
2288 /* First add the real call-clobbered variables. */
2289 for (i = 0; i < ai->num_addressable_vars; i++)
2291 tree var = ai->addressable_vars[i]->var;
2292 if (is_call_clobbered (var))
2293 add_may_alias (ai->ref_all_symbol_mem_tag, var);
2296 /* Then add the call-clobbered pointer memory tags. See
2297 compute_flow_insensitive_aliasing for the rationale. */
2298 for (i = 0; i < ai->num_pointers; i++)
2300 tree ptr = ai->pointers[i]->var, tag;
2301 if (PTR_IS_REF_ALL (ptr))
2303 tag = symbol_mem_tag (ptr);
2304 if (is_call_clobbered (tag))
2305 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
2311 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2314 create_alias_map_for (tree var, struct alias_info *ai)
2316 struct alias_map_d *alias_map;
2317 alias_map = XCNEW (struct alias_map_d);
2318 alias_map->var = var;
2319 alias_map->set = get_alias_set (var);
2320 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2324 /* Create memory tags for all the dereferenced pointers and build the
2325 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2326 sets. Based on the address escape and points-to information collected
2327 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2328 variables whose address is not needed anymore. */
2331 setup_pointers_and_addressables (struct alias_info *ai)
2333 size_t num_addressable_vars, num_pointers;
2334 referenced_var_iterator rvi;
2336 VEC (tree, heap) *varvec = NULL;
2337 safe_referenced_var_iterator srvi;
2339 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2340 num_addressable_vars = num_pointers = 0;
2342 FOR_EACH_REFERENCED_VAR (var, rvi)
2344 if (may_be_aliased (var))
2345 num_addressable_vars++;
2347 if (POINTER_TYPE_P (TREE_TYPE (var)))
2349 /* Since we don't keep track of volatile variables, assume that
2350 these pointers are used in indirect store operations. */
2351 if (TREE_THIS_VOLATILE (var))
2352 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2358 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2359 always going to be slightly bigger than we actually need them
2360 because some TREE_ADDRESSABLE variables will be marked
2361 non-addressable below and only pointers with unique symbol tags are
2362 going to be added to POINTERS. */
2363 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2364 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2365 ai->num_addressable_vars = 0;
2366 ai->num_pointers = 0;
2368 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2372 /* Name memory tags already have flow-sensitive aliasing
2373 information, so they need not be processed by
2374 compute_flow_insensitive_aliasing. Similarly, symbol memory
2375 tags are already accounted for when we process their
2378 Structure fields, on the other hand, have to have some of this
2379 information processed for them, but it's pointless to mark them
2380 non-addressable (since they are fake variables anyway). */
2381 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
2384 /* Remove the ADDRESSABLE flag from every addressable variable whose
2385 address is not needed anymore. This is caused by the propagation
2386 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2387 removal of dead pointer assignments done by the early scalar
2389 if (TREE_ADDRESSABLE (var))
2391 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2392 && TREE_CODE (var) != RESULT_DECL
2393 && !is_global_var (var))
2395 bool okay_to_mark = true;
2397 /* Since VAR is now a regular GIMPLE register, we will need
2398 to rename VAR into SSA afterwards. */
2399 mark_sym_for_renaming (var);
2401 /* If VAR can have sub-variables, and any of its
2402 sub-variables has its address taken, then we cannot
2403 remove the addressable flag from VAR. */
2404 if (var_can_have_subvars (var)
2405 && (svars = get_subvars_for_var (var)))
2409 for (sv = svars; sv; sv = sv->next)
2411 if (bitmap_bit_p (gimple_addressable_vars (cfun),
2412 DECL_UID (sv->var)))
2413 okay_to_mark = false;
2414 mark_sym_for_renaming (sv->var);
2418 /* The address of VAR is not needed, remove the
2419 addressable bit, so that it can be optimized as a
2420 regular variable. */
2423 /* The memory partition holding VAR will no longer
2424 contain VAR, and statements referencing it will need
2426 if (memory_partition (var))
2427 mark_sym_for_renaming (memory_partition (var));
2429 mark_non_addressable (var);
2434 /* Global variables and addressable locals may be aliased. Create an
2435 entry in ADDRESSABLE_VARS for VAR. */
2436 if (may_be_aliased (var))
2438 if (!var_can_have_subvars (var)
2439 || get_subvars_for_var (var) == NULL)
2440 create_alias_map_for (var, ai);
2442 mark_sym_for_renaming (var);
2445 /* Add pointer variables that have been dereferenced to the POINTERS
2446 array and create a symbol memory tag for them. */
2447 if (POINTER_TYPE_P (TREE_TYPE (var)))
2449 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2450 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2455 /* If pointer VAR still doesn't have a memory tag
2456 associated with it, create it now or re-use an
2458 tag = get_smt_for (var, ai);
2459 t_ann = var_ann (tag);
2461 /* The symbol tag will need to be renamed into SSA
2462 afterwards. Note that we cannot do this inside
2463 get_smt_for because aliasing may run multiple times
2464 and we only create symbol tags the first time. */
2465 mark_sym_for_renaming (tag);
2467 /* Similarly, if pointer VAR used to have another type
2468 tag, we will need to process it in the renamer to
2469 remove the stale virtual operands. */
2470 old_tag = symbol_mem_tag (var);
2472 mark_sym_for_renaming (old_tag);
2474 /* Associate the tag with pointer VAR. */
2475 set_symbol_mem_tag (var, tag);
2477 /* If pointer VAR has been used in a store operation,
2478 then its memory tag must be marked as written-to. */
2479 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2480 pointer_set_insert (ai->written_vars, tag);
2484 /* The pointer has not been dereferenced. If it had a
2485 symbol memory tag, remove it and mark the old tag for
2486 renaming to remove it out of the IL. */
2487 tree tag = symbol_mem_tag (var);
2490 mark_sym_for_renaming (tag);
2491 set_symbol_mem_tag (var, NULL_TREE);
2497 VEC_free (tree, heap, varvec);
2501 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2502 semantics. If the function makes no references to global
2503 variables and contains at least one call to a non-pure function,
2504 then we need to mark the side-effects of the call using .GLOBAL_VAR
2505 to represent all possible global memory referenced by the callee. */
2508 maybe_create_global_var (void)
2510 /* No need to create it, if we have one already. */
2511 if (gimple_global_var (cfun) == NULL_TREE)
2513 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2515 /* Create .GLOBAL_VAR if there are no call-clobbered
2516 variables and the program contains a mixture of pure/const
2517 and regular function calls. This is to avoid the problem
2518 described in PR 20115:
2521 int func_pure (void) { return X; }
2522 int func_non_pure (int a) { X += a; }
2525 int a = func_pure ();
2531 Since foo() has no call-clobbered variables, there is
2532 no relationship between the calls to func_pure and
2533 func_non_pure. Since func_pure has no side-effects, value
2534 numbering optimizations elide the second call to func_pure.
2535 So, if we have some pure/const and some regular calls in the
2536 program we create .GLOBAL_VAR to avoid missing these
2538 if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
2539 && stats->num_call_sites > 0
2540 && stats->num_pure_const_call_sites > 0
2541 && stats->num_call_sites > stats->num_pure_const_call_sites)
2542 create_global_var ();
2547 /* Return TRUE if pointer PTR may point to variable VAR.
2549 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2550 This is needed because when checking for type conflicts we are
2551 interested in the alias set of the memory location pointed-to by
2552 PTR. The alias set of PTR itself is irrelevant.
2554 VAR_ALIAS_SET is the alias set for VAR. */
2557 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
2558 tree var, HOST_WIDE_INT var_alias_set,
2559 bool alias_set_only)
2563 alias_stats.alias_queries++;
2564 alias_stats.simple_queries++;
2566 /* By convention, a variable cannot alias itself. */
2567 mem = symbol_mem_tag (ptr);
2570 alias_stats.alias_noalias++;
2571 alias_stats.simple_resolved++;
2575 /* If -fargument-noalias-global is > 2, pointer arguments may
2576 not point to anything else. */
2577 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2579 alias_stats.alias_noalias++;
2580 alias_stats.simple_resolved++;
2584 /* If -fargument-noalias-global is > 1, pointer arguments may
2585 not point to global variables. */
2586 if (flag_argument_noalias > 1 && is_global_var (var)
2587 && TREE_CODE (ptr) == PARM_DECL)
2589 alias_stats.alias_noalias++;
2590 alias_stats.simple_resolved++;
2594 /* If either MEM or VAR is a read-only global and the other one
2595 isn't, then PTR cannot point to VAR. */
2596 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
2597 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2599 alias_stats.alias_noalias++;
2600 alias_stats.simple_resolved++;
2604 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2606 alias_stats.tbaa_queries++;
2608 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2609 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2611 alias_stats.alias_noalias++;
2612 alias_stats.tbaa_resolved++;
2616 /* If VAR is a record or union type, PTR cannot point into VAR
2617 unless there is some explicit address operation in the
2618 program that can reference a field of the type pointed-to by PTR.
2619 This also assumes that the types of both VAR and PTR are
2620 contained within the compilation unit, and that there is no fancy
2621 addressing arithmetic associated with any of the types
2623 if (mem_alias_set != 0 && var_alias_set != 0)
2625 tree ptr_type = TREE_TYPE (ptr);
2626 tree var_type = TREE_TYPE (var);
2628 /* The star count is -1 if the type at the end of the pointer_to
2629 chain is not a record or union type. */
2630 if ((!alias_set_only) &&
2631 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2633 int ptr_star_count = 0;
2635 /* ipa_type_escape_star_count_of_interesting_type is a
2636 little too restrictive for the pointer type, need to
2637 allow pointers to primitive types as long as those types
2638 cannot be pointers to everything. */
2639 while (POINTER_TYPE_P (ptr_type))
2641 /* Strip the *s off. */
2642 ptr_type = TREE_TYPE (ptr_type);
2646 /* There does not appear to be a better test to see if the
2647 pointer type was one of the pointer to everything
2649 if (ptr_star_count > 0)
2651 alias_stats.structnoaddress_queries++;
2652 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2655 alias_stats.structnoaddress_resolved++;
2656 alias_stats.alias_noalias++;
2660 else if (ptr_star_count == 0)
2662 /* If PTR_TYPE was not really a pointer to type, it cannot
2664 alias_stats.structnoaddress_queries++;
2665 alias_stats.structnoaddress_resolved++;
2666 alias_stats.alias_noalias++;
2672 alias_stats.alias_mayalias++;
2677 /* Add ALIAS to the set of variables that may alias VAR. */
2680 add_may_alias (tree var, tree alias)
2682 /* Don't allow self-referential aliases. */
2683 gcc_assert (var != alias);
2685 /* ALIAS must be addressable if it's being added to an alias set. */
2687 TREE_ADDRESSABLE (alias) = 1;
2689 gcc_assert (may_be_aliased (alias));
2692 /* VAR must be a symbol or a name tag. */
2693 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
2694 || TREE_CODE (var) == NAME_MEMORY_TAG);
2696 if (MTAG_ALIASES (var) == NULL)
2697 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
2699 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
2703 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2706 set_pt_anything (tree ptr)
2708 struct ptr_info_def *pi = get_ptr_info (ptr);
2710 pi->pt_anything = 1;
2713 /* The pointer used to have a name tag, but we now found it pointing
2714 to an arbitrary location. The name tag needs to be renamed and
2715 disassociated from PTR. */
2716 if (pi->name_mem_tag)
2718 mark_sym_for_renaming (pi->name_mem_tag);
2719 pi->name_mem_tag = NULL_TREE;
2724 /* Return true if STMT is an "escape" site from the current function. Escape
2725 sites those statements which might expose the address of a variable
2726 outside the current function. STMT is an escape site iff:
2728 1- STMT is a function call, or
2729 2- STMT is an __asm__ expression, or
2730 3- STMT is an assignment to a non-local variable, or
2731 4- STMT is a return statement.
2733 Return the type of escape site found, if we found one, or NO_ESCAPE
2737 is_escape_site (tree stmt)
2739 tree call = get_call_expr_in (stmt);
2740 if (call != NULL_TREE)
2742 if (!TREE_SIDE_EFFECTS (call))
2743 return ESCAPE_TO_PURE_CONST;
2745 return ESCAPE_TO_CALL;
2747 else if (TREE_CODE (stmt) == ASM_EXPR)
2748 return ESCAPE_TO_ASM;
2749 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2751 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2753 /* Get to the base of _REF nodes. */
2754 if (TREE_CODE (lhs) != SSA_NAME)
2755 lhs = get_base_address (lhs);
2757 /* If we couldn't recognize the LHS of the assignment, assume that it
2758 is a non-local store. */
2759 if (lhs == NULL_TREE)
2760 return ESCAPE_UNKNOWN;
2762 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
2763 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
2764 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2767 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
2768 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
2770 /* If the RHS is a conversion between a pointer and an integer, the
2771 pointer escapes since we can't track the integer. */
2772 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2773 return ESCAPE_BAD_CAST;
2775 /* Same if the RHS is a conversion between a regular pointer and a
2776 ref-all pointer since we can't track the SMT of the former. */
2777 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2778 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2779 return ESCAPE_BAD_CAST;
2782 /* If the LHS is an SSA name, it can't possibly represent a non-local
2784 if (TREE_CODE (lhs) == SSA_NAME)
2787 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2788 local variables we cannot be sure if it will escape, because we
2789 don't have information about objects not in SSA form. Need to
2790 implement something along the lines of
2792 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2793 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2794 Conference on Object-Oriented Programming Systems, Languages, and
2795 Applications (OOPSLA), pp. 1-19, 1999. */
2796 return ESCAPE_STORED_IN_GLOBAL;
2798 else if (TREE_CODE (stmt) == RETURN_EXPR)
2799 return ESCAPE_TO_RETURN;
2804 /* Create a new memory tag of type TYPE.
2805 Does NOT push it into the current binding. */
2808 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2812 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
2814 /* Make the variable writable. */
2815 TREE_READONLY (tmp_var) = 0;
2817 /* It doesn't start out global. */
2818 MTAG_GLOBAL (tmp_var) = 0;
2819 TREE_STATIC (tmp_var) = 0;
2820 TREE_USED (tmp_var) = 1;
2825 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2826 is considered to represent all the pointers whose pointed-to types are
2827 in the same alias set class. Otherwise, the tag represents a single
2828 SSA_NAME pointer variable. */
2831 create_memory_tag (tree type, bool is_type_tag)
2833 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2834 type, (is_type_tag) ? "SMT" : "NMT");
2836 /* By default, memory tags are local variables. Alias analysis will
2837 determine whether they should be considered globals. */
2838 DECL_CONTEXT (tag) = current_function_decl;
2840 /* Memory tags are by definition addressable. */
2841 TREE_ADDRESSABLE (tag) = 1;
2843 set_symbol_mem_tag (tag, NULL_TREE);
2845 /* Add the tag to the symbol table. */
2846 add_referenced_var (tag);
2852 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2853 This is used if P_i has been found to point to a specific set of
2854 variables or to a non-aliased memory location like the address returned
2855 by malloc functions. */
2858 get_nmt_for (tree ptr)
2860 struct ptr_info_def *pi = get_ptr_info (ptr);
2861 tree tag = pi->name_mem_tag;
2863 if (tag == NULL_TREE)
2864 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2869 /* Return the symbol memory tag associated to pointer PTR. A memory
2870 tag is an artificial variable that represents the memory location
2871 pointed-to by PTR. It is used to model the effects of pointer
2872 de-references on addressable variables.
2874 AI points to the data gathered during alias analysis. This
2875 function populates the array AI->POINTERS. */
2878 get_smt_for (tree ptr, struct alias_info *ai)
2882 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2883 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2885 /* We use a unique memory tag for all the ref-all pointers. */
2886 if (PTR_IS_REF_ALL (ptr))
2888 if (!ai->ref_all_symbol_mem_tag)
2889 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2890 return ai->ref_all_symbol_mem_tag;
2893 /* To avoid creating unnecessary memory tags, only create one memory tag
2894 per alias set class. Note that it may be tempting to group
2895 memory tags based on conflicting alias sets instead of
2896 equivalence. That would be wrong because alias sets are not
2897 necessarily transitive (as demonstrated by the libstdc++ test
2898 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2899 such that conflicts (A, B) == true and conflicts (A, C) == true,
2900 it does not necessarily follow that conflicts (B, C) == true. */
2901 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2903 struct alias_map_d *curr = ai->pointers[i];
2904 tree curr_tag = symbol_mem_tag (curr->var);
2905 if (tag_set == curr->set)
2912 /* If VAR cannot alias with any of the existing memory tags, create a new
2913 tag for PTR and add it to the POINTERS array. */
2914 if (tag == NULL_TREE)
2916 struct alias_map_d *alias_map;
2918 /* If PTR did not have a symbol tag already, create a new SMT.*
2919 artificial variable representing the memory location
2920 pointed-to by PTR. */
2921 tag = symbol_mem_tag (ptr);
2922 if (tag == NULL_TREE)
2923 tag = create_memory_tag (tag_type, true);
2925 /* Add PTR to the POINTERS array. Note that we are not interested in
2926 PTR's alias set. Instead, we cache the alias set for the memory that
2928 alias_map = XCNEW (struct alias_map_d);
2929 alias_map->var = ptr;
2930 alias_map->set = tag_set;
2931 ai->pointers[ai->num_pointers++] = alias_map;
2934 /* If the pointed-to type is volatile, so is the tag. */
2935 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2937 /* Make sure that the symbol tag has the same alias set as the
2939 gcc_assert (tag_set == get_alias_set (tag));
2945 /* Create GLOBAL_VAR, an artificial global variable to act as a
2946 representative of all the variables that may be clobbered by function
2950 create_global_var (void)
2952 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2954 DECL_ARTIFICIAL (global_var) = 1;
2955 TREE_READONLY (global_var) = 0;
2956 DECL_EXTERNAL (global_var) = 1;
2957 TREE_STATIC (global_var) = 1;
2958 TREE_USED (global_var) = 1;
2959 DECL_CONTEXT (global_var) = NULL_TREE;
2960 TREE_THIS_VOLATILE (global_var) = 0;
2961 TREE_ADDRESSABLE (global_var) = 0;
2963 create_var_ann (global_var);
2964 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2965 add_referenced_var (global_var);
2966 mark_sym_for_renaming (global_var);
2967 cfun->gimple_df->global_var = global_var;
2971 /* Dump alias statistics on FILE. */
2974 dump_alias_stats (FILE *file)
2976 const char *funcname
2977 = lang_hooks.decl_printable_name (current_function_decl, 2);
2978 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2979 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2980 fprintf (file, "Total alias mayalias results:\t%u\n",
2981 alias_stats.alias_mayalias);
2982 fprintf (file, "Total alias noalias results:\t%u\n",
2983 alias_stats.alias_noalias);
2984 fprintf (file, "Total simple queries:\t%u\n",
2985 alias_stats.simple_queries);
2986 fprintf (file, "Total simple resolved:\t%u\n",
2987 alias_stats.simple_resolved);
2988 fprintf (file, "Total TBAA queries:\t%u\n",
2989 alias_stats.tbaa_queries);
2990 fprintf (file, "Total TBAA resolved:\t%u\n",
2991 alias_stats.tbaa_resolved);
2992 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2993 alias_stats.structnoaddress_queries);
2994 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2995 alias_stats.structnoaddress_resolved);
2999 /* Dump alias information on FILE. */
3002 dump_alias_info (FILE *file)
3005 const char *funcname
3006 = lang_hooks.decl_printable_name (current_function_decl, 2);
3007 referenced_var_iterator rvi;
3010 fprintf (file, "\nAlias information for %s\n\n", funcname);
3012 dump_memory_partitions (file);
3014 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3016 fprintf (file, "Aliased symbols\n\n");
3018 FOR_EACH_REFERENCED_VAR (var, rvi)
3020 if (may_be_aliased (var))
3021 dump_variable (file, var);
3024 fprintf (file, "\nDereferenced pointers\n\n");
3026 FOR_EACH_REFERENCED_VAR (var, rvi)
3027 if (symbol_mem_tag (var))
3028 dump_variable (file, var);
3030 fprintf (file, "\nSymbol memory tags\n\n");
3032 FOR_EACH_REFERENCED_VAR (var, rvi)
3034 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3035 dump_variable (file, var);
3038 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3040 fprintf (file, "SSA_NAME pointers\n\n");
3041 for (i = 1; i < num_ssa_names; i++)
3043 tree ptr = ssa_name (i);
3044 struct ptr_info_def *pi;
3046 if (ptr == NULL_TREE)
3049 pi = SSA_NAME_PTR_INFO (ptr);
3050 if (!SSA_NAME_IN_FREE_LIST (ptr)
3052 && pi->name_mem_tag)
3053 dump_points_to_info_for (file, ptr);
3056 fprintf (file, "\nName memory tags\n\n");
3058 FOR_EACH_REFERENCED_VAR (var, rvi)
3060 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3061 dump_variable (file, var);
3064 fprintf (file, "\n");
3068 /* Dump alias information on stderr. */
3071 debug_alias_info (void)
3073 dump_alias_info (stderr);
3077 /* Return the alias information associated with pointer T. It creates a
3078 new instance if none existed. */
3080 struct ptr_info_def *
3081 get_ptr_info (tree t)
3083 struct ptr_info_def *pi;
3085 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3087 pi = SSA_NAME_PTR_INFO (t);
3090 pi = GGC_CNEW (struct ptr_info_def);
3091 SSA_NAME_PTR_INFO (t) = pi;
3098 /* Dump points-to information for SSA_NAME PTR into FILE. */
3101 dump_points_to_info_for (FILE *file, tree ptr)
3103 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3105 print_generic_expr (file, ptr, dump_flags);
3109 if (pi->name_mem_tag)
3111 fprintf (file, ", name memory tag: ");
3112 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3115 if (pi->is_dereferenced)
3116 fprintf (file, ", is dereferenced (R=%ld, W=%ld)",
3117 get_mem_sym_stats_for (ptr)->num_direct_reads,
3118 get_mem_sym_stats_for (ptr)->num_direct_writes);
3120 if (pi->value_escapes_p)
3121 fprintf (file, ", its value escapes");
3123 if (pi->pt_anything)
3124 fprintf (file, ", points-to anything");
3127 fprintf (file, ", points-to NULL");
3131 fprintf (file, ", points-to vars: ");
3132 dump_decl_set (file, pi->pt_vars);
3136 fprintf (file, "\n");
3140 /* Dump points-to information for VAR into stderr. */
3143 debug_points_to_info_for (tree var)
3145 dump_points_to_info_for (stderr, var);
3149 /* Dump points-to information into FILE. NOTE: This function is slow, as
3150 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3153 dump_points_to_info (FILE *file)
3156 block_stmt_iterator si;
3159 lang_hooks.decl_printable_name (current_function_decl, 2);
3160 referenced_var_iterator rvi;
3163 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3165 /* First dump points-to information for the default definitions of
3166 pointer variables. This is necessary because default definitions are
3167 not part of the code. */
3168 FOR_EACH_REFERENCED_VAR (var, rvi)
3170 if (POINTER_TYPE_P (TREE_TYPE (var)))
3172 tree def = gimple_default_def (cfun, var);
3174 dump_points_to_info_for (file, def);
3178 /* Dump points-to information for every pointer defined in the program. */
3183 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3185 tree ptr = PHI_RESULT (phi);
3186 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3187 dump_points_to_info_for (file, ptr);
3190 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3192 tree stmt = bsi_stmt (si);
3194 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3195 if (TREE_CODE (def) == SSA_NAME
3196 && POINTER_TYPE_P (TREE_TYPE (def)))
3197 dump_points_to_info_for (file, def);
3201 fprintf (file, "\n");
3205 /* Dump points-to info pointed to by PTO into STDERR. */
3208 debug_points_to_info (void)
3210 dump_points_to_info (stderr);
3213 /* Dump to FILE the list of variables that may be aliasing VAR. */
3216 dump_may_aliases_for (FILE *file, tree var)
3220 aliases = MTAG_ALIASES (var);
3227 fprintf (file, "{ ");
3228 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3230 al = referenced_var (i);
3231 print_generic_expr (file, al, dump_flags);
3232 fprintf (file, " ");
3234 fprintf (file, "}");
3239 /* Dump to stderr the list of variables that may be aliasing VAR. */
3242 debug_may_aliases_for (tree var)
3244 dump_may_aliases_for (stderr, var);
3248 /* Return true if VAR may be aliased. */
3251 may_be_aliased (tree var)
3254 if (TREE_ADDRESSABLE (var))
3257 /* Globally visible variables can have their addresses taken by other
3258 translation units. */
3260 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
3262 else if (!MTAG_P (var)
3263 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3266 /* Automatic variables can't have their addresses escape any other
3267 way. This must be after the check for global variables, as
3268 extern declarations do not have TREE_STATIC set. */
3269 if (!TREE_STATIC (var))
3272 /* If we're in unit-at-a-time mode, then we must have seen all
3273 occurrences of address-of operators, and so we can trust
3274 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3275 isn't addressable if it's local to the current function. */
3276 if (flag_unit_at_a_time)
3279 if (decl_function_context (var) == current_function_decl)
3286 /* Given two symbols return TRUE if one is in the alias set of the
3290 is_aliased_with (tree tag, tree sym)
3296 aliases = MTAG_ALIASES (tag);
3298 if (aliases == NULL)
3301 return bitmap_bit_p (aliases, DECL_UID (sym));
3305 gcc_assert (MTAG_P (sym));
3306 aliases = MTAG_ALIASES (sym);
3308 if (aliases == NULL)
3311 return bitmap_bit_p (aliases, DECL_UID (tag));
3317 /* The following is based on code in add_stmt_operand to ensure that the
3318 same defs/uses/vdefs/vuses will be found after replacing a reference
3319 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3320 is the address of var. Return a memtag for the ptr, after adding the
3321 proper may_aliases to it (which are the aliases of var, if it has any,
3325 add_may_alias_for_new_tag (tree tag, tree var)
3327 bitmap aliases = NULL;
3330 aliases = may_aliases (var);
3332 /* Case 1: |aliases| == 1 */
3333 if (aliases && bitmap_count_bits (aliases) == 1)
3335 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3336 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3340 /* Case 2: |aliases| == 0 */
3341 if (aliases == NULL)
3342 add_may_alias (tag, var);
3345 /* Case 3: |aliases| > 1 */
3346 union_alias_set_into (tag, aliases);
3351 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
3352 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
3353 according to the location accessed by EXPR.
3355 Note, the set of aliases represented by the new symbol tag are not marked
3359 new_type_alias (tree ptr, tree var, tree expr)
3361 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3364 tree ali = NULL_TREE;
3365 HOST_WIDE_INT offset, size, maxsize;
3367 VEC (tree, heap) *overlaps = NULL;
3371 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3372 gcc_assert (!MTAG_P (var));
3374 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3377 tag = create_memory_tag (tag_type, true);
3378 set_symbol_mem_tag (ptr, tag);
3380 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
3381 subvars, add the subvars to the tag instead of the actual var. */
3382 if (var_can_have_subvars (ref)
3383 && (svars = get_subvars_for_var (ref)))
3385 for (sv = svars; sv; sv = sv->next)
3389 if (overlap_subvar (offset, maxsize, sv->var, &exact))
3390 VEC_safe_push (tree, heap, overlaps, sv->var);
3392 gcc_assert (overlaps != NULL);
3394 else if (var_can_have_subvars (var)
3395 && (svars = get_subvars_for_var (var)))
3397 /* If the REF is not a direct access to VAR (e.g., it is a dereference
3398 of a pointer), we should scan the virtual operands of REF the same
3399 way as tree-ssa-operands do. At the moment, this is somewhat
3400 difficult, so we just give up and add all the subvars of VAR.
3401 On mem-ssa branch, the scanning for virtual operands have been
3402 split from the rest of tree-ssa-operands, so it should be much
3403 easier to fix this problem correctly once mem-ssa is merged. */
3404 for (sv = svars; sv; sv = sv->next)
3405 VEC_safe_push (tree, heap, overlaps, sv->var);
3407 gcc_assert (overlaps != NULL);
3410 ali = add_may_alias_for_new_tag (tag, var);
3412 len = VEC_length (tree, overlaps);
3415 if (dump_file && (dump_flags & TDF_DETAILS))
3416 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
3419 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
3425 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
3427 ali = add_may_alias_for_new_tag (tag, sv_var);
3431 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
3432 took place. Since more than one svar was found, we add
3433 'ali' as one of the may_aliases of the new tag. */
3434 add_may_alias (tag, ali);
3439 VEC_free (tree, heap, overlaps);
3442 set_symbol_mem_tag (ptr, ali);
3443 TREE_READONLY (tag) = TREE_READONLY (var);
3444 MTAG_GLOBAL (tag) = is_global_var (var);
3447 /* This represents the used range of a variable. */
3449 typedef struct used_part
3451 HOST_WIDE_INT minused;
3452 HOST_WIDE_INT maxused;
3453 /* True if we have an explicit use/def of some portion of this variable,
3454 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
3456 /* True if we have an implicit use/def of some portion of this
3457 variable. Implicit uses occur when we can't tell what part we
3458 are referencing, and have to make conservative assumptions. */
3460 /* True if the structure is only written to or taken its address. */
3464 /* An array of used_part structures, indexed by variable uid. */
3466 static htab_t used_portions;
3468 struct used_part_map
3474 /* Return true if the uid in the two used part maps are equal. */
3477 used_part_map_eq (const void *va, const void *vb)
3479 const struct used_part_map *a = (const struct used_part_map *) va;
3480 const struct used_part_map *b = (const struct used_part_map *) vb;
3481 return (a->uid == b->uid);
3484 /* Hash a from uid in a used_part_map. */
3487 used_part_map_hash (const void *item)
3489 return ((const struct used_part_map *)item)->uid;
3492 /* Free a used part map element. */
3495 free_used_part_map (void *item)
3497 free (((struct used_part_map *)item)->to);
3501 /* Lookup a used_part structure for a UID. */
3504 up_lookup (unsigned int uid)
3506 struct used_part_map *h, in;
3508 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
3514 /* Insert the pair UID, TO into the used part hashtable. */
3517 up_insert (unsigned int uid, used_part_t to)
3519 struct used_part_map *h;
3522 h = XNEW (struct used_part_map);
3525 loc = htab_find_slot_with_hash (used_portions, h,
3529 *(struct used_part_map **) loc = h;
3533 /* Given a variable uid, UID, get or create the entry in the used portions
3534 table for the variable. */
3537 get_or_create_used_part_for (size_t uid)
3540 if ((up = up_lookup (uid)) == NULL)
3542 up = XCNEW (struct used_part);
3543 up->minused = INT_MAX;
3545 up->explicit_uses = false;
3546 up->implicit_uses = false;
3547 up->write_only = true;
3554 /* Create and return a structure sub-variable for field type FIELD at
3555 offset OFFSET, with size SIZE, of variable VAR. */
3558 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
3559 unsigned HOST_WIDE_INT size)
3561 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3563 /* We need to copy the various flags from VAR to SUBVAR, so that
3564 they are is_global_var iff the original variable was. */
3565 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3566 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3567 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3568 TREE_STATIC (subvar) = TREE_STATIC (var);
3569 TREE_READONLY (subvar) = TREE_READONLY (var);
3570 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3572 /* Add the new variable to REFERENCED_VARS. */
3573 set_symbol_mem_tag (subvar, NULL);
3574 add_referenced_var (subvar);
3575 SFT_PARENT_VAR (subvar) = var;
3576 SFT_OFFSET (subvar) = offset;
3577 SFT_SIZE (subvar) = size;
3582 /* Given an aggregate VAR, create the subvariables that represent its
3586 create_overlap_variables_for (tree var)
3588 VEC(fieldoff_s,heap) *fieldstack = NULL;
3590 size_t uid = DECL_UID (var);
3592 up = up_lookup (uid);
3597 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
3598 if (VEC_length (fieldoff_s, fieldstack) != 0)
3602 bool notokay = false;
3605 HOST_WIDE_INT lastfooffset = -1;
3606 HOST_WIDE_INT lastfosize = -1;
3607 tree lastfotype = NULL_TREE;
3609 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3610 know their size, and thus, can't handle.
3611 The same is true of fields with DECL_SIZE that is not an integer
3612 constant (such as variable sized fields).
3613 Fields with offsets which are not constant will have an offset < 0
3614 We *could* handle fields that are constant sized arrays, but
3615 currently don't. Doing so would require some extra changes to
3616 tree-ssa-operands.c. */
3618 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3621 || TREE_CODE (fo->size) != INTEGER_CST
3630 /* The current heuristic we use is as follows:
3631 If the variable has no used portions in this function, no
3632 structure vars are created for it.
3634 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3635 we always create structure vars for them.
3636 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3637 some explicit uses, we create structure vars for them.
3638 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3639 no explicit uses, we do not create structure vars for them.
3642 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3643 && !up->explicit_uses)
3645 if (dump_file && (dump_flags & TDF_DETAILS))
3647 fprintf (dump_file, "Variable ");
3648 print_generic_expr (dump_file, var, 0);
3649 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3654 /* Bail out, if we can't create overlap variables. */
3657 VEC_free (fieldoff_s, heap, fieldstack);
3661 /* Otherwise, create the variables. */
3662 subvars = lookup_subvars_for_var (var);
3664 sort_fieldstack (fieldstack);
3666 for (i = VEC_length (fieldoff_s, fieldstack);
3667 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3670 HOST_WIDE_INT fosize;
3673 fosize = TREE_INT_CST_LOW (fo->size);
3674 currfotype = fo->type;
3676 /* If this field isn't in the used portion,
3677 or it has the exact same offset and size as the last
3680 if (((fo->offset <= up->minused
3681 && fo->offset + fosize <= up->minused)
3682 || fo->offset >= up->maxused)
3683 || (fo->offset == lastfooffset
3684 && fosize == lastfosize
3685 && currfotype == lastfotype))
3687 sv = GGC_NEW (struct subvar);
3688 sv->next = *subvars;
3689 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3693 fprintf (dump_file, "structure field tag %s created for var %s",
3694 get_name (sv->var), get_name (var));
3695 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3696 SFT_OFFSET (sv->var));
3697 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3698 SFT_SIZE (sv->var));
3699 fprintf (dump_file, "\n");
3702 lastfotype = currfotype;
3703 lastfooffset = fo->offset;
3704 lastfosize = fosize;
3708 /* Once we have created subvars, the original is no longer call
3709 clobbered on its own. Its call clobbered status depends
3710 completely on the call clobbered status of the subvars.
3712 add_referenced_var in the above loop will take care of
3713 marking subvars of global variables as call clobbered for us
3714 to start, since they are global as well. */
3715 clear_call_clobbered (var);
3718 VEC_free (fieldoff_s, heap, fieldstack);
3722 /* Find the conservative answer to the question of what portions of what
3723 structures are used by this statement. We assume that if we have a
3724 component ref with a known size + offset, that we only need that part
3725 of the structure. For unknown cases, or cases where we do something
3726 to the whole structure, we assume we need to create fields for the
3727 entire structure. */
3730 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3732 switch (TREE_CODE (*tp))
3734 case GIMPLE_MODIFY_STMT:
3735 /* Recurse manually here to track whether the use is in the
3736 LHS of an assignment. */
3737 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3738 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3739 walk_subtrees, NULL);
3745 HOST_WIDE_INT bitsize;
3746 HOST_WIDE_INT bitmaxsize;
3747 HOST_WIDE_INT bitpos;
3749 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3751 && var_can_have_subvars (ref)
3752 && bitmaxsize != -1)
3754 size_t uid = DECL_UID (ref);
3757 up = get_or_create_used_part_for (uid);
3759 if (bitpos <= up->minused)
3760 up->minused = bitpos;
3761 if ((bitpos + bitmaxsize >= up->maxused))
3762 up->maxused = bitpos + bitmaxsize;
3764 if (bitsize == bitmaxsize)
3765 up->explicit_uses = true;
3767 up->implicit_uses = true;
3769 up->write_only = false;
3770 up_insert (uid, up);
3777 /* This is here to make sure we mark the entire base variable as used
3778 when you take its address. Because our used portion analysis is
3779 simple, we aren't looking at casts or pointer arithmetic to see what
3780 happens when you take the address. */
3783 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3788 && var_can_have_subvars (var)
3789 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3792 size_t uid = DECL_UID (var);
3794 up = get_or_create_used_part_for (uid);
3797 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3798 up->implicit_uses = true;
3800 up->write_only = false;
3802 up_insert (uid, up);
3811 int nargs = call_expr_nargs (*tp);
3812 for (i = 0; i < nargs; i++)
3814 tree *arg = &CALL_EXPR_ARG (*tp, i);
3815 if (TREE_CODE (*arg) != ADDR_EXPR)
3816 find_used_portions (arg, walk_subtrees, NULL);
3827 && var_can_have_subvars (var)
3828 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3831 size_t uid = DECL_UID (var);
3833 up = get_or_create_used_part_for (uid);
3836 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3837 up->implicit_uses = true;
3839 up_insert (uid, up);
3853 /* Create structure field variables for structures used in this function. */
3856 create_structure_vars (void)
3859 safe_referenced_var_iterator rvi;
3860 VEC (tree, heap) *varvec = NULL;
3863 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3864 free_used_part_map);
3868 block_stmt_iterator bsi;
3869 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3871 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3876 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3878 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3881 && var_can_have_subvars (var)
3883 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3884 create_overlap_variables_for (var);
3886 htab_delete (used_portions);
3887 VEC_free (tree, heap, varvec);
3889 /* Update SSA operands of statements mentioning variables we split. */
3890 if (gimple_in_ssa_p (cfun))
3893 block_stmt_iterator bsi;
3894 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3896 tree stmt = bsi_stmt (bsi);
3897 bool update = false;
3901 if (STORED_SYMS (stmt))
3902 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3904 tree sym = referenced_var_lookup (i);
3905 if (get_subvars_for_var (sym))
3912 if (LOADED_SYMS (stmt) && !update)
3913 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3915 tree sym = referenced_var_lookup (i);
3916 if (get_subvars_for_var (sym))
3923 if (stmt_ann (stmt)->addresses_taken && !update)
3924 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
3927 tree sym = referenced_var_lookup (i);
3928 if (get_subvars_for_var (sym))
3944 gate_structure_vars (void)
3946 return flag_tree_salias != 0;
3949 struct tree_opt_pass pass_create_structure_vars =
3951 "salias", /* name */
3952 gate_structure_vars, /* gate */
3953 create_structure_vars, /* execute */
3956 0, /* static_pass_number */
3958 PROP_cfg, /* properties_required */
3959 0, /* properties_provided */
3960 0, /* properties_destroyed */
3961 0, /* todo_flags_start */
3962 TODO_dump_func, /* todo_flags_finish */
3966 /* Reset the call_clobbered flags on our referenced vars. In
3967 theory, this only needs to be done for globals. */
3970 reset_cc_flags (void)
3973 referenced_var_iterator rvi;
3975 FOR_EACH_REFERENCED_VAR (var, rvi)
3976 var_ann (var)->call_clobbered = false;
3980 struct tree_opt_pass pass_reset_cc_flags =
3984 reset_cc_flags, /* execute */
3987 0, /* static_pass_number */
3989 PROP_referenced_vars |PROP_cfg, /* properties_required */
3990 0, /* properties_provided */
3991 0, /* properties_destroyed */
3992 0, /* todo_flags_start */
3993 0, /* todo_flags_finish */