1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
34 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
46 #include "ipa-type-escape.h"
51 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
53 static bitmap_obstack alias_obstack;
55 /* Structure to map a variable to its alias set and keep track of the
56 virtual operands that will be needed to represent it. */
59 /* Variable and its alias set. */
63 /* Total number of virtual operands that will be needed to represent
64 all the aliases of VAR. */
65 long total_alias_vops;
67 /* Nonzero if the aliases for this memory tag have been grouped
68 already. Used in group_aliases. */
69 unsigned int grouped_p : 1;
71 /* Set of variables aliased with VAR. This is the exact same
72 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
73 bitmap form to speed up alias grouping. */
78 /* Counters used to display statistics on alias analysis. */
81 unsigned int alias_queries;
82 unsigned int alias_mayalias;
83 unsigned int alias_noalias;
84 unsigned int simple_queries;
85 unsigned int simple_resolved;
86 unsigned int tbaa_queries;
87 unsigned int tbaa_resolved;
88 unsigned int structnoaddress_queries;
89 unsigned int structnoaddress_resolved;
93 /* Local variables. */
94 static struct alias_stats_d alias_stats;
96 /* Local functions. */
97 static void compute_flow_insensitive_aliasing (struct alias_info *);
98 static void finalize_ref_all_pointers (struct alias_info *);
99 static void dump_alias_stats (FILE *);
100 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
101 static tree create_memory_tag (tree type, bool is_type_tag);
102 static tree get_tmt_for (tree, struct alias_info *);
103 static tree get_nmt_for (tree);
104 static void add_may_alias (tree, tree);
105 static void replace_may_alias (tree, size_t, tree);
106 static struct alias_info *init_alias_info (void);
107 static void delete_alias_info (struct alias_info *);
108 static void compute_flow_sensitive_aliasing (struct alias_info *);
109 static void setup_pointers_and_addressables (struct alias_info *);
110 static void create_global_var (void);
111 static void maybe_create_global_var (struct alias_info *ai);
112 static void group_aliases (struct alias_info *);
113 static void set_pt_anything (tree ptr);
115 /* Global declarations. */
117 /* qsort comparison function to sort type/name tags by DECL_UID. */
120 sort_tags_by_id (const void *pa, const void *pb)
122 tree a = *(tree *)pa;
123 tree b = *(tree *)pb;
125 return DECL_UID (a) - DECL_UID (b);
128 /* Initialize WORKLIST to contain those memory tags that are marked call
129 clobbered. Initialized WORKLIST2 to contain the reasons these
130 memory tags escaped. */
133 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
134 VEC (int, heap) **worklist2)
136 referenced_var_iterator rvi;
139 FOR_EACH_REFERENCED_VAR (curr, rvi)
141 if (MTAG_P (curr) && is_call_clobbered (curr))
143 VEC_safe_push (tree, heap, *worklist, curr);
144 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
149 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
150 ALIAS is not already marked call clobbered, and is a memory
154 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
155 VEC (int, heap) **worklist2,
158 if (MTAG_P (alias) && !is_call_clobbered (alias))
160 VEC_safe_push (tree, heap, *worklist, alias);
161 VEC_safe_push (int, heap, *worklist2, reason);
165 /* Mark aliases of TAG as call clobbered, and place any tags on the
166 alias list that were not already call clobbered on WORKLIST. */
169 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
170 VEC (int, heap) **worklist2)
175 var_ann_t ta = var_ann (tag);
179 ma = may_aliases (tag);
183 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
185 if (!unmodifiable_var_p (entry))
187 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
188 mark_call_clobbered (entry, ta->escape_mask);
193 /* Tags containing global vars need to be marked as global.
194 Tags containing call clobbered vars need to be marked as call
198 compute_tag_properties (void)
200 referenced_var_iterator rvi;
203 VEC (tree, heap) *taglist = NULL;
205 FOR_EACH_REFERENCED_VAR (tag, rvi)
207 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
209 VEC_safe_push (tree, heap, taglist, tag);
212 /* We sort the taglist by DECL_UID, for two reasons.
213 1. To get a sequential ordering to make the bitmap accesses
215 2. Because of the way we compute aliases, it's more likely that
216 an earlier tag is included in a later tag, and this will reduce
217 the number of iterations.
219 If we had a real tag graph, we would just topo-order it and be
221 qsort (VEC_address (tree, taglist),
222 VEC_length (tree, taglist),
226 /* Go through each tag not marked as global, and if it aliases
227 global vars, mark it global.
229 If the tag contains call clobbered vars, mark it call
232 This loop iterates because tags may appear in the may-aliases
233 list of other tags when we group. */
240 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
245 bool tagcc = is_call_clobbered (tag);
246 bool tagglobal = MTAG_GLOBAL (tag);
248 if (tagcc && tagglobal)
251 ma = may_aliases (tag);
255 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
257 /* Call clobbered entries cause the tag to be marked
259 if (!tagcc && is_call_clobbered (entry))
261 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
266 /* Global vars cause the tag to be marked global. */
267 if (!tagglobal && is_global_var (entry))
269 MTAG_GLOBAL (tag) = true;
274 /* Early exit once both global and cc are set, since the
275 loop can't do any more than that. */
276 if (tagcc && tagglobal)
281 VEC_free (tree, heap, taglist);
284 /* Set up the initial variable clobbers and globalness.
285 When this function completes, only tags whose aliases need to be
286 clobbered will be set clobbered. Tags clobbered because they
287 contain call clobbered vars are handled in compute_tag_properties. */
290 set_initial_properties (struct alias_info *ai)
293 referenced_var_iterator rvi;
297 FOR_EACH_REFERENCED_VAR (var, rvi)
299 if (is_global_var (var)
300 && (!var_can_have_subvars (var)
301 || get_subvars_for_var (var) == NULL))
303 if (!unmodifiable_var_p (var))
304 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
306 else if (TREE_CODE (var) == PARM_DECL
307 && gimple_default_def (cfun, var)
308 && POINTER_TYPE_P (TREE_TYPE (var)))
310 tree def = gimple_default_def (cfun, var);
311 get_ptr_info (def)->value_escapes_p = 1;
312 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
316 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
318 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
319 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
321 if (pi->value_escapes_p)
323 /* If PTR escapes then its associated memory tags and
324 pointed-to variables are call-clobbered. */
325 if (pi->name_mem_tag)
326 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
328 if (v_ann->symbol_mem_tag)
329 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
335 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
336 if (!unmodifiable_var_p (referenced_var (j)))
337 mark_call_clobbered (referenced_var (j), pi->escape_mask);
341 /* If the name tag is call clobbered, so is the symbol tag
342 associated with the base VAR_DECL. */
344 && v_ann->symbol_mem_tag
345 && is_call_clobbered (pi->name_mem_tag))
346 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
348 /* Name tags and symbol tags that we don't know where they point
349 to, might point to global memory, and thus, are clobbered.
351 FIXME: This is not quite right. They should only be
352 clobbered if value_escapes_p is true, regardless of whether
353 they point to global memory or not.
354 So removing this code and fixing all the bugs would be nice.
355 It is the cause of a bunch of clobbering. */
356 if ((pi->pt_global_mem || pi->pt_anything)
357 && pi->is_dereferenced && pi->name_mem_tag)
359 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
360 MTAG_GLOBAL (pi->name_mem_tag) = true;
363 if ((pi->pt_global_mem || pi->pt_anything)
364 && pi->is_dereferenced
365 && v_ann->symbol_mem_tag)
367 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
368 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
373 /* Compute which variables need to be marked call clobbered because
374 their tag is call clobbered, and which tags need to be marked
375 global because they contain global variables. */
378 compute_call_clobbered (struct alias_info *ai)
380 VEC (tree, heap) *worklist = NULL;
381 VEC(int,heap) *worklist2 = NULL;
383 set_initial_properties (ai);
384 init_transitive_clobber_worklist (&worklist, &worklist2);
385 while (VEC_length (tree, worklist) != 0)
387 tree curr = VEC_pop (tree, worklist);
388 int reason = VEC_pop (int, worklist2);
390 mark_call_clobbered (curr, reason);
391 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
393 VEC_free (tree, heap, worklist);
394 VEC_free (int, heap, worklist2);
395 compute_tag_properties ();
398 /* Compute may-alias information for every variable referenced in function
401 Alias analysis proceeds in 3 main phases:
403 1- Points-to and escape analysis.
405 This phase walks the use-def chains in the SSA web looking for three
408 * Assignments of the form P_i = &VAR
409 * Assignments of the form P_i = malloc()
410 * Pointers and ADDR_EXPR that escape the current function.
412 The concept of 'escaping' is the same one used in the Java world. When
413 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
414 outside of the current function. So, assignment to global variables,
415 function arguments and returning a pointer are all escape sites, as are
416 conversions between pointers and integers.
418 This is where we are currently limited. Since not everything is renamed
419 into SSA, we lose track of escape properties when a pointer is stashed
420 inside a field in a structure, for instance. In those cases, we are
421 assuming that the pointer does escape.
423 We use escape analysis to determine whether a variable is
424 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
425 is call-clobbered. If a pointer P_i escapes, then all the variables
426 pointed-to by P_i (and its memory tag) also escape.
428 2- Compute flow-sensitive aliases
430 We have two classes of memory tags. Memory tags associated with the
431 pointed-to data type of the pointers in the program. These tags are
432 called "symbol memory tag" (SMT). The other class are those associated
433 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
434 when adding operands for an INDIRECT_REF *P_i, we will first check
435 whether P_i has a name tag, if it does we use it, because that will have
436 more precise aliasing information. Otherwise, we use the standard symbol
439 In this phase, we go through all the pointers we found in points-to
440 analysis and create alias sets for the name memory tags associated with
441 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
442 it points to and its tag.
445 3- Compute flow-insensitive aliases
447 This pass will compare the alias set of every symbol memory tag and
448 every addressable variable found in the program. Given a symbol
449 memory tag SMT and an addressable variable V. If the alias sets of
450 SMT and V conflict (as computed by may_alias_p), then V is marked
451 as an alias tag and added to the alias set of SMT.
453 For instance, consider the following function:
469 After aliasing analysis has finished, the symbol memory tag for pointer
470 'p' will have two aliases, namely variables 'a' and 'b'. Every time
471 pointer 'p' is dereferenced, we want to mark the operation as a
472 potential reference to 'a' and 'b'.
482 # p_1 = PHI <p_4(1), p_6(2)>;
484 # a_7 = V_MAY_DEF <a_3>;
485 # b_8 = V_MAY_DEF <b_5>;
488 # a_9 = V_MAY_DEF <a_7>
497 In certain cases, the list of may aliases for a pointer may grow too
498 large. This may cause an explosion in the number of virtual operands
499 inserted in the code. Resulting in increased memory consumption and
502 When the number of virtual operands needed to represent aliased
503 loads and stores grows too large (configurable with @option{--param
504 max-aliased-vops}), alias sets are grouped to avoid severe
505 compile-time slow downs and memory consumption. See group_aliases. */
508 compute_may_aliases (void)
510 struct alias_info *ai;
512 memset (&alias_stats, 0, sizeof (alias_stats));
514 /* Initialize aliasing information. */
515 ai = init_alias_info ();
517 /* For each pointer P_i, determine the sets of variables that P_i may
518 point-to. For every addressable variable V, determine whether the
519 address of V escapes the current function, making V call-clobbered
520 (i.e., whether &V is stored in a global variable or if its passed as a
521 function call argument). */
522 compute_points_to_sets (ai);
524 /* Collect all pointers and addressable variables, compute alias sets,
525 create memory tags for pointers and promote variables whose address is
526 not needed anymore. */
527 setup_pointers_and_addressables (ai);
529 /* Compute type-based flow-insensitive aliasing for all the type
531 compute_flow_insensitive_aliasing (ai);
533 /* Compute flow-sensitive, points-to based aliasing for all the name
535 compute_flow_sensitive_aliasing (ai);
537 /* Compute call clobbering information. */
538 compute_call_clobbered (ai);
540 /* Determine if we need to enable alias grouping. */
541 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
544 /* If the program has too many call-clobbered variables and/or function
545 calls, create .GLOBAL_VAR and use it to model call-clobbering
546 semantics at call sites. This reduces the number of virtual operands
547 considerably, improving compile times at the expense of lost
548 aliasing precision. */
549 maybe_create_global_var (ai);
551 /* If the program contains ref-all pointers, finalize may-alias information
552 for them. This pass needs to be run after call-clobbering information
553 has been computed. */
554 if (ai->ref_all_symbol_mem_tag)
555 finalize_ref_all_pointers (ai);
557 /* Debugging dumps. */
560 dump_referenced_vars (dump_file);
561 if (dump_flags & TDF_STATS)
562 dump_alias_stats (dump_file);
563 dump_points_to_info (dump_file);
564 dump_alias_info (dump_file);
567 /* Deallocate memory used by aliasing data structures. */
568 delete_alias_info (ai);
571 block_stmt_iterator bsi;
575 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
577 update_stmt_if_modified (bsi_stmt (bsi));
585 struct tree_opt_pass pass_may_alias =
589 compute_may_aliases, /* execute */
592 0, /* static_pass_number */
593 TV_TREE_MAY_ALIAS, /* tv_id */
594 PROP_cfg | PROP_ssa, /* properties_required */
595 PROP_alias, /* properties_provided */
596 0, /* properties_destroyed */
597 0, /* todo_flags_start */
598 TODO_dump_func | TODO_update_ssa
599 | TODO_ggc_collect | TODO_verify_ssa
600 | TODO_verify_stmts, /* todo_flags_finish */
605 /* Data structure used to count the number of dereferences to PTR
606 inside an expression. */
614 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
615 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
618 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
620 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
622 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
623 pointer 'ptr' is *not* dereferenced, it is simply used to compute
624 the address of 'fld' as 'ptr + offsetof(fld)'. */
625 if (TREE_CODE (*tp) == ADDR_EXPR)
631 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
638 /* Count the number of direct and indirect uses for pointer PTR in
639 statement STMT. The two counts are stored in *NUM_USES_P and
640 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
641 least one of those dereferences is a store operation. */
644 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
645 unsigned *num_derefs_p, bool *is_store)
654 /* Find out the total number of uses of PTR in STMT. */
655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
659 /* Now count the number of indirect references to PTR. This is
660 truly awful, but we don't have much choice. There are no parent
661 pointers inside INDIRECT_REFs, so an expression like
662 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
663 find all the indirect and direct uses of x_1 inside. The only
664 shortcut we can take is the fact that GIMPLE only allows
665 INDIRECT_REFs inside the expressions below. */
666 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
667 || (TREE_CODE (stmt) == RETURN_EXPR
668 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
669 || TREE_CODE (stmt) == ASM_EXPR
670 || TREE_CODE (stmt) == CALL_EXPR)
674 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
676 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
677 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
679 else if (TREE_CODE (stmt) == RETURN_EXPR)
681 tree e = TREE_OPERAND (stmt, 0);
682 lhs = GIMPLE_STMT_OPERAND (e, 0);
683 rhs = GIMPLE_STMT_OPERAND (e, 1);
685 else if (TREE_CODE (stmt) == ASM_EXPR)
687 lhs = ASM_OUTPUTS (stmt);
688 rhs = ASM_INPUTS (stmt);
696 if (lhs && (TREE_CODE (lhs) == TREE_LIST
697 || EXPR_P (lhs) || GIMPLE_STMT_P (lhs)))
699 struct count_ptr_d count;
702 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
704 *num_derefs_p = count.count;
707 if (rhs && (TREE_CODE (rhs) == TREE_LIST
708 || EXPR_P (rhs) || GIMPLE_STMT_P (rhs)))
710 struct count_ptr_d count;
713 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
714 *num_derefs_p += count.count;
718 gcc_assert (*num_uses_p >= *num_derefs_p);
721 /* Initialize the data structures used for alias analysis. */
723 static struct alias_info *
724 init_alias_info (void)
726 struct alias_info *ai;
727 referenced_var_iterator rvi;
730 bitmap_obstack_initialize (&alias_obstack);
731 ai = XCNEW (struct alias_info);
732 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
733 sbitmap_zero (ai->ssa_names_visited);
734 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
735 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
736 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
737 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
739 /* If aliases have been computed before, clear existing information. */
740 if (gimple_aliases_computed_p (cfun))
744 /* Similarly, clear the set of addressable variables. In this
745 case, we can just clear the set because addressability is
746 only computed here. */
747 bitmap_clear (gimple_addressable_vars (cfun));
749 /* Clear flow-insensitive alias information from each symbol. */
750 FOR_EACH_REFERENCED_VAR (var, rvi)
752 var_ann_t ann = var_ann (var);
755 ann->may_aliases = NULL;
756 NUM_REFERENCES_CLEAR (ann);
758 /* Since we are about to re-discover call-clobbered
759 variables, clear the call-clobbered flag. Variables that
760 are intrinsically call-clobbered (globals, local statics,
761 etc) will not be marked by the aliasing code, so we can't
762 remove them from CALL_CLOBBERED_VARS.
764 NB: STRUCT_FIELDS are still call clobbered if they are for
765 a global variable, so we *don't* clear their call clobberedness
766 just because they are tags, though we will clear it if they
767 aren't for global variables. */
768 if (TREE_CODE (var) == NAME_MEMORY_TAG
769 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
770 || !is_global_var (var))
771 clear_call_clobbered (var);
774 /* Clear flow-sensitive points-to information from each SSA name. */
775 for (i = 1; i < num_ssa_names; i++)
777 tree name = ssa_name (i);
779 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
782 if (SSA_NAME_PTR_INFO (name))
784 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
786 /* Clear all the flags but keep the name tag to
787 avoid creating new temporaries unnecessarily. If
788 this pointer is found to point to a subset or
789 superset of its former points-to set, then a new
790 tag will need to be created in create_name_tags. */
793 pi->value_escapes_p = 0;
794 pi->is_dereferenced = 0;
796 bitmap_clear (pi->pt_vars);
801 /* Next time, we will need to reset alias information. */
802 cfun->gimple_df->aliases_computed_p = true;
808 /* Deallocate memory used by alias analysis. */
811 delete_alias_info (struct alias_info *ai)
814 referenced_var_iterator rvi;
817 sbitmap_free (ai->ssa_names_visited);
818 VEC_free (tree, heap, ai->processed_ptrs);
820 for (i = 0; i < ai->num_addressable_vars; i++)
821 free (ai->addressable_vars[i]);
823 FOR_EACH_REFERENCED_VAR(var, rvi)
825 var_ann_t ann = var_ann (var);
826 NUM_REFERENCES_CLEAR (ann);
829 free (ai->addressable_vars);
831 for (i = 0; i < ai->num_pointers; i++)
832 free (ai->pointers[i]);
835 BITMAP_FREE (ai->written_vars);
836 BITMAP_FREE (ai->dereferenced_ptrs_store);
837 BITMAP_FREE (ai->dereferenced_ptrs_load);
838 bitmap_obstack_release (&alias_obstack);
841 delete_points_to_sets ();
844 /* Used for hashing to identify pointer infos with identical
847 eq_ptr_info (const void *p1, const void *p2)
849 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
850 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
851 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
855 ptr_info_hash (const void *p)
857 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
858 return bitmap_hash (n->pt_vars);
861 /* Create name tags for all the pointers that have been dereferenced.
862 We only create a name tag for a pointer P if P is found to point to
863 a set of variables (so that we can alias them to *P) or if it is
864 the result of a call to malloc (which means that P cannot point to
865 anything else nor alias any other variable).
867 If two pointers P and Q point to the same set of variables, they
868 are assigned the same name tag. */
871 create_name_tags (void)
874 VEC (tree, heap) *with_ptvars = NULL;
878 /* Collect the list of pointers with a non-empty points to set. */
879 for (i = 1; i < num_ssa_names; i++)
881 tree ptr = ssa_name (i);
882 struct ptr_info_def *pi;
885 || !POINTER_TYPE_P (TREE_TYPE (ptr))
886 || !SSA_NAME_PTR_INFO (ptr))
889 pi = SSA_NAME_PTR_INFO (ptr);
891 if (pi->pt_anything || !pi->is_dereferenced)
893 /* No name tags for pointers that have not been
894 dereferenced or point to an arbitrary location. */
895 pi->name_mem_tag = NULL_TREE;
899 /* Set pt_anything on the pointers without pt_vars filled in so
900 that they are assigned a symbol tag. */
901 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
902 VEC_safe_push (tree, heap, with_ptvars, ptr);
904 set_pt_anything (ptr);
907 /* If we didn't find any pointers with pt_vars set, we're done. */
911 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
912 /* Now go through the pointers with pt_vars, and find a name tag
913 with the same pt_vars as this pointer, or create one if one
915 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
917 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
918 tree old_name_tag = pi->name_mem_tag;
919 struct ptr_info_def **slot;
921 /* If PTR points to a set of variables, check if we don't
922 have another pointer Q with the same points-to set before
923 creating a tag. If so, use Q's tag instead of creating a
926 This is important for not creating unnecessary symbols
927 and also for copy propagation. If we ever need to
928 propagate PTR into Q or vice-versa, we would run into
929 problems if they both had different name tags because
930 they would have different SSA version numbers (which
931 would force us to take the name tags in and out of SSA). */
933 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
935 pi->name_mem_tag = (*slot)->name_mem_tag;
939 /* If we didn't find a pointer with the same points-to set
940 as PTR, create a new name tag if needed. */
941 if (pi->name_mem_tag == NULL_TREE)
942 pi->name_mem_tag = get_nmt_for (ptr);
945 /* If the new name tag computed for PTR is different than
946 the old name tag that it used to have, then the old tag
947 needs to be removed from the IL, so we mark it for
949 if (old_name_tag && old_name_tag != pi->name_mem_tag)
950 mark_sym_for_renaming (old_name_tag);
952 TREE_THIS_VOLATILE (pi->name_mem_tag)
953 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
955 /* Mark the new name tag for renaming. */
956 mark_sym_for_renaming (pi->name_mem_tag);
958 htab_delete (ptr_hash);
960 VEC_free (tree, heap, with_ptvars);
964 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
965 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
966 name tag and the variables it points-to are call-clobbered. Finally, if
967 P_i escapes and we could not determine where it points to, then all the
968 variables in the same alias set as *P_i are marked call-clobbered. This
969 is necessary because we must assume that P_i may take the address of any
970 variable in the same alias set. */
973 compute_flow_sensitive_aliasing (struct alias_info *ai)
980 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
982 if (!find_what_p_points_to (ptr))
983 set_pt_anything (ptr);
988 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
991 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
992 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
996 /* Set up aliasing information for PTR's name memory tag (if it has
997 one). Note that only pointers that have been dereferenced will
998 have a name memory tag. */
999 if (pi->name_mem_tag && pi->pt_vars)
1000 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1002 add_may_alias (pi->name_mem_tag, referenced_var (j));
1003 if (j != DECL_UID (v_ann->symbol_mem_tag))
1004 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1010 /* Compute type-based alias sets. Traverse all the pointers and
1011 addressable variables found in setup_pointers_and_addressables.
1013 For every pointer P in AI->POINTERS and addressable variable V in
1014 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1015 memory tag (SMT) if their alias sets conflict. V is then marked as
1016 an alias tag so that the operand scanner knows that statements
1017 containing V have aliased operands. */
1020 compute_flow_insensitive_aliasing (struct alias_info *ai)
1024 /* Initialize counter for the total number of virtual operands that
1025 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1026 threshold set by --params max-alias-vops, we enable alias
1028 ai->total_alias_vops = 0;
1030 /* For every pointer P, determine which addressable variables may alias
1031 with P's symbol memory tag. */
1032 for (i = 0; i < ai->num_pointers; i++)
1035 struct alias_map_d *p_map = ai->pointers[i];
1036 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1037 var_ann_t tag_ann = var_ann (tag);
1040 /* Call-clobbering information is not finalized yet at this point. */
1041 if (PTR_IS_REF_ALL (p_map->var))
1044 p_map->total_alias_vops = 0;
1045 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1047 /* Add any pre-existing may_aliases to the bitmap used to represent
1048 TAG's alias set in case we need to group aliases. */
1049 for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
1050 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1052 for (j = 0; j < ai->num_addressable_vars; j++)
1054 struct alias_map_d *v_map;
1056 bool tag_stored_p, var_stored_p;
1058 v_map = ai->addressable_vars[j];
1060 v_ann = var_ann (var);
1062 /* Skip memory tags and variables that have never been
1063 written to. We also need to check if the variables are
1064 call-clobbered because they may be overwritten by
1067 Note this is effectively random accessing elements in
1068 the sparse bitset, which can be highly inefficient.
1069 So we first check the call_clobbered status of the
1070 tag and variable before querying the bitmap. */
1071 tag_stored_p = is_call_clobbered (tag)
1072 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1073 var_stored_p = is_call_clobbered (var)
1074 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1075 if (!tag_stored_p && !var_stored_p)
1078 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1080 size_t num_tag_refs, num_var_refs;
1082 num_tag_refs = NUM_REFERENCES (tag_ann);
1083 num_var_refs = NUM_REFERENCES (v_ann);
1085 /* Add VAR to TAG's may-aliases set. */
1087 /* We should never have a var with subvars here, because
1088 they shouldn't get into the set of addressable vars */
1089 gcc_assert (!var_can_have_subvars (var)
1090 || get_subvars_for_var (var) == NULL);
1092 add_may_alias (tag, var);
1093 /* Update the bitmap used to represent TAG's alias set
1094 in case we need to group aliases. */
1095 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1097 /* Update the total number of virtual operands due to
1098 aliasing. Since we are adding one more alias to TAG's
1099 may-aliases set, the total number of virtual operands due
1100 to aliasing will be increased by the number of references
1101 made to VAR and TAG (every reference to TAG will also
1102 count as a reference to VAR). */
1103 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1104 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1111 /* Since this analysis is based exclusively on symbols, it fails to
1112 handle cases where two pointers P and Q have different memory
1113 tags with conflicting alias set numbers but no aliased symbols in
1116 For example, suppose that we have two memory tags SMT.1 and SMT.2
1119 may-aliases (SMT.1) = { a }
1120 may-aliases (SMT.2) = { b }
1122 and the alias set number of SMT.1 conflicts with that of SMT.2.
1123 Since they don't have symbols in common, loads and stores from
1124 SMT.1 and SMT.2 will seem independent of each other, which will
1125 lead to the optimizers making invalid transformations (see
1126 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1128 To avoid this problem, we do a final traversal of AI->POINTERS
1129 looking for pairs of pointers that have no aliased symbols in
1130 common and yet have conflicting alias set numbers. */
1131 for (i = 0; i < ai->num_pointers; i++)
1134 struct alias_map_d *p_map1 = ai->pointers[i];
1135 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1136 bitmap may_aliases1 = p_map1->may_aliases;
1138 if (PTR_IS_REF_ALL (p_map1->var))
1141 for (j = i + 1; j < ai->num_pointers; j++)
1143 struct alias_map_d *p_map2 = ai->pointers[j];
1144 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1145 bitmap may_aliases2 = p_map2->may_aliases;
1147 if (PTR_IS_REF_ALL (p_map2->var))
1150 /* If the pointers may not point to each other, do nothing. */
1151 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1154 /* The two pointers may alias each other. If they already have
1155 symbols in common, do nothing. */
1156 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1159 if (!bitmap_empty_p (may_aliases2))
1164 /* Add all the aliases for TAG2 into TAG1's alias set.
1165 FIXME, update grouping heuristic counters. */
1166 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1167 add_may_alias (tag1, referenced_var (k));
1168 bitmap_ior_into (may_aliases1, may_aliases2);
1172 /* Since TAG2 does not have any aliases of its own, add
1173 TAG2 itself to the alias set of TAG1. */
1174 add_may_alias (tag1, tag2);
1175 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1181 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1182 get_name (current_function_decl),
1183 ai->total_alias_vops);
1187 /* Finalize may-alias information for ref-all pointers. Traverse all
1188 the addressable variables found in setup_pointers_and_addressables.
1190 If flow-sensitive alias analysis has attached a name memory tag to
1191 a ref-all pointer, we will use it for the dereferences because that
1192 will have more precise aliasing information. But if there is no
1193 name tag, we will use a special symbol tag that aliases all the
1194 call-clobbered addressable variables. */
1197 finalize_ref_all_pointers (struct alias_info *ai)
1201 if (gimple_global_var (cfun))
1202 add_may_alias (ai->ref_all_symbol_mem_tag, gimple_global_var (cfun));
1205 /* First add the real call-clobbered variables. */
1206 for (i = 0; i < ai->num_addressable_vars; i++)
1208 tree var = ai->addressable_vars[i]->var;
1209 if (is_call_clobbered (var))
1210 add_may_alias (ai->ref_all_symbol_mem_tag, var);
1213 /* Then add the call-clobbered pointer memory tags. See
1214 compute_flow_insensitive_aliasing for the rationale. */
1215 for (i = 0; i < ai->num_pointers; i++)
1217 tree ptr = ai->pointers[i]->var, tag;
1218 if (PTR_IS_REF_ALL (ptr))
1220 tag = var_ann (ptr)->symbol_mem_tag;
1221 if (is_call_clobbered (tag))
1222 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1228 /* Comparison function for qsort used in group_aliases. */
1231 total_alias_vops_cmp (const void *p, const void *q)
1233 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1234 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1235 long n1 = (*p1)->total_alias_vops;
1236 long n2 = (*p2)->total_alias_vops;
1238 /* We want to sort in descending order. */
1239 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1242 /* Group all the aliases for TAG to make TAG represent all the
1243 variables in its alias set. Update the total number
1244 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1245 function will make TAG be the unique alias tag for all the
1246 variables in its may-aliases. So, given:
1248 may-aliases(TAG) = { V1, V2, V3 }
1250 This function will group the variables into:
1252 may-aliases(V1) = { TAG }
1253 may-aliases(V2) = { TAG }
1254 may-aliases(V2) = { TAG } */
1257 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1260 var_ann_t tag_ann = var_ann (tag);
1261 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1264 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1266 tree var = referenced_var (i);
1267 var_ann_t ann = var_ann (var);
1269 /* Make TAG the unique alias of VAR. */
1270 ann->is_aliased = 0;
1271 ann->may_aliases = NULL;
1273 /* Note that VAR and TAG may be the same if the function has no
1274 addressable variables (see the discussion at the end of
1275 setup_pointers_and_addressables). */
1277 add_may_alias (var, tag);
1279 /* Reduce total number of virtual operands contributed
1280 by TAG on behalf of VAR. Notice that the references to VAR
1281 itself won't be removed. We will merely replace them with
1282 references to TAG. */
1283 ai->total_alias_vops -= num_tag_refs;
1286 /* We have reduced the number of virtual operands that TAG makes on
1287 behalf of all the variables formerly aliased with it. However,
1288 we have also "removed" all the virtual operands for TAG itself,
1289 so we add them back. */
1290 ai->total_alias_vops += num_tag_refs;
1292 /* TAG no longer has any aliases. */
1293 tag_ann->may_aliases = NULL;
1296 /* Simple comparison function for qsort that sorts based on pointer
1300 tree_pointer_compare (const void *pa, const void *pb)
1302 const tree a = *((const tree *)pa);
1303 const tree b = *((const tree *)pb);
1309 /* Replacing may aliases in name tags during grouping can up with the
1310 same SMT multiple times in the may_alias list. It's quicker to
1311 just remove them post-hoc than it is to avoid them during
1312 replacement. Thus, this routine sorts the may-alias list and
1313 removes duplicates. */
1316 compact_name_tags (void)
1318 referenced_var_iterator rvi;
1321 FOR_EACH_REFERENCED_VAR (var, rvi)
1323 if (TREE_CODE (var) == NAME_MEMORY_TAG)
1325 VEC(tree, gc) *aliases, *new_aliases;
1326 tree alias, last_alias;
1330 aliases = var_ann (var)->may_aliases;
1333 if (VEC_length (tree, aliases) > 1)
1335 bool changed = false;
1336 qsort (VEC_address (tree, aliases), VEC_length (tree, aliases),
1337 sizeof (tree), tree_pointer_compare);
1339 for (i = 0; VEC_iterate (tree, aliases, i, alias); i++)
1341 if (alias == last_alias)
1347 VEC_safe_push (tree, gc, new_aliases, alias);
1351 /* Only replace the array if something has changed. */
1354 VEC_free (tree, gc, aliases);
1355 var_ann (var)->may_aliases = new_aliases;
1358 VEC_free (tree, gc, new_aliases);
1364 /* Group may-aliases sets to reduce the number of virtual operands due
1367 1- Sort the list of pointers in decreasing number of contributed
1370 2- Take the first entry in AI->POINTERS and revert the role of
1371 the memory tag and its aliases. Usually, whenever an aliased
1372 variable Vi is found to alias with a memory tag T, we add Vi
1373 to the may-aliases set for T. Meaning that after alias
1374 analysis, we will have:
1376 may-aliases(T) = { V1, V2, V3, ..., Vn }
1378 This means that every statement that references T, will get 'n'
1379 virtual operands for each of the Vi tags. But, when alias
1380 grouping is enabled, we make T an alias tag and add it to the
1381 alias set of all the Vi variables:
1383 may-aliases(V1) = { T }
1384 may-aliases(V2) = { T }
1386 may-aliases(Vn) = { T }
1388 This has two effects: (a) statements referencing T will only get
1389 a single virtual operand, and, (b) all the variables Vi will now
1390 appear to alias each other. So, we lose alias precision to
1391 improve compile time. But, in theory, a program with such a high
1392 level of aliasing should not be very optimizable in the first
1395 3- Since variables may be in the alias set of more than one
1396 memory tag, the grouping done in step (2) needs to be extended
1397 to all the memory tags that have a non-empty intersection with
1398 the may-aliases set of tag T. For instance, if we originally
1399 had these may-aliases sets:
1401 may-aliases(T) = { V1, V2, V3 }
1402 may-aliases(R) = { V2, V4 }
1404 In step (2) we would have reverted the aliases for T as:
1406 may-aliases(V1) = { T }
1407 may-aliases(V2) = { T }
1408 may-aliases(V3) = { T }
1410 But note that now V2 is no longer aliased with R. We could
1411 add R to may-aliases(V2), but we are in the process of
1412 grouping aliases to reduce virtual operands so what we do is
1413 add V4 to the grouping to obtain:
1415 may-aliases(V1) = { T }
1416 may-aliases(V2) = { T }
1417 may-aliases(V3) = { T }
1418 may-aliases(V4) = { T }
1420 4- If the total number of virtual operands due to aliasing is
1421 still above the threshold set by max-alias-vops, go back to (2). */
1424 group_aliases (struct alias_info *ai)
1429 /* Sort the POINTERS array in descending order of contributed
1430 virtual operands. */
1431 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1432 total_alias_vops_cmp);
1434 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1435 and the tag's may-aliases set. */
1436 for (i = 0; i < ai->num_pointers; i++)
1439 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1440 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1442 /* Skip tags that have been grouped already. */
1443 if (ai->pointers[i]->grouped_p)
1446 /* See if TAG1 had any aliases in common with other symbol tags.
1447 If we find a TAG2 with common aliases with TAG1, add TAG2's
1448 aliases into TAG1. */
1449 for (j = i + 1; j < ai->num_pointers; j++)
1451 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1453 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1455 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1457 bitmap_ior_into (tag1_aliases, tag2_aliases);
1459 /* TAG2 does not need its aliases anymore. */
1460 bitmap_clear (tag2_aliases);
1461 var_ann (tag2)->may_aliases = NULL;
1463 /* TAG1 is the unique alias of TAG2. */
1464 add_may_alias (tag2, tag1);
1466 ai->pointers[j]->grouped_p = true;
1470 /* Now group all the aliases we collected into TAG1. */
1471 group_aliases_into (tag1, tag1_aliases, ai);
1473 /* If we've reduced total number of virtual operands below the
1475 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1479 /* Finally, all the variables that have been grouped cannot be in
1480 the may-alias set of name memory tags. Suppose that we have
1481 grouped the aliases in this code so that may-aliases(a) = SMT.20
1485 # a_9 = V_MAY_DEF <a_8>
1487 ... Several modifications to SMT.20 ...
1491 Since p_5 points to 'a', the optimizers will try to propagate 0
1492 into p_5->field, but that is wrong because there have been
1493 modifications to 'SMT.20' in between. To prevent this we have to
1494 replace 'a' with 'SMT.20' in the name tag of p_5. */
1495 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1498 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1499 VEC(tree,gc) *aliases;
1502 if (name_tag == NULL_TREE)
1505 aliases = var_ann (name_tag)->may_aliases;
1506 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1508 var_ann_t ann = var_ann (alias);
1510 if ((!MTAG_P (alias)
1511 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1512 && ann->may_aliases)
1516 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1518 new_alias = VEC_index (tree, ann->may_aliases, 0);
1519 replace_may_alias (name_tag, j, new_alias);
1524 compact_name_tags ();
1528 "%s: Total number of aliased vops after grouping: %ld%s\n",
1529 get_name (current_function_decl),
1530 ai->total_alias_vops,
1531 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1535 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1538 create_alias_map_for (tree var, struct alias_info *ai)
1540 struct alias_map_d *alias_map;
1541 alias_map = XCNEW (struct alias_map_d);
1542 alias_map->var = var;
1543 alias_map->set = get_alias_set (var);
1544 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1548 /* Create memory tags for all the dereferenced pointers and build the
1549 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1550 sets. Based on the address escape and points-to information collected
1551 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1552 variables whose address is not needed anymore. */
1555 setup_pointers_and_addressables (struct alias_info *ai)
1557 size_t n_vars, num_addressable_vars, num_pointers;
1558 referenced_var_iterator rvi;
1560 VEC (tree, heap) *varvec = NULL;
1561 safe_referenced_var_iterator srvi;
1563 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1564 num_addressable_vars = num_pointers = 0;
1566 FOR_EACH_REFERENCED_VAR (var, rvi)
1568 if (may_be_aliased (var))
1569 num_addressable_vars++;
1571 if (POINTER_TYPE_P (TREE_TYPE (var)))
1573 /* Since we don't keep track of volatile variables, assume that
1574 these pointers are used in indirect store operations. */
1575 if (TREE_THIS_VOLATILE (var))
1576 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1582 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1583 always going to be slightly bigger than we actually need them
1584 because some TREE_ADDRESSABLE variables will be marked
1585 non-addressable below and only pointers with unique symbol tags are
1586 going to be added to POINTERS. */
1587 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1588 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1589 ai->num_addressable_vars = 0;
1590 ai->num_pointers = 0;
1592 /* Since we will be creating symbol memory tags within this loop,
1593 cache the value of NUM_REFERENCED_VARS to avoid processing the
1594 additional tags unnecessarily. */
1595 n_vars = num_referenced_vars;
1597 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1599 var_ann_t v_ann = var_ann (var);
1602 /* Name memory tags already have flow-sensitive aliasing
1603 information, so they need not be processed by
1604 compute_flow_insensitive_aliasing. Similarly, symbol memory
1605 tags are already accounted for when we process their
1608 Structure fields, on the other hand, have to have some of this
1609 information processed for them, but it's pointless to mark them
1610 non-addressable (since they are fake variables anyway). */
1611 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1614 /* Remove the ADDRESSABLE flag from every addressable variable whose
1615 address is not needed anymore. This is caused by the propagation
1616 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1617 removal of dead pointer assignments done by the early scalar
1619 if (TREE_ADDRESSABLE (var))
1621 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
1622 && TREE_CODE (var) != RESULT_DECL
1623 && !is_global_var (var))
1625 bool okay_to_mark = true;
1627 /* Since VAR is now a regular GIMPLE register, we will need
1628 to rename VAR into SSA afterwards. */
1629 mark_sym_for_renaming (var);
1631 /* If VAR can have sub-variables, and any of its
1632 sub-variables has its address taken, then we cannot
1633 remove the addressable flag from VAR. */
1634 if (var_can_have_subvars (var)
1635 && (svars = get_subvars_for_var (var)))
1639 for (sv = svars; sv; sv = sv->next)
1641 if (bitmap_bit_p (gimple_addressable_vars (cfun),
1642 DECL_UID (sv->var)))
1643 okay_to_mark = false;
1644 mark_sym_for_renaming (sv->var);
1648 /* The address of VAR is not needed, remove the
1649 addressable bit, so that it can be optimized as a
1650 regular variable. */
1652 mark_non_addressable (var);
1656 /* Global variables and addressable locals may be aliased. Create an
1657 entry in ADDRESSABLE_VARS for VAR. */
1658 if (may_be_aliased (var)
1659 && (!var_can_have_subvars (var)
1660 || get_subvars_for_var (var) == NULL))
1662 create_alias_map_for (var, ai);
1663 mark_sym_for_renaming (var);
1666 /* Add pointer variables that have been dereferenced to the POINTERS
1667 array and create a symbol memory tag for them. */
1668 if (POINTER_TYPE_P (TREE_TYPE (var)))
1670 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1671 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1676 /* If pointer VAR still doesn't have a memory tag
1677 associated with it, create it now or re-use an
1679 tag = get_tmt_for (var, ai);
1680 t_ann = var_ann (tag);
1682 /* The symbol tag will need to be renamed into SSA
1683 afterwards. Note that we cannot do this inside
1684 get_tmt_for because aliasing may run multiple times
1685 and we only create symbol tags the first time. */
1686 mark_sym_for_renaming (tag);
1688 /* Similarly, if pointer VAR used to have another type
1689 tag, we will need to process it in the renamer to
1690 remove the stale virtual operands. */
1691 if (v_ann->symbol_mem_tag)
1692 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1694 /* Associate the tag with pointer VAR. */
1695 v_ann->symbol_mem_tag = tag;
1697 /* If pointer VAR has been used in a store operation,
1698 then its memory tag must be marked as written-to. */
1699 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1700 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1702 /* All the dereferences of pointer VAR count as
1703 references of TAG. Since TAG can be associated with
1704 several pointers, add the dereferences of VAR to the
1706 NUM_REFERENCES_SET (t_ann,
1707 NUM_REFERENCES (t_ann)
1708 + NUM_REFERENCES (v_ann));
1712 /* The pointer has not been dereferenced. If it had a
1713 symbol memory tag, remove it and mark the old tag for
1714 renaming to remove it out of the IL. */
1715 var_ann_t ann = var_ann (var);
1716 tree tag = ann->symbol_mem_tag;
1719 mark_sym_for_renaming (tag);
1720 ann->symbol_mem_tag = NULL_TREE;
1725 VEC_free (tree, heap, varvec);
1729 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1730 every call site, we need to emit V_MAY_DEF expressions to represent the
1731 clobbering effects of the call for variables whose address escapes the
1734 One approach is to group all call-clobbered variables into a single
1735 representative that is used as an alias of every call-clobbered variable
1736 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1737 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1739 The second approach is to emit a clobbering V_MAY_DEF for every
1740 call-clobbered variable at call sites. This is the preferred way in terms
1741 of optimization opportunities but it may create too many V_MAY_DEF operands
1742 if there are many call clobbered variables and function calls in the
1745 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1746 function calls found by the number of call-clobbered variables. If that
1747 product is beyond a certain threshold, as determined by the parameterized
1748 values shown below, we use .GLOBAL_VAR.
1750 FIXME. This heuristic should be improved. One idea is to use several
1751 .GLOBAL_VARs of different types instead of a single one. The thresholds
1752 have been derived from a typical bootstrap cycle, including all target
1753 libraries. Compile times were found increase by ~1% compared to using
1757 maybe_create_global_var (struct alias_info *ai)
1759 unsigned i, n_clobbered;
1762 /* No need to create it, if we have one already. */
1763 if (gimple_global_var (cfun) == NULL_TREE)
1765 /* Count all the call-clobbered variables. */
1767 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1772 /* If the number of virtual operands that would be needed to
1773 model all the call-clobbered variables is larger than
1774 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1776 Also create .GLOBAL_VAR if there are no call-clobbered
1777 variables and the program contains a mixture of pure/const
1778 and regular function calls. This is to avoid the problem
1779 described in PR 20115:
1782 int func_pure (void) { return X; }
1783 int func_non_pure (int a) { X += a; }
1786 int a = func_pure ();
1792 Since foo() has no call-clobbered variables, there is
1793 no relationship between the calls to func_pure and
1794 func_non_pure. Since func_pure has no side-effects, value
1795 numbering optimizations elide the second call to func_pure.
1796 So, if we have some pure/const and some regular calls in the
1797 program we create .GLOBAL_VAR to avoid missing these
1799 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1800 || (n_clobbered == 0
1801 && ai->num_calls_found > 0
1802 && ai->num_pure_const_calls_found > 0
1803 && ai->num_calls_found > ai->num_pure_const_calls_found))
1804 create_global_var ();
1807 /* Mark all call-clobbered symbols for renaming. Since the initial
1808 rewrite into SSA ignored all call sites, we may need to rename
1809 .GLOBAL_VAR and the call-clobbered variables. */
1810 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1812 tree var = referenced_var (i);
1814 /* If the function has calls to clobbering functions and
1815 .GLOBAL_VAR has been created, make it an alias for all
1816 call-clobbered variables. */
1817 if (gimple_global_var (cfun) && var != gimple_global_var (cfun))
1819 add_may_alias (var, gimple_global_var (cfun));
1820 gcc_assert (!get_subvars_for_var (var));
1823 mark_sym_for_renaming (var);
1828 /* Return TRUE if pointer PTR may point to variable VAR.
1830 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1831 This is needed because when checking for type conflicts we are
1832 interested in the alias set of the memory location pointed-to by
1833 PTR. The alias set of PTR itself is irrelevant.
1835 VAR_ALIAS_SET is the alias set for VAR. */
1838 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1839 tree var, HOST_WIDE_INT var_alias_set,
1840 bool alias_set_only)
1844 alias_stats.alias_queries++;
1845 alias_stats.simple_queries++;
1847 /* By convention, a variable cannot alias itself. */
1848 mem = var_ann (ptr)->symbol_mem_tag;
1851 alias_stats.alias_noalias++;
1852 alias_stats.simple_resolved++;
1856 /* If -fargument-noalias-global is > 2, pointer arguments may
1857 not point to anything else. */
1858 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1860 alias_stats.alias_noalias++;
1861 alias_stats.simple_resolved++;
1865 /* If -fargument-noalias-global is > 1, pointer arguments may
1866 not point to global variables. */
1867 if (flag_argument_noalias > 1 && is_global_var (var)
1868 && TREE_CODE (ptr) == PARM_DECL)
1870 alias_stats.alias_noalias++;
1871 alias_stats.simple_resolved++;
1875 /* If either MEM or VAR is a read-only global and the other one
1876 isn't, then PTR cannot point to VAR. */
1877 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1878 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1880 alias_stats.alias_noalias++;
1881 alias_stats.simple_resolved++;
1885 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1887 alias_stats.tbaa_queries++;
1889 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1890 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1892 alias_stats.alias_noalias++;
1893 alias_stats.tbaa_resolved++;
1897 /* If var is a record or union type, ptr cannot point into var
1898 unless there is some operation explicit address operation in the
1899 program that can reference a field of the ptr's dereferenced
1900 type. This also assumes that the types of both var and ptr are
1901 contained within the compilation unit, and that there is no fancy
1902 addressing arithmetic associated with any of the types
1905 if ((mem_alias_set != 0) && (var_alias_set != 0))
1907 tree ptr_type = TREE_TYPE (ptr);
1908 tree var_type = TREE_TYPE (var);
1910 /* The star count is -1 if the type at the end of the pointer_to
1911 chain is not a record or union type. */
1912 if ((!alias_set_only) &&
1913 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1915 int ptr_star_count = 0;
1917 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1918 restrictive for the pointer type, need to allow pointers to
1919 primitive types as long as those types cannot be pointers
1921 while (POINTER_TYPE_P (ptr_type))
1922 /* Strip the *'s off. */
1924 ptr_type = TREE_TYPE (ptr_type);
1928 /* There does not appear to be a better test to see if the
1929 pointer type was one of the pointer to everything
1932 if (ptr_star_count > 0)
1934 alias_stats.structnoaddress_queries++;
1935 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1938 alias_stats.structnoaddress_resolved++;
1939 alias_stats.alias_noalias++;
1943 else if (ptr_star_count == 0)
1945 /* If ptr_type was not really a pointer to type, it cannot
1947 alias_stats.structnoaddress_queries++;
1948 alias_stats.structnoaddress_resolved++;
1949 alias_stats.alias_noalias++;
1955 alias_stats.alias_mayalias++;
1960 /* Add ALIAS to the set of variables that may alias VAR. */
1963 add_may_alias (tree var, tree alias)
1966 var_ann_t v_ann = get_var_ann (var);
1967 var_ann_t a_ann = get_var_ann (alias);
1970 /* Don't allow self-referential aliases. */
1971 gcc_assert (var != alias);
1973 /* ALIAS must be addressable if it's being added to an alias set. */
1975 TREE_ADDRESSABLE (alias) = 1;
1977 gcc_assert (may_be_aliased (alias));
1980 if (v_ann->may_aliases == NULL)
1981 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1983 /* Avoid adding duplicates. */
1984 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1988 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1989 a_ann->is_aliased = 1;
1993 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1996 replace_may_alias (tree var, size_t i, tree new_alias)
1998 var_ann_t v_ann = var_ann (var);
1999 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
2003 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2006 set_pt_anything (tree ptr)
2008 struct ptr_info_def *pi = get_ptr_info (ptr);
2010 pi->pt_anything = 1;
2013 /* The pointer used to have a name tag, but we now found it pointing
2014 to an arbitrary location. The name tag needs to be renamed and
2015 disassociated from PTR. */
2016 if (pi->name_mem_tag)
2018 mark_sym_for_renaming (pi->name_mem_tag);
2019 pi->name_mem_tag = NULL_TREE;
2024 /* Return true if STMT is an "escape" site from the current function. Escape
2025 sites those statements which might expose the address of a variable
2026 outside the current function. STMT is an escape site iff:
2028 1- STMT is a function call, or
2029 2- STMT is an __asm__ expression, or
2030 3- STMT is an assignment to a non-local variable, or
2031 4- STMT is a return statement.
2033 Return the type of escape site found, if we found one, or NO_ESCAPE
2037 is_escape_site (tree stmt)
2039 tree call = get_call_expr_in (stmt);
2040 if (call != NULL_TREE)
2042 if (!TREE_SIDE_EFFECTS (call))
2043 return ESCAPE_TO_PURE_CONST;
2045 return ESCAPE_TO_CALL;
2047 else if (TREE_CODE (stmt) == ASM_EXPR)
2048 return ESCAPE_TO_ASM;
2049 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2051 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2053 /* Get to the base of _REF nodes. */
2054 if (TREE_CODE (lhs) != SSA_NAME)
2055 lhs = get_base_address (lhs);
2057 /* If we couldn't recognize the LHS of the assignment, assume that it
2058 is a non-local store. */
2059 if (lhs == NULL_TREE)
2060 return ESCAPE_UNKNOWN;
2062 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
2063 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
2064 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2067 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
2068 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
2070 /* If the RHS is a conversion between a pointer and an integer, the
2071 pointer escapes since we can't track the integer. */
2072 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2073 return ESCAPE_BAD_CAST;
2075 /* Same if the RHS is a conversion between a regular pointer and a
2076 ref-all pointer since we can't track the SMT of the former. */
2077 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2078 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2079 return ESCAPE_BAD_CAST;
2082 /* If the LHS is an SSA name, it can't possibly represent a non-local
2084 if (TREE_CODE (lhs) == SSA_NAME)
2087 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2088 local variables we cannot be sure if it will escape, because we
2089 don't have information about objects not in SSA form. Need to
2090 implement something along the lines of
2092 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2093 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2094 Conference on Object-Oriented Programming Systems, Languages, and
2095 Applications (OOPSLA), pp. 1-19, 1999. */
2096 return ESCAPE_STORED_IN_GLOBAL;
2098 else if (TREE_CODE (stmt) == RETURN_EXPR)
2099 return ESCAPE_TO_RETURN;
2104 /* Create a new memory tag of type TYPE.
2105 Does NOT push it into the current binding. */
2108 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2113 /* Make the type of the variable writable. */
2114 new_type = build_type_variant (type, 0, 0);
2115 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2117 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2119 /* Make the variable writable. */
2120 TREE_READONLY (tmp_var) = 0;
2122 /* It doesn't start out global. */
2123 MTAG_GLOBAL (tmp_var) = 0;
2124 TREE_STATIC (tmp_var) = 0;
2125 TREE_USED (tmp_var) = 1;
2130 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2131 is considered to represent all the pointers whose pointed-to types are
2132 in the same alias set class. Otherwise, the tag represents a single
2133 SSA_NAME pointer variable. */
2136 create_memory_tag (tree type, bool is_type_tag)
2139 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2140 type, (is_type_tag) ? "SMT" : "NMT");
2142 /* By default, memory tags are local variables. Alias analysis will
2143 determine whether they should be considered globals. */
2144 DECL_CONTEXT (tag) = current_function_decl;
2146 /* Memory tags are by definition addressable. */
2147 TREE_ADDRESSABLE (tag) = 1;
2149 ann = get_var_ann (tag);
2150 ann->symbol_mem_tag = NULL_TREE;
2152 /* Add the tag to the symbol table. */
2153 add_referenced_var (tag);
2159 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2160 This is used if P_i has been found to point to a specific set of
2161 variables or to a non-aliased memory location like the address returned
2162 by malloc functions. */
2165 get_nmt_for (tree ptr)
2167 struct ptr_info_def *pi = get_ptr_info (ptr);
2168 tree tag = pi->name_mem_tag;
2170 if (tag == NULL_TREE)
2171 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2176 /* Return the symbol memory tag associated to pointer PTR. A memory
2177 tag is an artificial variable that represents the memory location
2178 pointed-to by PTR. It is used to model the effects of pointer
2179 de-references on addressable variables.
2181 AI points to the data gathered during alias analysis. This
2182 function populates the array AI->POINTERS. */
2185 get_tmt_for (tree ptr, struct alias_info *ai)
2189 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2190 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2192 /* We use a unique memory tag for all the ref-all pointers. */
2193 if (PTR_IS_REF_ALL (ptr))
2195 if (!ai->ref_all_symbol_mem_tag)
2196 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2197 return ai->ref_all_symbol_mem_tag;
2200 /* To avoid creating unnecessary memory tags, only create one memory tag
2201 per alias set class. Note that it may be tempting to group
2202 memory tags based on conflicting alias sets instead of
2203 equivalence. That would be wrong because alias sets are not
2204 necessarily transitive (as demonstrated by the libstdc++ test
2205 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2206 such that conflicts (A, B) == true and conflicts (A, C) == true,
2207 it does not necessarily follow that conflicts (B, C) == true. */
2208 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2210 struct alias_map_d *curr = ai->pointers[i];
2211 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2212 if (tag_set == curr->set)
2219 /* If VAR cannot alias with any of the existing memory tags, create a new
2220 tag for PTR and add it to the POINTERS array. */
2221 if (tag == NULL_TREE)
2223 struct alias_map_d *alias_map;
2225 /* If PTR did not have a symbol tag already, create a new SMT.*
2226 artificial variable representing the memory location
2227 pointed-to by PTR. */
2228 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2229 tag = create_memory_tag (tag_type, true);
2231 tag = var_ann (ptr)->symbol_mem_tag;
2233 /* Add PTR to the POINTERS array. Note that we are not interested in
2234 PTR's alias set. Instead, we cache the alias set for the memory that
2236 alias_map = XCNEW (struct alias_map_d);
2237 alias_map->var = ptr;
2238 alias_map->set = tag_set;
2239 ai->pointers[ai->num_pointers++] = alias_map;
2242 /* If the pointed-to type is volatile, so is the tag. */
2243 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2245 /* Make sure that the symbol tag has the same alias set as the
2247 gcc_assert (tag_set == get_alias_set (tag));
2253 /* Create GLOBAL_VAR, an artificial global variable to act as a
2254 representative of all the variables that may be clobbered by function
2258 create_global_var (void)
2260 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2262 DECL_ARTIFICIAL (global_var) = 1;
2263 TREE_READONLY (global_var) = 0;
2264 DECL_EXTERNAL (global_var) = 1;
2265 TREE_STATIC (global_var) = 1;
2266 TREE_USED (global_var) = 1;
2267 DECL_CONTEXT (global_var) = NULL_TREE;
2268 TREE_THIS_VOLATILE (global_var) = 0;
2269 TREE_ADDRESSABLE (global_var) = 0;
2271 create_var_ann (global_var);
2272 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2273 add_referenced_var (global_var);
2274 mark_sym_for_renaming (global_var);
2275 cfun->gimple_df->global_var = global_var;
2279 /* Dump alias statistics on FILE. */
2282 dump_alias_stats (FILE *file)
2284 const char *funcname
2285 = lang_hooks.decl_printable_name (current_function_decl, 2);
2286 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2287 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2288 fprintf (file, "Total alias mayalias results:\t%u\n",
2289 alias_stats.alias_mayalias);
2290 fprintf (file, "Total alias noalias results:\t%u\n",
2291 alias_stats.alias_noalias);
2292 fprintf (file, "Total simple queries:\t%u\n",
2293 alias_stats.simple_queries);
2294 fprintf (file, "Total simple resolved:\t%u\n",
2295 alias_stats.simple_resolved);
2296 fprintf (file, "Total TBAA queries:\t%u\n",
2297 alias_stats.tbaa_queries);
2298 fprintf (file, "Total TBAA resolved:\t%u\n",
2299 alias_stats.tbaa_resolved);
2300 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2301 alias_stats.structnoaddress_queries);
2302 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2303 alias_stats.structnoaddress_resolved);
2307 /* Dump alias information on FILE. */
2310 dump_alias_info (FILE *file)
2313 const char *funcname
2314 = lang_hooks.decl_printable_name (current_function_decl, 2);
2315 referenced_var_iterator rvi;
2318 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2320 fprintf (file, "Aliased symbols\n\n");
2322 FOR_EACH_REFERENCED_VAR (var, rvi)
2324 if (may_be_aliased (var))
2325 dump_variable (file, var);
2328 fprintf (file, "\nDereferenced pointers\n\n");
2330 FOR_EACH_REFERENCED_VAR (var, rvi)
2332 var_ann_t ann = var_ann (var);
2333 if (ann->symbol_mem_tag)
2334 dump_variable (file, var);
2337 fprintf (file, "\nSymbol memory tags\n\n");
2339 FOR_EACH_REFERENCED_VAR (var, rvi)
2341 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2342 dump_variable (file, var);
2345 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2347 fprintf (file, "SSA_NAME pointers\n\n");
2348 for (i = 1; i < num_ssa_names; i++)
2350 tree ptr = ssa_name (i);
2351 struct ptr_info_def *pi;
2353 if (ptr == NULL_TREE)
2356 pi = SSA_NAME_PTR_INFO (ptr);
2357 if (!SSA_NAME_IN_FREE_LIST (ptr)
2359 && pi->name_mem_tag)
2360 dump_points_to_info_for (file, ptr);
2363 fprintf (file, "\nName memory tags\n\n");
2365 FOR_EACH_REFERENCED_VAR (var, rvi)
2367 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2368 dump_variable (file, var);
2371 fprintf (file, "\n");
2375 /* Dump alias information on stderr. */
2378 debug_alias_info (void)
2380 dump_alias_info (stderr);
2384 /* Return the alias information associated with pointer T. It creates a
2385 new instance if none existed. */
2387 struct ptr_info_def *
2388 get_ptr_info (tree t)
2390 struct ptr_info_def *pi;
2392 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2394 pi = SSA_NAME_PTR_INFO (t);
2397 pi = GGC_CNEW (struct ptr_info_def);
2398 SSA_NAME_PTR_INFO (t) = pi;
2405 /* Dump points-to information for SSA_NAME PTR into FILE. */
2408 dump_points_to_info_for (FILE *file, tree ptr)
2410 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2412 print_generic_expr (file, ptr, dump_flags);
2416 if (pi->name_mem_tag)
2418 fprintf (file, ", name memory tag: ");
2419 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2422 if (pi->is_dereferenced)
2423 fprintf (file, ", is dereferenced");
2425 if (pi->value_escapes_p)
2426 fprintf (file, ", its value escapes");
2428 if (pi->pt_anything)
2429 fprintf (file, ", points-to anything");
2432 fprintf (file, ", points-to NULL");
2439 fprintf (file, ", points-to vars: { ");
2440 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2442 print_generic_expr (file, referenced_var (ix), dump_flags);
2443 fprintf (file, " ");
2445 fprintf (file, "}");
2449 fprintf (file, "\n");
2453 /* Dump points-to information for VAR into stderr. */
2456 debug_points_to_info_for (tree var)
2458 dump_points_to_info_for (stderr, var);
2462 /* Dump points-to information into FILE. NOTE: This function is slow, as
2463 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2466 dump_points_to_info (FILE *file)
2469 block_stmt_iterator si;
2472 lang_hooks.decl_printable_name (current_function_decl, 2);
2473 referenced_var_iterator rvi;
2476 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2478 /* First dump points-to information for the default definitions of
2479 pointer variables. This is necessary because default definitions are
2480 not part of the code. */
2481 FOR_EACH_REFERENCED_VAR (var, rvi)
2483 if (POINTER_TYPE_P (TREE_TYPE (var)))
2485 tree def = gimple_default_def (cfun, var);
2487 dump_points_to_info_for (file, def);
2491 /* Dump points-to information for every pointer defined in the program. */
2496 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2498 tree ptr = PHI_RESULT (phi);
2499 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2500 dump_points_to_info_for (file, ptr);
2503 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2505 tree stmt = bsi_stmt (si);
2507 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2508 if (POINTER_TYPE_P (TREE_TYPE (def)))
2509 dump_points_to_info_for (file, def);
2513 fprintf (file, "\n");
2517 /* Dump points-to info pointed to by PTO into STDERR. */
2520 debug_points_to_info (void)
2522 dump_points_to_info (stderr);
2525 /* Dump to FILE the list of variables that may be aliasing VAR. */
2528 dump_may_aliases_for (FILE *file, tree var)
2530 VEC(tree, gc) *aliases;
2532 if (TREE_CODE (var) == SSA_NAME)
2533 var = SSA_NAME_VAR (var);
2535 aliases = var_ann (var)->may_aliases;
2540 fprintf (file, "{ ");
2541 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2543 print_generic_expr (file, al, dump_flags);
2544 fprintf (file, " ");
2546 fprintf (file, "}");
2551 /* Dump to stderr the list of variables that may be aliasing VAR. */
2554 debug_may_aliases_for (tree var)
2556 dump_may_aliases_for (stderr, var);
2559 /* Return true if VAR may be aliased. */
2562 may_be_aliased (tree var)
2565 if (TREE_ADDRESSABLE (var))
2568 /* Globally visible variables can have their addresses taken by other
2569 translation units. */
2572 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2574 else if (!MTAG_P (var)
2575 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2578 /* Automatic variables can't have their addresses escape any other way.
2579 This must be after the check for global variables, as extern declarations
2580 do not have TREE_STATIC set. */
2581 if (!TREE_STATIC (var))
2584 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2585 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2586 we can only be sure the variable isn't addressable if it's local to the
2587 current function. */
2588 if (flag_unit_at_a_time)
2590 if (decl_function_context (var) == current_function_decl)
2597 /* Given two symbols return TRUE if one is in the alias set of the other. */
2599 is_aliased_with (tree tag, tree sym)
2602 VEC(tree,gc) *aliases;
2605 if (var_ann (sym)->is_aliased)
2607 aliases = var_ann (tag)->may_aliases;
2609 if (aliases == NULL)
2612 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2618 aliases = var_ann (sym)->may_aliases;
2620 if (aliases == NULL)
2623 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2631 /* The following is based on code in add_stmt_operand to ensure that the
2632 same defs/uses/vdefs/vuses will be found after replacing a reference
2633 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2634 is the address of var. Return a memtag for the ptr, after adding the
2635 proper may_aliases to it (which are the aliases of var, if it has any,
2639 add_may_alias_for_new_tag (tree tag, tree var)
2641 var_ann_t v_ann = var_ann (var);
2642 VEC(tree, gc) *aliases = v_ann->may_aliases;
2644 /* Case 1: |aliases| == 1 */
2645 if ((aliases != NULL)
2646 && (VEC_length (tree, aliases) == 1))
2648 tree ali = VEC_index (tree, aliases, 0);
2650 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2654 /* Case 2: |aliases| == 0 */
2655 if (aliases == NULL)
2656 add_may_alias (tag, var);
2659 /* Case 3: |aliases| > 1 */
2663 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2664 add_may_alias (tag, al);
2670 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2671 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
2672 according to the location accessed by EXPR.
2674 Note, the set of aliases represented by the new symbol tag are not marked
2678 new_type_alias (tree ptr, tree var, tree expr)
2680 var_ann_t p_ann = var_ann (ptr);
2681 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2684 tree ali = NULL_TREE;
2685 HOST_WIDE_INT offset, size, maxsize;
2687 VEC (tree, heap) *overlaps = NULL;
2691 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2692 gcc_assert (!MTAG_P (var));
2694 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2697 tag = create_memory_tag (tag_type, true);
2698 p_ann->symbol_mem_tag = tag;
2700 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2701 subvars, add the subvars to the tag instead of the actual var. */
2702 if (var_can_have_subvars (ref)
2703 && (svars = get_subvars_for_var (ref)))
2705 for (sv = svars; sv; sv = sv->next)
2709 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2710 VEC_safe_push (tree, heap, overlaps, sv->var);
2712 gcc_assert (overlaps != NULL);
2714 else if (var_can_have_subvars (var)
2715 && (svars = get_subvars_for_var (var)))
2717 /* If the REF is not a direct access to VAR (e.g., it is a dereference
2718 of a pointer), we should scan the virtual operands of REF the same
2719 way as tree-ssa-operands do. At the moment, this is somewhat
2720 difficult, so we just give up and add all the subvars of VAR.
2721 On mem-ssa branch, the scanning for virtual operands have been
2722 split from the rest of tree-ssa-operands, so it should be much
2723 easier to fix this problem correctly once mem-ssa is merged. */
2724 for (sv = svars; sv; sv = sv->next)
2725 VEC_safe_push (tree, heap, overlaps, sv->var);
2727 gcc_assert (overlaps != NULL);
2730 ali = add_may_alias_for_new_tag (tag, var);
2732 len = VEC_length (tree, overlaps);
2735 if (dump_file && (dump_flags & TDF_DETAILS))
2736 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
2739 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
2745 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
2747 ali = add_may_alias_for_new_tag (tag, sv_var);
2751 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
2752 took place. Since more than one svar was found, we add
2753 'ali' as one of the may_aliases of the new tag. */
2754 add_may_alias (tag, ali);
2759 VEC_free (tree, heap, overlaps);
2762 p_ann->symbol_mem_tag = ali;
2763 TREE_READONLY (tag) = TREE_READONLY (var);
2764 MTAG_GLOBAL (tag) = is_global_var (var);
2767 /* This represents the used range of a variable. */
2769 typedef struct used_part
2771 HOST_WIDE_INT minused;
2772 HOST_WIDE_INT maxused;
2773 /* True if we have an explicit use/def of some portion of this variable,
2774 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2776 /* True if we have an implicit use/def of some portion of this
2777 variable. Implicit uses occur when we can't tell what part we
2778 are referencing, and have to make conservative assumptions. */
2780 /* True if the structure is only written to or taken its address. */
2784 /* An array of used_part structures, indexed by variable uid. */
2786 static htab_t used_portions;
2788 struct used_part_map
2794 /* Return true if the uid in the two used part maps are equal. */
2797 used_part_map_eq (const void *va, const void *vb)
2799 const struct used_part_map *a = (const struct used_part_map *) va;
2800 const struct used_part_map *b = (const struct used_part_map *) vb;
2801 return (a->uid == b->uid);
2804 /* Hash a from uid in a used_part_map. */
2807 used_part_map_hash (const void *item)
2809 return ((const struct used_part_map *)item)->uid;
2812 /* Free a used part map element. */
2815 free_used_part_map (void *item)
2817 free (((struct used_part_map *)item)->to);
2821 /* Lookup a used_part structure for a UID. */
2824 up_lookup (unsigned int uid)
2826 struct used_part_map *h, in;
2828 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2834 /* Insert the pair UID, TO into the used part hashtable. */
2837 up_insert (unsigned int uid, used_part_t to)
2839 struct used_part_map *h;
2842 h = XNEW (struct used_part_map);
2845 loc = htab_find_slot_with_hash (used_portions, h,
2849 *(struct used_part_map **) loc = h;
2853 /* Given a variable uid, UID, get or create the entry in the used portions
2854 table for the variable. */
2857 get_or_create_used_part_for (size_t uid)
2860 if ((up = up_lookup (uid)) == NULL)
2862 up = XCNEW (struct used_part);
2863 up->minused = INT_MAX;
2865 up->explicit_uses = false;
2866 up->implicit_uses = false;
2867 up->write_only = true;
2874 /* Create and return a structure sub-variable for field type FIELD at
2875 offset OFFSET, with size SIZE, of variable VAR. */
2878 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2879 unsigned HOST_WIDE_INT size)
2882 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2884 /* We need to copy the various flags from VAR to SUBVAR, so that
2885 they are is_global_var iff the original variable was. */
2886 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2887 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2888 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2889 TREE_STATIC (subvar) = TREE_STATIC (var);
2890 TREE_READONLY (subvar) = TREE_READONLY (var);
2891 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2893 /* Add the new variable to REFERENCED_VARS. */
2894 ann = get_var_ann (subvar);
2895 ann->symbol_mem_tag = NULL;
2896 add_referenced_var (subvar);
2897 SFT_PARENT_VAR (subvar) = var;
2898 SFT_OFFSET (subvar) = offset;
2899 SFT_SIZE (subvar) = size;
2904 /* Given an aggregate VAR, create the subvariables that represent its
2908 create_overlap_variables_for (tree var)
2910 VEC(fieldoff_s,heap) *fieldstack = NULL;
2912 size_t uid = DECL_UID (var);
2914 up = up_lookup (uid);
2919 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2920 if (VEC_length (fieldoff_s, fieldstack) != 0)
2924 bool notokay = false;
2927 HOST_WIDE_INT lastfooffset = -1;
2928 HOST_WIDE_INT lastfosize = -1;
2929 tree lastfotype = NULL_TREE;
2931 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2932 know their size, and thus, can't handle.
2933 The same is true of fields with DECL_SIZE that is not an integer
2934 constant (such as variable sized fields).
2935 Fields with offsets which are not constant will have an offset < 0
2936 We *could* handle fields that are constant sized arrays, but
2937 currently don't. Doing so would require some extra changes to
2938 tree-ssa-operands.c. */
2940 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2943 || TREE_CODE (fo->size) != INTEGER_CST
2952 /* The current heuristic we use is as follows:
2953 If the variable has no used portions in this function, no
2954 structure vars are created for it.
2956 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2957 we always create structure vars for them.
2958 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2959 some explicit uses, we create structure vars for them.
2960 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2961 no explicit uses, we do not create structure vars for them.
2964 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2965 && !up->explicit_uses)
2967 if (dump_file && (dump_flags & TDF_DETAILS))
2969 fprintf (dump_file, "Variable ");
2970 print_generic_expr (dump_file, var, 0);
2971 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2976 /* Bail out, if we can't create overlap variables. */
2979 VEC_free (fieldoff_s, heap, fieldstack);
2983 /* Otherwise, create the variables. */
2984 subvars = lookup_subvars_for_var (var);
2986 sort_fieldstack (fieldstack);
2988 for (i = VEC_length (fieldoff_s, fieldstack);
2989 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2992 HOST_WIDE_INT fosize;
2995 fosize = TREE_INT_CST_LOW (fo->size);
2996 currfotype = fo->type;
2998 /* If this field isn't in the used portion,
2999 or it has the exact same offset and size as the last
3002 if (((fo->offset <= up->minused
3003 && fo->offset + fosize <= up->minused)
3004 || fo->offset >= up->maxused)
3005 || (fo->offset == lastfooffset
3006 && fosize == lastfosize
3007 && currfotype == lastfotype))
3009 sv = GGC_NEW (struct subvar);
3010 sv->next = *subvars;
3011 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3015 fprintf (dump_file, "structure field tag %s created for var %s",
3016 get_name (sv->var), get_name (var));
3017 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3018 SFT_OFFSET (sv->var));
3019 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3020 SFT_SIZE (sv->var));
3021 fprintf (dump_file, "\n");
3024 lastfotype = currfotype;
3025 lastfooffset = fo->offset;
3026 lastfosize = fosize;
3030 /* Once we have created subvars, the original is no longer call
3031 clobbered on its own. Its call clobbered status depends
3032 completely on the call clobbered status of the subvars.
3034 add_referenced_var in the above loop will take care of
3035 marking subvars of global variables as call clobbered for us
3036 to start, since they are global as well. */
3037 clear_call_clobbered (var);
3040 VEC_free (fieldoff_s, heap, fieldstack);
3044 /* Find the conservative answer to the question of what portions of what
3045 structures are used by this statement. We assume that if we have a
3046 component ref with a known size + offset, that we only need that part
3047 of the structure. For unknown cases, or cases where we do something
3048 to the whole structure, we assume we need to create fields for the
3049 entire structure. */
3052 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3054 switch (TREE_CODE (*tp))
3056 case GIMPLE_MODIFY_STMT:
3057 /* Recurse manually here to track whether the use is in the
3058 LHS of an assignment. */
3059 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3060 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3061 walk_subtrees, NULL);
3067 HOST_WIDE_INT bitsize;
3068 HOST_WIDE_INT bitmaxsize;
3069 HOST_WIDE_INT bitpos;
3071 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3073 && var_can_have_subvars (ref)
3074 && bitmaxsize != -1)
3076 size_t uid = DECL_UID (ref);
3079 up = get_or_create_used_part_for (uid);
3081 if (bitpos <= up->minused)
3082 up->minused = bitpos;
3083 if ((bitpos + bitmaxsize >= up->maxused))
3084 up->maxused = bitpos + bitmaxsize;
3086 if (bitsize == bitmaxsize)
3087 up->explicit_uses = true;
3089 up->implicit_uses = true;
3091 up->write_only = false;
3092 up_insert (uid, up);
3099 /* This is here to make sure we mark the entire base variable as used
3100 when you take its address. Because our used portion analysis is
3101 simple, we aren't looking at casts or pointer arithmetic to see what
3102 happens when you take the address. */
3105 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3110 && var_can_have_subvars (var)
3111 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3114 size_t uid = DECL_UID (var);
3116 up = get_or_create_used_part_for (uid);
3119 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3120 up->implicit_uses = true;
3122 up->write_only = false;
3124 up_insert (uid, up);
3133 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3135 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3136 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3147 && var_can_have_subvars (var)
3148 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3151 size_t uid = DECL_UID (var);
3153 up = get_or_create_used_part_for (uid);
3156 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3157 up->implicit_uses = true;
3159 up_insert (uid, up);
3173 /* Create structure field variables for structures used in this function. */
3176 create_structure_vars (void)
3179 safe_referenced_var_iterator rvi;
3180 VEC (tree, heap) *varvec = NULL;
3183 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3184 free_used_part_map);
3188 block_stmt_iterator bsi;
3189 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3191 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3196 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3198 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3201 && var_can_have_subvars (var)
3203 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3204 create_overlap_variables_for (var);
3206 htab_delete (used_portions);
3207 VEC_free (tree, heap, varvec);
3212 gate_structure_vars (void)
3214 return flag_tree_salias != 0;
3217 struct tree_opt_pass pass_create_structure_vars =
3219 "salias", /* name */
3220 gate_structure_vars, /* gate */
3221 create_structure_vars, /* execute */
3224 0, /* static_pass_number */
3226 PROP_cfg, /* properties_required */
3227 0, /* properties_provided */
3228 0, /* properties_destroyed */
3229 0, /* todo_flags_start */
3230 TODO_dump_func, /* todo_flags_finish */
3234 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3235 theory, this only needs to be done for globals. */
3238 reset_cc_flags (void)
3241 referenced_var_iterator rvi;
3243 FOR_EACH_REFERENCED_VAR (var, rvi)
3244 DECL_CALL_CLOBBERED (var) = false;
3248 struct tree_opt_pass pass_reset_cc_flags =
3252 reset_cc_flags, /* execute */
3255 0, /* static_pass_number */
3257 PROP_referenced_vars |PROP_cfg, /* properties_required */
3258 0, /* properties_provided */
3259 0, /* properties_destroyed */
3260 0, /* todo_flags_start */
3261 0, /* todo_flags_finish */