1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
39 #include "splay-tree.h"
41 #include "langhooks.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d {
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree GTY((param1_is (int), param2_is (int))) children;
151 typedef struct alias_set_entry_d *alias_set_entry;
153 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
155 static void record_set (rtx, const_rtx, void *);
156 static int base_alias_check (rtx, rtx, enum machine_mode,
158 static rtx find_base_value (rtx);
159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
160 static int insert_subset_children (splay_tree_node, void*);
161 static alias_set_entry get_alias_set_entry (alias_set_type);
162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164 static int aliases_everything_p (const_rtx);
165 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
166 static tree decl_for_component_ref (tree);
167 static rtx adjust_offset_for_component_ref (tree, rtx);
168 static int write_dependence_p (const_rtx, const_rtx, int);
170 static void memory_modified_1 (rtx, const_rtx, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx,gc) *reg_base_value;
208 static rtx *new_reg_base_value;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
213 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215 /* Static hunks of RTL used by the aliasing code; these are initialized
216 once per function to avoid unnecessary RTL allocations. */
217 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
219 #define REG_BASE_VALUE(X) \
220 (REGNO (X) < VEC_length (rtx, reg_base_value) \
221 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
223 /* Vector indexed by N giving the initial (unchanging) value known for
224 pseudo-register N. This array is initialized in init_alias_analysis,
225 and does not change until end_alias_analysis is called. */
226 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
228 /* Indicates number of valid entries in reg_known_value. */
229 static GTY(()) unsigned int reg_known_value_size;
231 /* Vector recording for each reg_known_value whether it is due to a
232 REG_EQUIV note. Future passes (viz., reload) may replace the
233 pseudo with the equivalent expression and so we account for the
234 dependences that would be introduced if that happens.
236 The REG_EQUIV notes created in assign_parms may mention the arg
237 pointer, and there are explicit insns in the RTL that modify the
238 arg pointer. Thus we must ensure that such insns don't get
239 scheduled across each other because that would invalidate the
240 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
241 wrong, but solving the problem in the scheduler will likely give
242 better code, so we do it here. */
243 static bool *reg_known_equiv_p;
245 /* True when scanning insns from the start of the rtl to the
246 NOTE_INSN_FUNCTION_BEG note. */
247 static bool copying_arguments;
249 DEF_VEC_P(alias_set_entry);
250 DEF_VEC_ALLOC_P(alias_set_entry,gc);
252 /* The splay-tree used to store the various alias set entries. */
253 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
255 /* Build a decomposed reference object for querying the alias-oracle
256 from the MEM rtx and store it in *REF.
257 Returns false if MEM is not suitable for the alias-oracle. */
260 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
262 tree expr = MEM_EXPR (mem);
268 ao_ref_init (ref, expr);
270 /* Get the base of the reference and see if we have to reject or
272 base = ao_ref_base (ref);
273 if (base == NULL_TREE)
276 /* If this is a pointer dereference of a non-SSA_NAME punt.
277 ??? We could replace it with a pointer to anything. */
278 if (INDIRECT_REF_P (base)
279 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
282 /* If this is a reference based on a partitioned decl replace the
283 base with an INDIRECT_REF of the pointer representative we
284 created during stack slot partitioning. */
285 if (TREE_CODE (base) == VAR_DECL
286 && ! TREE_STATIC (base)
287 && cfun->gimple_df->decls_to_pointers != NULL)
290 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
293 ref->base_alias_set = get_alias_set (base);
294 ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
298 ref->ref_alias_set = MEM_ALIAS_SET (mem);
300 /* For NULL MEM_OFFSET the MEM_EXPR may have been stripped arbitrarily
301 without recording offset or extent adjustments properly. */
302 if (MEM_OFFSET (mem) == NULL_RTX)
309 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
312 /* NULL MEM_SIZE should not really happen with a non-NULL MEM_EXPR,
313 but just play safe here. The size may have been adjusted together
314 with the offset, so we need to take it if it is set and not rely
315 on MEM_EXPR here (which has the size determining parts potentially
316 stripped anyway). We lose precision for max_size which is only
317 available from the remaining MEM_EXPR. */
318 if (MEM_SIZE (mem) == NULL_RTX)
325 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
331 /* Query the alias-oracle on whether the two memory rtx X and MEM may
332 alias. If TBAA_P is set also apply TBAA. Returns true if the
333 two rtxen may alias, false otherwise. */
336 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
340 if (!ao_ref_from_mem (&ref1, x)
341 || !ao_ref_from_mem (&ref2, mem))
344 return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
347 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
348 such an entry, or NULL otherwise. */
350 static inline alias_set_entry
351 get_alias_set_entry (alias_set_type alias_set)
353 return VEC_index (alias_set_entry, alias_sets, alias_set);
356 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
357 the two MEMs cannot alias each other. */
360 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
362 /* Perform a basic sanity check. Namely, that there are no alias sets
363 if we're not using strict aliasing. This helps to catch bugs
364 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
365 where a MEM is allocated in some way other than by the use of
366 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
367 use alias sets to indicate that spilled registers cannot alias each
368 other, we might need to remove this check. */
369 gcc_assert (flag_strict_aliasing
370 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
372 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
375 /* Insert the NODE into the splay tree given by DATA. Used by
376 record_alias_subset via splay_tree_foreach. */
379 insert_subset_children (splay_tree_node node, void *data)
381 splay_tree_insert ((splay_tree) data, node->key, node->value);
386 /* Return true if the first alias set is a subset of the second. */
389 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
393 /* Everything is a subset of the "aliases everything" set. */
397 /* Otherwise, check if set1 is a subset of set2. */
398 ase = get_alias_set_entry (set2);
400 && ((ase->has_zero_child && set1 == 0)
401 || splay_tree_lookup (ase->children,
402 (splay_tree_key) set1)))
407 /* Return 1 if the two specified alias sets may conflict. */
410 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
415 if (alias_sets_must_conflict_p (set1, set2))
418 /* See if the first alias set is a subset of the second. */
419 ase = get_alias_set_entry (set1);
421 && (ase->has_zero_child
422 || splay_tree_lookup (ase->children,
423 (splay_tree_key) set2)))
426 /* Now do the same, but with the alias sets reversed. */
427 ase = get_alias_set_entry (set2);
429 && (ase->has_zero_child
430 || splay_tree_lookup (ase->children,
431 (splay_tree_key) set1)))
434 /* The two alias sets are distinct and neither one is the
435 child of the other. Therefore, they cannot conflict. */
440 walk_mems_2 (rtx *x, rtx mem)
444 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
453 walk_mems_1 (rtx *x, rtx *pat)
457 /* Visit all MEMs in *PAT and check indepedence. */
458 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
459 /* Indicate that dependence was determined and stop traversal. */
467 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
469 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
471 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
472 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
476 /* Return 1 if the two specified alias sets will always conflict. */
479 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
481 if (set1 == 0 || set2 == 0 || set1 == set2)
487 /* Return 1 if any MEM object of type T1 will always conflict (using the
488 dependency routines in this file) with any MEM object of type T2.
489 This is used when allocating temporary storage. If T1 and/or T2 are
490 NULL_TREE, it means we know nothing about the storage. */
493 objects_must_conflict_p (tree t1, tree t2)
495 alias_set_type set1, set2;
497 /* If neither has a type specified, we don't know if they'll conflict
498 because we may be using them to store objects of various types, for
499 example the argument and local variables areas of inlined functions. */
500 if (t1 == 0 && t2 == 0)
503 /* If they are the same type, they must conflict. */
505 /* Likewise if both are volatile. */
506 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
509 set1 = t1 ? get_alias_set (t1) : 0;
510 set2 = t2 ? get_alias_set (t2) : 0;
512 /* We can't use alias_sets_conflict_p because we must make sure
513 that every subtype of t1 will conflict with every subtype of
514 t2 for which a pair of subobjects of these respective subtypes
515 overlaps on the stack. */
516 return alias_sets_must_conflict_p (set1, set2);
519 /* Return true if all nested component references handled by
520 get_inner_reference in T are such that we should use the alias set
521 provided by the object at the heart of T.
523 This is true for non-addressable components (which don't have their
524 own alias set), as well as components of objects in alias set zero.
525 This later point is a special case wherein we wish to override the
526 alias set used by the component, but we don't have per-FIELD_DECL
527 assignable alias sets. */
530 component_uses_parent_alias_set (const_tree t)
534 /* If we're at the end, it vacuously uses its own alias set. */
535 if (!handled_component_p (t))
538 switch (TREE_CODE (t))
541 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
546 case ARRAY_RANGE_REF:
547 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
556 /* Bitfields and casts are never addressable. */
560 t = TREE_OPERAND (t, 0);
561 if (get_alias_set (TREE_TYPE (t)) == 0)
566 /* Return the alias set for the memory pointed to by T, which may be
567 either a type or an expression. Return -1 if there is nothing
568 special about dereferencing T. */
570 static alias_set_type
571 get_deref_alias_set_1 (tree t)
573 /* If we're not doing any alias analysis, just assume everything
574 aliases everything else. */
575 if (!flag_strict_aliasing)
578 /* All we care about is the type. */
582 /* If we have an INDIRECT_REF via a void pointer, we don't
583 know anything about what that might alias. Likewise if the
584 pointer is marked that way. */
585 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
586 || TYPE_REF_CAN_ALIAS_ALL (t))
592 /* Return the alias set for the memory pointed to by T, which may be
593 either a type or an expression. */
596 get_deref_alias_set (tree t)
598 alias_set_type set = get_deref_alias_set_1 (t);
600 /* Fall back to the alias-set of the pointed-to type. */
605 set = get_alias_set (TREE_TYPE (t));
611 /* Return the alias set for T, which may be either a type or an
612 expression. Call language-specific routine for help, if needed. */
615 get_alias_set (tree t)
619 /* If we're not doing any alias analysis, just assume everything
620 aliases everything else. Also return 0 if this or its type is
622 if (! flag_strict_aliasing || t == error_mark_node
624 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
627 /* We can be passed either an expression or a type. This and the
628 language-specific routine may make mutually-recursive calls to each other
629 to figure out what to do. At each juncture, we see if this is a tree
630 that the language may need to handle specially. First handle things that
636 /* Remove any nops, then give the language a chance to do
637 something with this tree before we look at it. */
639 set = lang_hooks.get_alias_set (t);
643 /* First see if the actual object referenced is an INDIRECT_REF from a
644 restrict-qualified pointer or a "void *". */
645 while (handled_component_p (inner))
647 inner = TREE_OPERAND (inner, 0);
651 if (INDIRECT_REF_P (inner))
653 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
658 /* Otherwise, pick up the outermost object that we could have a pointer
659 to, processing conversions as above. */
660 while (component_uses_parent_alias_set (t))
662 t = TREE_OPERAND (t, 0);
666 /* If we've already determined the alias set for a decl, just return
667 it. This is necessary for C++ anonymous unions, whose component
668 variables don't look like union members (boo!). */
669 if (TREE_CODE (t) == VAR_DECL
670 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
671 return MEM_ALIAS_SET (DECL_RTL (t));
673 /* Now all we care about is the type. */
677 /* Variant qualifiers don't affect the alias set, so get the main
678 variant. Always use the canonical type as well.
679 If this is a type with a known alias set, return it. */
680 t = TYPE_MAIN_VARIANT (t);
681 if (TYPE_CANONICAL (t))
682 t = TYPE_CANONICAL (t);
683 if (TYPE_ALIAS_SET_KNOWN_P (t))
684 return TYPE_ALIAS_SET (t);
686 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
687 if (!COMPLETE_TYPE_P (t))
689 /* For arrays with unknown size the conservative answer is the
690 alias set of the element type. */
691 if (TREE_CODE (t) == ARRAY_TYPE)
692 return get_alias_set (TREE_TYPE (t));
694 /* But return zero as a conservative answer for incomplete types. */
698 /* See if the language has special handling for this type. */
699 set = lang_hooks.get_alias_set (t);
703 /* There are no objects of FUNCTION_TYPE, so there's no point in
704 using up an alias set for them. (There are, of course, pointers
705 and references to functions, but that's different.) */
706 else if (TREE_CODE (t) == FUNCTION_TYPE
707 || TREE_CODE (t) == METHOD_TYPE)
710 /* Unless the language specifies otherwise, let vector types alias
711 their components. This avoids some nasty type punning issues in
712 normal usage. And indeed lets vectors be treated more like an
714 else if (TREE_CODE (t) == VECTOR_TYPE)
715 set = get_alias_set (TREE_TYPE (t));
717 /* Unless the language specifies otherwise, treat array types the
718 same as their components. This avoids the asymmetry we get
719 through recording the components. Consider accessing a
720 character(kind=1) through a reference to a character(kind=1)[1:1].
721 Or consider if we want to assign integer(kind=4)[0:D.1387] and
722 integer(kind=4)[4] the same alias set or not.
723 Just be pragmatic here and make sure the array and its element
724 type get the same alias set assigned. */
725 else if (TREE_CODE (t) == ARRAY_TYPE
726 && !TYPE_NONALIASED_COMPONENT (t))
727 set = get_alias_set (TREE_TYPE (t));
730 /* Otherwise make a new alias set for this type. */
731 set = new_alias_set ();
733 TYPE_ALIAS_SET (t) = set;
735 /* If this is an aggregate type, we must record any component aliasing
737 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
738 record_component_aliases (t);
743 /* Return a brand-new alias set. */
748 if (flag_strict_aliasing)
751 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
752 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
753 return VEC_length (alias_set_entry, alias_sets) - 1;
759 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
760 not everything that aliases SUPERSET also aliases SUBSET. For example,
761 in C, a store to an `int' can alias a load of a structure containing an
762 `int', and vice versa. But it can't alias a load of a 'double' member
763 of the same structure. Here, the structure would be the SUPERSET and
764 `int' the SUBSET. This relationship is also described in the comment at
765 the beginning of this file.
767 This function should be called only once per SUPERSET/SUBSET pair.
769 It is illegal for SUPERSET to be zero; everything is implicitly a
770 subset of alias set zero. */
773 record_alias_subset (alias_set_type superset, alias_set_type subset)
775 alias_set_entry superset_entry;
776 alias_set_entry subset_entry;
778 /* It is possible in complex type situations for both sets to be the same,
779 in which case we can ignore this operation. */
780 if (superset == subset)
783 gcc_assert (superset);
785 superset_entry = get_alias_set_entry (superset);
786 if (superset_entry == 0)
788 /* Create an entry for the SUPERSET, so that we have a place to
789 attach the SUBSET. */
790 superset_entry = GGC_NEW (struct alias_set_entry_d);
791 superset_entry->alias_set = superset;
792 superset_entry->children
793 = splay_tree_new_ggc (splay_tree_compare_ints);
794 superset_entry->has_zero_child = 0;
795 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
799 superset_entry->has_zero_child = 1;
802 subset_entry = get_alias_set_entry (subset);
803 /* If there is an entry for the subset, enter all of its children
804 (if they are not already present) as children of the SUPERSET. */
807 if (subset_entry->has_zero_child)
808 superset_entry->has_zero_child = 1;
810 splay_tree_foreach (subset_entry->children, insert_subset_children,
811 superset_entry->children);
814 /* Enter the SUBSET itself as a child of the SUPERSET. */
815 splay_tree_insert (superset_entry->children,
816 (splay_tree_key) subset, 0);
820 /* Record that component types of TYPE, if any, are part of that type for
821 aliasing purposes. For record types, we only record component types
822 for fields that are not marked non-addressable. For array types, we
823 only record the component type if it is not marked non-aliased. */
826 record_component_aliases (tree type)
828 alias_set_type superset = get_alias_set (type);
834 switch (TREE_CODE (type))
838 case QUAL_UNION_TYPE:
839 /* Recursively record aliases for the base classes, if there are any. */
840 if (TYPE_BINFO (type))
843 tree binfo, base_binfo;
845 for (binfo = TYPE_BINFO (type), i = 0;
846 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
847 record_alias_subset (superset,
848 get_alias_set (BINFO_TYPE (base_binfo)));
850 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
851 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
852 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
856 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
859 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
867 /* Allocate an alias set for use in storing and reading from the varargs
870 static GTY(()) alias_set_type varargs_set = -1;
873 get_varargs_alias_set (void)
876 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
877 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
878 consistently use the varargs alias set for loads from the varargs
879 area. So don't use it anywhere. */
882 if (varargs_set == -1)
883 varargs_set = new_alias_set ();
889 /* Likewise, but used for the fixed portions of the frame, e.g., register
892 static GTY(()) alias_set_type frame_set = -1;
895 get_frame_alias_set (void)
898 frame_set = new_alias_set ();
903 /* Inside SRC, the source of a SET, find a base address. */
906 find_base_value (rtx src)
910 #if defined (FIND_BASE_TERM)
911 /* Try machine-dependent ways to find the base term. */
912 src = FIND_BASE_TERM (src);
915 switch (GET_CODE (src))
923 /* At the start of a function, argument registers have known base
924 values which may be lost later. Returning an ADDRESS
925 expression here allows optimization based on argument values
926 even when the argument registers are used for other purposes. */
927 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
928 return new_reg_base_value[regno];
930 /* If a pseudo has a known base value, return it. Do not do this
931 for non-fixed hard regs since it can result in a circular
932 dependency chain for registers which have values at function entry.
934 The test above is not sufficient because the scheduler may move
935 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
936 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
937 && regno < VEC_length (rtx, reg_base_value))
939 /* If we're inside init_alias_analysis, use new_reg_base_value
940 to reduce the number of relaxation iterations. */
941 if (new_reg_base_value && new_reg_base_value[regno]
942 && DF_REG_DEF_COUNT (regno) == 1)
943 return new_reg_base_value[regno];
945 if (VEC_index (rtx, reg_base_value, regno))
946 return VEC_index (rtx, reg_base_value, regno);
952 /* Check for an argument passed in memory. Only record in the
953 copying-arguments block; it is too hard to track changes
955 if (copying_arguments
956 && (XEXP (src, 0) == arg_pointer_rtx
957 || (GET_CODE (XEXP (src, 0)) == PLUS
958 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
959 return gen_rtx_ADDRESS (VOIDmode, src);
964 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
967 /* ... fall through ... */
972 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
974 /* If either operand is a REG that is a known pointer, then it
976 if (REG_P (src_0) && REG_POINTER (src_0))
977 return find_base_value (src_0);
978 if (REG_P (src_1) && REG_POINTER (src_1))
979 return find_base_value (src_1);
981 /* If either operand is a REG, then see if we already have
982 a known value for it. */
985 temp = find_base_value (src_0);
992 temp = find_base_value (src_1);
997 /* If either base is named object or a special address
998 (like an argument or stack reference), then use it for the
1001 && (GET_CODE (src_0) == SYMBOL_REF
1002 || GET_CODE (src_0) == LABEL_REF
1003 || (GET_CODE (src_0) == ADDRESS
1004 && GET_MODE (src_0) != VOIDmode)))
1008 && (GET_CODE (src_1) == SYMBOL_REF
1009 || GET_CODE (src_1) == LABEL_REF
1010 || (GET_CODE (src_1) == ADDRESS
1011 && GET_MODE (src_1) != VOIDmode)))
1014 /* Guess which operand is the base address:
1015 If either operand is a symbol, then it is the base. If
1016 either operand is a CONST_INT, then the other is the base. */
1017 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1018 return find_base_value (src_0);
1019 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1020 return find_base_value (src_1);
1026 /* The standard form is (lo_sum reg sym) so look only at the
1028 return find_base_value (XEXP (src, 1));
1031 /* If the second operand is constant set the base
1032 address to the first operand. */
1033 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1034 return find_base_value (XEXP (src, 0));
1038 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1048 return find_base_value (XEXP (src, 0));
1051 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1053 rtx temp = find_base_value (XEXP (src, 0));
1055 if (temp != 0 && CONSTANT_P (temp))
1056 temp = convert_memory_address (Pmode, temp);
1068 /* Called from init_alias_analysis indirectly through note_stores. */
1070 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1071 register N has been set in this function. */
1072 static char *reg_seen;
1074 /* Addresses which are known not to alias anything else are identified
1075 by a unique integer. */
1076 static int unique_id;
1079 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1088 regno = REGNO (dest);
1090 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1092 /* If this spans multiple hard registers, then we must indicate that every
1093 register has an unusable value. */
1094 if (regno < FIRST_PSEUDO_REGISTER)
1095 n = hard_regno_nregs[regno][GET_MODE (dest)];
1102 reg_seen[regno + n] = 1;
1103 new_reg_base_value[regno + n] = 0;
1110 /* A CLOBBER wipes out any old value but does not prevent a previously
1111 unset register from acquiring a base address (i.e. reg_seen is not
1113 if (GET_CODE (set) == CLOBBER)
1115 new_reg_base_value[regno] = 0;
1118 src = SET_SRC (set);
1122 if (reg_seen[regno])
1124 new_reg_base_value[regno] = 0;
1127 reg_seen[regno] = 1;
1128 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1129 GEN_INT (unique_id++));
1133 /* If this is not the first set of REGNO, see whether the new value
1134 is related to the old one. There are two cases of interest:
1136 (1) The register might be assigned an entirely new value
1137 that has the same base term as the original set.
1139 (2) The set might be a simple self-modification that
1140 cannot change REGNO's base value.
1142 If neither case holds, reject the original base value as invalid.
1143 Note that the following situation is not detected:
1145 extern int x, y; int *p = &x; p += (&y-&x);
1147 ANSI C does not allow computing the difference of addresses
1148 of distinct top level objects. */
1149 if (new_reg_base_value[regno] != 0
1150 && find_base_value (src) != new_reg_base_value[regno])
1151 switch (GET_CODE (src))
1155 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1156 new_reg_base_value[regno] = 0;
1159 /* If the value we add in the PLUS is also a valid base value,
1160 this might be the actual base value, and the original value
1163 rtx other = NULL_RTX;
1165 if (XEXP (src, 0) == dest)
1166 other = XEXP (src, 1);
1167 else if (XEXP (src, 1) == dest)
1168 other = XEXP (src, 0);
1170 if (! other || find_base_value (other))
1171 new_reg_base_value[regno] = 0;
1175 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1176 new_reg_base_value[regno] = 0;
1179 new_reg_base_value[regno] = 0;
1182 /* If this is the first set of a register, record the value. */
1183 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1184 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1185 new_reg_base_value[regno] = find_base_value (src);
1187 reg_seen[regno] = 1;
1190 /* If a value is known for REGNO, return it. */
1193 get_reg_known_value (unsigned int regno)
1195 if (regno >= FIRST_PSEUDO_REGISTER)
1197 regno -= FIRST_PSEUDO_REGISTER;
1198 if (regno < reg_known_value_size)
1199 return reg_known_value[regno];
1207 set_reg_known_value (unsigned int regno, rtx val)
1209 if (regno >= FIRST_PSEUDO_REGISTER)
1211 regno -= FIRST_PSEUDO_REGISTER;
1212 if (regno < reg_known_value_size)
1213 reg_known_value[regno] = val;
1217 /* Similarly for reg_known_equiv_p. */
1220 get_reg_known_equiv_p (unsigned int regno)
1222 if (regno >= FIRST_PSEUDO_REGISTER)
1224 regno -= FIRST_PSEUDO_REGISTER;
1225 if (regno < reg_known_value_size)
1226 return reg_known_equiv_p[regno];
1232 set_reg_known_equiv_p (unsigned int regno, bool val)
1234 if (regno >= FIRST_PSEUDO_REGISTER)
1236 regno -= FIRST_PSEUDO_REGISTER;
1237 if (regno < reg_known_value_size)
1238 reg_known_equiv_p[regno] = val;
1243 /* Returns a canonical version of X, from the point of view alias
1244 analysis. (For example, if X is a MEM whose address is a register,
1245 and the register has a known value (say a SYMBOL_REF), then a MEM
1246 whose address is the SYMBOL_REF is returned.) */
1251 /* Recursively look for equivalences. */
1252 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1254 rtx t = get_reg_known_value (REGNO (x));
1258 return canon_rtx (t);
1261 if (GET_CODE (x) == PLUS)
1263 rtx x0 = canon_rtx (XEXP (x, 0));
1264 rtx x1 = canon_rtx (XEXP (x, 1));
1266 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1268 if (CONST_INT_P (x0))
1269 return plus_constant (x1, INTVAL (x0));
1270 else if (CONST_INT_P (x1))
1271 return plus_constant (x0, INTVAL (x1));
1272 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1276 /* This gives us much better alias analysis when called from
1277 the loop optimizer. Note we want to leave the original
1278 MEM alone, but need to return the canonicalized MEM with
1279 all the flags with their original values. */
1281 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1286 /* Return 1 if X and Y are identical-looking rtx's.
1287 Expect that X and Y has been already canonicalized.
1289 We use the data in reg_known_value above to see if two registers with
1290 different numbers are, in fact, equivalent. */
1293 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1300 if (x == 0 && y == 0)
1302 if (x == 0 || y == 0)
1308 code = GET_CODE (x);
1309 /* Rtx's of different codes cannot be equal. */
1310 if (code != GET_CODE (y))
1313 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1314 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1316 if (GET_MODE (x) != GET_MODE (y))
1319 /* Some RTL can be compared without a recursive examination. */
1323 return REGNO (x) == REGNO (y);
1326 return XEXP (x, 0) == XEXP (y, 0);
1329 return XSTR (x, 0) == XSTR (y, 0);
1335 /* There's no need to compare the contents of CONST_DOUBLEs or
1336 CONST_INTs because pointer equality is a good enough
1337 comparison for these nodes. */
1344 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1346 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1347 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1348 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1349 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1350 /* For commutative operations, the RTX match if the operand match in any
1351 order. Also handle the simple binary and unary cases without a loop. */
1352 if (COMMUTATIVE_P (x))
1354 rtx xop0 = canon_rtx (XEXP (x, 0));
1355 rtx yop0 = canon_rtx (XEXP (y, 0));
1356 rtx yop1 = canon_rtx (XEXP (y, 1));
1358 return ((rtx_equal_for_memref_p (xop0, yop0)
1359 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1360 || (rtx_equal_for_memref_p (xop0, yop1)
1361 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1363 else if (NON_COMMUTATIVE_P (x))
1365 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1366 canon_rtx (XEXP (y, 0)))
1367 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1368 canon_rtx (XEXP (y, 1))));
1370 else if (UNARY_P (x))
1371 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1372 canon_rtx (XEXP (y, 0)));
1374 /* Compare the elements. If any pair of corresponding elements
1375 fail to match, return 0 for the whole things.
1377 Limit cases to types which actually appear in addresses. */
1379 fmt = GET_RTX_FORMAT (code);
1380 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1385 if (XINT (x, i) != XINT (y, i))
1390 /* Two vectors must have the same length. */
1391 if (XVECLEN (x, i) != XVECLEN (y, i))
1394 /* And the corresponding elements must match. */
1395 for (j = 0; j < XVECLEN (x, i); j++)
1396 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1397 canon_rtx (XVECEXP (y, i, j))) == 0)
1402 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1403 canon_rtx (XEXP (y, i))) == 0)
1407 /* This can happen for asm operands. */
1409 if (strcmp (XSTR (x, i), XSTR (y, i)))
1413 /* This can happen for an asm which clobbers memory. */
1417 /* It is believed that rtx's at this level will never
1418 contain anything but integers and other rtx's,
1419 except for within LABEL_REFs and SYMBOL_REFs. */
1428 find_base_term (rtx x)
1431 struct elt_loc_list *l;
1433 #if defined (FIND_BASE_TERM)
1434 /* Try machine-dependent ways to find the base term. */
1435 x = FIND_BASE_TERM (x);
1438 switch (GET_CODE (x))
1441 return REG_BASE_VALUE (x);
1444 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1454 return find_base_term (XEXP (x, 0));
1457 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1459 rtx temp = find_base_term (XEXP (x, 0));
1461 if (temp != 0 && CONSTANT_P (temp))
1462 temp = convert_memory_address (Pmode, temp);
1468 val = CSELIB_VAL_PTR (x);
1471 for (l = val->locs; l; l = l->next)
1472 if ((x = find_base_term (l->loc)) != 0)
1477 /* The standard form is (lo_sum reg sym) so look only at the
1479 return find_base_term (XEXP (x, 1));
1483 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1489 rtx tmp1 = XEXP (x, 0);
1490 rtx tmp2 = XEXP (x, 1);
1492 /* This is a little bit tricky since we have to determine which of
1493 the two operands represents the real base address. Otherwise this
1494 routine may return the index register instead of the base register.
1496 That may cause us to believe no aliasing was possible, when in
1497 fact aliasing is possible.
1499 We use a few simple tests to guess the base register. Additional
1500 tests can certainly be added. For example, if one of the operands
1501 is a shift or multiply, then it must be the index register and the
1502 other operand is the base register. */
1504 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1505 return find_base_term (tmp2);
1507 /* If either operand is known to be a pointer, then use it
1508 to determine the base term. */
1509 if (REG_P (tmp1) && REG_POINTER (tmp1))
1511 rtx base = find_base_term (tmp1);
1516 if (REG_P (tmp2) && REG_POINTER (tmp2))
1518 rtx base = find_base_term (tmp2);
1523 /* Neither operand was known to be a pointer. Go ahead and find the
1524 base term for both operands. */
1525 tmp1 = find_base_term (tmp1);
1526 tmp2 = find_base_term (tmp2);
1528 /* If either base term is named object or a special address
1529 (like an argument or stack reference), then use it for the
1532 && (GET_CODE (tmp1) == SYMBOL_REF
1533 || GET_CODE (tmp1) == LABEL_REF
1534 || (GET_CODE (tmp1) == ADDRESS
1535 && GET_MODE (tmp1) != VOIDmode)))
1539 && (GET_CODE (tmp2) == SYMBOL_REF
1540 || GET_CODE (tmp2) == LABEL_REF
1541 || (GET_CODE (tmp2) == ADDRESS
1542 && GET_MODE (tmp2) != VOIDmode)))
1545 /* We could not determine which of the two operands was the
1546 base register and which was the index. So we can determine
1547 nothing from the base alias check. */
1552 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1553 return find_base_term (XEXP (x, 0));
1565 /* Return 0 if the addresses X and Y are known to point to different
1566 objects, 1 if they might be pointers to the same object. */
1569 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1570 enum machine_mode y_mode)
1572 rtx x_base = find_base_term (x);
1573 rtx y_base = find_base_term (y);
1575 /* If the address itself has no known base see if a known equivalent
1576 value has one. If either address still has no known base, nothing
1577 is known about aliasing. */
1582 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1585 x_base = find_base_term (x_c);
1593 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1596 y_base = find_base_term (y_c);
1601 /* If the base addresses are equal nothing is known about aliasing. */
1602 if (rtx_equal_p (x_base, y_base))
1605 /* The base addresses are different expressions. If they are not accessed
1606 via AND, there is no conflict. We can bring knowledge of object
1607 alignment into play here. For example, on alpha, "char a, b;" can
1608 alias one another, though "char a; long b;" cannot. AND addesses may
1609 implicitly alias surrounding objects; i.e. unaligned access in DImode
1610 via AND address can alias all surrounding object types except those
1611 with aligment 8 or higher. */
1612 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1614 if (GET_CODE (x) == AND
1615 && (!CONST_INT_P (XEXP (x, 1))
1616 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1618 if (GET_CODE (y) == AND
1619 && (!CONST_INT_P (XEXP (y, 1))
1620 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1623 /* Differing symbols not accessed via AND never alias. */
1624 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1627 /* If one address is a stack reference there can be no alias:
1628 stack references using different base registers do not alias,
1629 a stack reference can not alias a parameter, and a stack reference
1630 can not alias a global. */
1631 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1632 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1635 if (! flag_argument_noalias)
1638 if (flag_argument_noalias > 1)
1641 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1642 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1645 /* Convert the address X into something we can use. This is done by returning
1646 it unchanged unless it is a value; in the latter case we call cselib to get
1647 a more useful rtx. */
1653 struct elt_loc_list *l;
1655 if (GET_CODE (x) != VALUE)
1657 v = CSELIB_VAL_PTR (x);
1660 for (l = v->locs; l; l = l->next)
1661 if (CONSTANT_P (l->loc))
1663 for (l = v->locs; l; l = l->next)
1664 if (!REG_P (l->loc) && !MEM_P (l->loc))
1667 return v->locs->loc;
1672 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1673 where SIZE is the size in bytes of the memory reference. If ADDR
1674 is not modified by the memory reference then ADDR is returned. */
1677 addr_side_effect_eval (rtx addr, int size, int n_refs)
1681 switch (GET_CODE (addr))
1684 offset = (n_refs + 1) * size;
1687 offset = -(n_refs + 1) * size;
1690 offset = n_refs * size;
1693 offset = -n_refs * size;
1701 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1704 addr = XEXP (addr, 0);
1705 addr = canon_rtx (addr);
1710 /* Return nonzero if X and Y (memory addresses) could reference the
1711 same location in memory. C is an offset accumulator. When
1712 C is nonzero, we are testing aliases between X and Y + C.
1713 XSIZE is the size in bytes of the X reference,
1714 similarly YSIZE is the size in bytes for Y.
1715 Expect that canon_rtx has been already called for X and Y.
1717 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1718 referenced (the reference was BLKmode), so make the most pessimistic
1721 If XSIZE or YSIZE is negative, we may access memory outside the object
1722 being referenced as a side effect. This can happen when using AND to
1723 align memory references, as is done on the Alpha.
1725 Nice to notice that varying addresses cannot conflict with fp if no
1726 local variables had their addresses taken, but that's too hard now. */
1729 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1731 if (GET_CODE (x) == VALUE)
1733 if (GET_CODE (y) == VALUE)
1735 if (GET_CODE (x) == HIGH)
1737 else if (GET_CODE (x) == LO_SUM)
1740 x = addr_side_effect_eval (x, xsize, 0);
1741 if (GET_CODE (y) == HIGH)
1743 else if (GET_CODE (y) == LO_SUM)
1746 y = addr_side_effect_eval (y, ysize, 0);
1748 if (rtx_equal_for_memref_p (x, y))
1750 if (xsize <= 0 || ysize <= 0)
1752 if (c >= 0 && xsize > c)
1754 if (c < 0 && ysize+c > 0)
1759 /* This code used to check for conflicts involving stack references and
1760 globals but the base address alias code now handles these cases. */
1762 if (GET_CODE (x) == PLUS)
1764 /* The fact that X is canonicalized means that this
1765 PLUS rtx is canonicalized. */
1766 rtx x0 = XEXP (x, 0);
1767 rtx x1 = XEXP (x, 1);
1769 if (GET_CODE (y) == PLUS)
1771 /* The fact that Y is canonicalized means that this
1772 PLUS rtx is canonicalized. */
1773 rtx y0 = XEXP (y, 0);
1774 rtx y1 = XEXP (y, 1);
1776 if (rtx_equal_for_memref_p (x1, y1))
1777 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1778 if (rtx_equal_for_memref_p (x0, y0))
1779 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1780 if (CONST_INT_P (x1))
1782 if (CONST_INT_P (y1))
1783 return memrefs_conflict_p (xsize, x0, ysize, y0,
1784 c - INTVAL (x1) + INTVAL (y1));
1786 return memrefs_conflict_p (xsize, x0, ysize, y,
1789 else if (CONST_INT_P (y1))
1790 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1794 else if (CONST_INT_P (x1))
1795 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1797 else if (GET_CODE (y) == PLUS)
1799 /* The fact that Y is canonicalized means that this
1800 PLUS rtx is canonicalized. */
1801 rtx y0 = XEXP (y, 0);
1802 rtx y1 = XEXP (y, 1);
1804 if (CONST_INT_P (y1))
1805 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1810 if (GET_CODE (x) == GET_CODE (y))
1811 switch (GET_CODE (x))
1815 /* Handle cases where we expect the second operands to be the
1816 same, and check only whether the first operand would conflict
1819 rtx x1 = canon_rtx (XEXP (x, 1));
1820 rtx y1 = canon_rtx (XEXP (y, 1));
1821 if (! rtx_equal_for_memref_p (x1, y1))
1823 x0 = canon_rtx (XEXP (x, 0));
1824 y0 = canon_rtx (XEXP (y, 0));
1825 if (rtx_equal_for_memref_p (x0, y0))
1826 return (xsize == 0 || ysize == 0
1827 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1829 /* Can't properly adjust our sizes. */
1830 if (!CONST_INT_P (x1))
1832 xsize /= INTVAL (x1);
1833 ysize /= INTVAL (x1);
1835 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1842 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1843 as an access with indeterminate size. Assume that references
1844 besides AND are aligned, so if the size of the other reference is
1845 at least as large as the alignment, assume no other overlap. */
1846 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1848 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1850 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1852 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1854 /* ??? If we are indexing far enough into the array/structure, we
1855 may yet be able to determine that we can not overlap. But we
1856 also need to that we are far enough from the end not to overlap
1857 a following reference, so we do nothing with that for now. */
1858 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1860 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1865 if (CONST_INT_P (x) && CONST_INT_P (y))
1867 c += (INTVAL (y) - INTVAL (x));
1868 return (xsize <= 0 || ysize <= 0
1869 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1872 if (GET_CODE (x) == CONST)
1874 if (GET_CODE (y) == CONST)
1875 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1876 ysize, canon_rtx (XEXP (y, 0)), c);
1878 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1881 if (GET_CODE (y) == CONST)
1882 return memrefs_conflict_p (xsize, x, ysize,
1883 canon_rtx (XEXP (y, 0)), c);
1886 return (xsize <= 0 || ysize <= 0
1887 || (rtx_equal_for_memref_p (x, y)
1888 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1895 /* Functions to compute memory dependencies.
1897 Since we process the insns in execution order, we can build tables
1898 to keep track of what registers are fixed (and not aliased), what registers
1899 are varying in known ways, and what registers are varying in unknown
1902 If both memory references are volatile, then there must always be a
1903 dependence between the two references, since their order can not be
1904 changed. A volatile and non-volatile reference can be interchanged
1907 A MEM_IN_STRUCT reference at a non-AND varying address can never
1908 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1909 also must allow AND addresses, because they may generate accesses
1910 outside the object being referenced. This is used to generate
1911 aligned addresses from unaligned addresses, for instance, the alpha
1912 storeqi_unaligned pattern. */
1914 /* Read dependence: X is read after read in MEM takes place. There can
1915 only be a dependence here if both reads are volatile. */
1918 read_dependence (const_rtx mem, const_rtx x)
1920 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1923 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1924 MEM2 is a reference to a structure at a varying address, or returns
1925 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1926 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1927 to decide whether or not an address may vary; it should return
1928 nonzero whenever variation is possible.
1929 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1932 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1934 bool (*varies_p) (const_rtx, bool))
1936 if (! flag_strict_aliasing)
1939 if (MEM_ALIAS_SET (mem2)
1940 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1941 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1942 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1946 if (MEM_ALIAS_SET (mem1)
1947 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1948 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1949 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1956 /* Returns nonzero if something about the mode or address format MEM1
1957 indicates that it might well alias *anything*. */
1960 aliases_everything_p (const_rtx mem)
1962 if (GET_CODE (XEXP (mem, 0)) == AND)
1963 /* If the address is an AND, it's very hard to know at what it is
1964 actually pointing. */
1970 /* Return true if we can determine that the fields referenced cannot
1971 overlap for any pair of objects. */
1974 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1976 const_tree fieldx, fieldy, typex, typey, orig_y;
1980 /* The comparison has to be done at a common type, since we don't
1981 know how the inheritance hierarchy works. */
1985 fieldx = TREE_OPERAND (x, 1);
1986 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
1991 fieldy = TREE_OPERAND (y, 1);
1992 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
1997 y = TREE_OPERAND (y, 0);
1999 while (y && TREE_CODE (y) == COMPONENT_REF);
2001 x = TREE_OPERAND (x, 0);
2003 while (x && TREE_CODE (x) == COMPONENT_REF);
2004 /* Never found a common type. */
2008 /* If we're left with accessing different fields of a structure,
2010 if (TREE_CODE (typex) == RECORD_TYPE
2011 && fieldx != fieldy)
2014 /* The comparison on the current field failed. If we're accessing
2015 a very nested structure, look at the next outer level. */
2016 x = TREE_OPERAND (x, 0);
2017 y = TREE_OPERAND (y, 0);
2020 && TREE_CODE (x) == COMPONENT_REF
2021 && TREE_CODE (y) == COMPONENT_REF);
2026 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2029 decl_for_component_ref (tree x)
2033 x = TREE_OPERAND (x, 0);
2035 while (x && TREE_CODE (x) == COMPONENT_REF);
2037 return x && DECL_P (x) ? x : NULL_TREE;
2040 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2041 offset of the field reference. */
2044 adjust_offset_for_component_ref (tree x, rtx offset)
2046 HOST_WIDE_INT ioffset;
2051 ioffset = INTVAL (offset);
2054 tree offset = component_ref_field_offset (x);
2055 tree field = TREE_OPERAND (x, 1);
2057 if (! host_integerp (offset, 1))
2059 ioffset += (tree_low_cst (offset, 1)
2060 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2063 x = TREE_OPERAND (x, 0);
2065 while (x && TREE_CODE (x) == COMPONENT_REF);
2067 return GEN_INT (ioffset);
2070 /* Return nonzero if we can determine the exprs corresponding to memrefs
2071 X and Y and they do not overlap. */
2074 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2076 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2079 rtx moffsetx, moffsety;
2080 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2082 /* Unless both have exprs, we can't tell anything. */
2083 if (exprx == 0 || expry == 0)
2086 /* If both are field references, we may be able to determine something. */
2087 if (TREE_CODE (exprx) == COMPONENT_REF
2088 && TREE_CODE (expry) == COMPONENT_REF
2089 && nonoverlapping_component_refs_p (exprx, expry))
2093 /* If the field reference test failed, look at the DECLs involved. */
2094 moffsetx = MEM_OFFSET (x);
2095 if (TREE_CODE (exprx) == COMPONENT_REF)
2097 if (TREE_CODE (expry) == VAR_DECL
2098 && POINTER_TYPE_P (TREE_TYPE (expry)))
2100 tree field = TREE_OPERAND (exprx, 1);
2101 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2102 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2107 tree t = decl_for_component_ref (exprx);
2110 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2114 else if (INDIRECT_REF_P (exprx))
2116 exprx = TREE_OPERAND (exprx, 0);
2117 if (flag_argument_noalias < 2
2118 || TREE_CODE (exprx) != PARM_DECL)
2122 moffsety = MEM_OFFSET (y);
2123 if (TREE_CODE (expry) == COMPONENT_REF)
2125 if (TREE_CODE (exprx) == VAR_DECL
2126 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2128 tree field = TREE_OPERAND (expry, 1);
2129 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2130 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2135 tree t = decl_for_component_ref (expry);
2138 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2142 else if (INDIRECT_REF_P (expry))
2144 expry = TREE_OPERAND (expry, 0);
2145 if (flag_argument_noalias < 2
2146 || TREE_CODE (expry) != PARM_DECL)
2150 if (! DECL_P (exprx) || ! DECL_P (expry))
2153 rtlx = DECL_RTL (exprx);
2154 rtly = DECL_RTL (expry);
2156 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2157 can't overlap unless they are the same because we never reuse that part
2158 of the stack frame used for locals for spilled pseudos. */
2159 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2160 && ! rtx_equal_p (rtlx, rtly))
2163 /* Get the base and offsets of both decls. If either is a register, we
2164 know both are and are the same, so use that as the base. The only
2165 we can avoid overlap is if we can deduce that they are nonoverlapping
2166 pieces of that decl, which is very rare. */
2167 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2168 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2169 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2171 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2172 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2173 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2175 /* If the bases are different, we know they do not overlap if both
2176 are constants or if one is a constant and the other a pointer into the
2177 stack frame. Otherwise a different base means we can't tell if they
2179 if (! rtx_equal_p (basex, basey))
2180 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2181 || (CONSTANT_P (basex) && REG_P (basey)
2182 && REGNO_PTR_FRAME_P (REGNO (basey)))
2183 || (CONSTANT_P (basey) && REG_P (basex)
2184 && REGNO_PTR_FRAME_P (REGNO (basex))));
2186 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2187 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2189 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2190 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2193 /* If we have an offset for either memref, it can update the values computed
2196 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2198 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2200 /* If a memref has both a size and an offset, we can use the smaller size.
2201 We can't do this if the offset isn't known because we must view this
2202 memref as being anywhere inside the DECL's MEM. */
2203 if (MEM_SIZE (x) && moffsetx)
2204 sizex = INTVAL (MEM_SIZE (x));
2205 if (MEM_SIZE (y) && moffsety)
2206 sizey = INTVAL (MEM_SIZE (y));
2208 /* Put the values of the memref with the lower offset in X's values. */
2209 if (offsetx > offsety)
2211 tem = offsetx, offsetx = offsety, offsety = tem;
2212 tem = sizex, sizex = sizey, sizey = tem;
2215 /* If we don't know the size of the lower-offset value, we can't tell
2216 if they conflict. Otherwise, we do the test. */
2217 return sizex >= 0 && offsety >= offsetx + sizex;
2220 /* True dependence: X is read after store in MEM takes place. */
2223 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2224 bool (*varies) (const_rtx, bool))
2226 rtx x_addr, mem_addr;
2229 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2232 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2233 This is used in epilogue deallocation functions, and in cselib. */
2234 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2236 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2238 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2239 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2242 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2245 /* Read-only memory is by definition never modified, and therefore can't
2246 conflict with anything. We don't expect to find read-only set on MEM,
2247 but stupid user tricks can produce them, so don't die. */
2248 if (MEM_READONLY_P (x))
2251 if (nonoverlapping_memrefs_p (mem, x))
2254 if (mem_mode == VOIDmode)
2255 mem_mode = GET_MODE (mem);
2257 x_addr = get_addr (XEXP (x, 0));
2258 mem_addr = get_addr (XEXP (mem, 0));
2260 base = find_base_term (x_addr);
2261 if (base && (GET_CODE (base) == LABEL_REF
2262 || (GET_CODE (base) == SYMBOL_REF
2263 && CONSTANT_POOL_ADDRESS_P (base))))
2266 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2269 x_addr = canon_rtx (x_addr);
2270 mem_addr = canon_rtx (mem_addr);
2272 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2273 SIZE_FOR_MODE (x), x_addr, 0))
2276 if (aliases_everything_p (x))
2279 /* We cannot use aliases_everything_p to test MEM, since we must look
2280 at MEM_MODE, rather than GET_MODE (MEM). */
2281 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2284 /* In true_dependence we also allow BLKmode to alias anything. Why
2285 don't we do this in anti_dependence and output_dependence? */
2286 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2289 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2292 return rtx_refs_may_alias_p (x, mem, true);
2295 /* Canonical true dependence: X is read after store in MEM takes place.
2296 Variant of true_dependence which assumes MEM has already been
2297 canonicalized (hence we no longer do that here).
2298 The mem_addr argument has been added, since true_dependence computed
2299 this value prior to canonicalizing.
2300 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2303 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2304 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2306 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2309 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2310 This is used in epilogue deallocation functions. */
2311 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2313 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2315 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2316 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2319 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2322 /* Read-only memory is by definition never modified, and therefore can't
2323 conflict with anything. We don't expect to find read-only set on MEM,
2324 but stupid user tricks can produce them, so don't die. */
2325 if (MEM_READONLY_P (x))
2328 if (nonoverlapping_memrefs_p (x, mem))
2332 x_addr = get_addr (XEXP (x, 0));
2334 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2337 x_addr = canon_rtx (x_addr);
2338 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2339 SIZE_FOR_MODE (x), x_addr, 0))
2342 if (aliases_everything_p (x))
2345 /* We cannot use aliases_everything_p to test MEM, since we must look
2346 at MEM_MODE, rather than GET_MODE (MEM). */
2347 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2350 /* In true_dependence we also allow BLKmode to alias anything. Why
2351 don't we do this in anti_dependence and output_dependence? */
2352 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2355 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2358 return rtx_refs_may_alias_p (x, mem, true);
2361 /* Returns nonzero if a write to X might alias a previous read from
2362 (or, if WRITEP is nonzero, a write to) MEM. */
2365 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2367 rtx x_addr, mem_addr;
2368 const_rtx fixed_scalar;
2371 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2374 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2375 This is used in epilogue deallocation functions. */
2376 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2378 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2380 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2381 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2384 /* A read from read-only memory can't conflict with read-write memory. */
2385 if (!writep && MEM_READONLY_P (mem))
2388 if (nonoverlapping_memrefs_p (x, mem))
2391 x_addr = get_addr (XEXP (x, 0));
2392 mem_addr = get_addr (XEXP (mem, 0));
2396 base = find_base_term (mem_addr);
2397 if (base && (GET_CODE (base) == LABEL_REF
2398 || (GET_CODE (base) == SYMBOL_REF
2399 && CONSTANT_POOL_ADDRESS_P (base))))
2403 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2407 x_addr = canon_rtx (x_addr);
2408 mem_addr = canon_rtx (mem_addr);
2410 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2411 SIZE_FOR_MODE (x), x_addr, 0))
2415 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2418 if ((fixed_scalar == mem && !aliases_everything_p (x))
2419 || (fixed_scalar == x && !aliases_everything_p (mem)))
2422 return rtx_refs_may_alias_p (x, mem, false);
2425 /* Anti dependence: X is written after read in MEM takes place. */
2428 anti_dependence (const_rtx mem, const_rtx x)
2430 return write_dependence_p (mem, x, /*writep=*/0);
2433 /* Output dependence: X is written after store in MEM takes place. */
2436 output_dependence (const_rtx mem, const_rtx x)
2438 return write_dependence_p (mem, x, /*writep=*/1);
2443 init_alias_target (void)
2447 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2449 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2450 /* Check whether this register can hold an incoming pointer
2451 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2452 numbers, so translate if necessary due to register windows. */
2453 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2454 && HARD_REGNO_MODE_OK (i, Pmode))
2455 static_reg_base_value[i]
2456 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2458 static_reg_base_value[STACK_POINTER_REGNUM]
2459 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2460 static_reg_base_value[ARG_POINTER_REGNUM]
2461 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2462 static_reg_base_value[FRAME_POINTER_REGNUM]
2463 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2464 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2465 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2466 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2470 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2471 to be memory reference. */
2472 static bool memory_modified;
2474 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2478 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2479 memory_modified = true;
2484 /* Return true when INSN possibly modify memory contents of MEM
2485 (i.e. address can be modified). */
2487 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2491 memory_modified = false;
2492 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2493 return memory_modified;
2496 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2500 init_alias_analysis (void)
2502 unsigned int maxreg = max_reg_num ();
2508 timevar_push (TV_ALIAS_ANALYSIS);
2510 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2511 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2512 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2514 /* If we have memory allocated from the previous run, use it. */
2515 if (old_reg_base_value)
2516 reg_base_value = old_reg_base_value;
2519 VEC_truncate (rtx, reg_base_value, 0);
2521 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2523 new_reg_base_value = XNEWVEC (rtx, maxreg);
2524 reg_seen = XNEWVEC (char, maxreg);
2526 /* The basic idea is that each pass through this loop will use the
2527 "constant" information from the previous pass to propagate alias
2528 information through another level of assignments.
2530 This could get expensive if the assignment chains are long. Maybe
2531 we should throttle the number of iterations, possibly based on
2532 the optimization level or flag_expensive_optimizations.
2534 We could propagate more information in the first pass by making use
2535 of DF_REG_DEF_COUNT to determine immediately that the alias information
2536 for a pseudo is "constant".
2538 A program with an uninitialized variable can cause an infinite loop
2539 here. Instead of doing a full dataflow analysis to detect such problems
2540 we just cap the number of iterations for the loop.
2542 The state of the arrays for the set chain in question does not matter
2543 since the program has undefined behavior. */
2548 /* Assume nothing will change this iteration of the loop. */
2551 /* We want to assign the same IDs each iteration of this loop, so
2552 start counting from zero each iteration of the loop. */
2555 /* We're at the start of the function each iteration through the
2556 loop, so we're copying arguments. */
2557 copying_arguments = true;
2559 /* Wipe the potential alias information clean for this pass. */
2560 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2562 /* Wipe the reg_seen array clean. */
2563 memset (reg_seen, 0, maxreg);
2565 /* Mark all hard registers which may contain an address.
2566 The stack, frame and argument pointers may contain an address.
2567 An argument register which can hold a Pmode value may contain
2568 an address even if it is not in BASE_REGS.
2570 The address expression is VOIDmode for an argument and
2571 Pmode for other registers. */
2573 memcpy (new_reg_base_value, static_reg_base_value,
2574 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2576 /* Walk the insns adding values to the new_reg_base_value array. */
2577 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2583 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2584 /* The prologue/epilogue insns are not threaded onto the
2585 insn chain until after reload has completed. Thus,
2586 there is no sense wasting time checking if INSN is in
2587 the prologue/epilogue until after reload has completed. */
2588 if (reload_completed
2589 && prologue_epilogue_contains (insn))
2593 /* If this insn has a noalias note, process it, Otherwise,
2594 scan for sets. A simple set will have no side effects
2595 which could change the base value of any other register. */
2597 if (GET_CODE (PATTERN (insn)) == SET
2598 && REG_NOTES (insn) != 0
2599 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2600 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2602 note_stores (PATTERN (insn), record_set, NULL);
2604 set = single_set (insn);
2607 && REG_P (SET_DEST (set))
2608 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2610 unsigned int regno = REGNO (SET_DEST (set));
2611 rtx src = SET_SRC (set);
2614 note = find_reg_equal_equiv_note (insn);
2615 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2616 && DF_REG_DEF_COUNT (regno) != 1)
2619 if (note != NULL_RTX
2620 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2621 && ! rtx_varies_p (XEXP (note, 0), 1)
2622 && ! reg_overlap_mentioned_p (SET_DEST (set),
2625 set_reg_known_value (regno, XEXP (note, 0));
2626 set_reg_known_equiv_p (regno,
2627 REG_NOTE_KIND (note) == REG_EQUIV);
2629 else if (DF_REG_DEF_COUNT (regno) == 1
2630 && GET_CODE (src) == PLUS
2631 && REG_P (XEXP (src, 0))
2632 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2633 && CONST_INT_P (XEXP (src, 1)))
2635 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2636 set_reg_known_value (regno, t);
2637 set_reg_known_equiv_p (regno, 0);
2639 else if (DF_REG_DEF_COUNT (regno) == 1
2640 && ! rtx_varies_p (src, 1))
2642 set_reg_known_value (regno, src);
2643 set_reg_known_equiv_p (regno, 0);
2647 else if (NOTE_P (insn)
2648 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2649 copying_arguments = false;
2652 /* Now propagate values from new_reg_base_value to reg_base_value. */
2653 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2655 for (ui = 0; ui < maxreg; ui++)
2657 if (new_reg_base_value[ui]
2658 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2659 && ! rtx_equal_p (new_reg_base_value[ui],
2660 VEC_index (rtx, reg_base_value, ui)))
2662 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2667 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2669 /* Fill in the remaining entries. */
2670 for (i = 0; i < (int)reg_known_value_size; i++)
2671 if (reg_known_value[i] == 0)
2672 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2675 free (new_reg_base_value);
2676 new_reg_base_value = 0;
2679 timevar_pop (TV_ALIAS_ANALYSIS);
2683 end_alias_analysis (void)
2685 old_reg_base_value = reg_base_value;
2686 ggc_free (reg_known_value);
2687 reg_known_value = 0;
2688 reg_known_value_size = 0;
2689 free (reg_known_equiv_p);
2690 reg_known_equiv_p = 0;
2693 #include "gt-alias.h"