1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
39 #include "splay-tree.h"
41 #include "langhooks.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d {
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree GTY((param1_is (int), param2_is (int))) children;
151 typedef struct alias_set_entry_d *alias_set_entry;
153 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
155 static void record_set (rtx, const_rtx, void *);
156 static int base_alias_check (rtx, rtx, enum machine_mode,
158 static rtx find_base_value (rtx);
159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
160 static int insert_subset_children (splay_tree_node, void*);
161 static alias_set_entry get_alias_set_entry (alias_set_type);
162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164 static int aliases_everything_p (const_rtx);
165 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
166 static tree decl_for_component_ref (tree);
167 static rtx adjust_offset_for_component_ref (tree, rtx);
168 static int write_dependence_p (const_rtx, const_rtx, int);
170 static void memory_modified_1 (rtx, const_rtx, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx,gc) *reg_base_value;
208 static rtx *new_reg_base_value;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
213 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215 /* Static hunks of RTL used by the aliasing code; these are initialized
216 once per function to avoid unnecessary RTL allocations. */
217 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
219 #define REG_BASE_VALUE(X) \
220 (REGNO (X) < VEC_length (rtx, reg_base_value) \
221 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
223 /* Vector indexed by N giving the initial (unchanging) value known for
224 pseudo-register N. This array is initialized in init_alias_analysis,
225 and does not change until end_alias_analysis is called. */
226 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
228 /* Indicates number of valid entries in reg_known_value. */
229 static GTY(()) unsigned int reg_known_value_size;
231 /* Vector recording for each reg_known_value whether it is due to a
232 REG_EQUIV note. Future passes (viz., reload) may replace the
233 pseudo with the equivalent expression and so we account for the
234 dependences that would be introduced if that happens.
236 The REG_EQUIV notes created in assign_parms may mention the arg
237 pointer, and there are explicit insns in the RTL that modify the
238 arg pointer. Thus we must ensure that such insns don't get
239 scheduled across each other because that would invalidate the
240 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
241 wrong, but solving the problem in the scheduler will likely give
242 better code, so we do it here. */
243 static bool *reg_known_equiv_p;
245 /* True when scanning insns from the start of the rtl to the
246 NOTE_INSN_FUNCTION_BEG note. */
247 static bool copying_arguments;
249 DEF_VEC_P(alias_set_entry);
250 DEF_VEC_ALLOC_P(alias_set_entry,gc);
252 /* The splay-tree used to store the various alias set entries. */
253 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
255 /* Build a decomposed reference object for querying the alias-oracle
256 from the MEM rtx and store it in *REF.
257 Returns false if MEM is not suitable for the alias-oracle. */
260 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
262 tree expr = MEM_EXPR (mem);
268 /* If MEM_OFFSET or MEM_SIZE are NULL punt. */
269 if (!MEM_OFFSET (mem)
273 ao_ref_init (ref, expr);
275 /* Get the base of the reference and see if we have to reject or
277 base = ao_ref_base (ref);
278 if (base == NULL_TREE)
281 /* If this is a pointer dereference of a non-SSA_NAME punt.
282 ??? We could replace it with a pointer to anything. */
283 if (INDIRECT_REF_P (base)
284 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
287 /* The tree oracle doesn't like to have these. */
288 if (TREE_CODE (base) == FUNCTION_DECL
289 || TREE_CODE (base) == LABEL_DECL)
292 /* If this is a reference based on a partitioned decl replace the
293 base with an INDIRECT_REF of the pointer representative we
294 created during stack slot partitioning. */
295 if (TREE_CODE (base) == VAR_DECL
296 && ! TREE_STATIC (base)
297 && cfun->gimple_df->decls_to_pointers != NULL)
300 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
303 ref->base_alias_set = get_alias_set (base);
304 ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
308 ref->ref_alias_set = MEM_ALIAS_SET (mem);
310 /* If the base decl is a parameter we can have negative MEM_OFFSET in
311 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
313 if (INTVAL (MEM_OFFSET (mem)) < 0
314 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
315 * BITS_PER_UNIT) == ref->size)
318 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
319 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
321 /* The MEM may extend into adjacent fields, so adjust max_size if
323 if (ref->max_size != -1
324 && ref->size > ref->max_size)
325 ref->max_size = ref->size;
327 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
328 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
329 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
331 || (DECL_P (ref->base)
332 && (!host_integerp (DECL_SIZE (ref->base), 1)
333 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
334 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
340 /* Query the alias-oracle on whether the two memory rtx X and MEM may
341 alias. If TBAA_P is set also apply TBAA. Returns true if the
342 two rtxen may alias, false otherwise. */
345 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
349 if (!ao_ref_from_mem (&ref1, x)
350 || !ao_ref_from_mem (&ref2, mem))
353 return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
356 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
357 such an entry, or NULL otherwise. */
359 static inline alias_set_entry
360 get_alias_set_entry (alias_set_type alias_set)
362 return VEC_index (alias_set_entry, alias_sets, alias_set);
365 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
366 the two MEMs cannot alias each other. */
369 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
371 /* Perform a basic sanity check. Namely, that there are no alias sets
372 if we're not using strict aliasing. This helps to catch bugs
373 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
374 where a MEM is allocated in some way other than by the use of
375 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
376 use alias sets to indicate that spilled registers cannot alias each
377 other, we might need to remove this check. */
378 gcc_assert (flag_strict_aliasing
379 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
381 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
384 /* Insert the NODE into the splay tree given by DATA. Used by
385 record_alias_subset via splay_tree_foreach. */
388 insert_subset_children (splay_tree_node node, void *data)
390 splay_tree_insert ((splay_tree) data, node->key, node->value);
395 /* Return true if the first alias set is a subset of the second. */
398 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
402 /* Everything is a subset of the "aliases everything" set. */
406 /* Otherwise, check if set1 is a subset of set2. */
407 ase = get_alias_set_entry (set2);
409 && ((ase->has_zero_child && set1 == 0)
410 || splay_tree_lookup (ase->children,
411 (splay_tree_key) set1)))
416 /* Return 1 if the two specified alias sets may conflict. */
419 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
424 if (alias_sets_must_conflict_p (set1, set2))
427 /* See if the first alias set is a subset of the second. */
428 ase = get_alias_set_entry (set1);
430 && (ase->has_zero_child
431 || splay_tree_lookup (ase->children,
432 (splay_tree_key) set2)))
435 /* Now do the same, but with the alias sets reversed. */
436 ase = get_alias_set_entry (set2);
438 && (ase->has_zero_child
439 || splay_tree_lookup (ase->children,
440 (splay_tree_key) set1)))
443 /* The two alias sets are distinct and neither one is the
444 child of the other. Therefore, they cannot conflict. */
449 walk_mems_2 (rtx *x, rtx mem)
453 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
462 walk_mems_1 (rtx *x, rtx *pat)
466 /* Visit all MEMs in *PAT and check indepedence. */
467 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
468 /* Indicate that dependence was determined and stop traversal. */
476 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
478 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
480 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
481 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
485 /* Return 1 if the two specified alias sets will always conflict. */
488 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
490 if (set1 == 0 || set2 == 0 || set1 == set2)
496 /* Return 1 if any MEM object of type T1 will always conflict (using the
497 dependency routines in this file) with any MEM object of type T2.
498 This is used when allocating temporary storage. If T1 and/or T2 are
499 NULL_TREE, it means we know nothing about the storage. */
502 objects_must_conflict_p (tree t1, tree t2)
504 alias_set_type set1, set2;
506 /* If neither has a type specified, we don't know if they'll conflict
507 because we may be using them to store objects of various types, for
508 example the argument and local variables areas of inlined functions. */
509 if (t1 == 0 && t2 == 0)
512 /* If they are the same type, they must conflict. */
514 /* Likewise if both are volatile. */
515 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
518 set1 = t1 ? get_alias_set (t1) : 0;
519 set2 = t2 ? get_alias_set (t2) : 0;
521 /* We can't use alias_sets_conflict_p because we must make sure
522 that every subtype of t1 will conflict with every subtype of
523 t2 for which a pair of subobjects of these respective subtypes
524 overlaps on the stack. */
525 return alias_sets_must_conflict_p (set1, set2);
528 /* Return true if all nested component references handled by
529 get_inner_reference in T are such that we should use the alias set
530 provided by the object at the heart of T.
532 This is true for non-addressable components (which don't have their
533 own alias set), as well as components of objects in alias set zero.
534 This later point is a special case wherein we wish to override the
535 alias set used by the component, but we don't have per-FIELD_DECL
536 assignable alias sets. */
539 component_uses_parent_alias_set (const_tree t)
543 /* If we're at the end, it vacuously uses its own alias set. */
544 if (!handled_component_p (t))
547 switch (TREE_CODE (t))
550 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
555 case ARRAY_RANGE_REF:
556 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
565 /* Bitfields and casts are never addressable. */
569 t = TREE_OPERAND (t, 0);
570 if (get_alias_set (TREE_TYPE (t)) == 0)
575 /* Return the alias set for the memory pointed to by T, which may be
576 either a type or an expression. Return -1 if there is nothing
577 special about dereferencing T. */
579 static alias_set_type
580 get_deref_alias_set_1 (tree t)
582 /* If we're not doing any alias analysis, just assume everything
583 aliases everything else. */
584 if (!flag_strict_aliasing)
587 /* All we care about is the type. */
591 /* If we have an INDIRECT_REF via a void pointer, we don't
592 know anything about what that might alias. Likewise if the
593 pointer is marked that way. */
594 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
595 || TYPE_REF_CAN_ALIAS_ALL (t))
601 /* Return the alias set for the memory pointed to by T, which may be
602 either a type or an expression. */
605 get_deref_alias_set (tree t)
607 alias_set_type set = get_deref_alias_set_1 (t);
609 /* Fall back to the alias-set of the pointed-to type. */
614 set = get_alias_set (TREE_TYPE (t));
620 /* Return the alias set for T, which may be either a type or an
621 expression. Call language-specific routine for help, if needed. */
624 get_alias_set (tree t)
628 /* If we're not doing any alias analysis, just assume everything
629 aliases everything else. Also return 0 if this or its type is
631 if (! flag_strict_aliasing || t == error_mark_node
633 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
636 /* We can be passed either an expression or a type. This and the
637 language-specific routine may make mutually-recursive calls to each other
638 to figure out what to do. At each juncture, we see if this is a tree
639 that the language may need to handle specially. First handle things that
645 /* Remove any nops, then give the language a chance to do
646 something with this tree before we look at it. */
648 set = lang_hooks.get_alias_set (t);
652 /* First see if the actual object referenced is an INDIRECT_REF from a
653 restrict-qualified pointer or a "void *". */
654 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
660 if (INDIRECT_REF_P (inner))
662 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
667 /* Otherwise, pick up the outermost object that we could have a pointer
668 to, processing conversions as above. */
669 while (component_uses_parent_alias_set (t))
671 t = TREE_OPERAND (t, 0);
675 /* If we've already determined the alias set for a decl, just return
676 it. This is necessary for C++ anonymous unions, whose component
677 variables don't look like union members (boo!). */
678 if (TREE_CODE (t) == VAR_DECL
679 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
680 return MEM_ALIAS_SET (DECL_RTL (t));
682 /* Now all we care about is the type. */
686 /* Variant qualifiers don't affect the alias set, so get the main
688 t = TYPE_MAIN_VARIANT (t);
690 /* Always use the canonical type as well. If this is a type that
691 requires structural comparisons to identify compatible types
692 use alias set zero. */
693 if (TYPE_STRUCTURAL_EQUALITY_P (t))
695 t = TYPE_CANONICAL (t);
696 /* Canonical types shouldn't form a tree nor should the canonical
697 type require structural equality checks. */
698 gcc_assert (!TYPE_STRUCTURAL_EQUALITY_P (t) && TYPE_CANONICAL (t) == t);
700 /* If this is a type with a known alias set, return it. */
701 if (TYPE_ALIAS_SET_KNOWN_P (t))
702 return TYPE_ALIAS_SET (t);
704 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
705 if (!COMPLETE_TYPE_P (t))
707 /* For arrays with unknown size the conservative answer is the
708 alias set of the element type. */
709 if (TREE_CODE (t) == ARRAY_TYPE)
710 return get_alias_set (TREE_TYPE (t));
712 /* But return zero as a conservative answer for incomplete types. */
716 /* See if the language has special handling for this type. */
717 set = lang_hooks.get_alias_set (t);
721 /* There are no objects of FUNCTION_TYPE, so there's no point in
722 using up an alias set for them. (There are, of course, pointers
723 and references to functions, but that's different.) */
724 else if (TREE_CODE (t) == FUNCTION_TYPE
725 || TREE_CODE (t) == METHOD_TYPE)
728 /* Unless the language specifies otherwise, let vector types alias
729 their components. This avoids some nasty type punning issues in
730 normal usage. And indeed lets vectors be treated more like an
732 else if (TREE_CODE (t) == VECTOR_TYPE)
733 set = get_alias_set (TREE_TYPE (t));
735 /* Unless the language specifies otherwise, treat array types the
736 same as their components. This avoids the asymmetry we get
737 through recording the components. Consider accessing a
738 character(kind=1) through a reference to a character(kind=1)[1:1].
739 Or consider if we want to assign integer(kind=4)[0:D.1387] and
740 integer(kind=4)[4] the same alias set or not.
741 Just be pragmatic here and make sure the array and its element
742 type get the same alias set assigned. */
743 else if (TREE_CODE (t) == ARRAY_TYPE
744 && !TYPE_NONALIASED_COMPONENT (t))
745 set = get_alias_set (TREE_TYPE (t));
748 /* Otherwise make a new alias set for this type. */
749 set = new_alias_set ();
751 TYPE_ALIAS_SET (t) = set;
753 /* If this is an aggregate type, we must record any component aliasing
755 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
756 record_component_aliases (t);
761 /* Return a brand-new alias set. */
766 if (flag_strict_aliasing)
769 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
770 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
771 return VEC_length (alias_set_entry, alias_sets) - 1;
777 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
778 not everything that aliases SUPERSET also aliases SUBSET. For example,
779 in C, a store to an `int' can alias a load of a structure containing an
780 `int', and vice versa. But it can't alias a load of a 'double' member
781 of the same structure. Here, the structure would be the SUPERSET and
782 `int' the SUBSET. This relationship is also described in the comment at
783 the beginning of this file.
785 This function should be called only once per SUPERSET/SUBSET pair.
787 It is illegal for SUPERSET to be zero; everything is implicitly a
788 subset of alias set zero. */
791 record_alias_subset (alias_set_type superset, alias_set_type subset)
793 alias_set_entry superset_entry;
794 alias_set_entry subset_entry;
796 /* It is possible in complex type situations for both sets to be the same,
797 in which case we can ignore this operation. */
798 if (superset == subset)
801 gcc_assert (superset);
803 superset_entry = get_alias_set_entry (superset);
804 if (superset_entry == 0)
806 /* Create an entry for the SUPERSET, so that we have a place to
807 attach the SUBSET. */
808 superset_entry = GGC_NEW (struct alias_set_entry_d);
809 superset_entry->alias_set = superset;
810 superset_entry->children
811 = splay_tree_new_ggc (splay_tree_compare_ints);
812 superset_entry->has_zero_child = 0;
813 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
817 superset_entry->has_zero_child = 1;
820 subset_entry = get_alias_set_entry (subset);
821 /* If there is an entry for the subset, enter all of its children
822 (if they are not already present) as children of the SUPERSET. */
825 if (subset_entry->has_zero_child)
826 superset_entry->has_zero_child = 1;
828 splay_tree_foreach (subset_entry->children, insert_subset_children,
829 superset_entry->children);
832 /* Enter the SUBSET itself as a child of the SUPERSET. */
833 splay_tree_insert (superset_entry->children,
834 (splay_tree_key) subset, 0);
838 /* Record that component types of TYPE, if any, are part of that type for
839 aliasing purposes. For record types, we only record component types
840 for fields that are not marked non-addressable. For array types, we
841 only record the component type if it is not marked non-aliased. */
844 record_component_aliases (tree type)
846 alias_set_type superset = get_alias_set (type);
852 switch (TREE_CODE (type))
856 case QUAL_UNION_TYPE:
857 /* Recursively record aliases for the base classes, if there are any. */
858 if (TYPE_BINFO (type))
861 tree binfo, base_binfo;
863 for (binfo = TYPE_BINFO (type), i = 0;
864 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
865 record_alias_subset (superset,
866 get_alias_set (BINFO_TYPE (base_binfo)));
868 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
869 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
870 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
874 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
877 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
885 /* Allocate an alias set for use in storing and reading from the varargs
888 static GTY(()) alias_set_type varargs_set = -1;
891 get_varargs_alias_set (void)
894 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
895 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
896 consistently use the varargs alias set for loads from the varargs
897 area. So don't use it anywhere. */
900 if (varargs_set == -1)
901 varargs_set = new_alias_set ();
907 /* Likewise, but used for the fixed portions of the frame, e.g., register
910 static GTY(()) alias_set_type frame_set = -1;
913 get_frame_alias_set (void)
916 frame_set = new_alias_set ();
921 /* Inside SRC, the source of a SET, find a base address. */
924 find_base_value (rtx src)
928 #if defined (FIND_BASE_TERM)
929 /* Try machine-dependent ways to find the base term. */
930 src = FIND_BASE_TERM (src);
933 switch (GET_CODE (src))
941 /* At the start of a function, argument registers have known base
942 values which may be lost later. Returning an ADDRESS
943 expression here allows optimization based on argument values
944 even when the argument registers are used for other purposes. */
945 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
946 return new_reg_base_value[regno];
948 /* If a pseudo has a known base value, return it. Do not do this
949 for non-fixed hard regs since it can result in a circular
950 dependency chain for registers which have values at function entry.
952 The test above is not sufficient because the scheduler may move
953 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
954 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
955 && regno < VEC_length (rtx, reg_base_value))
957 /* If we're inside init_alias_analysis, use new_reg_base_value
958 to reduce the number of relaxation iterations. */
959 if (new_reg_base_value && new_reg_base_value[regno]
960 && DF_REG_DEF_COUNT (regno) == 1)
961 return new_reg_base_value[regno];
963 if (VEC_index (rtx, reg_base_value, regno))
964 return VEC_index (rtx, reg_base_value, regno);
970 /* Check for an argument passed in memory. Only record in the
971 copying-arguments block; it is too hard to track changes
973 if (copying_arguments
974 && (XEXP (src, 0) == arg_pointer_rtx
975 || (GET_CODE (XEXP (src, 0)) == PLUS
976 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
977 return gen_rtx_ADDRESS (VOIDmode, src);
982 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
985 /* ... fall through ... */
990 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
992 /* If either operand is a REG that is a known pointer, then it
994 if (REG_P (src_0) && REG_POINTER (src_0))
995 return find_base_value (src_0);
996 if (REG_P (src_1) && REG_POINTER (src_1))
997 return find_base_value (src_1);
999 /* If either operand is a REG, then see if we already have
1000 a known value for it. */
1003 temp = find_base_value (src_0);
1010 temp = find_base_value (src_1);
1015 /* If either base is named object or a special address
1016 (like an argument or stack reference), then use it for the
1019 && (GET_CODE (src_0) == SYMBOL_REF
1020 || GET_CODE (src_0) == LABEL_REF
1021 || (GET_CODE (src_0) == ADDRESS
1022 && GET_MODE (src_0) != VOIDmode)))
1026 && (GET_CODE (src_1) == SYMBOL_REF
1027 || GET_CODE (src_1) == LABEL_REF
1028 || (GET_CODE (src_1) == ADDRESS
1029 && GET_MODE (src_1) != VOIDmode)))
1032 /* Guess which operand is the base address:
1033 If either operand is a symbol, then it is the base. If
1034 either operand is a CONST_INT, then the other is the base. */
1035 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1036 return find_base_value (src_0);
1037 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1038 return find_base_value (src_1);
1044 /* The standard form is (lo_sum reg sym) so look only at the
1046 return find_base_value (XEXP (src, 1));
1049 /* If the second operand is constant set the base
1050 address to the first operand. */
1051 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1052 return find_base_value (XEXP (src, 0));
1056 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1066 return find_base_value (XEXP (src, 0));
1069 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1071 rtx temp = find_base_value (XEXP (src, 0));
1073 if (temp != 0 && CONSTANT_P (temp))
1074 temp = convert_memory_address (Pmode, temp);
1086 /* Called from init_alias_analysis indirectly through note_stores. */
1088 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1089 register N has been set in this function. */
1090 static char *reg_seen;
1092 /* Addresses which are known not to alias anything else are identified
1093 by a unique integer. */
1094 static int unique_id;
1097 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1106 regno = REGNO (dest);
1108 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1110 /* If this spans multiple hard registers, then we must indicate that every
1111 register has an unusable value. */
1112 if (regno < FIRST_PSEUDO_REGISTER)
1113 n = hard_regno_nregs[regno][GET_MODE (dest)];
1120 reg_seen[regno + n] = 1;
1121 new_reg_base_value[regno + n] = 0;
1128 /* A CLOBBER wipes out any old value but does not prevent a previously
1129 unset register from acquiring a base address (i.e. reg_seen is not
1131 if (GET_CODE (set) == CLOBBER)
1133 new_reg_base_value[regno] = 0;
1136 src = SET_SRC (set);
1140 if (reg_seen[regno])
1142 new_reg_base_value[regno] = 0;
1145 reg_seen[regno] = 1;
1146 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1147 GEN_INT (unique_id++));
1151 /* If this is not the first set of REGNO, see whether the new value
1152 is related to the old one. There are two cases of interest:
1154 (1) The register might be assigned an entirely new value
1155 that has the same base term as the original set.
1157 (2) The set might be a simple self-modification that
1158 cannot change REGNO's base value.
1160 If neither case holds, reject the original base value as invalid.
1161 Note that the following situation is not detected:
1163 extern int x, y; int *p = &x; p += (&y-&x);
1165 ANSI C does not allow computing the difference of addresses
1166 of distinct top level objects. */
1167 if (new_reg_base_value[regno] != 0
1168 && find_base_value (src) != new_reg_base_value[regno])
1169 switch (GET_CODE (src))
1173 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1174 new_reg_base_value[regno] = 0;
1177 /* If the value we add in the PLUS is also a valid base value,
1178 this might be the actual base value, and the original value
1181 rtx other = NULL_RTX;
1183 if (XEXP (src, 0) == dest)
1184 other = XEXP (src, 1);
1185 else if (XEXP (src, 1) == dest)
1186 other = XEXP (src, 0);
1188 if (! other || find_base_value (other))
1189 new_reg_base_value[regno] = 0;
1193 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1194 new_reg_base_value[regno] = 0;
1197 new_reg_base_value[regno] = 0;
1200 /* If this is the first set of a register, record the value. */
1201 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1202 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1203 new_reg_base_value[regno] = find_base_value (src);
1205 reg_seen[regno] = 1;
1208 /* If a value is known for REGNO, return it. */
1211 get_reg_known_value (unsigned int regno)
1213 if (regno >= FIRST_PSEUDO_REGISTER)
1215 regno -= FIRST_PSEUDO_REGISTER;
1216 if (regno < reg_known_value_size)
1217 return reg_known_value[regno];
1225 set_reg_known_value (unsigned int regno, rtx val)
1227 if (regno >= FIRST_PSEUDO_REGISTER)
1229 regno -= FIRST_PSEUDO_REGISTER;
1230 if (regno < reg_known_value_size)
1231 reg_known_value[regno] = val;
1235 /* Similarly for reg_known_equiv_p. */
1238 get_reg_known_equiv_p (unsigned int regno)
1240 if (regno >= FIRST_PSEUDO_REGISTER)
1242 regno -= FIRST_PSEUDO_REGISTER;
1243 if (regno < reg_known_value_size)
1244 return reg_known_equiv_p[regno];
1250 set_reg_known_equiv_p (unsigned int regno, bool val)
1252 if (regno >= FIRST_PSEUDO_REGISTER)
1254 regno -= FIRST_PSEUDO_REGISTER;
1255 if (regno < reg_known_value_size)
1256 reg_known_equiv_p[regno] = val;
1261 /* Returns a canonical version of X, from the point of view alias
1262 analysis. (For example, if X is a MEM whose address is a register,
1263 and the register has a known value (say a SYMBOL_REF), then a MEM
1264 whose address is the SYMBOL_REF is returned.) */
1269 /* Recursively look for equivalences. */
1270 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1272 rtx t = get_reg_known_value (REGNO (x));
1276 return canon_rtx (t);
1279 if (GET_CODE (x) == PLUS)
1281 rtx x0 = canon_rtx (XEXP (x, 0));
1282 rtx x1 = canon_rtx (XEXP (x, 1));
1284 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1286 if (CONST_INT_P (x0))
1287 return plus_constant (x1, INTVAL (x0));
1288 else if (CONST_INT_P (x1))
1289 return plus_constant (x0, INTVAL (x1));
1290 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1294 /* This gives us much better alias analysis when called from
1295 the loop optimizer. Note we want to leave the original
1296 MEM alone, but need to return the canonicalized MEM with
1297 all the flags with their original values. */
1299 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1304 /* Return 1 if X and Y are identical-looking rtx's.
1305 Expect that X and Y has been already canonicalized.
1307 We use the data in reg_known_value above to see if two registers with
1308 different numbers are, in fact, equivalent. */
1311 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1318 if (x == 0 && y == 0)
1320 if (x == 0 || y == 0)
1326 code = GET_CODE (x);
1327 /* Rtx's of different codes cannot be equal. */
1328 if (code != GET_CODE (y))
1331 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1332 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1334 if (GET_MODE (x) != GET_MODE (y))
1337 /* Some RTL can be compared without a recursive examination. */
1341 return REGNO (x) == REGNO (y);
1344 return XEXP (x, 0) == XEXP (y, 0);
1347 return XSTR (x, 0) == XSTR (y, 0);
1353 /* There's no need to compare the contents of CONST_DOUBLEs or
1354 CONST_INTs because pointer equality is a good enough
1355 comparison for these nodes. */
1362 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1364 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1365 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1366 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1367 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1368 /* For commutative operations, the RTX match if the operand match in any
1369 order. Also handle the simple binary and unary cases without a loop. */
1370 if (COMMUTATIVE_P (x))
1372 rtx xop0 = canon_rtx (XEXP (x, 0));
1373 rtx yop0 = canon_rtx (XEXP (y, 0));
1374 rtx yop1 = canon_rtx (XEXP (y, 1));
1376 return ((rtx_equal_for_memref_p (xop0, yop0)
1377 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1378 || (rtx_equal_for_memref_p (xop0, yop1)
1379 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1381 else if (NON_COMMUTATIVE_P (x))
1383 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1384 canon_rtx (XEXP (y, 0)))
1385 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1386 canon_rtx (XEXP (y, 1))));
1388 else if (UNARY_P (x))
1389 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1390 canon_rtx (XEXP (y, 0)));
1392 /* Compare the elements. If any pair of corresponding elements
1393 fail to match, return 0 for the whole things.
1395 Limit cases to types which actually appear in addresses. */
1397 fmt = GET_RTX_FORMAT (code);
1398 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1403 if (XINT (x, i) != XINT (y, i))
1408 /* Two vectors must have the same length. */
1409 if (XVECLEN (x, i) != XVECLEN (y, i))
1412 /* And the corresponding elements must match. */
1413 for (j = 0; j < XVECLEN (x, i); j++)
1414 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1415 canon_rtx (XVECEXP (y, i, j))) == 0)
1420 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1421 canon_rtx (XEXP (y, i))) == 0)
1425 /* This can happen for asm operands. */
1427 if (strcmp (XSTR (x, i), XSTR (y, i)))
1431 /* This can happen for an asm which clobbers memory. */
1435 /* It is believed that rtx's at this level will never
1436 contain anything but integers and other rtx's,
1437 except for within LABEL_REFs and SYMBOL_REFs. */
1446 find_base_term (rtx x)
1449 struct elt_loc_list *l;
1451 #if defined (FIND_BASE_TERM)
1452 /* Try machine-dependent ways to find the base term. */
1453 x = FIND_BASE_TERM (x);
1456 switch (GET_CODE (x))
1459 return REG_BASE_VALUE (x);
1462 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1472 return find_base_term (XEXP (x, 0));
1475 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1477 rtx temp = find_base_term (XEXP (x, 0));
1479 if (temp != 0 && CONSTANT_P (temp))
1480 temp = convert_memory_address (Pmode, temp);
1486 val = CSELIB_VAL_PTR (x);
1489 for (l = val->locs; l; l = l->next)
1490 if ((x = find_base_term (l->loc)) != 0)
1495 /* The standard form is (lo_sum reg sym) so look only at the
1497 return find_base_term (XEXP (x, 1));
1501 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1507 rtx tmp1 = XEXP (x, 0);
1508 rtx tmp2 = XEXP (x, 1);
1510 /* This is a little bit tricky since we have to determine which of
1511 the two operands represents the real base address. Otherwise this
1512 routine may return the index register instead of the base register.
1514 That may cause us to believe no aliasing was possible, when in
1515 fact aliasing is possible.
1517 We use a few simple tests to guess the base register. Additional
1518 tests can certainly be added. For example, if one of the operands
1519 is a shift or multiply, then it must be the index register and the
1520 other operand is the base register. */
1522 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1523 return find_base_term (tmp2);
1525 /* If either operand is known to be a pointer, then use it
1526 to determine the base term. */
1527 if (REG_P (tmp1) && REG_POINTER (tmp1))
1529 rtx base = find_base_term (tmp1);
1534 if (REG_P (tmp2) && REG_POINTER (tmp2))
1536 rtx base = find_base_term (tmp2);
1541 /* Neither operand was known to be a pointer. Go ahead and find the
1542 base term for both operands. */
1543 tmp1 = find_base_term (tmp1);
1544 tmp2 = find_base_term (tmp2);
1546 /* If either base term is named object or a special address
1547 (like an argument or stack reference), then use it for the
1550 && (GET_CODE (tmp1) == SYMBOL_REF
1551 || GET_CODE (tmp1) == LABEL_REF
1552 || (GET_CODE (tmp1) == ADDRESS
1553 && GET_MODE (tmp1) != VOIDmode)))
1557 && (GET_CODE (tmp2) == SYMBOL_REF
1558 || GET_CODE (tmp2) == LABEL_REF
1559 || (GET_CODE (tmp2) == ADDRESS
1560 && GET_MODE (tmp2) != VOIDmode)))
1563 /* We could not determine which of the two operands was the
1564 base register and which was the index. So we can determine
1565 nothing from the base alias check. */
1570 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1571 return find_base_term (XEXP (x, 0));
1583 /* Return 0 if the addresses X and Y are known to point to different
1584 objects, 1 if they might be pointers to the same object. */
1587 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1588 enum machine_mode y_mode)
1590 rtx x_base = find_base_term (x);
1591 rtx y_base = find_base_term (y);
1593 /* If the address itself has no known base see if a known equivalent
1594 value has one. If either address still has no known base, nothing
1595 is known about aliasing. */
1600 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1603 x_base = find_base_term (x_c);
1611 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1614 y_base = find_base_term (y_c);
1619 /* If the base addresses are equal nothing is known about aliasing. */
1620 if (rtx_equal_p (x_base, y_base))
1623 /* The base addresses are different expressions. If they are not accessed
1624 via AND, there is no conflict. We can bring knowledge of object
1625 alignment into play here. For example, on alpha, "char a, b;" can
1626 alias one another, though "char a; long b;" cannot. AND addesses may
1627 implicitly alias surrounding objects; i.e. unaligned access in DImode
1628 via AND address can alias all surrounding object types except those
1629 with aligment 8 or higher. */
1630 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1632 if (GET_CODE (x) == AND
1633 && (!CONST_INT_P (XEXP (x, 1))
1634 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1636 if (GET_CODE (y) == AND
1637 && (!CONST_INT_P (XEXP (y, 1))
1638 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1641 /* Differing symbols not accessed via AND never alias. */
1642 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1645 /* If one address is a stack reference there can be no alias:
1646 stack references using different base registers do not alias,
1647 a stack reference can not alias a parameter, and a stack reference
1648 can not alias a global. */
1649 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1650 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1653 if (! flag_argument_noalias)
1656 if (flag_argument_noalias > 1)
1659 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1660 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1663 /* Convert the address X into something we can use. This is done by returning
1664 it unchanged unless it is a value; in the latter case we call cselib to get
1665 a more useful rtx. */
1671 struct elt_loc_list *l;
1673 if (GET_CODE (x) != VALUE)
1675 v = CSELIB_VAL_PTR (x);
1678 for (l = v->locs; l; l = l->next)
1679 if (CONSTANT_P (l->loc))
1681 for (l = v->locs; l; l = l->next)
1682 if (!REG_P (l->loc) && !MEM_P (l->loc))
1685 return v->locs->loc;
1690 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1691 where SIZE is the size in bytes of the memory reference. If ADDR
1692 is not modified by the memory reference then ADDR is returned. */
1695 addr_side_effect_eval (rtx addr, int size, int n_refs)
1699 switch (GET_CODE (addr))
1702 offset = (n_refs + 1) * size;
1705 offset = -(n_refs + 1) * size;
1708 offset = n_refs * size;
1711 offset = -n_refs * size;
1719 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1722 addr = XEXP (addr, 0);
1723 addr = canon_rtx (addr);
1728 /* Return nonzero if X and Y (memory addresses) could reference the
1729 same location in memory. C is an offset accumulator. When
1730 C is nonzero, we are testing aliases between X and Y + C.
1731 XSIZE is the size in bytes of the X reference,
1732 similarly YSIZE is the size in bytes for Y.
1733 Expect that canon_rtx has been already called for X and Y.
1735 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1736 referenced (the reference was BLKmode), so make the most pessimistic
1739 If XSIZE or YSIZE is negative, we may access memory outside the object
1740 being referenced as a side effect. This can happen when using AND to
1741 align memory references, as is done on the Alpha.
1743 Nice to notice that varying addresses cannot conflict with fp if no
1744 local variables had their addresses taken, but that's too hard now. */
1747 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1749 if (GET_CODE (x) == VALUE)
1751 if (GET_CODE (y) == VALUE)
1753 if (GET_CODE (x) == HIGH)
1755 else if (GET_CODE (x) == LO_SUM)
1758 x = addr_side_effect_eval (x, xsize, 0);
1759 if (GET_CODE (y) == HIGH)
1761 else if (GET_CODE (y) == LO_SUM)
1764 y = addr_side_effect_eval (y, ysize, 0);
1766 if (rtx_equal_for_memref_p (x, y))
1768 if (xsize <= 0 || ysize <= 0)
1770 if (c >= 0 && xsize > c)
1772 if (c < 0 && ysize+c > 0)
1777 /* This code used to check for conflicts involving stack references and
1778 globals but the base address alias code now handles these cases. */
1780 if (GET_CODE (x) == PLUS)
1782 /* The fact that X is canonicalized means that this
1783 PLUS rtx is canonicalized. */
1784 rtx x0 = XEXP (x, 0);
1785 rtx x1 = XEXP (x, 1);
1787 if (GET_CODE (y) == PLUS)
1789 /* The fact that Y is canonicalized means that this
1790 PLUS rtx is canonicalized. */
1791 rtx y0 = XEXP (y, 0);
1792 rtx y1 = XEXP (y, 1);
1794 if (rtx_equal_for_memref_p (x1, y1))
1795 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1796 if (rtx_equal_for_memref_p (x0, y0))
1797 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1798 if (CONST_INT_P (x1))
1800 if (CONST_INT_P (y1))
1801 return memrefs_conflict_p (xsize, x0, ysize, y0,
1802 c - INTVAL (x1) + INTVAL (y1));
1804 return memrefs_conflict_p (xsize, x0, ysize, y,
1807 else if (CONST_INT_P (y1))
1808 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1812 else if (CONST_INT_P (x1))
1813 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1815 else if (GET_CODE (y) == PLUS)
1817 /* The fact that Y is canonicalized means that this
1818 PLUS rtx is canonicalized. */
1819 rtx y0 = XEXP (y, 0);
1820 rtx y1 = XEXP (y, 1);
1822 if (CONST_INT_P (y1))
1823 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1828 if (GET_CODE (x) == GET_CODE (y))
1829 switch (GET_CODE (x))
1833 /* Handle cases where we expect the second operands to be the
1834 same, and check only whether the first operand would conflict
1837 rtx x1 = canon_rtx (XEXP (x, 1));
1838 rtx y1 = canon_rtx (XEXP (y, 1));
1839 if (! rtx_equal_for_memref_p (x1, y1))
1841 x0 = canon_rtx (XEXP (x, 0));
1842 y0 = canon_rtx (XEXP (y, 0));
1843 if (rtx_equal_for_memref_p (x0, y0))
1844 return (xsize == 0 || ysize == 0
1845 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1847 /* Can't properly adjust our sizes. */
1848 if (!CONST_INT_P (x1))
1850 xsize /= INTVAL (x1);
1851 ysize /= INTVAL (x1);
1853 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1860 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1861 as an access with indeterminate size. Assume that references
1862 besides AND are aligned, so if the size of the other reference is
1863 at least as large as the alignment, assume no other overlap. */
1864 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1866 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1868 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1870 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1872 /* ??? If we are indexing far enough into the array/structure, we
1873 may yet be able to determine that we can not overlap. But we
1874 also need to that we are far enough from the end not to overlap
1875 a following reference, so we do nothing with that for now. */
1876 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1878 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1883 if (CONST_INT_P (x) && CONST_INT_P (y))
1885 c += (INTVAL (y) - INTVAL (x));
1886 return (xsize <= 0 || ysize <= 0
1887 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1890 if (GET_CODE (x) == CONST)
1892 if (GET_CODE (y) == CONST)
1893 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1894 ysize, canon_rtx (XEXP (y, 0)), c);
1896 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1899 if (GET_CODE (y) == CONST)
1900 return memrefs_conflict_p (xsize, x, ysize,
1901 canon_rtx (XEXP (y, 0)), c);
1904 return (xsize <= 0 || ysize <= 0
1905 || (rtx_equal_for_memref_p (x, y)
1906 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1913 /* Functions to compute memory dependencies.
1915 Since we process the insns in execution order, we can build tables
1916 to keep track of what registers are fixed (and not aliased), what registers
1917 are varying in known ways, and what registers are varying in unknown
1920 If both memory references are volatile, then there must always be a
1921 dependence between the two references, since their order can not be
1922 changed. A volatile and non-volatile reference can be interchanged
1925 A MEM_IN_STRUCT reference at a non-AND varying address can never
1926 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1927 also must allow AND addresses, because they may generate accesses
1928 outside the object being referenced. This is used to generate
1929 aligned addresses from unaligned addresses, for instance, the alpha
1930 storeqi_unaligned pattern. */
1932 /* Read dependence: X is read after read in MEM takes place. There can
1933 only be a dependence here if both reads are volatile. */
1936 read_dependence (const_rtx mem, const_rtx x)
1938 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1941 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1942 MEM2 is a reference to a structure at a varying address, or returns
1943 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1944 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1945 to decide whether or not an address may vary; it should return
1946 nonzero whenever variation is possible.
1947 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1950 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1952 bool (*varies_p) (const_rtx, bool))
1954 if (! flag_strict_aliasing)
1957 if (MEM_ALIAS_SET (mem2)
1958 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1959 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1960 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1964 if (MEM_ALIAS_SET (mem1)
1965 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1966 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1967 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1974 /* Returns nonzero if something about the mode or address format MEM1
1975 indicates that it might well alias *anything*. */
1978 aliases_everything_p (const_rtx mem)
1980 if (GET_CODE (XEXP (mem, 0)) == AND)
1981 /* If the address is an AND, it's very hard to know at what it is
1982 actually pointing. */
1988 /* Return true if we can determine that the fields referenced cannot
1989 overlap for any pair of objects. */
1992 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1994 const_tree fieldx, fieldy, typex, typey, orig_y;
1996 if (!flag_strict_aliasing)
2001 /* The comparison has to be done at a common type, since we don't
2002 know how the inheritance hierarchy works. */
2006 fieldx = TREE_OPERAND (x, 1);
2007 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
2012 fieldy = TREE_OPERAND (y, 1);
2013 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
2018 y = TREE_OPERAND (y, 0);
2020 while (y && TREE_CODE (y) == COMPONENT_REF);
2022 x = TREE_OPERAND (x, 0);
2024 while (x && TREE_CODE (x) == COMPONENT_REF);
2025 /* Never found a common type. */
2029 /* If we're left with accessing different fields of a structure,
2031 if (TREE_CODE (typex) == RECORD_TYPE
2032 && fieldx != fieldy)
2035 /* The comparison on the current field failed. If we're accessing
2036 a very nested structure, look at the next outer level. */
2037 x = TREE_OPERAND (x, 0);
2038 y = TREE_OPERAND (y, 0);
2041 && TREE_CODE (x) == COMPONENT_REF
2042 && TREE_CODE (y) == COMPONENT_REF);
2047 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2050 decl_for_component_ref (tree x)
2054 x = TREE_OPERAND (x, 0);
2056 while (x && TREE_CODE (x) == COMPONENT_REF);
2058 return x && DECL_P (x) ? x : NULL_TREE;
2061 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2062 offset of the field reference. */
2065 adjust_offset_for_component_ref (tree x, rtx offset)
2067 HOST_WIDE_INT ioffset;
2072 ioffset = INTVAL (offset);
2075 tree offset = component_ref_field_offset (x);
2076 tree field = TREE_OPERAND (x, 1);
2078 if (! host_integerp (offset, 1))
2080 ioffset += (tree_low_cst (offset, 1)
2081 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2084 x = TREE_OPERAND (x, 0);
2086 while (x && TREE_CODE (x) == COMPONENT_REF);
2088 return GEN_INT (ioffset);
2091 /* Return nonzero if we can determine the exprs corresponding to memrefs
2092 X and Y and they do not overlap. */
2095 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2097 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2100 rtx moffsetx, moffsety;
2101 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2103 /* Unless both have exprs, we can't tell anything. */
2104 if (exprx == 0 || expry == 0)
2107 /* If both are field references, we may be able to determine something. */
2108 if (TREE_CODE (exprx) == COMPONENT_REF
2109 && TREE_CODE (expry) == COMPONENT_REF
2110 && nonoverlapping_component_refs_p (exprx, expry))
2114 /* If the field reference test failed, look at the DECLs involved. */
2115 moffsetx = MEM_OFFSET (x);
2116 if (TREE_CODE (exprx) == COMPONENT_REF)
2118 if (TREE_CODE (expry) == VAR_DECL
2119 && POINTER_TYPE_P (TREE_TYPE (expry)))
2121 tree field = TREE_OPERAND (exprx, 1);
2122 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2123 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2128 tree t = decl_for_component_ref (exprx);
2131 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2135 else if (INDIRECT_REF_P (exprx))
2137 exprx = TREE_OPERAND (exprx, 0);
2138 if (flag_argument_noalias < 2
2139 || TREE_CODE (exprx) != PARM_DECL)
2143 moffsety = MEM_OFFSET (y);
2144 if (TREE_CODE (expry) == COMPONENT_REF)
2146 if (TREE_CODE (exprx) == VAR_DECL
2147 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2149 tree field = TREE_OPERAND (expry, 1);
2150 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2151 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2156 tree t = decl_for_component_ref (expry);
2159 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2163 else if (INDIRECT_REF_P (expry))
2165 expry = TREE_OPERAND (expry, 0);
2166 if (flag_argument_noalias < 2
2167 || TREE_CODE (expry) != PARM_DECL)
2171 if (! DECL_P (exprx) || ! DECL_P (expry))
2174 rtlx = DECL_RTL (exprx);
2175 rtly = DECL_RTL (expry);
2177 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2178 can't overlap unless they are the same because we never reuse that part
2179 of the stack frame used for locals for spilled pseudos. */
2180 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2181 && ! rtx_equal_p (rtlx, rtly))
2184 /* Get the base and offsets of both decls. If either is a register, we
2185 know both are and are the same, so use that as the base. The only
2186 we can avoid overlap is if we can deduce that they are nonoverlapping
2187 pieces of that decl, which is very rare. */
2188 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2189 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2190 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2192 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2193 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2194 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2196 /* If the bases are different, we know they do not overlap if both
2197 are constants or if one is a constant and the other a pointer into the
2198 stack frame. Otherwise a different base means we can't tell if they
2200 if (! rtx_equal_p (basex, basey))
2201 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2202 || (CONSTANT_P (basex) && REG_P (basey)
2203 && REGNO_PTR_FRAME_P (REGNO (basey)))
2204 || (CONSTANT_P (basey) && REG_P (basex)
2205 && REGNO_PTR_FRAME_P (REGNO (basex))));
2207 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2208 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2210 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2211 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2214 /* If we have an offset for either memref, it can update the values computed
2217 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2219 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2221 /* If a memref has both a size and an offset, we can use the smaller size.
2222 We can't do this if the offset isn't known because we must view this
2223 memref as being anywhere inside the DECL's MEM. */
2224 if (MEM_SIZE (x) && moffsetx)
2225 sizex = INTVAL (MEM_SIZE (x));
2226 if (MEM_SIZE (y) && moffsety)
2227 sizey = INTVAL (MEM_SIZE (y));
2229 /* Put the values of the memref with the lower offset in X's values. */
2230 if (offsetx > offsety)
2232 tem = offsetx, offsetx = offsety, offsety = tem;
2233 tem = sizex, sizex = sizey, sizey = tem;
2236 /* If we don't know the size of the lower-offset value, we can't tell
2237 if they conflict. Otherwise, we do the test. */
2238 return sizex >= 0 && offsety >= offsetx + sizex;
2241 /* True dependence: X is read after store in MEM takes place. */
2244 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2245 bool (*varies) (const_rtx, bool))
2247 rtx x_addr, mem_addr;
2250 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2253 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2254 This is used in epilogue deallocation functions, and in cselib. */
2255 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2257 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2259 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2260 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2263 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2266 /* Read-only memory is by definition never modified, and therefore can't
2267 conflict with anything. We don't expect to find read-only set on MEM,
2268 but stupid user tricks can produce them, so don't die. */
2269 if (MEM_READONLY_P (x))
2272 if (nonoverlapping_memrefs_p (mem, x))
2275 if (mem_mode == VOIDmode)
2276 mem_mode = GET_MODE (mem);
2278 x_addr = get_addr (XEXP (x, 0));
2279 mem_addr = get_addr (XEXP (mem, 0));
2281 base = find_base_term (x_addr);
2282 if (base && (GET_CODE (base) == LABEL_REF
2283 || (GET_CODE (base) == SYMBOL_REF
2284 && CONSTANT_POOL_ADDRESS_P (base))))
2287 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2290 x_addr = canon_rtx (x_addr);
2291 mem_addr = canon_rtx (mem_addr);
2293 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2294 SIZE_FOR_MODE (x), x_addr, 0))
2297 if (aliases_everything_p (x))
2300 /* We cannot use aliases_everything_p to test MEM, since we must look
2301 at MEM_MODE, rather than GET_MODE (MEM). */
2302 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2305 /* In true_dependence we also allow BLKmode to alias anything. Why
2306 don't we do this in anti_dependence and output_dependence? */
2307 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2310 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2313 return rtx_refs_may_alias_p (x, mem, true);
2316 /* Canonical true dependence: X is read after store in MEM takes place.
2317 Variant of true_dependence which assumes MEM has already been
2318 canonicalized (hence we no longer do that here).
2319 The mem_addr argument has been added, since true_dependence computed
2320 this value prior to canonicalizing.
2321 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2324 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2325 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2327 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2330 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2331 This is used in epilogue deallocation functions. */
2332 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2334 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2336 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2337 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2340 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2343 /* Read-only memory is by definition never modified, and therefore can't
2344 conflict with anything. We don't expect to find read-only set on MEM,
2345 but stupid user tricks can produce them, so don't die. */
2346 if (MEM_READONLY_P (x))
2349 if (nonoverlapping_memrefs_p (x, mem))
2353 x_addr = get_addr (XEXP (x, 0));
2355 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2358 x_addr = canon_rtx (x_addr);
2359 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2360 SIZE_FOR_MODE (x), x_addr, 0))
2363 if (aliases_everything_p (x))
2366 /* We cannot use aliases_everything_p to test MEM, since we must look
2367 at MEM_MODE, rather than GET_MODE (MEM). */
2368 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2371 /* In true_dependence we also allow BLKmode to alias anything. Why
2372 don't we do this in anti_dependence and output_dependence? */
2373 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2376 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2379 return rtx_refs_may_alias_p (x, mem, true);
2382 /* Returns nonzero if a write to X might alias a previous read from
2383 (or, if WRITEP is nonzero, a write to) MEM. */
2386 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2388 rtx x_addr, mem_addr;
2389 const_rtx fixed_scalar;
2392 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2395 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2396 This is used in epilogue deallocation functions. */
2397 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2399 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2401 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2402 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2405 /* A read from read-only memory can't conflict with read-write memory. */
2406 if (!writep && MEM_READONLY_P (mem))
2409 if (nonoverlapping_memrefs_p (x, mem))
2412 x_addr = get_addr (XEXP (x, 0));
2413 mem_addr = get_addr (XEXP (mem, 0));
2417 base = find_base_term (mem_addr);
2418 if (base && (GET_CODE (base) == LABEL_REF
2419 || (GET_CODE (base) == SYMBOL_REF
2420 && CONSTANT_POOL_ADDRESS_P (base))))
2424 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2428 x_addr = canon_rtx (x_addr);
2429 mem_addr = canon_rtx (mem_addr);
2431 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2432 SIZE_FOR_MODE (x), x_addr, 0))
2436 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2439 if ((fixed_scalar == mem && !aliases_everything_p (x))
2440 || (fixed_scalar == x && !aliases_everything_p (mem)))
2443 return rtx_refs_may_alias_p (x, mem, false);
2446 /* Anti dependence: X is written after read in MEM takes place. */
2449 anti_dependence (const_rtx mem, const_rtx x)
2451 return write_dependence_p (mem, x, /*writep=*/0);
2454 /* Output dependence: X is written after store in MEM takes place. */
2457 output_dependence (const_rtx mem, const_rtx x)
2459 return write_dependence_p (mem, x, /*writep=*/1);
2464 init_alias_target (void)
2468 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2470 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2471 /* Check whether this register can hold an incoming pointer
2472 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2473 numbers, so translate if necessary due to register windows. */
2474 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2475 && HARD_REGNO_MODE_OK (i, Pmode))
2476 static_reg_base_value[i]
2477 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2479 static_reg_base_value[STACK_POINTER_REGNUM]
2480 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2481 static_reg_base_value[ARG_POINTER_REGNUM]
2482 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2483 static_reg_base_value[FRAME_POINTER_REGNUM]
2484 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2485 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2486 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2487 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2491 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2492 to be memory reference. */
2493 static bool memory_modified;
2495 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2499 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2500 memory_modified = true;
2505 /* Return true when INSN possibly modify memory contents of MEM
2506 (i.e. address can be modified). */
2508 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2512 memory_modified = false;
2513 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2514 return memory_modified;
2517 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2521 init_alias_analysis (void)
2523 unsigned int maxreg = max_reg_num ();
2529 timevar_push (TV_ALIAS_ANALYSIS);
2531 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2532 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2533 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2535 /* If we have memory allocated from the previous run, use it. */
2536 if (old_reg_base_value)
2537 reg_base_value = old_reg_base_value;
2540 VEC_truncate (rtx, reg_base_value, 0);
2542 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2544 new_reg_base_value = XNEWVEC (rtx, maxreg);
2545 reg_seen = XNEWVEC (char, maxreg);
2547 /* The basic idea is that each pass through this loop will use the
2548 "constant" information from the previous pass to propagate alias
2549 information through another level of assignments.
2551 This could get expensive if the assignment chains are long. Maybe
2552 we should throttle the number of iterations, possibly based on
2553 the optimization level or flag_expensive_optimizations.
2555 We could propagate more information in the first pass by making use
2556 of DF_REG_DEF_COUNT to determine immediately that the alias information
2557 for a pseudo is "constant".
2559 A program with an uninitialized variable can cause an infinite loop
2560 here. Instead of doing a full dataflow analysis to detect such problems
2561 we just cap the number of iterations for the loop.
2563 The state of the arrays for the set chain in question does not matter
2564 since the program has undefined behavior. */
2569 /* Assume nothing will change this iteration of the loop. */
2572 /* We want to assign the same IDs each iteration of this loop, so
2573 start counting from zero each iteration of the loop. */
2576 /* We're at the start of the function each iteration through the
2577 loop, so we're copying arguments. */
2578 copying_arguments = true;
2580 /* Wipe the potential alias information clean for this pass. */
2581 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2583 /* Wipe the reg_seen array clean. */
2584 memset (reg_seen, 0, maxreg);
2586 /* Mark all hard registers which may contain an address.
2587 The stack, frame and argument pointers may contain an address.
2588 An argument register which can hold a Pmode value may contain
2589 an address even if it is not in BASE_REGS.
2591 The address expression is VOIDmode for an argument and
2592 Pmode for other registers. */
2594 memcpy (new_reg_base_value, static_reg_base_value,
2595 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2597 /* Walk the insns adding values to the new_reg_base_value array. */
2598 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2604 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2605 /* The prologue/epilogue insns are not threaded onto the
2606 insn chain until after reload has completed. Thus,
2607 there is no sense wasting time checking if INSN is in
2608 the prologue/epilogue until after reload has completed. */
2609 if (reload_completed
2610 && prologue_epilogue_contains (insn))
2614 /* If this insn has a noalias note, process it, Otherwise,
2615 scan for sets. A simple set will have no side effects
2616 which could change the base value of any other register. */
2618 if (GET_CODE (PATTERN (insn)) == SET
2619 && REG_NOTES (insn) != 0
2620 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2621 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2623 note_stores (PATTERN (insn), record_set, NULL);
2625 set = single_set (insn);
2628 && REG_P (SET_DEST (set))
2629 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2631 unsigned int regno = REGNO (SET_DEST (set));
2632 rtx src = SET_SRC (set);
2635 note = find_reg_equal_equiv_note (insn);
2636 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2637 && DF_REG_DEF_COUNT (regno) != 1)
2640 if (note != NULL_RTX
2641 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2642 && ! rtx_varies_p (XEXP (note, 0), 1)
2643 && ! reg_overlap_mentioned_p (SET_DEST (set),
2646 set_reg_known_value (regno, XEXP (note, 0));
2647 set_reg_known_equiv_p (regno,
2648 REG_NOTE_KIND (note) == REG_EQUIV);
2650 else if (DF_REG_DEF_COUNT (regno) == 1
2651 && GET_CODE (src) == PLUS
2652 && REG_P (XEXP (src, 0))
2653 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2654 && CONST_INT_P (XEXP (src, 1)))
2656 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2657 set_reg_known_value (regno, t);
2658 set_reg_known_equiv_p (regno, 0);
2660 else if (DF_REG_DEF_COUNT (regno) == 1
2661 && ! rtx_varies_p (src, 1))
2663 set_reg_known_value (regno, src);
2664 set_reg_known_equiv_p (regno, 0);
2668 else if (NOTE_P (insn)
2669 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2670 copying_arguments = false;
2673 /* Now propagate values from new_reg_base_value to reg_base_value. */
2674 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2676 for (ui = 0; ui < maxreg; ui++)
2678 if (new_reg_base_value[ui]
2679 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2680 && ! rtx_equal_p (new_reg_base_value[ui],
2681 VEC_index (rtx, reg_base_value, ui)))
2683 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2688 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2690 /* Fill in the remaining entries. */
2691 for (i = 0; i < (int)reg_known_value_size; i++)
2692 if (reg_known_value[i] == 0)
2693 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2696 free (new_reg_base_value);
2697 new_reg_base_value = 0;
2700 timevar_pop (TV_ALIAS_ANALYSIS);
2704 end_alias_analysis (void)
2706 old_reg_base_value = reg_base_value;
2707 ggc_free (reg_known_value);
2708 reg_known_value = 0;
2709 reg_known_value_size = 0;
2710 free (reg_known_equiv_p);
2711 reg_known_equiv_p = 0;
2714 #include "gt-alias.h"