1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 #include "diagnostic-core.h"
40 #include "splay-tree.h"
42 #include "langhooks.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d {
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree GTY((param1_is (int), param2_is (int))) children;
151 typedef struct alias_set_entry_d *alias_set_entry;
153 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
155 static void record_set (rtx, const_rtx, void *);
156 static int base_alias_check (rtx, rtx, enum machine_mode,
158 static rtx find_base_value (rtx);
159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
160 static int insert_subset_children (splay_tree_node, void*);
161 static alias_set_entry get_alias_set_entry (alias_set_type);
162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164 static int aliases_everything_p (const_rtx);
165 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
166 static tree decl_for_component_ref (tree);
167 static rtx adjust_offset_for_component_ref (tree, rtx);
168 static int write_dependence_p (const_rtx, const_rtx, int);
170 static void memory_modified_1 (rtx, const_rtx, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx,gc) *reg_base_value;
208 static rtx *new_reg_base_value;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
213 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215 #define static_reg_base_value \
216 (this_target_rtl->x_static_reg_base_value)
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments;
248 DEF_VEC_P(alias_set_entry);
249 DEF_VEC_ALLOC_P(alias_set_entry,gc);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
254 /* Build a decomposed reference object for querying the alias-oracle
255 from the MEM rtx and store it in *REF.
256 Returns false if MEM is not suitable for the alias-oracle. */
259 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
261 tree expr = MEM_EXPR (mem);
267 ao_ref_init (ref, expr);
269 /* Get the base of the reference and see if we have to reject or
271 base = ao_ref_base (ref);
272 if (base == NULL_TREE)
275 /* The tree oracle doesn't like to have these. */
276 if (TREE_CODE (base) == FUNCTION_DECL
277 || TREE_CODE (base) == LABEL_DECL)
280 /* If this is a pointer dereference of a non-SSA_NAME punt.
281 ??? We could replace it with a pointer to anything. */
282 if ((INDIRECT_REF_P (base)
283 || TREE_CODE (base) == MEM_REF)
284 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
287 /* If this is a reference based on a partitioned decl replace the
288 base with an INDIRECT_REF of the pointer representative we
289 created during stack slot partitioning. */
290 if (TREE_CODE (base) == VAR_DECL
291 && ! TREE_STATIC (base)
292 && cfun->gimple_df->decls_to_pointers != NULL)
295 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
297 ref->base = build_simple_mem_ref (*(tree *)namep);
300 ref->ref_alias_set = MEM_ALIAS_SET (mem);
302 /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
303 Keep points-to related information though. */
304 if (!MEM_OFFSET (mem)
307 ref->ref = NULL_TREE;
314 /* If the base decl is a parameter we can have negative MEM_OFFSET in
315 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
317 if (INTVAL (MEM_OFFSET (mem)) < 0
318 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
319 * BITS_PER_UNIT) == ref->size)
322 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
323 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
325 /* The MEM may extend into adjacent fields, so adjust max_size if
327 if (ref->max_size != -1
328 && ref->size > ref->max_size)
329 ref->max_size = ref->size;
331 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
332 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
333 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
335 || (DECL_P (ref->base)
336 && (!host_integerp (DECL_SIZE (ref->base), 1)
337 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
338 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
344 /* Query the alias-oracle on whether the two memory rtx X and MEM may
345 alias. If TBAA_P is set also apply TBAA. Returns true if the
346 two rtxen may alias, false otherwise. */
349 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
353 if (!ao_ref_from_mem (&ref1, x)
354 || !ao_ref_from_mem (&ref2, mem))
357 return refs_may_alias_p_1 (&ref1, &ref2,
359 && MEM_ALIAS_SET (x) != 0
360 && MEM_ALIAS_SET (mem) != 0);
363 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
364 such an entry, or NULL otherwise. */
366 static inline alias_set_entry
367 get_alias_set_entry (alias_set_type alias_set)
369 return VEC_index (alias_set_entry, alias_sets, alias_set);
372 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
373 the two MEMs cannot alias each other. */
376 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
378 /* Perform a basic sanity check. Namely, that there are no alias sets
379 if we're not using strict aliasing. This helps to catch bugs
380 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
381 where a MEM is allocated in some way other than by the use of
382 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
383 use alias sets to indicate that spilled registers cannot alias each
384 other, we might need to remove this check. */
385 gcc_assert (flag_strict_aliasing
386 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
388 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
391 /* Insert the NODE into the splay tree given by DATA. Used by
392 record_alias_subset via splay_tree_foreach. */
395 insert_subset_children (splay_tree_node node, void *data)
397 splay_tree_insert ((splay_tree) data, node->key, node->value);
402 /* Return true if the first alias set is a subset of the second. */
405 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
409 /* Everything is a subset of the "aliases everything" set. */
413 /* Otherwise, check if set1 is a subset of set2. */
414 ase = get_alias_set_entry (set2);
416 && (ase->has_zero_child
417 || splay_tree_lookup (ase->children,
418 (splay_tree_key) set1)))
423 /* Return 1 if the two specified alias sets may conflict. */
426 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
431 if (alias_sets_must_conflict_p (set1, set2))
434 /* See if the first alias set is a subset of the second. */
435 ase = get_alias_set_entry (set1);
437 && (ase->has_zero_child
438 || splay_tree_lookup (ase->children,
439 (splay_tree_key) set2)))
442 /* Now do the same, but with the alias sets reversed. */
443 ase = get_alias_set_entry (set2);
445 && (ase->has_zero_child
446 || splay_tree_lookup (ase->children,
447 (splay_tree_key) set1)))
450 /* The two alias sets are distinct and neither one is the
451 child of the other. Therefore, they cannot conflict. */
455 /* Return 1 if the two specified alias sets will always conflict. */
458 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
460 if (set1 == 0 || set2 == 0 || set1 == set2)
466 /* Return 1 if any MEM object of type T1 will always conflict (using the
467 dependency routines in this file) with any MEM object of type T2.
468 This is used when allocating temporary storage. If T1 and/or T2 are
469 NULL_TREE, it means we know nothing about the storage. */
472 objects_must_conflict_p (tree t1, tree t2)
474 alias_set_type set1, set2;
476 /* If neither has a type specified, we don't know if they'll conflict
477 because we may be using them to store objects of various types, for
478 example the argument and local variables areas of inlined functions. */
479 if (t1 == 0 && t2 == 0)
482 /* If they are the same type, they must conflict. */
484 /* Likewise if both are volatile. */
485 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
488 set1 = t1 ? get_alias_set (t1) : 0;
489 set2 = t2 ? get_alias_set (t2) : 0;
491 /* We can't use alias_sets_conflict_p because we must make sure
492 that every subtype of t1 will conflict with every subtype of
493 t2 for which a pair of subobjects of these respective subtypes
494 overlaps on the stack. */
495 return alias_sets_must_conflict_p (set1, set2);
498 /* Return true if all nested component references handled by
499 get_inner_reference in T are such that we should use the alias set
500 provided by the object at the heart of T.
502 This is true for non-addressable components (which don't have their
503 own alias set), as well as components of objects in alias set zero.
504 This later point is a special case wherein we wish to override the
505 alias set used by the component, but we don't have per-FIELD_DECL
506 assignable alias sets. */
509 component_uses_parent_alias_set (const_tree t)
513 /* If we're at the end, it vacuously uses its own alias set. */
514 if (!handled_component_p (t))
517 switch (TREE_CODE (t))
520 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
525 case ARRAY_RANGE_REF:
526 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
535 /* Bitfields and casts are never addressable. */
539 t = TREE_OPERAND (t, 0);
540 if (get_alias_set (TREE_TYPE (t)) == 0)
545 /* Return the alias set for the memory pointed to by T, which may be
546 either a type or an expression. Return -1 if there is nothing
547 special about dereferencing T. */
549 static alias_set_type
550 get_deref_alias_set_1 (tree t)
552 /* If we're not doing any alias analysis, just assume everything
553 aliases everything else. */
554 if (!flag_strict_aliasing)
557 /* All we care about is the type. */
561 /* If we have an INDIRECT_REF via a void pointer, we don't
562 know anything about what that might alias. Likewise if the
563 pointer is marked that way. */
564 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
565 || TYPE_REF_CAN_ALIAS_ALL (t))
571 /* Return the alias set for the memory pointed to by T, which may be
572 either a type or an expression. */
575 get_deref_alias_set (tree t)
577 alias_set_type set = get_deref_alias_set_1 (t);
579 /* Fall back to the alias-set of the pointed-to type. */
584 set = get_alias_set (TREE_TYPE (t));
590 /* Return the alias set for T, which may be either a type or an
591 expression. Call language-specific routine for help, if needed. */
594 get_alias_set (tree t)
598 /* If we're not doing any alias analysis, just assume everything
599 aliases everything else. Also return 0 if this or its type is
601 if (! flag_strict_aliasing || t == error_mark_node
603 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
606 /* We can be passed either an expression or a type. This and the
607 language-specific routine may make mutually-recursive calls to each other
608 to figure out what to do. At each juncture, we see if this is a tree
609 that the language may need to handle specially. First handle things that
615 /* Give the language a chance to do something with this tree
616 before we look at it. */
618 set = lang_hooks.get_alias_set (t);
622 /* Retrieve the original memory reference if needed. */
623 if (TREE_CODE (t) == TARGET_MEM_REF)
624 t = TMR_ORIGINAL (t);
626 /* Get the base object of the reference. */
628 while (handled_component_p (inner))
630 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
631 the type of any component references that wrap it to
632 determine the alias-set. */
633 if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
634 t = TREE_OPERAND (inner, 0);
635 inner = TREE_OPERAND (inner, 0);
638 /* Handle pointer dereferences here, they can override the
640 if (INDIRECT_REF_P (inner))
642 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
646 else if (TREE_CODE (inner) == MEM_REF)
648 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 1));
653 /* If the innermost reference is a MEM_REF that has a
654 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
655 using the memory access type for determining the alias-set. */
656 if (TREE_CODE (inner) == MEM_REF
657 && TYPE_MAIN_VARIANT (TREE_TYPE (inner))
659 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1)))))
660 return get_deref_alias_set (TREE_OPERAND (inner, 1));
662 /* Otherwise, pick up the outermost object that we could have a pointer
663 to, processing conversions as above. */
664 while (component_uses_parent_alias_set (t))
666 t = TREE_OPERAND (t, 0);
670 /* If we've already determined the alias set for a decl, just return
671 it. This is necessary for C++ anonymous unions, whose component
672 variables don't look like union members (boo!). */
673 if (TREE_CODE (t) == VAR_DECL
674 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
675 return MEM_ALIAS_SET (DECL_RTL (t));
677 /* Now all we care about is the type. */
681 /* Variant qualifiers don't affect the alias set, so get the main
683 t = TYPE_MAIN_VARIANT (t);
685 /* Always use the canonical type as well. If this is a type that
686 requires structural comparisons to identify compatible types
687 use alias set zero. */
688 if (TYPE_STRUCTURAL_EQUALITY_P (t))
690 /* Allow the language to specify another alias set for this
692 set = lang_hooks.get_alias_set (t);
698 t = TYPE_CANONICAL (t);
700 /* Canonical types shouldn't form a tree nor should the canonical
701 type require structural equality checks. */
702 gcc_checking_assert (TYPE_CANONICAL (t) == t
703 && !TYPE_STRUCTURAL_EQUALITY_P (t));
705 /* If this is a type with a known alias set, return it. */
706 if (TYPE_ALIAS_SET_KNOWN_P (t))
707 return TYPE_ALIAS_SET (t);
709 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
710 if (!COMPLETE_TYPE_P (t))
712 /* For arrays with unknown size the conservative answer is the
713 alias set of the element type. */
714 if (TREE_CODE (t) == ARRAY_TYPE)
715 return get_alias_set (TREE_TYPE (t));
717 /* But return zero as a conservative answer for incomplete types. */
721 /* See if the language has special handling for this type. */
722 set = lang_hooks.get_alias_set (t);
726 /* There are no objects of FUNCTION_TYPE, so there's no point in
727 using up an alias set for them. (There are, of course, pointers
728 and references to functions, but that's different.) */
729 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
732 /* Unless the language specifies otherwise, let vector types alias
733 their components. This avoids some nasty type punning issues in
734 normal usage. And indeed lets vectors be treated more like an
736 else if (TREE_CODE (t) == VECTOR_TYPE)
737 set = get_alias_set (TREE_TYPE (t));
739 /* Unless the language specifies otherwise, treat array types the
740 same as their components. This avoids the asymmetry we get
741 through recording the components. Consider accessing a
742 character(kind=1) through a reference to a character(kind=1)[1:1].
743 Or consider if we want to assign integer(kind=4)[0:D.1387] and
744 integer(kind=4)[4] the same alias set or not.
745 Just be pragmatic here and make sure the array and its element
746 type get the same alias set assigned. */
747 else if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NONALIASED_COMPONENT (t))
748 set = get_alias_set (TREE_TYPE (t));
750 /* Otherwise make a new alias set for this type. */
752 set = new_alias_set ();
754 TYPE_ALIAS_SET (t) = set;
756 /* If this is an aggregate type or a complex type, we must record any
757 component aliasing information. */
758 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
759 record_component_aliases (t);
764 /* Return a brand-new alias set. */
769 if (flag_strict_aliasing)
772 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
773 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
774 return VEC_length (alias_set_entry, alias_sets) - 1;
780 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
781 not everything that aliases SUPERSET also aliases SUBSET. For example,
782 in C, a store to an `int' can alias a load of a structure containing an
783 `int', and vice versa. But it can't alias a load of a 'double' member
784 of the same structure. Here, the structure would be the SUPERSET and
785 `int' the SUBSET. This relationship is also described in the comment at
786 the beginning of this file.
788 This function should be called only once per SUPERSET/SUBSET pair.
790 It is illegal for SUPERSET to be zero; everything is implicitly a
791 subset of alias set zero. */
794 record_alias_subset (alias_set_type superset, alias_set_type subset)
796 alias_set_entry superset_entry;
797 alias_set_entry subset_entry;
799 /* It is possible in complex type situations for both sets to be the same,
800 in which case we can ignore this operation. */
801 if (superset == subset)
804 gcc_assert (superset);
806 superset_entry = get_alias_set_entry (superset);
807 if (superset_entry == 0)
809 /* Create an entry for the SUPERSET, so that we have a place to
810 attach the SUBSET. */
811 superset_entry = ggc_alloc_cleared_alias_set_entry_d ();
812 superset_entry->alias_set = superset;
813 superset_entry->children
814 = splay_tree_new_ggc (splay_tree_compare_ints,
815 ggc_alloc_splay_tree_scalar_scalar_splay_tree_s,
816 ggc_alloc_splay_tree_scalar_scalar_splay_tree_node_s);
817 superset_entry->has_zero_child = 0;
818 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
822 superset_entry->has_zero_child = 1;
825 subset_entry = get_alias_set_entry (subset);
826 /* If there is an entry for the subset, enter all of its children
827 (if they are not already present) as children of the SUPERSET. */
830 if (subset_entry->has_zero_child)
831 superset_entry->has_zero_child = 1;
833 splay_tree_foreach (subset_entry->children, insert_subset_children,
834 superset_entry->children);
837 /* Enter the SUBSET itself as a child of the SUPERSET. */
838 splay_tree_insert (superset_entry->children,
839 (splay_tree_key) subset, 0);
843 /* Record that component types of TYPE, if any, are part of that type for
844 aliasing purposes. For record types, we only record component types
845 for fields that are not marked non-addressable. For array types, we
846 only record the component type if it is not marked non-aliased. */
849 record_component_aliases (tree type)
851 alias_set_type superset = get_alias_set (type);
857 switch (TREE_CODE (type))
861 case QUAL_UNION_TYPE:
862 /* Recursively record aliases for the base classes, if there are any. */
863 if (TYPE_BINFO (type))
866 tree binfo, base_binfo;
868 for (binfo = TYPE_BINFO (type), i = 0;
869 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
870 record_alias_subset (superset,
871 get_alias_set (BINFO_TYPE (base_binfo)));
873 for (field = TYPE_FIELDS (type); field != 0; field = DECL_CHAIN (field))
874 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
875 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
879 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
882 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
890 /* Allocate an alias set for use in storing and reading from the varargs
893 static GTY(()) alias_set_type varargs_set = -1;
896 get_varargs_alias_set (void)
899 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
900 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
901 consistently use the varargs alias set for loads from the varargs
902 area. So don't use it anywhere. */
905 if (varargs_set == -1)
906 varargs_set = new_alias_set ();
912 /* Likewise, but used for the fixed portions of the frame, e.g., register
915 static GTY(()) alias_set_type frame_set = -1;
918 get_frame_alias_set (void)
921 frame_set = new_alias_set ();
926 /* Inside SRC, the source of a SET, find a base address. */
929 find_base_value (rtx src)
933 #if defined (FIND_BASE_TERM)
934 /* Try machine-dependent ways to find the base term. */
935 src = FIND_BASE_TERM (src);
938 switch (GET_CODE (src))
946 /* At the start of a function, argument registers have known base
947 values which may be lost later. Returning an ADDRESS
948 expression here allows optimization based on argument values
949 even when the argument registers are used for other purposes. */
950 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
951 return new_reg_base_value[regno];
953 /* If a pseudo has a known base value, return it. Do not do this
954 for non-fixed hard regs since it can result in a circular
955 dependency chain for registers which have values at function entry.
957 The test above is not sufficient because the scheduler may move
958 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
959 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
960 && regno < VEC_length (rtx, reg_base_value))
962 /* If we're inside init_alias_analysis, use new_reg_base_value
963 to reduce the number of relaxation iterations. */
964 if (new_reg_base_value && new_reg_base_value[regno]
965 && DF_REG_DEF_COUNT (regno) == 1)
966 return new_reg_base_value[regno];
968 if (VEC_index (rtx, reg_base_value, regno))
969 return VEC_index (rtx, reg_base_value, regno);
975 /* Check for an argument passed in memory. Only record in the
976 copying-arguments block; it is too hard to track changes
978 if (copying_arguments
979 && (XEXP (src, 0) == arg_pointer_rtx
980 || (GET_CODE (XEXP (src, 0)) == PLUS
981 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
982 return gen_rtx_ADDRESS (VOIDmode, src);
987 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
990 /* ... fall through ... */
995 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
997 /* If either operand is a REG that is a known pointer, then it
999 if (REG_P (src_0) && REG_POINTER (src_0))
1000 return find_base_value (src_0);
1001 if (REG_P (src_1) && REG_POINTER (src_1))
1002 return find_base_value (src_1);
1004 /* If either operand is a REG, then see if we already have
1005 a known value for it. */
1008 temp = find_base_value (src_0);
1015 temp = find_base_value (src_1);
1020 /* If either base is named object or a special address
1021 (like an argument or stack reference), then use it for the
1024 && (GET_CODE (src_0) == SYMBOL_REF
1025 || GET_CODE (src_0) == LABEL_REF
1026 || (GET_CODE (src_0) == ADDRESS
1027 && GET_MODE (src_0) != VOIDmode)))
1031 && (GET_CODE (src_1) == SYMBOL_REF
1032 || GET_CODE (src_1) == LABEL_REF
1033 || (GET_CODE (src_1) == ADDRESS
1034 && GET_MODE (src_1) != VOIDmode)))
1037 /* Guess which operand is the base address:
1038 If either operand is a symbol, then it is the base. If
1039 either operand is a CONST_INT, then the other is the base. */
1040 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1041 return find_base_value (src_0);
1042 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1043 return find_base_value (src_1);
1049 /* The standard form is (lo_sum reg sym) so look only at the
1051 return find_base_value (XEXP (src, 1));
1054 /* If the second operand is constant set the base
1055 address to the first operand. */
1056 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1057 return find_base_value (XEXP (src, 0));
1061 /* As we do not know which address space the pointer is refering to, we can
1062 handle this only if the target does not support different pointer or
1063 address modes depending on the address space. */
1064 if (!target_default_pointer_address_modes_p ())
1066 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1076 return find_base_value (XEXP (src, 0));
1079 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1080 /* As we do not know which address space the pointer is refering to, we can
1081 handle this only if the target does not support different pointer or
1082 address modes depending on the address space. */
1083 if (!target_default_pointer_address_modes_p ())
1087 rtx temp = find_base_value (XEXP (src, 0));
1089 if (temp != 0 && CONSTANT_P (temp))
1090 temp = convert_memory_address (Pmode, temp);
1102 /* Called from init_alias_analysis indirectly through note_stores. */
1104 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1105 register N has been set in this function. */
1106 static char *reg_seen;
1108 /* Addresses which are known not to alias anything else are identified
1109 by a unique integer. */
1110 static int unique_id;
1113 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1122 regno = REGNO (dest);
1124 gcc_checking_assert (regno < VEC_length (rtx, reg_base_value));
1126 /* If this spans multiple hard registers, then we must indicate that every
1127 register has an unusable value. */
1128 if (regno < FIRST_PSEUDO_REGISTER)
1129 n = hard_regno_nregs[regno][GET_MODE (dest)];
1136 reg_seen[regno + n] = 1;
1137 new_reg_base_value[regno + n] = 0;
1144 /* A CLOBBER wipes out any old value but does not prevent a previously
1145 unset register from acquiring a base address (i.e. reg_seen is not
1147 if (GET_CODE (set) == CLOBBER)
1149 new_reg_base_value[regno] = 0;
1152 src = SET_SRC (set);
1156 if (reg_seen[regno])
1158 new_reg_base_value[regno] = 0;
1161 reg_seen[regno] = 1;
1162 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1163 GEN_INT (unique_id++));
1167 /* If this is not the first set of REGNO, see whether the new value
1168 is related to the old one. There are two cases of interest:
1170 (1) The register might be assigned an entirely new value
1171 that has the same base term as the original set.
1173 (2) The set might be a simple self-modification that
1174 cannot change REGNO's base value.
1176 If neither case holds, reject the original base value as invalid.
1177 Note that the following situation is not detected:
1179 extern int x, y; int *p = &x; p += (&y-&x);
1181 ANSI C does not allow computing the difference of addresses
1182 of distinct top level objects. */
1183 if (new_reg_base_value[regno] != 0
1184 && find_base_value (src) != new_reg_base_value[regno])
1185 switch (GET_CODE (src))
1189 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1190 new_reg_base_value[regno] = 0;
1193 /* If the value we add in the PLUS is also a valid base value,
1194 this might be the actual base value, and the original value
1197 rtx other = NULL_RTX;
1199 if (XEXP (src, 0) == dest)
1200 other = XEXP (src, 1);
1201 else if (XEXP (src, 1) == dest)
1202 other = XEXP (src, 0);
1204 if (! other || find_base_value (other))
1205 new_reg_base_value[regno] = 0;
1209 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1210 new_reg_base_value[regno] = 0;
1213 new_reg_base_value[regno] = 0;
1216 /* If this is the first set of a register, record the value. */
1217 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1218 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1219 new_reg_base_value[regno] = find_base_value (src);
1221 reg_seen[regno] = 1;
1224 /* If a value is known for REGNO, return it. */
1227 get_reg_known_value (unsigned int regno)
1229 if (regno >= FIRST_PSEUDO_REGISTER)
1231 regno -= FIRST_PSEUDO_REGISTER;
1232 if (regno < reg_known_value_size)
1233 return reg_known_value[regno];
1241 set_reg_known_value (unsigned int regno, rtx val)
1243 if (regno >= FIRST_PSEUDO_REGISTER)
1245 regno -= FIRST_PSEUDO_REGISTER;
1246 if (regno < reg_known_value_size)
1247 reg_known_value[regno] = val;
1251 /* Similarly for reg_known_equiv_p. */
1254 get_reg_known_equiv_p (unsigned int regno)
1256 if (regno >= FIRST_PSEUDO_REGISTER)
1258 regno -= FIRST_PSEUDO_REGISTER;
1259 if (regno < reg_known_value_size)
1260 return reg_known_equiv_p[regno];
1266 set_reg_known_equiv_p (unsigned int regno, bool val)
1268 if (regno >= FIRST_PSEUDO_REGISTER)
1270 regno -= FIRST_PSEUDO_REGISTER;
1271 if (regno < reg_known_value_size)
1272 reg_known_equiv_p[regno] = val;
1277 /* Returns a canonical version of X, from the point of view alias
1278 analysis. (For example, if X is a MEM whose address is a register,
1279 and the register has a known value (say a SYMBOL_REF), then a MEM
1280 whose address is the SYMBOL_REF is returned.) */
1285 /* Recursively look for equivalences. */
1286 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1288 rtx t = get_reg_known_value (REGNO (x));
1292 return canon_rtx (t);
1295 if (GET_CODE (x) == PLUS)
1297 rtx x0 = canon_rtx (XEXP (x, 0));
1298 rtx x1 = canon_rtx (XEXP (x, 1));
1300 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1302 if (CONST_INT_P (x0))
1303 return plus_constant (x1, INTVAL (x0));
1304 else if (CONST_INT_P (x1))
1305 return plus_constant (x0, INTVAL (x1));
1306 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1310 /* This gives us much better alias analysis when called from
1311 the loop optimizer. Note we want to leave the original
1312 MEM alone, but need to return the canonicalized MEM with
1313 all the flags with their original values. */
1315 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1320 /* Return 1 if X and Y are identical-looking rtx's.
1321 Expect that X and Y has been already canonicalized.
1323 We use the data in reg_known_value above to see if two registers with
1324 different numbers are, in fact, equivalent. */
1327 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1334 if (x == 0 && y == 0)
1336 if (x == 0 || y == 0)
1342 code = GET_CODE (x);
1343 /* Rtx's of different codes cannot be equal. */
1344 if (code != GET_CODE (y))
1347 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1348 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1350 if (GET_MODE (x) != GET_MODE (y))
1353 /* Some RTL can be compared without a recursive examination. */
1357 return REGNO (x) == REGNO (y);
1360 return XEXP (x, 0) == XEXP (y, 0);
1363 return XSTR (x, 0) == XSTR (y, 0);
1369 /* There's no need to compare the contents of CONST_DOUBLEs or
1370 CONST_INTs because pointer equality is a good enough
1371 comparison for these nodes. */
1378 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1380 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1381 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1382 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1383 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1384 /* For commutative operations, the RTX match if the operand match in any
1385 order. Also handle the simple binary and unary cases without a loop. */
1386 if (COMMUTATIVE_P (x))
1388 rtx xop0 = canon_rtx (XEXP (x, 0));
1389 rtx yop0 = canon_rtx (XEXP (y, 0));
1390 rtx yop1 = canon_rtx (XEXP (y, 1));
1392 return ((rtx_equal_for_memref_p (xop0, yop0)
1393 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1394 || (rtx_equal_for_memref_p (xop0, yop1)
1395 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1397 else if (NON_COMMUTATIVE_P (x))
1399 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1400 canon_rtx (XEXP (y, 0)))
1401 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1402 canon_rtx (XEXP (y, 1))));
1404 else if (UNARY_P (x))
1405 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1406 canon_rtx (XEXP (y, 0)));
1408 /* Compare the elements. If any pair of corresponding elements
1409 fail to match, return 0 for the whole things.
1411 Limit cases to types which actually appear in addresses. */
1413 fmt = GET_RTX_FORMAT (code);
1414 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1419 if (XINT (x, i) != XINT (y, i))
1424 /* Two vectors must have the same length. */
1425 if (XVECLEN (x, i) != XVECLEN (y, i))
1428 /* And the corresponding elements must match. */
1429 for (j = 0; j < XVECLEN (x, i); j++)
1430 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1431 canon_rtx (XVECEXP (y, i, j))) == 0)
1436 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1437 canon_rtx (XEXP (y, i))) == 0)
1441 /* This can happen for asm operands. */
1443 if (strcmp (XSTR (x, i), XSTR (y, i)))
1447 /* This can happen for an asm which clobbers memory. */
1451 /* It is believed that rtx's at this level will never
1452 contain anything but integers and other rtx's,
1453 except for within LABEL_REFs and SYMBOL_REFs. */
1462 find_base_term (rtx x)
1465 struct elt_loc_list *l;
1467 #if defined (FIND_BASE_TERM)
1468 /* Try machine-dependent ways to find the base term. */
1469 x = FIND_BASE_TERM (x);
1472 switch (GET_CODE (x))
1475 return REG_BASE_VALUE (x);
1478 /* As we do not know which address space the pointer is refering to, we can
1479 handle this only if the target does not support different pointer or
1480 address modes depending on the address space. */
1481 if (!target_default_pointer_address_modes_p ())
1483 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1493 return find_base_term (XEXP (x, 0));
1496 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1497 /* As we do not know which address space the pointer is refering to, we can
1498 handle this only if the target does not support different pointer or
1499 address modes depending on the address space. */
1500 if (!target_default_pointer_address_modes_p ())
1504 rtx temp = find_base_term (XEXP (x, 0));
1506 if (temp != 0 && CONSTANT_P (temp))
1507 temp = convert_memory_address (Pmode, temp);
1513 val = CSELIB_VAL_PTR (x);
1516 for (l = val->locs; l; l = l->next)
1517 if ((x = find_base_term (l->loc)) != 0)
1522 /* The standard form is (lo_sum reg sym) so look only at the
1524 return find_base_term (XEXP (x, 1));
1528 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1534 rtx tmp1 = XEXP (x, 0);
1535 rtx tmp2 = XEXP (x, 1);
1537 /* This is a little bit tricky since we have to determine which of
1538 the two operands represents the real base address. Otherwise this
1539 routine may return the index register instead of the base register.
1541 That may cause us to believe no aliasing was possible, when in
1542 fact aliasing is possible.
1544 We use a few simple tests to guess the base register. Additional
1545 tests can certainly be added. For example, if one of the operands
1546 is a shift or multiply, then it must be the index register and the
1547 other operand is the base register. */
1549 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1550 return find_base_term (tmp2);
1552 /* If either operand is known to be a pointer, then use it
1553 to determine the base term. */
1554 if (REG_P (tmp1) && REG_POINTER (tmp1))
1556 rtx base = find_base_term (tmp1);
1561 if (REG_P (tmp2) && REG_POINTER (tmp2))
1563 rtx base = find_base_term (tmp2);
1568 /* Neither operand was known to be a pointer. Go ahead and find the
1569 base term for both operands. */
1570 tmp1 = find_base_term (tmp1);
1571 tmp2 = find_base_term (tmp2);
1573 /* If either base term is named object or a special address
1574 (like an argument or stack reference), then use it for the
1577 && (GET_CODE (tmp1) == SYMBOL_REF
1578 || GET_CODE (tmp1) == LABEL_REF
1579 || (GET_CODE (tmp1) == ADDRESS
1580 && GET_MODE (tmp1) != VOIDmode)))
1584 && (GET_CODE (tmp2) == SYMBOL_REF
1585 || GET_CODE (tmp2) == LABEL_REF
1586 || (GET_CODE (tmp2) == ADDRESS
1587 && GET_MODE (tmp2) != VOIDmode)))
1590 /* We could not determine which of the two operands was the
1591 base register and which was the index. So we can determine
1592 nothing from the base alias check. */
1597 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1598 return find_base_term (XEXP (x, 0));
1610 /* Return 0 if the addresses X and Y are known to point to different
1611 objects, 1 if they might be pointers to the same object. */
1614 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1615 enum machine_mode y_mode)
1617 rtx x_base = find_base_term (x);
1618 rtx y_base = find_base_term (y);
1620 /* If the address itself has no known base see if a known equivalent
1621 value has one. If either address still has no known base, nothing
1622 is known about aliasing. */
1627 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1630 x_base = find_base_term (x_c);
1638 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1641 y_base = find_base_term (y_c);
1646 /* If the base addresses are equal nothing is known about aliasing. */
1647 if (rtx_equal_p (x_base, y_base))
1650 /* The base addresses are different expressions. If they are not accessed
1651 via AND, there is no conflict. We can bring knowledge of object
1652 alignment into play here. For example, on alpha, "char a, b;" can
1653 alias one another, though "char a; long b;" cannot. AND addesses may
1654 implicitly alias surrounding objects; i.e. unaligned access in DImode
1655 via AND address can alias all surrounding object types except those
1656 with aligment 8 or higher. */
1657 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1659 if (GET_CODE (x) == AND
1660 && (!CONST_INT_P (XEXP (x, 1))
1661 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1663 if (GET_CODE (y) == AND
1664 && (!CONST_INT_P (XEXP (y, 1))
1665 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1668 /* Differing symbols not accessed via AND never alias. */
1669 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1672 /* If one address is a stack reference there can be no alias:
1673 stack references using different base registers do not alias,
1674 a stack reference can not alias a parameter, and a stack reference
1675 can not alias a global. */
1676 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1677 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1683 /* Convert the address X into something we can use. This is done by returning
1684 it unchanged unless it is a value; in the latter case we call cselib to get
1685 a more useful rtx. */
1691 struct elt_loc_list *l;
1693 if (GET_CODE (x) != VALUE)
1695 v = CSELIB_VAL_PTR (x);
1698 for (l = v->locs; l; l = l->next)
1699 if (CONSTANT_P (l->loc))
1701 for (l = v->locs; l; l = l->next)
1702 if (!REG_P (l->loc) && !MEM_P (l->loc))
1705 return v->locs->loc;
1710 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1711 where SIZE is the size in bytes of the memory reference. If ADDR
1712 is not modified by the memory reference then ADDR is returned. */
1715 addr_side_effect_eval (rtx addr, int size, int n_refs)
1719 switch (GET_CODE (addr))
1722 offset = (n_refs + 1) * size;
1725 offset = -(n_refs + 1) * size;
1728 offset = n_refs * size;
1731 offset = -n_refs * size;
1739 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1742 addr = XEXP (addr, 0);
1743 addr = canon_rtx (addr);
1748 /* Return one if X and Y (memory addresses) reference the
1749 same location in memory or if the references overlap.
1750 Return zero if they do not overlap, else return
1751 minus one in which case they still might reference the same location.
1753 C is an offset accumulator. When
1754 C is nonzero, we are testing aliases between X and Y + C.
1755 XSIZE is the size in bytes of the X reference,
1756 similarly YSIZE is the size in bytes for Y.
1757 Expect that canon_rtx has been already called for X and Y.
1759 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1760 referenced (the reference was BLKmode), so make the most pessimistic
1763 If XSIZE or YSIZE is negative, we may access memory outside the object
1764 being referenced as a side effect. This can happen when using AND to
1765 align memory references, as is done on the Alpha.
1767 Nice to notice that varying addresses cannot conflict with fp if no
1768 local variables had their addresses taken, but that's too hard now.
1770 ??? Contrary to the tree alias oracle this does not return
1771 one for X + non-constant and Y + non-constant when X and Y are equal.
1772 If that is fixed the TBAA hack for union type-punning can be removed. */
1775 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1777 if (GET_CODE (x) == VALUE)
1781 struct elt_loc_list *l = NULL;
1782 if (CSELIB_VAL_PTR (x))
1783 for (l = CSELIB_VAL_PTR (x)->locs; l; l = l->next)
1784 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
1791 /* Don't call get_addr if y is the same VALUE. */
1795 if (GET_CODE (y) == VALUE)
1799 struct elt_loc_list *l = NULL;
1800 if (CSELIB_VAL_PTR (y))
1801 for (l = CSELIB_VAL_PTR (y)->locs; l; l = l->next)
1802 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
1809 /* Don't call get_addr if x is the same VALUE. */
1813 if (GET_CODE (x) == HIGH)
1815 else if (GET_CODE (x) == LO_SUM)
1818 x = addr_side_effect_eval (x, xsize, 0);
1819 if (GET_CODE (y) == HIGH)
1821 else if (GET_CODE (y) == LO_SUM)
1824 y = addr_side_effect_eval (y, ysize, 0);
1826 if (rtx_equal_for_memref_p (x, y))
1828 if (xsize <= 0 || ysize <= 0)
1830 if (c >= 0 && xsize > c)
1832 if (c < 0 && ysize+c > 0)
1837 /* This code used to check for conflicts involving stack references and
1838 globals but the base address alias code now handles these cases. */
1840 if (GET_CODE (x) == PLUS)
1842 /* The fact that X is canonicalized means that this
1843 PLUS rtx is canonicalized. */
1844 rtx x0 = XEXP (x, 0);
1845 rtx x1 = XEXP (x, 1);
1847 if (GET_CODE (y) == PLUS)
1849 /* The fact that Y is canonicalized means that this
1850 PLUS rtx is canonicalized. */
1851 rtx y0 = XEXP (y, 0);
1852 rtx y1 = XEXP (y, 1);
1854 if (rtx_equal_for_memref_p (x1, y1))
1855 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1856 if (rtx_equal_for_memref_p (x0, y0))
1857 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1858 if (CONST_INT_P (x1))
1860 if (CONST_INT_P (y1))
1861 return memrefs_conflict_p (xsize, x0, ysize, y0,
1862 c - INTVAL (x1) + INTVAL (y1));
1864 return memrefs_conflict_p (xsize, x0, ysize, y,
1867 else if (CONST_INT_P (y1))
1868 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1872 else if (CONST_INT_P (x1))
1873 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1875 else if (GET_CODE (y) == PLUS)
1877 /* The fact that Y is canonicalized means that this
1878 PLUS rtx is canonicalized. */
1879 rtx y0 = XEXP (y, 0);
1880 rtx y1 = XEXP (y, 1);
1882 if (CONST_INT_P (y1))
1883 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1888 if (GET_CODE (x) == GET_CODE (y))
1889 switch (GET_CODE (x))
1893 /* Handle cases where we expect the second operands to be the
1894 same, and check only whether the first operand would conflict
1897 rtx x1 = canon_rtx (XEXP (x, 1));
1898 rtx y1 = canon_rtx (XEXP (y, 1));
1899 if (! rtx_equal_for_memref_p (x1, y1))
1901 x0 = canon_rtx (XEXP (x, 0));
1902 y0 = canon_rtx (XEXP (y, 0));
1903 if (rtx_equal_for_memref_p (x0, y0))
1904 return (xsize == 0 || ysize == 0
1905 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1907 /* Can't properly adjust our sizes. */
1908 if (!CONST_INT_P (x1))
1910 xsize /= INTVAL (x1);
1911 ysize /= INTVAL (x1);
1913 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1920 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1921 as an access with indeterminate size. Assume that references
1922 besides AND are aligned, so if the size of the other reference is
1923 at least as large as the alignment, assume no other overlap. */
1924 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1926 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1928 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1930 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1932 /* ??? If we are indexing far enough into the array/structure, we
1933 may yet be able to determine that we can not overlap. But we
1934 also need to that we are far enough from the end not to overlap
1935 a following reference, so we do nothing with that for now. */
1936 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1938 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1943 if (CONST_INT_P (x) && CONST_INT_P (y))
1945 c += (INTVAL (y) - INTVAL (x));
1946 return (xsize <= 0 || ysize <= 0
1947 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1950 if (GET_CODE (x) == CONST)
1952 if (GET_CODE (y) == CONST)
1953 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1954 ysize, canon_rtx (XEXP (y, 0)), c);
1956 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1959 if (GET_CODE (y) == CONST)
1960 return memrefs_conflict_p (xsize, x, ysize,
1961 canon_rtx (XEXP (y, 0)), c);
1964 return (xsize <= 0 || ysize <= 0
1965 || (rtx_equal_for_memref_p (x, y)
1966 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1974 /* Functions to compute memory dependencies.
1976 Since we process the insns in execution order, we can build tables
1977 to keep track of what registers are fixed (and not aliased), what registers
1978 are varying in known ways, and what registers are varying in unknown
1981 If both memory references are volatile, then there must always be a
1982 dependence between the two references, since their order can not be
1983 changed. A volatile and non-volatile reference can be interchanged
1986 A MEM_IN_STRUCT reference at a non-AND varying address can never
1987 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1988 also must allow AND addresses, because they may generate accesses
1989 outside the object being referenced. This is used to generate
1990 aligned addresses from unaligned addresses, for instance, the alpha
1991 storeqi_unaligned pattern. */
1993 /* Read dependence: X is read after read in MEM takes place. There can
1994 only be a dependence here if both reads are volatile. */
1997 read_dependence (const_rtx mem, const_rtx x)
1999 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
2002 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
2003 MEM2 is a reference to a structure at a varying address, or returns
2004 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
2005 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
2006 to decide whether or not an address may vary; it should return
2007 nonzero whenever variation is possible.
2008 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
2011 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
2013 bool (*varies_p) (const_rtx, bool))
2015 if (! flag_strict_aliasing)
2018 if (MEM_ALIAS_SET (mem2)
2019 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
2020 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
2021 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
2025 if (MEM_ALIAS_SET (mem1)
2026 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
2027 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
2028 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
2035 /* Returns nonzero if something about the mode or address format MEM1
2036 indicates that it might well alias *anything*. */
2039 aliases_everything_p (const_rtx mem)
2041 if (GET_CODE (XEXP (mem, 0)) == AND)
2042 /* If the address is an AND, it's very hard to know at what it is
2043 actually pointing. */
2049 /* Return true if we can determine that the fields referenced cannot
2050 overlap for any pair of objects. */
2053 nonoverlapping_component_refs_p (const_tree x, const_tree y)
2055 const_tree fieldx, fieldy, typex, typey, orig_y;
2057 if (!flag_strict_aliasing)
2062 /* The comparison has to be done at a common type, since we don't
2063 know how the inheritance hierarchy works. */
2067 fieldx = TREE_OPERAND (x, 1);
2068 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
2073 fieldy = TREE_OPERAND (y, 1);
2074 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
2079 y = TREE_OPERAND (y, 0);
2081 while (y && TREE_CODE (y) == COMPONENT_REF);
2083 x = TREE_OPERAND (x, 0);
2085 while (x && TREE_CODE (x) == COMPONENT_REF);
2086 /* Never found a common type. */
2090 /* If we're left with accessing different fields of a structure,
2092 if (TREE_CODE (typex) == RECORD_TYPE
2093 && fieldx != fieldy)
2096 /* The comparison on the current field failed. If we're accessing
2097 a very nested structure, look at the next outer level. */
2098 x = TREE_OPERAND (x, 0);
2099 y = TREE_OPERAND (y, 0);
2102 && TREE_CODE (x) == COMPONENT_REF
2103 && TREE_CODE (y) == COMPONENT_REF);
2108 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2111 decl_for_component_ref (tree x)
2115 x = TREE_OPERAND (x, 0);
2117 while (x && TREE_CODE (x) == COMPONENT_REF);
2119 return x && DECL_P (x) ? x : NULL_TREE;
2122 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2123 offset of the field reference. */
2126 adjust_offset_for_component_ref (tree x, rtx offset)
2128 HOST_WIDE_INT ioffset;
2133 ioffset = INTVAL (offset);
2136 tree offset = component_ref_field_offset (x);
2137 tree field = TREE_OPERAND (x, 1);
2139 if (! host_integerp (offset, 1))
2141 ioffset += (tree_low_cst (offset, 1)
2142 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2145 x = TREE_OPERAND (x, 0);
2147 while (x && TREE_CODE (x) == COMPONENT_REF);
2149 return GEN_INT (ioffset);
2152 /* Return nonzero if we can determine the exprs corresponding to memrefs
2153 X and Y and they do not overlap.
2154 If LOOP_VARIANT is set, skip offset-based disambiguation */
2157 nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
2159 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2162 rtx moffsetx, moffsety;
2163 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2165 /* Unless both have exprs, we can't tell anything. */
2166 if (exprx == 0 || expry == 0)
2169 /* For spill-slot accesses make sure we have valid offsets. */
2170 if ((exprx == get_spill_slot_decl (false)
2171 && ! MEM_OFFSET (x))
2172 || (expry == get_spill_slot_decl (false)
2173 && ! MEM_OFFSET (y)))
2176 /* If both are field references, we may be able to determine something. */
2177 if (TREE_CODE (exprx) == COMPONENT_REF
2178 && TREE_CODE (expry) == COMPONENT_REF
2179 && nonoverlapping_component_refs_p (exprx, expry))
2183 /* If the field reference test failed, look at the DECLs involved. */
2184 moffsetx = MEM_OFFSET (x);
2185 if (TREE_CODE (exprx) == COMPONENT_REF)
2187 tree t = decl_for_component_ref (exprx);
2190 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2194 moffsety = MEM_OFFSET (y);
2195 if (TREE_CODE (expry) == COMPONENT_REF)
2197 tree t = decl_for_component_ref (expry);
2200 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2204 if (! DECL_P (exprx) || ! DECL_P (expry))
2207 /* With invalid code we can end up storing into the constant pool.
2208 Bail out to avoid ICEing when creating RTL for this.
2209 See gfortran.dg/lto/20091028-2_0.f90. */
2210 if (TREE_CODE (exprx) == CONST_DECL
2211 || TREE_CODE (expry) == CONST_DECL)
2214 rtlx = DECL_RTL (exprx);
2215 rtly = DECL_RTL (expry);
2217 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2218 can't overlap unless they are the same because we never reuse that part
2219 of the stack frame used for locals for spilled pseudos. */
2220 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2221 && ! rtx_equal_p (rtlx, rtly))
2224 /* If we have MEMs refering to different address spaces (which can
2225 potentially overlap), we cannot easily tell from the addresses
2226 whether the references overlap. */
2227 if (MEM_P (rtlx) && MEM_P (rtly)
2228 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2231 /* Get the base and offsets of both decls. If either is a register, we
2232 know both are and are the same, so use that as the base. The only
2233 we can avoid overlap is if we can deduce that they are nonoverlapping
2234 pieces of that decl, which is very rare. */
2235 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2236 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2237 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2239 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2240 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2241 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2243 /* If the bases are different, we know they do not overlap if both
2244 are constants or if one is a constant and the other a pointer into the
2245 stack frame. Otherwise a different base means we can't tell if they
2247 if (! rtx_equal_p (basex, basey))
2248 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2249 || (CONSTANT_P (basex) && REG_P (basey)
2250 && REGNO_PTR_FRAME_P (REGNO (basey)))
2251 || (CONSTANT_P (basey) && REG_P (basex)
2252 && REGNO_PTR_FRAME_P (REGNO (basex))));
2254 /* Offset based disambiguation not appropriate for loop invariant */
2258 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2259 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2261 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2262 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2265 /* If we have an offset for either memref, it can update the values computed
2268 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2270 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2272 /* If a memref has both a size and an offset, we can use the smaller size.
2273 We can't do this if the offset isn't known because we must view this
2274 memref as being anywhere inside the DECL's MEM. */
2275 if (MEM_SIZE (x) && moffsetx)
2276 sizex = INTVAL (MEM_SIZE (x));
2277 if (MEM_SIZE (y) && moffsety)
2278 sizey = INTVAL (MEM_SIZE (y));
2280 /* Put the values of the memref with the lower offset in X's values. */
2281 if (offsetx > offsety)
2283 tem = offsetx, offsetx = offsety, offsety = tem;
2284 tem = sizex, sizex = sizey, sizey = tem;
2287 /* If we don't know the size of the lower-offset value, we can't tell
2288 if they conflict. Otherwise, we do the test. */
2289 return sizex >= 0 && offsety >= offsetx + sizex;
2292 /* Helper for true_dependence and canon_true_dependence.
2293 Checks for true dependence: X is read after store in MEM takes place.
2295 VARIES is the function that should be used as rtx_varies function.
2297 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2298 NULL_RTX, and the canonical addresses of MEM and X are both computed
2299 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2301 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2303 Returns 1 if there is a true dependence, 0 otherwise. */
2306 true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2307 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool),
2308 bool mem_canonicalized)
2313 gcc_checking_assert (mem_canonicalized ? (mem_addr != NULL_RTX)
2314 : (mem_addr == NULL_RTX && x_addr == NULL_RTX));
2316 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2319 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2320 This is used in epilogue deallocation functions, and in cselib. */
2321 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2323 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2325 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2326 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2329 /* Read-only memory is by definition never modified, and therefore can't
2330 conflict with anything. We don't expect to find read-only set on MEM,
2331 but stupid user tricks can produce them, so don't die. */
2332 if (MEM_READONLY_P (x))
2335 /* If we have MEMs refering to different address spaces (which can
2336 potentially overlap), we cannot easily tell from the addresses
2337 whether the references overlap. */
2338 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2343 mem_addr = XEXP (mem, 0);
2344 if (mem_mode == VOIDmode)
2345 mem_mode = GET_MODE (mem);
2350 x_addr = XEXP (x, 0);
2351 if (!((GET_CODE (x_addr) == VALUE
2352 && GET_CODE (mem_addr) != VALUE
2353 && reg_mentioned_p (x_addr, mem_addr))
2354 || (GET_CODE (x_addr) != VALUE
2355 && GET_CODE (mem_addr) == VALUE
2356 && reg_mentioned_p (mem_addr, x_addr))))
2358 x_addr = get_addr (x_addr);
2359 if (! mem_canonicalized)
2360 mem_addr = get_addr (mem_addr);
2364 base = find_base_term (x_addr);
2365 if (base && (GET_CODE (base) == LABEL_REF
2366 || (GET_CODE (base) == SYMBOL_REF
2367 && CONSTANT_POOL_ADDRESS_P (base))))
2370 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2373 x_addr = canon_rtx (x_addr);
2374 if (!mem_canonicalized)
2375 mem_addr = canon_rtx (mem_addr);
2377 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2378 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2381 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2384 if (nonoverlapping_memrefs_p (mem, x, false))
2387 if (aliases_everything_p (x))
2390 /* We cannot use aliases_everything_p to test MEM, since we must look
2391 at MEM_ADDR, rather than XEXP (mem, 0). */
2392 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2395 /* ??? In true_dependence we also allow BLKmode to alias anything. Why
2396 don't we do this in anti_dependence and output_dependence? */
2397 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2400 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2403 return rtx_refs_may_alias_p (x, mem, true);
2406 /* True dependence: X is read after store in MEM takes place. */
2409 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2410 bool (*varies) (const_rtx, bool))
2412 return true_dependence_1 (mem, mem_mode, NULL_RTX,
2413 x, NULL_RTX, varies,
2414 /*mem_canonicalized=*/false);
2417 /* Canonical true dependence: X is read after store in MEM takes place.
2418 Variant of true_dependence which assumes MEM has already been
2419 canonicalized (hence we no longer do that here).
2420 The mem_addr argument has been added, since true_dependence_1 computed
2421 this value prior to canonicalizing. */
2424 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2425 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2427 return true_dependence_1 (mem, mem_mode, mem_addr,
2429 /*mem_canonicalized=*/true);
2432 /* Returns nonzero if a write to X might alias a previous read from
2433 (or, if WRITEP is nonzero, a write to) MEM. */
2436 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2438 rtx x_addr, mem_addr;
2439 const_rtx fixed_scalar;
2443 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2446 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2447 This is used in epilogue deallocation functions. */
2448 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2450 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2452 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2453 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2456 /* A read from read-only memory can't conflict with read-write memory. */
2457 if (!writep && MEM_READONLY_P (mem))
2460 /* If we have MEMs refering to different address spaces (which can
2461 potentially overlap), we cannot easily tell from the addresses
2462 whether the references overlap. */
2463 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2466 x_addr = XEXP (x, 0);
2467 mem_addr = XEXP (mem, 0);
2468 if (!((GET_CODE (x_addr) == VALUE
2469 && GET_CODE (mem_addr) != VALUE
2470 && reg_mentioned_p (x_addr, mem_addr))
2471 || (GET_CODE (x_addr) != VALUE
2472 && GET_CODE (mem_addr) == VALUE
2473 && reg_mentioned_p (mem_addr, x_addr))))
2475 x_addr = get_addr (x_addr);
2476 mem_addr = get_addr (mem_addr);
2481 base = find_base_term (mem_addr);
2482 if (base && (GET_CODE (base) == LABEL_REF
2483 || (GET_CODE (base) == SYMBOL_REF
2484 && CONSTANT_POOL_ADDRESS_P (base))))
2488 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2492 x_addr = canon_rtx (x_addr);
2493 mem_addr = canon_rtx (mem_addr);
2495 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2496 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2499 if (nonoverlapping_memrefs_p (x, mem, false))
2503 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2506 if ((fixed_scalar == mem && !aliases_everything_p (x))
2507 || (fixed_scalar == x && !aliases_everything_p (mem)))
2510 return rtx_refs_may_alias_p (x, mem, false);
2513 /* Anti dependence: X is written after read in MEM takes place. */
2516 anti_dependence (const_rtx mem, const_rtx x)
2518 return write_dependence_p (mem, x, /*writep=*/0);
2521 /* Output dependence: X is written after store in MEM takes place. */
2524 output_dependence (const_rtx mem, const_rtx x)
2526 return write_dependence_p (mem, x, /*writep=*/1);
2531 /* Check whether X may be aliased with MEM. Don't do offset-based
2532 memory disambiguation & TBAA. */
2534 may_alias_p (const_rtx mem, const_rtx x)
2536 rtx x_addr, mem_addr;
2538 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2541 /* ??? In true_dependence we also allow BLKmode to alias anything. */
2542 if (GET_MODE (mem) == BLKmode || GET_MODE (x) == BLKmode)
2545 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2546 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2549 /* Read-only memory is by definition never modified, and therefore can't
2550 conflict with anything. We don't expect to find read-only set on MEM,
2551 but stupid user tricks can produce them, so don't die. */
2552 if (MEM_READONLY_P (x))
2555 /* If we have MEMs refering to different address spaces (which can
2556 potentially overlap), we cannot easily tell from the addresses
2557 whether the references overlap. */
2558 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2561 x_addr = XEXP (x, 0);
2562 mem_addr = XEXP (mem, 0);
2563 if (!((GET_CODE (x_addr) == VALUE
2564 && GET_CODE (mem_addr) != VALUE
2565 && reg_mentioned_p (x_addr, mem_addr))
2566 || (GET_CODE (x_addr) != VALUE
2567 && GET_CODE (mem_addr) == VALUE
2568 && reg_mentioned_p (mem_addr, x_addr))))
2570 x_addr = get_addr (x_addr);
2571 mem_addr = get_addr (mem_addr);
2574 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), GET_MODE (mem_addr)))
2577 x_addr = canon_rtx (x_addr);
2578 mem_addr = canon_rtx (mem_addr);
2580 if (nonoverlapping_memrefs_p (mem, x, true))
2583 if (aliases_everything_p (x))
2586 /* We cannot use aliases_everything_p to test MEM, since we must look
2587 at MEM_ADDR, rather than XEXP (mem, 0). */
2588 if (GET_MODE (mem) == QImode || GET_CODE (mem_addr) == AND)
2591 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2595 /* TBAA not valid for loop_invarint */
2596 return rtx_refs_may_alias_p (x, mem, false);
2600 init_alias_target (void)
2604 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2606 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2607 /* Check whether this register can hold an incoming pointer
2608 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2609 numbers, so translate if necessary due to register windows. */
2610 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2611 && HARD_REGNO_MODE_OK (i, Pmode))
2612 static_reg_base_value[i]
2613 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2615 static_reg_base_value[STACK_POINTER_REGNUM]
2616 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2617 static_reg_base_value[ARG_POINTER_REGNUM]
2618 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2619 static_reg_base_value[FRAME_POINTER_REGNUM]
2620 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2621 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2622 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2623 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2627 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2628 to be memory reference. */
2629 static bool memory_modified;
2631 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2635 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2636 memory_modified = true;
2641 /* Return true when INSN possibly modify memory contents of MEM
2642 (i.e. address can be modified). */
2644 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2648 memory_modified = false;
2649 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2650 return memory_modified;
2653 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2657 init_alias_analysis (void)
2659 unsigned int maxreg = max_reg_num ();
2665 timevar_push (TV_ALIAS_ANALYSIS);
2667 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2668 reg_known_value = ggc_alloc_cleared_vec_rtx (reg_known_value_size);
2669 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2671 /* If we have memory allocated from the previous run, use it. */
2672 if (old_reg_base_value)
2673 reg_base_value = old_reg_base_value;
2676 VEC_truncate (rtx, reg_base_value, 0);
2678 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2680 new_reg_base_value = XNEWVEC (rtx, maxreg);
2681 reg_seen = XNEWVEC (char, maxreg);
2683 /* The basic idea is that each pass through this loop will use the
2684 "constant" information from the previous pass to propagate alias
2685 information through another level of assignments.
2687 This could get expensive if the assignment chains are long. Maybe
2688 we should throttle the number of iterations, possibly based on
2689 the optimization level or flag_expensive_optimizations.
2691 We could propagate more information in the first pass by making use
2692 of DF_REG_DEF_COUNT to determine immediately that the alias information
2693 for a pseudo is "constant".
2695 A program with an uninitialized variable can cause an infinite loop
2696 here. Instead of doing a full dataflow analysis to detect such problems
2697 we just cap the number of iterations for the loop.
2699 The state of the arrays for the set chain in question does not matter
2700 since the program has undefined behavior. */
2705 /* Assume nothing will change this iteration of the loop. */
2708 /* We want to assign the same IDs each iteration of this loop, so
2709 start counting from zero each iteration of the loop. */
2712 /* We're at the start of the function each iteration through the
2713 loop, so we're copying arguments. */
2714 copying_arguments = true;
2716 /* Wipe the potential alias information clean for this pass. */
2717 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2719 /* Wipe the reg_seen array clean. */
2720 memset (reg_seen, 0, maxreg);
2722 /* Mark all hard registers which may contain an address.
2723 The stack, frame and argument pointers may contain an address.
2724 An argument register which can hold a Pmode value may contain
2725 an address even if it is not in BASE_REGS.
2727 The address expression is VOIDmode for an argument and
2728 Pmode for other registers. */
2730 memcpy (new_reg_base_value, static_reg_base_value,
2731 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2733 /* Walk the insns adding values to the new_reg_base_value array. */
2734 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2740 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2741 /* The prologue/epilogue insns are not threaded onto the
2742 insn chain until after reload has completed. Thus,
2743 there is no sense wasting time checking if INSN is in
2744 the prologue/epilogue until after reload has completed. */
2745 if (reload_completed
2746 && prologue_epilogue_contains (insn))
2750 /* If this insn has a noalias note, process it, Otherwise,
2751 scan for sets. A simple set will have no side effects
2752 which could change the base value of any other register. */
2754 if (GET_CODE (PATTERN (insn)) == SET
2755 && REG_NOTES (insn) != 0
2756 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2757 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2759 note_stores (PATTERN (insn), record_set, NULL);
2761 set = single_set (insn);
2764 && REG_P (SET_DEST (set))
2765 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2767 unsigned int regno = REGNO (SET_DEST (set));
2768 rtx src = SET_SRC (set);
2771 note = find_reg_equal_equiv_note (insn);
2772 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2773 && DF_REG_DEF_COUNT (regno) != 1)
2776 if (note != NULL_RTX
2777 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2778 && ! rtx_varies_p (XEXP (note, 0), 1)
2779 && ! reg_overlap_mentioned_p (SET_DEST (set),
2782 set_reg_known_value (regno, XEXP (note, 0));
2783 set_reg_known_equiv_p (regno,
2784 REG_NOTE_KIND (note) == REG_EQUIV);
2786 else if (DF_REG_DEF_COUNT (regno) == 1
2787 && GET_CODE (src) == PLUS
2788 && REG_P (XEXP (src, 0))
2789 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2790 && CONST_INT_P (XEXP (src, 1)))
2792 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2793 set_reg_known_value (regno, t);
2794 set_reg_known_equiv_p (regno, 0);
2796 else if (DF_REG_DEF_COUNT (regno) == 1
2797 && ! rtx_varies_p (src, 1))
2799 set_reg_known_value (regno, src);
2800 set_reg_known_equiv_p (regno, 0);
2804 else if (NOTE_P (insn)
2805 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2806 copying_arguments = false;
2809 /* Now propagate values from new_reg_base_value to reg_base_value. */
2810 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2812 for (ui = 0; ui < maxreg; ui++)
2814 if (new_reg_base_value[ui]
2815 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2816 && ! rtx_equal_p (new_reg_base_value[ui],
2817 VEC_index (rtx, reg_base_value, ui)))
2819 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2824 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2826 /* Fill in the remaining entries. */
2827 for (i = 0; i < (int)reg_known_value_size; i++)
2828 if (reg_known_value[i] == 0)
2829 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2832 free (new_reg_base_value);
2833 new_reg_base_value = 0;
2836 timevar_pop (TV_ALIAS_ANALYSIS);
2840 end_alias_analysis (void)
2842 old_reg_base_value = reg_base_value;
2843 ggc_free (reg_known_value);
2844 reg_known_value = 0;
2845 reg_known_value_size = 0;
2846 free (reg_known_equiv_p);
2847 reg_known_equiv_p = 0;
2850 #include "gt-alias.h"