1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 #include "diagnostic-core.h"
40 #include "splay-tree.h"
42 #include "langhooks.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d {
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree GTY((param1_is (int), param2_is (int))) children;
151 typedef struct alias_set_entry_d *alias_set_entry;
153 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
155 static void record_set (rtx, const_rtx, void *);
156 static int base_alias_check (rtx, rtx, enum machine_mode,
158 static rtx find_base_value (rtx);
159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
160 static int insert_subset_children (splay_tree_node, void*);
161 static alias_set_entry get_alias_set_entry (alias_set_type);
162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164 static int aliases_everything_p (const_rtx);
165 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
166 static tree decl_for_component_ref (tree);
167 static rtx adjust_offset_for_component_ref (tree, rtx);
168 static int write_dependence_p (const_rtx, const_rtx, int);
170 static void memory_modified_1 (rtx, const_rtx, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx,gc) *reg_base_value;
208 static rtx *new_reg_base_value;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
213 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215 #define static_reg_base_value \
216 (this_target_rtl->x_static_reg_base_value)
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments;
248 DEF_VEC_P(alias_set_entry);
249 DEF_VEC_ALLOC_P(alias_set_entry,gc);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
254 /* Build a decomposed reference object for querying the alias-oracle
255 from the MEM rtx and store it in *REF.
256 Returns false if MEM is not suitable for the alias-oracle. */
259 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
261 tree expr = MEM_EXPR (mem);
267 ao_ref_init (ref, expr);
269 /* Get the base of the reference and see if we have to reject or
271 base = ao_ref_base (ref);
272 if (base == NULL_TREE)
275 /* The tree oracle doesn't like to have these. */
276 if (TREE_CODE (base) == FUNCTION_DECL
277 || TREE_CODE (base) == LABEL_DECL)
280 /* If this is a pointer dereference of a non-SSA_NAME punt.
281 ??? We could replace it with a pointer to anything. */
282 if ((INDIRECT_REF_P (base)
283 || TREE_CODE (base) == MEM_REF)
284 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
287 /* If this is a reference based on a partitioned decl replace the
288 base with an INDIRECT_REF of the pointer representative we
289 created during stack slot partitioning. */
290 if (TREE_CODE (base) == VAR_DECL
291 && ! TREE_STATIC (base)
292 && cfun->gimple_df->decls_to_pointers != NULL)
295 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
297 ref->base = build_simple_mem_ref (*(tree *)namep);
300 ref->ref_alias_set = MEM_ALIAS_SET (mem);
302 /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
303 Keep points-to related information though. */
304 if (!MEM_OFFSET (mem)
307 ref->ref = NULL_TREE;
314 /* If the base decl is a parameter we can have negative MEM_OFFSET in
315 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
317 if (INTVAL (MEM_OFFSET (mem)) < 0
318 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
319 * BITS_PER_UNIT) == ref->size)
322 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
323 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
325 /* The MEM may extend into adjacent fields, so adjust max_size if
327 if (ref->max_size != -1
328 && ref->size > ref->max_size)
329 ref->max_size = ref->size;
331 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
332 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
333 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
335 || (DECL_P (ref->base)
336 && (!host_integerp (DECL_SIZE (ref->base), 1)
337 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
338 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
344 /* Query the alias-oracle on whether the two memory rtx X and MEM may
345 alias. If TBAA_P is set also apply TBAA. Returns true if the
346 two rtxen may alias, false otherwise. */
349 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
353 if (!ao_ref_from_mem (&ref1, x)
354 || !ao_ref_from_mem (&ref2, mem))
357 return refs_may_alias_p_1 (&ref1, &ref2,
359 && MEM_ALIAS_SET (x) != 0
360 && MEM_ALIAS_SET (mem) != 0);
363 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
364 such an entry, or NULL otherwise. */
366 static inline alias_set_entry
367 get_alias_set_entry (alias_set_type alias_set)
369 return VEC_index (alias_set_entry, alias_sets, alias_set);
372 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
373 the two MEMs cannot alias each other. */
376 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
378 /* Perform a basic sanity check. Namely, that there are no alias sets
379 if we're not using strict aliasing. This helps to catch bugs
380 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
381 where a MEM is allocated in some way other than by the use of
382 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
383 use alias sets to indicate that spilled registers cannot alias each
384 other, we might need to remove this check. */
385 gcc_assert (flag_strict_aliasing
386 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
388 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
391 /* Insert the NODE into the splay tree given by DATA. Used by
392 record_alias_subset via splay_tree_foreach. */
395 insert_subset_children (splay_tree_node node, void *data)
397 splay_tree_insert ((splay_tree) data, node->key, node->value);
402 /* Return true if the first alias set is a subset of the second. */
405 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
409 /* Everything is a subset of the "aliases everything" set. */
413 /* Otherwise, check if set1 is a subset of set2. */
414 ase = get_alias_set_entry (set2);
416 && (ase->has_zero_child
417 || splay_tree_lookup (ase->children,
418 (splay_tree_key) set1)))
423 /* Return 1 if the two specified alias sets may conflict. */
426 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
431 if (alias_sets_must_conflict_p (set1, set2))
434 /* See if the first alias set is a subset of the second. */
435 ase = get_alias_set_entry (set1);
437 && (ase->has_zero_child
438 || splay_tree_lookup (ase->children,
439 (splay_tree_key) set2)))
442 /* Now do the same, but with the alias sets reversed. */
443 ase = get_alias_set_entry (set2);
445 && (ase->has_zero_child
446 || splay_tree_lookup (ase->children,
447 (splay_tree_key) set1)))
450 /* The two alias sets are distinct and neither one is the
451 child of the other. Therefore, they cannot conflict. */
456 walk_mems_2 (rtx *x, rtx mem)
460 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
469 walk_mems_1 (rtx *x, rtx *pat)
473 /* Visit all MEMs in *PAT and check indepedence. */
474 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
475 /* Indicate that dependence was determined and stop traversal. */
483 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
485 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
487 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
488 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
492 /* Return 1 if the two specified alias sets will always conflict. */
495 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
497 if (set1 == 0 || set2 == 0 || set1 == set2)
503 /* Return 1 if any MEM object of type T1 will always conflict (using the
504 dependency routines in this file) with any MEM object of type T2.
505 This is used when allocating temporary storage. If T1 and/or T2 are
506 NULL_TREE, it means we know nothing about the storage. */
509 objects_must_conflict_p (tree t1, tree t2)
511 alias_set_type set1, set2;
513 /* If neither has a type specified, we don't know if they'll conflict
514 because we may be using them to store objects of various types, for
515 example the argument and local variables areas of inlined functions. */
516 if (t1 == 0 && t2 == 0)
519 /* If they are the same type, they must conflict. */
521 /* Likewise if both are volatile. */
522 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
525 set1 = t1 ? get_alias_set (t1) : 0;
526 set2 = t2 ? get_alias_set (t2) : 0;
528 /* We can't use alias_sets_conflict_p because we must make sure
529 that every subtype of t1 will conflict with every subtype of
530 t2 for which a pair of subobjects of these respective subtypes
531 overlaps on the stack. */
532 return alias_sets_must_conflict_p (set1, set2);
535 /* Return true if all nested component references handled by
536 get_inner_reference in T are such that we should use the alias set
537 provided by the object at the heart of T.
539 This is true for non-addressable components (which don't have their
540 own alias set), as well as components of objects in alias set zero.
541 This later point is a special case wherein we wish to override the
542 alias set used by the component, but we don't have per-FIELD_DECL
543 assignable alias sets. */
546 component_uses_parent_alias_set (const_tree t)
550 /* If we're at the end, it vacuously uses its own alias set. */
551 if (!handled_component_p (t))
554 switch (TREE_CODE (t))
557 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
562 case ARRAY_RANGE_REF:
563 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
572 /* Bitfields and casts are never addressable. */
576 t = TREE_OPERAND (t, 0);
577 if (get_alias_set (TREE_TYPE (t)) == 0)
582 /* Return the alias set for the memory pointed to by T, which may be
583 either a type or an expression. Return -1 if there is nothing
584 special about dereferencing T. */
586 static alias_set_type
587 get_deref_alias_set_1 (tree t)
589 /* If we're not doing any alias analysis, just assume everything
590 aliases everything else. */
591 if (!flag_strict_aliasing)
594 /* All we care about is the type. */
598 /* If we have an INDIRECT_REF via a void pointer, we don't
599 know anything about what that might alias. Likewise if the
600 pointer is marked that way. */
601 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
602 || TYPE_REF_CAN_ALIAS_ALL (t))
608 /* Return the alias set for the memory pointed to by T, which may be
609 either a type or an expression. */
612 get_deref_alias_set (tree t)
614 alias_set_type set = get_deref_alias_set_1 (t);
616 /* Fall back to the alias-set of the pointed-to type. */
621 set = get_alias_set (TREE_TYPE (t));
627 /* Return the alias set for T, which may be either a type or an
628 expression. Call language-specific routine for help, if needed. */
631 get_alias_set (tree t)
635 /* If we're not doing any alias analysis, just assume everything
636 aliases everything else. Also return 0 if this or its type is
638 if (! flag_strict_aliasing || t == error_mark_node
640 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
643 /* We can be passed either an expression or a type. This and the
644 language-specific routine may make mutually-recursive calls to each other
645 to figure out what to do. At each juncture, we see if this is a tree
646 that the language may need to handle specially. First handle things that
652 /* Give the language a chance to do something with this tree
653 before we look at it. */
655 set = lang_hooks.get_alias_set (t);
659 /* Retrieve the original memory reference if needed. */
660 if (TREE_CODE (t) == TARGET_MEM_REF)
661 t = TMR_ORIGINAL (t);
663 /* Get the base object of the reference. */
665 while (handled_component_p (inner))
667 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
668 the type of any component references that wrap it to
669 determine the alias-set. */
670 if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
671 t = TREE_OPERAND (inner, 0);
672 inner = TREE_OPERAND (inner, 0);
675 /* Handle pointer dereferences here, they can override the
677 if (INDIRECT_REF_P (inner))
679 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
683 else if (TREE_CODE (inner) == MEM_REF)
685 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 1));
690 /* If the innermost reference is a MEM_REF that has a
691 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
692 using the memory access type for determining the alias-set. */
693 if (TREE_CODE (inner) == MEM_REF
694 && TYPE_MAIN_VARIANT (TREE_TYPE (inner))
696 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1)))))
697 return get_deref_alias_set (TREE_OPERAND (inner, 1));
699 /* Otherwise, pick up the outermost object that we could have a pointer
700 to, processing conversions as above. */
701 while (component_uses_parent_alias_set (t))
703 t = TREE_OPERAND (t, 0);
707 /* If we've already determined the alias set for a decl, just return
708 it. This is necessary for C++ anonymous unions, whose component
709 variables don't look like union members (boo!). */
710 if (TREE_CODE (t) == VAR_DECL
711 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
712 return MEM_ALIAS_SET (DECL_RTL (t));
714 /* Now all we care about is the type. */
718 /* Variant qualifiers don't affect the alias set, so get the main
720 t = TYPE_MAIN_VARIANT (t);
722 /* Always use the canonical type as well. If this is a type that
723 requires structural comparisons to identify compatible types
724 use alias set zero. */
725 if (TYPE_STRUCTURAL_EQUALITY_P (t))
727 /* Allow the language to specify another alias set for this
729 set = lang_hooks.get_alias_set (t);
735 t = TYPE_CANONICAL (t);
737 /* Canonical types shouldn't form a tree nor should the canonical
738 type require structural equality checks. */
739 gcc_checking_assert (TYPE_CANONICAL (t) == t
740 && !TYPE_STRUCTURAL_EQUALITY_P (t));
742 /* If this is a type with a known alias set, return it. */
743 if (TYPE_ALIAS_SET_KNOWN_P (t))
744 return TYPE_ALIAS_SET (t);
746 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
747 if (!COMPLETE_TYPE_P (t))
749 /* For arrays with unknown size the conservative answer is the
750 alias set of the element type. */
751 if (TREE_CODE (t) == ARRAY_TYPE)
752 return get_alias_set (TREE_TYPE (t));
754 /* But return zero as a conservative answer for incomplete types. */
758 /* See if the language has special handling for this type. */
759 set = lang_hooks.get_alias_set (t);
763 /* There are no objects of FUNCTION_TYPE, so there's no point in
764 using up an alias set for them. (There are, of course, pointers
765 and references to functions, but that's different.) */
766 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
769 /* Unless the language specifies otherwise, let vector types alias
770 their components. This avoids some nasty type punning issues in
771 normal usage. And indeed lets vectors be treated more like an
773 else if (TREE_CODE (t) == VECTOR_TYPE)
774 set = get_alias_set (TREE_TYPE (t));
776 /* Unless the language specifies otherwise, treat array types the
777 same as their components. This avoids the asymmetry we get
778 through recording the components. Consider accessing a
779 character(kind=1) through a reference to a character(kind=1)[1:1].
780 Or consider if we want to assign integer(kind=4)[0:D.1387] and
781 integer(kind=4)[4] the same alias set or not.
782 Just be pragmatic here and make sure the array and its element
783 type get the same alias set assigned. */
784 else if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NONALIASED_COMPONENT (t))
785 set = get_alias_set (TREE_TYPE (t));
787 /* Otherwise make a new alias set for this type. */
789 set = new_alias_set ();
791 TYPE_ALIAS_SET (t) = set;
793 /* If this is an aggregate type or a complex type, we must record any
794 component aliasing information. */
795 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
796 record_component_aliases (t);
801 /* Return a brand-new alias set. */
806 if (flag_strict_aliasing)
809 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
810 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
811 return VEC_length (alias_set_entry, alias_sets) - 1;
817 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
818 not everything that aliases SUPERSET also aliases SUBSET. For example,
819 in C, a store to an `int' can alias a load of a structure containing an
820 `int', and vice versa. But it can't alias a load of a 'double' member
821 of the same structure. Here, the structure would be the SUPERSET and
822 `int' the SUBSET. This relationship is also described in the comment at
823 the beginning of this file.
825 This function should be called only once per SUPERSET/SUBSET pair.
827 It is illegal for SUPERSET to be zero; everything is implicitly a
828 subset of alias set zero. */
831 record_alias_subset (alias_set_type superset, alias_set_type subset)
833 alias_set_entry superset_entry;
834 alias_set_entry subset_entry;
836 /* It is possible in complex type situations for both sets to be the same,
837 in which case we can ignore this operation. */
838 if (superset == subset)
841 gcc_assert (superset);
843 superset_entry = get_alias_set_entry (superset);
844 if (superset_entry == 0)
846 /* Create an entry for the SUPERSET, so that we have a place to
847 attach the SUBSET. */
848 superset_entry = ggc_alloc_cleared_alias_set_entry_d ();
849 superset_entry->alias_set = superset;
850 superset_entry->children
851 = splay_tree_new_ggc (splay_tree_compare_ints,
852 ggc_alloc_splay_tree_scalar_scalar_splay_tree_s,
853 ggc_alloc_splay_tree_scalar_scalar_splay_tree_node_s);
854 superset_entry->has_zero_child = 0;
855 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
859 superset_entry->has_zero_child = 1;
862 subset_entry = get_alias_set_entry (subset);
863 /* If there is an entry for the subset, enter all of its children
864 (if they are not already present) as children of the SUPERSET. */
867 if (subset_entry->has_zero_child)
868 superset_entry->has_zero_child = 1;
870 splay_tree_foreach (subset_entry->children, insert_subset_children,
871 superset_entry->children);
874 /* Enter the SUBSET itself as a child of the SUPERSET. */
875 splay_tree_insert (superset_entry->children,
876 (splay_tree_key) subset, 0);
880 /* Record that component types of TYPE, if any, are part of that type for
881 aliasing purposes. For record types, we only record component types
882 for fields that are not marked non-addressable. For array types, we
883 only record the component type if it is not marked non-aliased. */
886 record_component_aliases (tree type)
888 alias_set_type superset = get_alias_set (type);
894 switch (TREE_CODE (type))
898 case QUAL_UNION_TYPE:
899 /* Recursively record aliases for the base classes, if there are any. */
900 if (TYPE_BINFO (type))
903 tree binfo, base_binfo;
905 for (binfo = TYPE_BINFO (type), i = 0;
906 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
907 record_alias_subset (superset,
908 get_alias_set (BINFO_TYPE (base_binfo)));
910 for (field = TYPE_FIELDS (type); field != 0; field = DECL_CHAIN (field))
911 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
912 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
916 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
919 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
927 /* Allocate an alias set for use in storing and reading from the varargs
930 static GTY(()) alias_set_type varargs_set = -1;
933 get_varargs_alias_set (void)
936 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
937 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
938 consistently use the varargs alias set for loads from the varargs
939 area. So don't use it anywhere. */
942 if (varargs_set == -1)
943 varargs_set = new_alias_set ();
949 /* Likewise, but used for the fixed portions of the frame, e.g., register
952 static GTY(()) alias_set_type frame_set = -1;
955 get_frame_alias_set (void)
958 frame_set = new_alias_set ();
963 /* Inside SRC, the source of a SET, find a base address. */
966 find_base_value (rtx src)
970 #if defined (FIND_BASE_TERM)
971 /* Try machine-dependent ways to find the base term. */
972 src = FIND_BASE_TERM (src);
975 switch (GET_CODE (src))
983 /* At the start of a function, argument registers have known base
984 values which may be lost later. Returning an ADDRESS
985 expression here allows optimization based on argument values
986 even when the argument registers are used for other purposes. */
987 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
988 return new_reg_base_value[regno];
990 /* If a pseudo has a known base value, return it. Do not do this
991 for non-fixed hard regs since it can result in a circular
992 dependency chain for registers which have values at function entry.
994 The test above is not sufficient because the scheduler may move
995 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
996 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
997 && regno < VEC_length (rtx, reg_base_value))
999 /* If we're inside init_alias_analysis, use new_reg_base_value
1000 to reduce the number of relaxation iterations. */
1001 if (new_reg_base_value && new_reg_base_value[regno]
1002 && DF_REG_DEF_COUNT (regno) == 1)
1003 return new_reg_base_value[regno];
1005 if (VEC_index (rtx, reg_base_value, regno))
1006 return VEC_index (rtx, reg_base_value, regno);
1012 /* Check for an argument passed in memory. Only record in the
1013 copying-arguments block; it is too hard to track changes
1015 if (copying_arguments
1016 && (XEXP (src, 0) == arg_pointer_rtx
1017 || (GET_CODE (XEXP (src, 0)) == PLUS
1018 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
1019 return gen_rtx_ADDRESS (VOIDmode, src);
1023 src = XEXP (src, 0);
1024 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
1027 /* ... fall through ... */
1032 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1034 /* If either operand is a REG that is a known pointer, then it
1036 if (REG_P (src_0) && REG_POINTER (src_0))
1037 return find_base_value (src_0);
1038 if (REG_P (src_1) && REG_POINTER (src_1))
1039 return find_base_value (src_1);
1041 /* If either operand is a REG, then see if we already have
1042 a known value for it. */
1045 temp = find_base_value (src_0);
1052 temp = find_base_value (src_1);
1057 /* If either base is named object or a special address
1058 (like an argument or stack reference), then use it for the
1061 && (GET_CODE (src_0) == SYMBOL_REF
1062 || GET_CODE (src_0) == LABEL_REF
1063 || (GET_CODE (src_0) == ADDRESS
1064 && GET_MODE (src_0) != VOIDmode)))
1068 && (GET_CODE (src_1) == SYMBOL_REF
1069 || GET_CODE (src_1) == LABEL_REF
1070 || (GET_CODE (src_1) == ADDRESS
1071 && GET_MODE (src_1) != VOIDmode)))
1074 /* Guess which operand is the base address:
1075 If either operand is a symbol, then it is the base. If
1076 either operand is a CONST_INT, then the other is the base. */
1077 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1078 return find_base_value (src_0);
1079 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1080 return find_base_value (src_1);
1086 /* The standard form is (lo_sum reg sym) so look only at the
1088 return find_base_value (XEXP (src, 1));
1091 /* If the second operand is constant set the base
1092 address to the first operand. */
1093 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1094 return find_base_value (XEXP (src, 0));
1098 /* As we do not know which address space the pointer is refering to, we can
1099 handle this only if the target does not support different pointer or
1100 address modes depending on the address space. */
1101 if (!target_default_pointer_address_modes_p ())
1103 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1113 return find_base_value (XEXP (src, 0));
1116 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1117 /* As we do not know which address space the pointer is refering to, we can
1118 handle this only if the target does not support different pointer or
1119 address modes depending on the address space. */
1120 if (!target_default_pointer_address_modes_p ())
1124 rtx temp = find_base_value (XEXP (src, 0));
1126 if (temp != 0 && CONSTANT_P (temp))
1127 temp = convert_memory_address (Pmode, temp);
1139 /* Called from init_alias_analysis indirectly through note_stores. */
1141 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1142 register N has been set in this function. */
1143 static char *reg_seen;
1145 /* Addresses which are known not to alias anything else are identified
1146 by a unique integer. */
1147 static int unique_id;
1150 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1159 regno = REGNO (dest);
1161 gcc_checking_assert (regno < VEC_length (rtx, reg_base_value));
1163 /* If this spans multiple hard registers, then we must indicate that every
1164 register has an unusable value. */
1165 if (regno < FIRST_PSEUDO_REGISTER)
1166 n = hard_regno_nregs[regno][GET_MODE (dest)];
1173 reg_seen[regno + n] = 1;
1174 new_reg_base_value[regno + n] = 0;
1181 /* A CLOBBER wipes out any old value but does not prevent a previously
1182 unset register from acquiring a base address (i.e. reg_seen is not
1184 if (GET_CODE (set) == CLOBBER)
1186 new_reg_base_value[regno] = 0;
1189 src = SET_SRC (set);
1193 if (reg_seen[regno])
1195 new_reg_base_value[regno] = 0;
1198 reg_seen[regno] = 1;
1199 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1200 GEN_INT (unique_id++));
1204 /* If this is not the first set of REGNO, see whether the new value
1205 is related to the old one. There are two cases of interest:
1207 (1) The register might be assigned an entirely new value
1208 that has the same base term as the original set.
1210 (2) The set might be a simple self-modification that
1211 cannot change REGNO's base value.
1213 If neither case holds, reject the original base value as invalid.
1214 Note that the following situation is not detected:
1216 extern int x, y; int *p = &x; p += (&y-&x);
1218 ANSI C does not allow computing the difference of addresses
1219 of distinct top level objects. */
1220 if (new_reg_base_value[regno] != 0
1221 && find_base_value (src) != new_reg_base_value[regno])
1222 switch (GET_CODE (src))
1226 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1227 new_reg_base_value[regno] = 0;
1230 /* If the value we add in the PLUS is also a valid base value,
1231 this might be the actual base value, and the original value
1234 rtx other = NULL_RTX;
1236 if (XEXP (src, 0) == dest)
1237 other = XEXP (src, 1);
1238 else if (XEXP (src, 1) == dest)
1239 other = XEXP (src, 0);
1241 if (! other || find_base_value (other))
1242 new_reg_base_value[regno] = 0;
1246 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1247 new_reg_base_value[regno] = 0;
1250 new_reg_base_value[regno] = 0;
1253 /* If this is the first set of a register, record the value. */
1254 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1255 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1256 new_reg_base_value[regno] = find_base_value (src);
1258 reg_seen[regno] = 1;
1261 /* If a value is known for REGNO, return it. */
1264 get_reg_known_value (unsigned int regno)
1266 if (regno >= FIRST_PSEUDO_REGISTER)
1268 regno -= FIRST_PSEUDO_REGISTER;
1269 if (regno < reg_known_value_size)
1270 return reg_known_value[regno];
1278 set_reg_known_value (unsigned int regno, rtx val)
1280 if (regno >= FIRST_PSEUDO_REGISTER)
1282 regno -= FIRST_PSEUDO_REGISTER;
1283 if (regno < reg_known_value_size)
1284 reg_known_value[regno] = val;
1288 /* Similarly for reg_known_equiv_p. */
1291 get_reg_known_equiv_p (unsigned int regno)
1293 if (regno >= FIRST_PSEUDO_REGISTER)
1295 regno -= FIRST_PSEUDO_REGISTER;
1296 if (regno < reg_known_value_size)
1297 return reg_known_equiv_p[regno];
1303 set_reg_known_equiv_p (unsigned int regno, bool val)
1305 if (regno >= FIRST_PSEUDO_REGISTER)
1307 regno -= FIRST_PSEUDO_REGISTER;
1308 if (regno < reg_known_value_size)
1309 reg_known_equiv_p[regno] = val;
1314 /* Returns a canonical version of X, from the point of view alias
1315 analysis. (For example, if X is a MEM whose address is a register,
1316 and the register has a known value (say a SYMBOL_REF), then a MEM
1317 whose address is the SYMBOL_REF is returned.) */
1322 /* Recursively look for equivalences. */
1323 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1325 rtx t = get_reg_known_value (REGNO (x));
1329 return canon_rtx (t);
1332 if (GET_CODE (x) == PLUS)
1334 rtx x0 = canon_rtx (XEXP (x, 0));
1335 rtx x1 = canon_rtx (XEXP (x, 1));
1337 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1339 if (CONST_INT_P (x0))
1340 return plus_constant (x1, INTVAL (x0));
1341 else if (CONST_INT_P (x1))
1342 return plus_constant (x0, INTVAL (x1));
1343 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1347 /* This gives us much better alias analysis when called from
1348 the loop optimizer. Note we want to leave the original
1349 MEM alone, but need to return the canonicalized MEM with
1350 all the flags with their original values. */
1352 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1357 /* Return 1 if X and Y are identical-looking rtx's.
1358 Expect that X and Y has been already canonicalized.
1360 We use the data in reg_known_value above to see if two registers with
1361 different numbers are, in fact, equivalent. */
1364 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1371 if (x == 0 && y == 0)
1373 if (x == 0 || y == 0)
1379 code = GET_CODE (x);
1380 /* Rtx's of different codes cannot be equal. */
1381 if (code != GET_CODE (y))
1384 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1385 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1387 if (GET_MODE (x) != GET_MODE (y))
1390 /* Some RTL can be compared without a recursive examination. */
1394 return REGNO (x) == REGNO (y);
1397 return XEXP (x, 0) == XEXP (y, 0);
1400 return XSTR (x, 0) == XSTR (y, 0);
1406 /* There's no need to compare the contents of CONST_DOUBLEs or
1407 CONST_INTs because pointer equality is a good enough
1408 comparison for these nodes. */
1415 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1417 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1418 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1419 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1420 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1421 /* For commutative operations, the RTX match if the operand match in any
1422 order. Also handle the simple binary and unary cases without a loop. */
1423 if (COMMUTATIVE_P (x))
1425 rtx xop0 = canon_rtx (XEXP (x, 0));
1426 rtx yop0 = canon_rtx (XEXP (y, 0));
1427 rtx yop1 = canon_rtx (XEXP (y, 1));
1429 return ((rtx_equal_for_memref_p (xop0, yop0)
1430 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1431 || (rtx_equal_for_memref_p (xop0, yop1)
1432 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1434 else if (NON_COMMUTATIVE_P (x))
1436 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1437 canon_rtx (XEXP (y, 0)))
1438 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1439 canon_rtx (XEXP (y, 1))));
1441 else if (UNARY_P (x))
1442 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1443 canon_rtx (XEXP (y, 0)));
1445 /* Compare the elements. If any pair of corresponding elements
1446 fail to match, return 0 for the whole things.
1448 Limit cases to types which actually appear in addresses. */
1450 fmt = GET_RTX_FORMAT (code);
1451 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1456 if (XINT (x, i) != XINT (y, i))
1461 /* Two vectors must have the same length. */
1462 if (XVECLEN (x, i) != XVECLEN (y, i))
1465 /* And the corresponding elements must match. */
1466 for (j = 0; j < XVECLEN (x, i); j++)
1467 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1468 canon_rtx (XVECEXP (y, i, j))) == 0)
1473 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1474 canon_rtx (XEXP (y, i))) == 0)
1478 /* This can happen for asm operands. */
1480 if (strcmp (XSTR (x, i), XSTR (y, i)))
1484 /* This can happen for an asm which clobbers memory. */
1488 /* It is believed that rtx's at this level will never
1489 contain anything but integers and other rtx's,
1490 except for within LABEL_REFs and SYMBOL_REFs. */
1499 find_base_term (rtx x)
1502 struct elt_loc_list *l;
1504 #if defined (FIND_BASE_TERM)
1505 /* Try machine-dependent ways to find the base term. */
1506 x = FIND_BASE_TERM (x);
1509 switch (GET_CODE (x))
1512 return REG_BASE_VALUE (x);
1515 /* As we do not know which address space the pointer is refering to, we can
1516 handle this only if the target does not support different pointer or
1517 address modes depending on the address space. */
1518 if (!target_default_pointer_address_modes_p ())
1520 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1530 return find_base_term (XEXP (x, 0));
1533 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1534 /* As we do not know which address space the pointer is refering to, we can
1535 handle this only if the target does not support different pointer or
1536 address modes depending on the address space. */
1537 if (!target_default_pointer_address_modes_p ())
1541 rtx temp = find_base_term (XEXP (x, 0));
1543 if (temp != 0 && CONSTANT_P (temp))
1544 temp = convert_memory_address (Pmode, temp);
1550 val = CSELIB_VAL_PTR (x);
1553 for (l = val->locs; l; l = l->next)
1554 if ((x = find_base_term (l->loc)) != 0)
1559 /* The standard form is (lo_sum reg sym) so look only at the
1561 return find_base_term (XEXP (x, 1));
1565 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1571 rtx tmp1 = XEXP (x, 0);
1572 rtx tmp2 = XEXP (x, 1);
1574 /* This is a little bit tricky since we have to determine which of
1575 the two operands represents the real base address. Otherwise this
1576 routine may return the index register instead of the base register.
1578 That may cause us to believe no aliasing was possible, when in
1579 fact aliasing is possible.
1581 We use a few simple tests to guess the base register. Additional
1582 tests can certainly be added. For example, if one of the operands
1583 is a shift or multiply, then it must be the index register and the
1584 other operand is the base register. */
1586 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1587 return find_base_term (tmp2);
1589 /* If either operand is known to be a pointer, then use it
1590 to determine the base term. */
1591 if (REG_P (tmp1) && REG_POINTER (tmp1))
1593 rtx base = find_base_term (tmp1);
1598 if (REG_P (tmp2) && REG_POINTER (tmp2))
1600 rtx base = find_base_term (tmp2);
1605 /* Neither operand was known to be a pointer. Go ahead and find the
1606 base term for both operands. */
1607 tmp1 = find_base_term (tmp1);
1608 tmp2 = find_base_term (tmp2);
1610 /* If either base term is named object or a special address
1611 (like an argument or stack reference), then use it for the
1614 && (GET_CODE (tmp1) == SYMBOL_REF
1615 || GET_CODE (tmp1) == LABEL_REF
1616 || (GET_CODE (tmp1) == ADDRESS
1617 && GET_MODE (tmp1) != VOIDmode)))
1621 && (GET_CODE (tmp2) == SYMBOL_REF
1622 || GET_CODE (tmp2) == LABEL_REF
1623 || (GET_CODE (tmp2) == ADDRESS
1624 && GET_MODE (tmp2) != VOIDmode)))
1627 /* We could not determine which of the two operands was the
1628 base register and which was the index. So we can determine
1629 nothing from the base alias check. */
1634 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1635 return find_base_term (XEXP (x, 0));
1647 /* Return 0 if the addresses X and Y are known to point to different
1648 objects, 1 if they might be pointers to the same object. */
1651 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1652 enum machine_mode y_mode)
1654 rtx x_base = find_base_term (x);
1655 rtx y_base = find_base_term (y);
1657 /* If the address itself has no known base see if a known equivalent
1658 value has one. If either address still has no known base, nothing
1659 is known about aliasing. */
1664 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1667 x_base = find_base_term (x_c);
1675 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1678 y_base = find_base_term (y_c);
1683 /* If the base addresses are equal nothing is known about aliasing. */
1684 if (rtx_equal_p (x_base, y_base))
1687 /* The base addresses are different expressions. If they are not accessed
1688 via AND, there is no conflict. We can bring knowledge of object
1689 alignment into play here. For example, on alpha, "char a, b;" can
1690 alias one another, though "char a; long b;" cannot. AND addesses may
1691 implicitly alias surrounding objects; i.e. unaligned access in DImode
1692 via AND address can alias all surrounding object types except those
1693 with aligment 8 or higher. */
1694 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1696 if (GET_CODE (x) == AND
1697 && (!CONST_INT_P (XEXP (x, 1))
1698 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1700 if (GET_CODE (y) == AND
1701 && (!CONST_INT_P (XEXP (y, 1))
1702 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1705 /* Differing symbols not accessed via AND never alias. */
1706 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1709 /* If one address is a stack reference there can be no alias:
1710 stack references using different base registers do not alias,
1711 a stack reference can not alias a parameter, and a stack reference
1712 can not alias a global. */
1713 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1714 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1720 /* Convert the address X into something we can use. This is done by returning
1721 it unchanged unless it is a value; in the latter case we call cselib to get
1722 a more useful rtx. */
1728 struct elt_loc_list *l;
1730 if (GET_CODE (x) != VALUE)
1732 v = CSELIB_VAL_PTR (x);
1735 for (l = v->locs; l; l = l->next)
1736 if (CONSTANT_P (l->loc))
1738 for (l = v->locs; l; l = l->next)
1739 if (!REG_P (l->loc) && !MEM_P (l->loc))
1742 return v->locs->loc;
1747 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1748 where SIZE is the size in bytes of the memory reference. If ADDR
1749 is not modified by the memory reference then ADDR is returned. */
1752 addr_side_effect_eval (rtx addr, int size, int n_refs)
1756 switch (GET_CODE (addr))
1759 offset = (n_refs + 1) * size;
1762 offset = -(n_refs + 1) * size;
1765 offset = n_refs * size;
1768 offset = -n_refs * size;
1776 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1779 addr = XEXP (addr, 0);
1780 addr = canon_rtx (addr);
1785 /* Return one if X and Y (memory addresses) reference the
1786 same location in memory or if the references overlap.
1787 Return zero if they do not overlap, else return
1788 minus one in which case they still might reference the same location.
1790 C is an offset accumulator. When
1791 C is nonzero, we are testing aliases between X and Y + C.
1792 XSIZE is the size in bytes of the X reference,
1793 similarly YSIZE is the size in bytes for Y.
1794 Expect that canon_rtx has been already called for X and Y.
1796 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1797 referenced (the reference was BLKmode), so make the most pessimistic
1800 If XSIZE or YSIZE is negative, we may access memory outside the object
1801 being referenced as a side effect. This can happen when using AND to
1802 align memory references, as is done on the Alpha.
1804 Nice to notice that varying addresses cannot conflict with fp if no
1805 local variables had their addresses taken, but that's too hard now.
1807 ??? Contrary to the tree alias oracle this does not return
1808 one for X + non-constant and Y + non-constant when X and Y are equal.
1809 If that is fixed the TBAA hack for union type-punning can be removed. */
1812 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1814 if (GET_CODE (x) == VALUE)
1818 struct elt_loc_list *l = NULL;
1819 if (CSELIB_VAL_PTR (x))
1820 for (l = CSELIB_VAL_PTR (x)->locs; l; l = l->next)
1821 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
1828 /* Don't call get_addr if y is the same VALUE. */
1832 if (GET_CODE (y) == VALUE)
1836 struct elt_loc_list *l = NULL;
1837 if (CSELIB_VAL_PTR (y))
1838 for (l = CSELIB_VAL_PTR (y)->locs; l; l = l->next)
1839 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
1846 /* Don't call get_addr if x is the same VALUE. */
1850 if (GET_CODE (x) == HIGH)
1852 else if (GET_CODE (x) == LO_SUM)
1855 x = addr_side_effect_eval (x, xsize, 0);
1856 if (GET_CODE (y) == HIGH)
1858 else if (GET_CODE (y) == LO_SUM)
1861 y = addr_side_effect_eval (y, ysize, 0);
1863 if (rtx_equal_for_memref_p (x, y))
1865 if (xsize <= 0 || ysize <= 0)
1867 if (c >= 0 && xsize > c)
1869 if (c < 0 && ysize+c > 0)
1874 /* This code used to check for conflicts involving stack references and
1875 globals but the base address alias code now handles these cases. */
1877 if (GET_CODE (x) == PLUS)
1879 /* The fact that X is canonicalized means that this
1880 PLUS rtx is canonicalized. */
1881 rtx x0 = XEXP (x, 0);
1882 rtx x1 = XEXP (x, 1);
1884 if (GET_CODE (y) == PLUS)
1886 /* The fact that Y is canonicalized means that this
1887 PLUS rtx is canonicalized. */
1888 rtx y0 = XEXP (y, 0);
1889 rtx y1 = XEXP (y, 1);
1891 if (rtx_equal_for_memref_p (x1, y1))
1892 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1893 if (rtx_equal_for_memref_p (x0, y0))
1894 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1895 if (CONST_INT_P (x1))
1897 if (CONST_INT_P (y1))
1898 return memrefs_conflict_p (xsize, x0, ysize, y0,
1899 c - INTVAL (x1) + INTVAL (y1));
1901 return memrefs_conflict_p (xsize, x0, ysize, y,
1904 else if (CONST_INT_P (y1))
1905 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1909 else if (CONST_INT_P (x1))
1910 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1912 else if (GET_CODE (y) == PLUS)
1914 /* The fact that Y is canonicalized means that this
1915 PLUS rtx is canonicalized. */
1916 rtx y0 = XEXP (y, 0);
1917 rtx y1 = XEXP (y, 1);
1919 if (CONST_INT_P (y1))
1920 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1925 if (GET_CODE (x) == GET_CODE (y))
1926 switch (GET_CODE (x))
1930 /* Handle cases where we expect the second operands to be the
1931 same, and check only whether the first operand would conflict
1934 rtx x1 = canon_rtx (XEXP (x, 1));
1935 rtx y1 = canon_rtx (XEXP (y, 1));
1936 if (! rtx_equal_for_memref_p (x1, y1))
1938 x0 = canon_rtx (XEXP (x, 0));
1939 y0 = canon_rtx (XEXP (y, 0));
1940 if (rtx_equal_for_memref_p (x0, y0))
1941 return (xsize == 0 || ysize == 0
1942 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1944 /* Can't properly adjust our sizes. */
1945 if (!CONST_INT_P (x1))
1947 xsize /= INTVAL (x1);
1948 ysize /= INTVAL (x1);
1950 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1957 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1958 as an access with indeterminate size. Assume that references
1959 besides AND are aligned, so if the size of the other reference is
1960 at least as large as the alignment, assume no other overlap. */
1961 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1963 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1965 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1967 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1969 /* ??? If we are indexing far enough into the array/structure, we
1970 may yet be able to determine that we can not overlap. But we
1971 also need to that we are far enough from the end not to overlap
1972 a following reference, so we do nothing with that for now. */
1973 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1975 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1980 if (CONST_INT_P (x) && CONST_INT_P (y))
1982 c += (INTVAL (y) - INTVAL (x));
1983 return (xsize <= 0 || ysize <= 0
1984 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1987 if (GET_CODE (x) == CONST)
1989 if (GET_CODE (y) == CONST)
1990 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1991 ysize, canon_rtx (XEXP (y, 0)), c);
1993 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1996 if (GET_CODE (y) == CONST)
1997 return memrefs_conflict_p (xsize, x, ysize,
1998 canon_rtx (XEXP (y, 0)), c);
2001 return (xsize <= 0 || ysize <= 0
2002 || (rtx_equal_for_memref_p (x, y)
2003 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
2011 /* Functions to compute memory dependencies.
2013 Since we process the insns in execution order, we can build tables
2014 to keep track of what registers are fixed (and not aliased), what registers
2015 are varying in known ways, and what registers are varying in unknown
2018 If both memory references are volatile, then there must always be a
2019 dependence between the two references, since their order can not be
2020 changed. A volatile and non-volatile reference can be interchanged
2023 A MEM_IN_STRUCT reference at a non-AND varying address can never
2024 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
2025 also must allow AND addresses, because they may generate accesses
2026 outside the object being referenced. This is used to generate
2027 aligned addresses from unaligned addresses, for instance, the alpha
2028 storeqi_unaligned pattern. */
2030 /* Read dependence: X is read after read in MEM takes place. There can
2031 only be a dependence here if both reads are volatile. */
2034 read_dependence (const_rtx mem, const_rtx x)
2036 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
2039 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
2040 MEM2 is a reference to a structure at a varying address, or returns
2041 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
2042 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
2043 to decide whether or not an address may vary; it should return
2044 nonzero whenever variation is possible.
2045 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
2048 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
2050 bool (*varies_p) (const_rtx, bool))
2052 if (! flag_strict_aliasing)
2055 if (MEM_ALIAS_SET (mem2)
2056 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
2057 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
2058 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
2062 if (MEM_ALIAS_SET (mem1)
2063 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
2064 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
2065 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
2072 /* Returns nonzero if something about the mode or address format MEM1
2073 indicates that it might well alias *anything*. */
2076 aliases_everything_p (const_rtx mem)
2078 if (GET_CODE (XEXP (mem, 0)) == AND)
2079 /* If the address is an AND, it's very hard to know at what it is
2080 actually pointing. */
2086 /* Return true if we can determine that the fields referenced cannot
2087 overlap for any pair of objects. */
2090 nonoverlapping_component_refs_p (const_tree x, const_tree y)
2092 const_tree fieldx, fieldy, typex, typey, orig_y;
2094 if (!flag_strict_aliasing)
2099 /* The comparison has to be done at a common type, since we don't
2100 know how the inheritance hierarchy works. */
2104 fieldx = TREE_OPERAND (x, 1);
2105 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
2110 fieldy = TREE_OPERAND (y, 1);
2111 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
2116 y = TREE_OPERAND (y, 0);
2118 while (y && TREE_CODE (y) == COMPONENT_REF);
2120 x = TREE_OPERAND (x, 0);
2122 while (x && TREE_CODE (x) == COMPONENT_REF);
2123 /* Never found a common type. */
2127 /* If we're left with accessing different fields of a structure,
2129 if (TREE_CODE (typex) == RECORD_TYPE
2130 && fieldx != fieldy)
2133 /* The comparison on the current field failed. If we're accessing
2134 a very nested structure, look at the next outer level. */
2135 x = TREE_OPERAND (x, 0);
2136 y = TREE_OPERAND (y, 0);
2139 && TREE_CODE (x) == COMPONENT_REF
2140 && TREE_CODE (y) == COMPONENT_REF);
2145 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2148 decl_for_component_ref (tree x)
2152 x = TREE_OPERAND (x, 0);
2154 while (x && TREE_CODE (x) == COMPONENT_REF);
2156 return x && DECL_P (x) ? x : NULL_TREE;
2159 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2160 offset of the field reference. */
2163 adjust_offset_for_component_ref (tree x, rtx offset)
2165 HOST_WIDE_INT ioffset;
2170 ioffset = INTVAL (offset);
2173 tree offset = component_ref_field_offset (x);
2174 tree field = TREE_OPERAND (x, 1);
2176 if (! host_integerp (offset, 1))
2178 ioffset += (tree_low_cst (offset, 1)
2179 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2182 x = TREE_OPERAND (x, 0);
2184 while (x && TREE_CODE (x) == COMPONENT_REF);
2186 return GEN_INT (ioffset);
2189 /* Return nonzero if we can determine the exprs corresponding to memrefs
2190 X and Y and they do not overlap. */
2193 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2195 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2198 rtx moffsetx, moffsety;
2199 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2201 /* Unless both have exprs, we can't tell anything. */
2202 if (exprx == 0 || expry == 0)
2205 /* For spill-slot accesses make sure we have valid offsets. */
2206 if ((exprx == get_spill_slot_decl (false)
2207 && ! MEM_OFFSET (x))
2208 || (expry == get_spill_slot_decl (false)
2209 && ! MEM_OFFSET (y)))
2212 /* If both are field references, we may be able to determine something. */
2213 if (TREE_CODE (exprx) == COMPONENT_REF
2214 && TREE_CODE (expry) == COMPONENT_REF
2215 && nonoverlapping_component_refs_p (exprx, expry))
2219 /* If the field reference test failed, look at the DECLs involved. */
2220 moffsetx = MEM_OFFSET (x);
2221 if (TREE_CODE (exprx) == COMPONENT_REF)
2223 tree t = decl_for_component_ref (exprx);
2226 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2230 moffsety = MEM_OFFSET (y);
2231 if (TREE_CODE (expry) == COMPONENT_REF)
2233 tree t = decl_for_component_ref (expry);
2236 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2240 if (! DECL_P (exprx) || ! DECL_P (expry))
2243 /* With invalid code we can end up storing into the constant pool.
2244 Bail out to avoid ICEing when creating RTL for this.
2245 See gfortran.dg/lto/20091028-2_0.f90. */
2246 if (TREE_CODE (exprx) == CONST_DECL
2247 || TREE_CODE (expry) == CONST_DECL)
2250 rtlx = DECL_RTL (exprx);
2251 rtly = DECL_RTL (expry);
2253 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2254 can't overlap unless they are the same because we never reuse that part
2255 of the stack frame used for locals for spilled pseudos. */
2256 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2257 && ! rtx_equal_p (rtlx, rtly))
2260 /* If we have MEMs refering to different address spaces (which can
2261 potentially overlap), we cannot easily tell from the addresses
2262 whether the references overlap. */
2263 if (MEM_P (rtlx) && MEM_P (rtly)
2264 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2267 /* Get the base and offsets of both decls. If either is a register, we
2268 know both are and are the same, so use that as the base. The only
2269 we can avoid overlap is if we can deduce that they are nonoverlapping
2270 pieces of that decl, which is very rare. */
2271 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2272 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2273 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2275 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2276 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2277 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2279 /* If the bases are different, we know they do not overlap if both
2280 are constants or if one is a constant and the other a pointer into the
2281 stack frame. Otherwise a different base means we can't tell if they
2283 if (! rtx_equal_p (basex, basey))
2284 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2285 || (CONSTANT_P (basex) && REG_P (basey)
2286 && REGNO_PTR_FRAME_P (REGNO (basey)))
2287 || (CONSTANT_P (basey) && REG_P (basex)
2288 && REGNO_PTR_FRAME_P (REGNO (basex))));
2290 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2291 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2293 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2294 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2297 /* If we have an offset for either memref, it can update the values computed
2300 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2302 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2304 /* If a memref has both a size and an offset, we can use the smaller size.
2305 We can't do this if the offset isn't known because we must view this
2306 memref as being anywhere inside the DECL's MEM. */
2307 if (MEM_SIZE (x) && moffsetx)
2308 sizex = INTVAL (MEM_SIZE (x));
2309 if (MEM_SIZE (y) && moffsety)
2310 sizey = INTVAL (MEM_SIZE (y));
2312 /* Put the values of the memref with the lower offset in X's values. */
2313 if (offsetx > offsety)
2315 tem = offsetx, offsetx = offsety, offsety = tem;
2316 tem = sizex, sizex = sizey, sizey = tem;
2319 /* If we don't know the size of the lower-offset value, we can't tell
2320 if they conflict. Otherwise, we do the test. */
2321 return sizex >= 0 && offsety >= offsetx + sizex;
2324 /* Helper for true_dependence and canon_true_dependence.
2325 Checks for true dependence: X is read after store in MEM takes place.
2327 VARIES is the function that should be used as rtx_varies function.
2329 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2330 NULL_RTX, and the canonical addresses of MEM and X are both computed
2331 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2333 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2335 Returns 1 if there is a true dependence, 0 otherwise. */
2338 true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2339 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool),
2340 bool mem_canonicalized)
2345 gcc_checking_assert (mem_canonicalized ? (mem_addr != NULL_RTX)
2346 : (mem_addr == NULL_RTX && x_addr == NULL_RTX));
2348 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2351 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2352 This is used in epilogue deallocation functions, and in cselib. */
2353 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2355 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2357 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2358 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2361 /* Read-only memory is by definition never modified, and therefore can't
2362 conflict with anything. We don't expect to find read-only set on MEM,
2363 but stupid user tricks can produce them, so don't die. */
2364 if (MEM_READONLY_P (x))
2367 /* If we have MEMs refering to different address spaces (which can
2368 potentially overlap), we cannot easily tell from the addresses
2369 whether the references overlap. */
2370 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2375 mem_addr = XEXP (mem, 0);
2376 if (mem_mode == VOIDmode)
2377 mem_mode = GET_MODE (mem);
2382 x_addr = XEXP (x, 0);
2383 if (!((GET_CODE (x_addr) == VALUE
2384 && GET_CODE (mem_addr) != VALUE
2385 && reg_mentioned_p (x_addr, mem_addr))
2386 || (GET_CODE (x_addr) != VALUE
2387 && GET_CODE (mem_addr) == VALUE
2388 && reg_mentioned_p (mem_addr, x_addr))))
2390 x_addr = get_addr (x_addr);
2391 if (! mem_canonicalized)
2392 mem_addr = get_addr (mem_addr);
2396 base = find_base_term (x_addr);
2397 if (base && (GET_CODE (base) == LABEL_REF
2398 || (GET_CODE (base) == SYMBOL_REF
2399 && CONSTANT_POOL_ADDRESS_P (base))))
2402 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2405 x_addr = canon_rtx (x_addr);
2406 if (!mem_canonicalized)
2407 mem_addr = canon_rtx (mem_addr);
2409 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2410 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2413 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2416 if (nonoverlapping_memrefs_p (mem, x))
2419 if (aliases_everything_p (x))
2422 /* We cannot use aliases_everything_p to test MEM, since we must look
2423 at MEM_ADDR, rather than XEXP (mem, 0). */
2424 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2427 /* ??? In true_dependence we also allow BLKmode to alias anything. Why
2428 don't we do this in anti_dependence and output_dependence? */
2429 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2432 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2435 return rtx_refs_may_alias_p (x, mem, true);
2438 /* True dependence: X is read after store in MEM takes place. */
2441 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2442 bool (*varies) (const_rtx, bool))
2444 return true_dependence_1 (mem, mem_mode, NULL_RTX,
2445 x, NULL_RTX, varies,
2446 /*mem_canonicalized=*/false);
2449 /* Canonical true dependence: X is read after store in MEM takes place.
2450 Variant of true_dependence which assumes MEM has already been
2451 canonicalized (hence we no longer do that here).
2452 The mem_addr argument has been added, since true_dependence_1 computed
2453 this value prior to canonicalizing. */
2456 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2457 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2459 return true_dependence_1 (mem, mem_mode, mem_addr,
2461 /*mem_canonicalized=*/true);
2464 /* Returns nonzero if a write to X might alias a previous read from
2465 (or, if WRITEP is nonzero, a write to) MEM. */
2468 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2470 rtx x_addr, mem_addr;
2471 const_rtx fixed_scalar;
2475 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2478 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2479 This is used in epilogue deallocation functions. */
2480 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2482 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2484 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2485 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2488 /* A read from read-only memory can't conflict with read-write memory. */
2489 if (!writep && MEM_READONLY_P (mem))
2492 /* If we have MEMs refering to different address spaces (which can
2493 potentially overlap), we cannot easily tell from the addresses
2494 whether the references overlap. */
2495 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2498 x_addr = XEXP (x, 0);
2499 mem_addr = XEXP (mem, 0);
2500 if (!((GET_CODE (x_addr) == VALUE
2501 && GET_CODE (mem_addr) != VALUE
2502 && reg_mentioned_p (x_addr, mem_addr))
2503 || (GET_CODE (x_addr) != VALUE
2504 && GET_CODE (mem_addr) == VALUE
2505 && reg_mentioned_p (mem_addr, x_addr))))
2507 x_addr = get_addr (x_addr);
2508 mem_addr = get_addr (mem_addr);
2513 base = find_base_term (mem_addr);
2514 if (base && (GET_CODE (base) == LABEL_REF
2515 || (GET_CODE (base) == SYMBOL_REF
2516 && CONSTANT_POOL_ADDRESS_P (base))))
2520 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2524 x_addr = canon_rtx (x_addr);
2525 mem_addr = canon_rtx (mem_addr);
2527 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2528 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2531 if (nonoverlapping_memrefs_p (x, mem))
2535 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2538 if ((fixed_scalar == mem && !aliases_everything_p (x))
2539 || (fixed_scalar == x && !aliases_everything_p (mem)))
2542 return rtx_refs_may_alias_p (x, mem, false);
2545 /* Anti dependence: X is written after read in MEM takes place. */
2548 anti_dependence (const_rtx mem, const_rtx x)
2550 return write_dependence_p (mem, x, /*writep=*/0);
2553 /* Output dependence: X is written after store in MEM takes place. */
2556 output_dependence (const_rtx mem, const_rtx x)
2558 return write_dependence_p (mem, x, /*writep=*/1);
2563 init_alias_target (void)
2567 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2569 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2570 /* Check whether this register can hold an incoming pointer
2571 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2572 numbers, so translate if necessary due to register windows. */
2573 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2574 && HARD_REGNO_MODE_OK (i, Pmode))
2575 static_reg_base_value[i]
2576 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2578 static_reg_base_value[STACK_POINTER_REGNUM]
2579 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2580 static_reg_base_value[ARG_POINTER_REGNUM]
2581 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2582 static_reg_base_value[FRAME_POINTER_REGNUM]
2583 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2584 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2585 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2586 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2590 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2591 to be memory reference. */
2592 static bool memory_modified;
2594 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2598 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2599 memory_modified = true;
2604 /* Return true when INSN possibly modify memory contents of MEM
2605 (i.e. address can be modified). */
2607 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2611 memory_modified = false;
2612 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2613 return memory_modified;
2616 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2620 init_alias_analysis (void)
2622 unsigned int maxreg = max_reg_num ();
2628 timevar_push (TV_ALIAS_ANALYSIS);
2630 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2631 reg_known_value = ggc_alloc_cleared_vec_rtx (reg_known_value_size);
2632 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2634 /* If we have memory allocated from the previous run, use it. */
2635 if (old_reg_base_value)
2636 reg_base_value = old_reg_base_value;
2639 VEC_truncate (rtx, reg_base_value, 0);
2641 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2643 new_reg_base_value = XNEWVEC (rtx, maxreg);
2644 reg_seen = XNEWVEC (char, maxreg);
2646 /* The basic idea is that each pass through this loop will use the
2647 "constant" information from the previous pass to propagate alias
2648 information through another level of assignments.
2650 This could get expensive if the assignment chains are long. Maybe
2651 we should throttle the number of iterations, possibly based on
2652 the optimization level or flag_expensive_optimizations.
2654 We could propagate more information in the first pass by making use
2655 of DF_REG_DEF_COUNT to determine immediately that the alias information
2656 for a pseudo is "constant".
2658 A program with an uninitialized variable can cause an infinite loop
2659 here. Instead of doing a full dataflow analysis to detect such problems
2660 we just cap the number of iterations for the loop.
2662 The state of the arrays for the set chain in question does not matter
2663 since the program has undefined behavior. */
2668 /* Assume nothing will change this iteration of the loop. */
2671 /* We want to assign the same IDs each iteration of this loop, so
2672 start counting from zero each iteration of the loop. */
2675 /* We're at the start of the function each iteration through the
2676 loop, so we're copying arguments. */
2677 copying_arguments = true;
2679 /* Wipe the potential alias information clean for this pass. */
2680 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2682 /* Wipe the reg_seen array clean. */
2683 memset (reg_seen, 0, maxreg);
2685 /* Mark all hard registers which may contain an address.
2686 The stack, frame and argument pointers may contain an address.
2687 An argument register which can hold a Pmode value may contain
2688 an address even if it is not in BASE_REGS.
2690 The address expression is VOIDmode for an argument and
2691 Pmode for other registers. */
2693 memcpy (new_reg_base_value, static_reg_base_value,
2694 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2696 /* Walk the insns adding values to the new_reg_base_value array. */
2697 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2703 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2704 /* The prologue/epilogue insns are not threaded onto the
2705 insn chain until after reload has completed. Thus,
2706 there is no sense wasting time checking if INSN is in
2707 the prologue/epilogue until after reload has completed. */
2708 if (reload_completed
2709 && prologue_epilogue_contains (insn))
2713 /* If this insn has a noalias note, process it, Otherwise,
2714 scan for sets. A simple set will have no side effects
2715 which could change the base value of any other register. */
2717 if (GET_CODE (PATTERN (insn)) == SET
2718 && REG_NOTES (insn) != 0
2719 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2720 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2722 note_stores (PATTERN (insn), record_set, NULL);
2724 set = single_set (insn);
2727 && REG_P (SET_DEST (set))
2728 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2730 unsigned int regno = REGNO (SET_DEST (set));
2731 rtx src = SET_SRC (set);
2734 note = find_reg_equal_equiv_note (insn);
2735 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2736 && DF_REG_DEF_COUNT (regno) != 1)
2739 if (note != NULL_RTX
2740 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2741 && ! rtx_varies_p (XEXP (note, 0), 1)
2742 && ! reg_overlap_mentioned_p (SET_DEST (set),
2745 set_reg_known_value (regno, XEXP (note, 0));
2746 set_reg_known_equiv_p (regno,
2747 REG_NOTE_KIND (note) == REG_EQUIV);
2749 else if (DF_REG_DEF_COUNT (regno) == 1
2750 && GET_CODE (src) == PLUS
2751 && REG_P (XEXP (src, 0))
2752 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2753 && CONST_INT_P (XEXP (src, 1)))
2755 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2756 set_reg_known_value (regno, t);
2757 set_reg_known_equiv_p (regno, 0);
2759 else if (DF_REG_DEF_COUNT (regno) == 1
2760 && ! rtx_varies_p (src, 1))
2762 set_reg_known_value (regno, src);
2763 set_reg_known_equiv_p (regno, 0);
2767 else if (NOTE_P (insn)
2768 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2769 copying_arguments = false;
2772 /* Now propagate values from new_reg_base_value to reg_base_value. */
2773 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2775 for (ui = 0; ui < maxreg; ui++)
2777 if (new_reg_base_value[ui]
2778 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2779 && ! rtx_equal_p (new_reg_base_value[ui],
2780 VEC_index (rtx, reg_base_value, ui)))
2782 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2787 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2789 /* Fill in the remaining entries. */
2790 for (i = 0; i < (int)reg_known_value_size; i++)
2791 if (reg_known_value[i] == 0)
2792 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2795 free (new_reg_base_value);
2796 new_reg_base_value = 0;
2799 timevar_pop (TV_ALIAS_ANALYSIS);
2803 end_alias_analysis (void)
2805 old_reg_base_value = reg_base_value;
2806 ggc_free (reg_known_value);
2807 reg_known_value = 0;
2808 reg_known_value_size = 0;
2809 free (reg_known_equiv_p);
2810 reg_known_equiv_p = 0;
2813 #include "gt-alias.h"