1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
31 #include "hard-reg-set.h"
36 #include "splay-tree.h"
39 /* The alias sets assigned to MEMs assist the back-end in determining
40 which MEMs can alias which other MEMs. In general, two MEMs in
41 different alias sets cannot alias each other, with one important
42 exception. Consider something like:
44 struct S {int i; double d; };
46 a store to an `S' can alias something of either type `int' or type
47 `double'. (However, a store to an `int' cannot alias a `double'
48 and vice versa.) We indicate this via a tree structure that looks
56 (The arrows are directed and point downwards.)
57 In this situation we say the alias set for `struct S' is the
58 `superset' and that those for `int' and `double' are `subsets'.
60 To see whether two alias sets can point to the same memory, we must go
61 down the list of decendents of each and see if there is some alias set
62 in common. We need not trace past immediate decendents, however, since
63 we propagate all grandchildren up one level.
65 Alias set zero is implicitly a superset of all other alias sets.
66 However, this is no actual entry for alias set zero. It is an
67 error to attempt to explicitly construct a subset of zero. */
69 typedef struct alias_set_entry
71 /* The alias set number, as stored in MEM_ALIAS_SET. */
74 /* The children of the alias set. These are not just the immediate
75 children, but, in fact, all decendents. So, if we have:
77 struct T { struct S s; float f; }
79 continuing our example above, the children here will be all of
80 `int', `double', `float', and `struct S'. */
84 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
85 static rtx find_symbolic_term PARAMS ((rtx));
86 static rtx get_addr PARAMS ((rtx));
87 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
89 static void record_set PARAMS ((rtx, rtx, void *));
90 static rtx find_base_term PARAMS ((rtx));
91 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
93 static rtx find_base_value PARAMS ((rtx));
94 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
95 static int insert_subset_children PARAMS ((splay_tree_node, void*));
96 static alias_set_entry get_alias_set_entry PARAMS ((int));
97 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
99 static int aliases_everything_p PARAMS ((rtx));
100 static int write_dependence_p PARAMS ((rtx, rtx, int));
101 static int nonlocal_reference_p PARAMS ((rtx));
103 /* Set up all info needed to perform alias analysis on memory references. */
105 /* Returns the size in bytes of the mode of X. */
106 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
108 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
109 different alias sets. We ignore alias sets in functions making use
110 of variable arguments because the va_arg macros on some systems are
112 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
113 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
115 /* Cap the number of passes we make over the insns propagating alias
116 information through set chains. 10 is a completely arbitrary choice. */
117 #define MAX_ALIAS_LOOP_PASSES 10
119 /* reg_base_value[N] gives an address to which register N is related.
120 If all sets after the first add or subtract to the current value
121 or otherwise modify it so it does not point to a different top level
122 object, reg_base_value[N] is equal to the address part of the source
125 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
126 expressions represent certain special values: function arguments and
127 the stack, frame, and argument pointers.
129 The contents of an ADDRESS is not normally used, the mode of the
130 ADDRESS determines whether the ADDRESS is a function argument or some
131 other special value. Pointer equality, not rtx_equal_p, determines whether
132 two ADDRESS expressions refer to the same base address.
134 The only use of the contents of an ADDRESS is for determining if the
135 current function performs nonlocal memory memory references for the
136 purposes of marking the function as a constant function. */
138 static rtx *reg_base_value;
139 static rtx *new_reg_base_value;
140 static unsigned int reg_base_value_size; /* size of reg_base_value array */
142 #define REG_BASE_VALUE(X) \
143 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
145 /* Vector of known invariant relationships between registers. Set in
146 loop unrolling. Indexed by register number, if nonzero the value
147 is an expression describing this register in terms of another.
149 The length of this array is REG_BASE_VALUE_SIZE.
151 Because this array contains only pseudo registers it has no effect
153 static rtx *alias_invariant;
155 /* Vector indexed by N giving the initial (unchanging) value known for
156 pseudo-register N. This array is initialized in
157 init_alias_analysis, and does not change until end_alias_analysis
159 rtx *reg_known_value;
161 /* Indicates number of valid entries in reg_known_value. */
162 static unsigned int reg_known_value_size;
164 /* Vector recording for each reg_known_value whether it is due to a
165 REG_EQUIV note. Future passes (viz., reload) may replace the
166 pseudo with the equivalent expression and so we account for the
167 dependences that would be introduced if that happens.
169 The REG_EQUIV notes created in assign_parms may mention the arg
170 pointer, and there are explicit insns in the RTL that modify the
171 arg pointer. Thus we must ensure that such insns don't get
172 scheduled across each other because that would invalidate the
173 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
174 wrong, but solving the problem in the scheduler will likely give
175 better code, so we do it here. */
176 char *reg_known_equiv_p;
178 /* True when scanning insns from the start of the rtl to the
179 NOTE_INSN_FUNCTION_BEG note. */
180 static int copying_arguments;
182 /* The splay-tree used to store the various alias set entries. */
183 static splay_tree alias_sets;
185 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
186 such an entry, or NULL otherwise. */
188 static alias_set_entry
189 get_alias_set_entry (alias_set)
193 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
195 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
198 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
199 the two MEMs cannot alias each other. */
202 mems_in_disjoint_alias_sets_p (mem1, mem2)
208 #ifdef ENABLE_CHECKING
209 /* Perform a basic sanity check. Namely, that there are no alias sets
210 if we're not using strict aliasing. This helps to catch bugs
211 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
212 where a MEM is allocated in some way other than by the use of
213 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
214 use alias sets to indicate that spilled registers cannot alias each
215 other, we might need to remove this check. */
216 if (! flag_strict_aliasing
217 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
221 /* The code used in varargs macros are often not conforming ANSI C,
222 which can trick the compiler into making incorrect aliasing
223 assumptions in these functions. So, we don't use alias sets in
224 such a function. FIXME: This should be moved into the front-end;
225 it is a language-dependent notion, and there's no reason not to
226 still use these checks to handle globals. */
227 if (current_function_stdarg || current_function_varargs)
230 /* If have no alias set information for one of the MEMs, we have to assume
231 it can alias anything. */
232 if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
235 /* If the two alias sets are the same, they may alias. */
236 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
239 /* Iterate through each of the children of the first alias set,
240 comparing it with the second alias set. */
241 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
242 if (ase != 0 && splay_tree_lookup (ase->children,
243 (splay_tree_key) MEM_ALIAS_SET (mem2)))
246 /* Now do the same, but with the alias sets reversed. */
247 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
248 if (ase != 0 && splay_tree_lookup (ase->children,
249 (splay_tree_key) MEM_ALIAS_SET (mem1)))
252 /* The two MEMs are in distinct alias sets, and neither one is the
253 child of the other. Therefore, they cannot alias. */
257 /* Insert the NODE into the splay tree given by DATA. Used by
258 record_alias_subset via splay_tree_foreach. */
261 insert_subset_children (node, data)
262 splay_tree_node node;
265 splay_tree_insert ((splay_tree) data, node->key, node->value);
270 /* Indicate that things in SUBSET can alias things in SUPERSET, but
271 not vice versa. For example, in C, a store to an `int' can alias a
272 structure containing an `int', but not vice versa. Here, the
273 structure would be the SUPERSET and `int' the SUBSET. This
274 function should be called only once per SUPERSET/SUBSET pair. At
275 present any given alias set may only be a subset of one superset.
277 It is illegal for SUPERSET to be zero; everything is implicitly a
278 subset of alias set zero. */
281 record_alias_subset (superset, subset)
285 alias_set_entry superset_entry;
286 alias_set_entry subset_entry;
291 superset_entry = get_alias_set_entry (superset);
292 if (superset_entry == 0)
294 /* Create an entry for the SUPERSET, so that we have a place to
295 attach the SUBSET. */
297 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
298 superset_entry->alias_set = superset;
299 superset_entry->children
300 = splay_tree_new (splay_tree_compare_ints, 0, 0);
301 splay_tree_insert (alias_sets, (splay_tree_key) superset,
302 (splay_tree_value) superset_entry);
306 subset_entry = get_alias_set_entry (subset);
308 /* If there is an entry for the subset, enter all of its children
309 (if they are not already present) as children of the SUPERSET. */
311 splay_tree_foreach (subset_entry->children,
312 insert_subset_children,
313 superset_entry->children);
315 /* Enter the SUBSET itself as a child of the SUPERSET. */
316 splay_tree_insert (superset_entry->children,
317 (splay_tree_key) subset, 0);
320 /* Inside SRC, the source of a SET, find a base address. */
323 find_base_value (src)
326 switch (GET_CODE (src))
333 /* At the start of a function, argument registers have known base
334 values which may be lost later. Returning an ADDRESS
335 expression here allows optimization based on argument values
336 even when the argument registers are used for other purposes. */
337 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
338 return new_reg_base_value[REGNO (src)];
340 /* If a pseudo has a known base value, return it. Do not do this
341 for hard regs since it can result in a circular dependency
342 chain for registers which have values at function entry.
344 The test above is not sufficient because the scheduler may move
345 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
346 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
347 && (unsigned) REGNO (src) < reg_base_value_size
348 && reg_base_value[REGNO (src)])
349 return reg_base_value[REGNO (src)];
354 /* Check for an argument passed in memory. Only record in the
355 copying-arguments block; it is too hard to track changes
357 if (copying_arguments
358 && (XEXP (src, 0) == arg_pointer_rtx
359 || (GET_CODE (XEXP (src, 0)) == PLUS
360 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
361 return gen_rtx_ADDRESS (VOIDmode, src);
366 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
369 /* ... fall through ... */
374 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
376 /* If either operand is a REG, then see if we already have
377 a known value for it. */
378 if (GET_CODE (src_0) == REG)
380 temp = find_base_value (src_0);
385 if (GET_CODE (src_1) == REG)
387 temp = find_base_value (src_1);
392 /* Guess which operand is the base address:
393 If either operand is a symbol, then it is the base. If
394 either operand is a CONST_INT, then the other is the base. */
395 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
396 return find_base_value (src_0);
397 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
398 return find_base_value (src_1);
400 /* This might not be necessary anymore:
401 If either operand is a REG that is a known pointer, then it
403 else if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
404 return find_base_value (src_0);
405 else if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
406 return find_base_value (src_1);
412 /* The standard form is (lo_sum reg sym) so look only at the
414 return find_base_value (XEXP (src, 1));
417 /* If the second operand is constant set the base
418 address to the first operand. */
419 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
420 return find_base_value (XEXP (src, 0));
424 case SIGN_EXTEND: /* used for NT/Alpha pointers */
426 return find_base_value (XEXP (src, 0));
435 /* Called from init_alias_analysis indirectly through note_stores. */
437 /* While scanning insns to find base values, reg_seen[N] is nonzero if
438 register N has been set in this function. */
439 static char *reg_seen;
441 /* Addresses which are known not to alias anything else are identified
442 by a unique integer. */
443 static int unique_id;
446 record_set (dest, set, data)
448 void *data ATTRIBUTE_UNUSED;
450 register unsigned regno;
453 if (GET_CODE (dest) != REG)
456 regno = REGNO (dest);
458 if (regno >= reg_base_value_size)
463 /* A CLOBBER wipes out any old value but does not prevent a previously
464 unset register from acquiring a base address (i.e. reg_seen is not
466 if (GET_CODE (set) == CLOBBER)
468 new_reg_base_value[regno] = 0;
477 new_reg_base_value[regno] = 0;
481 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
482 GEN_INT (unique_id++));
486 /* This is not the first set. If the new value is not related to the
487 old value, forget the base value. Note that the following code is
489 extern int x, y; int *p = &x; p += (&y-&x);
490 ANSI C does not allow computing the difference of addresses
491 of distinct top level objects. */
492 if (new_reg_base_value[regno])
493 switch (GET_CODE (src))
498 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
499 new_reg_base_value[regno] = 0;
502 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
503 new_reg_base_value[regno] = 0;
506 new_reg_base_value[regno] = 0;
509 /* If this is the first set of a register, record the value. */
510 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
511 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
512 new_reg_base_value[regno] = find_base_value (src);
517 /* Called from loop optimization when a new pseudo-register is
518 created. It indicates that REGNO is being set to VAL. f INVARIANT
519 is true then this value also describes an invariant relationship
520 which can be used to deduce that two registers with unknown values
524 record_base_value (regno, val, invariant)
529 if (regno >= reg_base_value_size)
532 if (invariant && alias_invariant)
533 alias_invariant[regno] = val;
535 if (GET_CODE (val) == REG)
537 if (REGNO (val) < reg_base_value_size)
538 reg_base_value[regno] = reg_base_value[REGNO (val)];
543 reg_base_value[regno] = find_base_value (val);
546 /* Returns a canonical version of X, from the point of view alias
547 analysis. (For example, if X is a MEM whose address is a register,
548 and the register has a known value (say a SYMBOL_REF), then a MEM
549 whose address is the SYMBOL_REF is returned.) */
555 /* Recursively look for equivalences. */
556 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
557 && REGNO (x) < reg_known_value_size)
558 return reg_known_value[REGNO (x)] == x
559 ? x : canon_rtx (reg_known_value[REGNO (x)]);
560 else if (GET_CODE (x) == PLUS)
562 rtx x0 = canon_rtx (XEXP (x, 0));
563 rtx x1 = canon_rtx (XEXP (x, 1));
565 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
567 /* We can tolerate LO_SUMs being offset here; these
568 rtl are used for nothing other than comparisons. */
569 if (GET_CODE (x0) == CONST_INT)
570 return plus_constant_for_output (x1, INTVAL (x0));
571 else if (GET_CODE (x1) == CONST_INT)
572 return plus_constant_for_output (x0, INTVAL (x1));
573 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
577 /* This gives us much better alias analysis when called from
578 the loop optimizer. Note we want to leave the original
579 MEM alone, but need to return the canonicalized MEM with
580 all the flags with their original values. */
581 else if (GET_CODE (x) == MEM)
583 rtx addr = canon_rtx (XEXP (x, 0));
585 if (addr != XEXP (x, 0))
587 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
589 MEM_COPY_ATTRIBUTES (new, x);
596 /* Return 1 if X and Y are identical-looking rtx's.
598 We use the data in reg_known_value above to see if two registers with
599 different numbers are, in fact, equivalent. */
602 rtx_equal_for_memref_p (x, y)
607 register enum rtx_code code;
608 register const char *fmt;
610 if (x == 0 && y == 0)
612 if (x == 0 || y == 0)
622 /* Rtx's of different codes cannot be equal. */
623 if (code != GET_CODE (y))
626 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
627 (REG:SI x) and (REG:HI x) are NOT equivalent. */
629 if (GET_MODE (x) != GET_MODE (y))
632 /* Some RTL can be compared without a recursive examination. */
636 return REGNO (x) == REGNO (y);
639 return XEXP (x, 0) == XEXP (y, 0);
642 return XSTR (x, 0) == XSTR (y, 0);
646 /* There's no need to compare the contents of CONST_DOUBLEs or
647 CONST_INTs because pointer equality is a good enough
648 comparison for these nodes. */
652 return (REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0))
653 && XINT (x, 1) == XINT (y, 1));
659 /* For commutative operations, the RTX match if the operand match in any
660 order. Also handle the simple binary and unary cases without a loop. */
661 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
662 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
663 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
664 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
665 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
666 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
667 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
668 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
669 else if (GET_RTX_CLASS (code) == '1')
670 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
672 /* Compare the elements. If any pair of corresponding elements
673 fail to match, return 0 for the whole things.
675 Limit cases to types which actually appear in addresses. */
677 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
683 if (XINT (x, i) != XINT (y, i))
688 /* Two vectors must have the same length. */
689 if (XVECLEN (x, i) != XVECLEN (y, i))
692 /* And the corresponding elements must match. */
693 for (j = 0; j < XVECLEN (x, i); j++)
694 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
695 XVECEXP (y, i, j)) == 0)
700 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
704 /* This can happen for an asm which clobbers memory. */
708 /* It is believed that rtx's at this level will never
709 contain anything but integers and other rtx's,
710 except for within LABEL_REFs and SYMBOL_REFs. */
718 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
719 X and return it, or return 0 if none found. */
722 find_symbolic_term (x)
726 register enum rtx_code code;
727 register const char *fmt;
730 if (code == SYMBOL_REF || code == LABEL_REF)
732 if (GET_RTX_CLASS (code) == 'o')
735 fmt = GET_RTX_FORMAT (code);
736 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
742 t = find_symbolic_term (XEXP (x, i));
746 else if (fmt[i] == 'E')
757 struct elt_loc_list *l;
759 switch (GET_CODE (x))
762 return REG_BASE_VALUE (x);
765 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
771 return find_base_term (XEXP (x, 0));
774 val = CSELIB_VAL_PTR (x);
775 for (l = val->locs; l; l = l->next)
776 if ((x = find_base_term (l->loc)) != 0)
782 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
789 rtx tmp1 = XEXP (x, 0);
790 rtx tmp2 = XEXP (x, 1);
792 /* This is a litle bit tricky since we have to determine which of
793 the two operands represents the real base address. Otherwise this
794 routine may return the index register instead of the base register.
796 That may cause us to believe no aliasing was possible, when in
797 fact aliasing is possible.
799 We use a few simple tests to guess the base register. Additional
800 tests can certainly be added. For example, if one of the operands
801 is a shift or multiply, then it must be the index register and the
802 other operand is the base register. */
804 /* If either operand is known to be a pointer, then use it
805 to determine the base term. */
806 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
807 return find_base_term (tmp1);
809 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
810 return find_base_term (tmp2);
812 /* Neither operand was known to be a pointer. Go ahead and find the
813 base term for both operands. */
814 tmp1 = find_base_term (tmp1);
815 tmp2 = find_base_term (tmp2);
817 /* If either base term is named object or a special address
818 (like an argument or stack reference), then use it for the
821 && (GET_CODE (tmp1) == SYMBOL_REF
822 || GET_CODE (tmp1) == LABEL_REF
823 || (GET_CODE (tmp1) == ADDRESS
824 && GET_MODE (tmp1) != VOIDmode)))
828 && (GET_CODE (tmp2) == SYMBOL_REF
829 || GET_CODE (tmp2) == LABEL_REF
830 || (GET_CODE (tmp2) == ADDRESS
831 && GET_MODE (tmp2) != VOIDmode)))
834 /* We could not determine which of the two operands was the
835 base register and which was the index. So we can determine
836 nothing from the base alias check. */
841 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
842 return REG_BASE_VALUE (XEXP (x, 0));
854 /* Return 0 if the addresses X and Y are known to point to different
855 objects, 1 if they might be pointers to the same object. */
858 base_alias_check (x, y, x_mode, y_mode)
860 enum machine_mode x_mode, y_mode;
862 rtx x_base = find_base_term (x);
863 rtx y_base = find_base_term (y);
865 /* If the address itself has no known base see if a known equivalent
866 value has one. If either address still has no known base, nothing
867 is known about aliasing. */
872 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
875 x_base = find_base_term (x_c);
883 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
886 y_base = find_base_term (y_c);
891 /* If the base addresses are equal nothing is known about aliasing. */
892 if (rtx_equal_p (x_base, y_base))
895 /* The base addresses of the read and write are different expressions.
896 If they are both symbols and they are not accessed via AND, there is
897 no conflict. We can bring knowledge of object alignment into play
898 here. For example, on alpha, "char a, b;" can alias one another,
899 though "char a; long b;" cannot. */
900 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
902 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
904 if (GET_CODE (x) == AND
905 && (GET_CODE (XEXP (x, 1)) != CONST_INT
906 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
908 if (GET_CODE (y) == AND
909 && (GET_CODE (XEXP (y, 1)) != CONST_INT
910 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
912 /* Differing symbols never alias. */
916 /* If one address is a stack reference there can be no alias:
917 stack references using different base registers do not alias,
918 a stack reference can not alias a parameter, and a stack reference
919 can not alias a global. */
920 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
921 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
924 if (! flag_argument_noalias)
927 if (flag_argument_noalias > 1)
930 /* Weak noalias assertion (arguments are distinct, but may match globals). */
931 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
934 /* Convert the address X into something we can use. This is done by returning
935 it unchanged unless it is a value; in the latter case we call cselib to get
936 a more useful rtx. */
942 struct elt_loc_list *l;
944 if (GET_CODE (x) != VALUE)
946 v = CSELIB_VAL_PTR (x);
947 for (l = v->locs; l; l = l->next)
948 if (CONSTANT_P (l->loc))
950 for (l = v->locs; l; l = l->next)
951 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
958 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
959 where SIZE is the size in bytes of the memory reference. If ADDR
960 is not modified by the memory reference then ADDR is returned. */
963 addr_side_effect_eval (addr, size, n_refs)
970 switch (GET_CODE (addr))
973 offset = (n_refs + 1) * size;
976 offset = -(n_refs + 1) * size;
979 offset = n_refs * size;
982 offset = -n_refs * size;
990 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
992 addr = XEXP (addr, 0);
997 /* Return nonzero if X and Y (memory addresses) could reference the
998 same location in memory. C is an offset accumulator. When
999 C is nonzero, we are testing aliases between X and Y + C.
1000 XSIZE is the size in bytes of the X reference,
1001 similarly YSIZE is the size in bytes for Y.
1003 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1004 referenced (the reference was BLKmode), so make the most pessimistic
1007 If XSIZE or YSIZE is negative, we may access memory outside the object
1008 being referenced as a side effect. This can happen when using AND to
1009 align memory references, as is done on the Alpha.
1011 Nice to notice that varying addresses cannot conflict with fp if no
1012 local variables had their addresses taken, but that's too hard now. */
1015 memrefs_conflict_p (xsize, x, ysize, y, c)
1020 if (GET_CODE (x) == VALUE)
1022 if (GET_CODE (y) == VALUE)
1024 if (GET_CODE (x) == HIGH)
1026 else if (GET_CODE (x) == LO_SUM)
1029 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1030 if (GET_CODE (y) == HIGH)
1032 else if (GET_CODE (y) == LO_SUM)
1035 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1037 if (rtx_equal_for_memref_p (x, y))
1039 if (xsize <= 0 || ysize <= 0)
1041 if (c >= 0 && xsize > c)
1043 if (c < 0 && ysize+c > 0)
1048 /* This code used to check for conflicts involving stack references and
1049 globals but the base address alias code now handles these cases. */
1051 if (GET_CODE (x) == PLUS)
1053 /* The fact that X is canonicalized means that this
1054 PLUS rtx is canonicalized. */
1055 rtx x0 = XEXP (x, 0);
1056 rtx x1 = XEXP (x, 1);
1058 if (GET_CODE (y) == PLUS)
1060 /* The fact that Y is canonicalized means that this
1061 PLUS rtx is canonicalized. */
1062 rtx y0 = XEXP (y, 0);
1063 rtx y1 = XEXP (y, 1);
1065 if (rtx_equal_for_memref_p (x1, y1))
1066 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1067 if (rtx_equal_for_memref_p (x0, y0))
1068 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1069 if (GET_CODE (x1) == CONST_INT)
1071 if (GET_CODE (y1) == CONST_INT)
1072 return memrefs_conflict_p (xsize, x0, ysize, y0,
1073 c - INTVAL (x1) + INTVAL (y1));
1075 return memrefs_conflict_p (xsize, x0, ysize, y,
1078 else if (GET_CODE (y1) == CONST_INT)
1079 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1083 else if (GET_CODE (x1) == CONST_INT)
1084 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1086 else if (GET_CODE (y) == PLUS)
1088 /* The fact that Y is canonicalized means that this
1089 PLUS rtx is canonicalized. */
1090 rtx y0 = XEXP (y, 0);
1091 rtx y1 = XEXP (y, 1);
1093 if (GET_CODE (y1) == CONST_INT)
1094 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1099 if (GET_CODE (x) == GET_CODE (y))
1100 switch (GET_CODE (x))
1104 /* Handle cases where we expect the second operands to be the
1105 same, and check only whether the first operand would conflict
1108 rtx x1 = canon_rtx (XEXP (x, 1));
1109 rtx y1 = canon_rtx (XEXP (y, 1));
1110 if (! rtx_equal_for_memref_p (x1, y1))
1112 x0 = canon_rtx (XEXP (x, 0));
1113 y0 = canon_rtx (XEXP (y, 0));
1114 if (rtx_equal_for_memref_p (x0, y0))
1115 return (xsize == 0 || ysize == 0
1116 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1118 /* Can't properly adjust our sizes. */
1119 if (GET_CODE (x1) != CONST_INT)
1121 xsize /= INTVAL (x1);
1122 ysize /= INTVAL (x1);
1124 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1128 /* Are these registers known not to be equal? */
1129 if (alias_invariant)
1131 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1132 rtx i_x, i_y; /* invariant relationships of X and Y */
1134 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1135 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1137 if (i_x == 0 && i_y == 0)
1140 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1141 ysize, i_y ? i_y : y, c))
1150 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1151 as an access with indeterminate size. Assume that references
1152 besides AND are aligned, so if the size of the other reference is
1153 at least as large as the alignment, assume no other overlap. */
1154 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1156 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1158 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1160 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1162 /* ??? If we are indexing far enough into the array/structure, we
1163 may yet be able to determine that we can not overlap. But we
1164 also need to that we are far enough from the end not to overlap
1165 a following reference, so we do nothing with that for now. */
1166 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1168 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1173 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1175 c += (INTVAL (y) - INTVAL (x));
1176 return (xsize <= 0 || ysize <= 0
1177 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1180 if (GET_CODE (x) == CONST)
1182 if (GET_CODE (y) == CONST)
1183 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1184 ysize, canon_rtx (XEXP (y, 0)), c);
1186 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1189 if (GET_CODE (y) == CONST)
1190 return memrefs_conflict_p (xsize, x, ysize,
1191 canon_rtx (XEXP (y, 0)), c);
1194 return (xsize < 0 || ysize < 0
1195 || (rtx_equal_for_memref_p (x, y)
1196 && (xsize == 0 || ysize == 0
1197 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1204 /* Functions to compute memory dependencies.
1206 Since we process the insns in execution order, we can build tables
1207 to keep track of what registers are fixed (and not aliased), what registers
1208 are varying in known ways, and what registers are varying in unknown
1211 If both memory references are volatile, then there must always be a
1212 dependence between the two references, since their order can not be
1213 changed. A volatile and non-volatile reference can be interchanged
1216 A MEM_IN_STRUCT reference at a non-AND varying address can never
1217 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1218 also must allow AND addresses, because they may generate accesses
1219 outside the object being referenced. This is used to generate
1220 aligned addresses from unaligned addresses, for instance, the alpha
1221 storeqi_unaligned pattern. */
1223 /* Read dependence: X is read after read in MEM takes place. There can
1224 only be a dependence here if both reads are volatile. */
1227 read_dependence (mem, x)
1231 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1234 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1235 MEM2 is a reference to a structure at a varying address, or returns
1236 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1237 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1238 to decide whether or not an address may vary; it should return
1239 nonzero whenever variation is possible.
1240 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1243 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1245 rtx mem1_addr, mem2_addr;
1246 int (*varies_p) PARAMS ((rtx));
1248 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1249 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1250 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1254 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1255 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1256 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1263 /* Returns nonzero if something about the mode or address format MEM1
1264 indicates that it might well alias *anything*. */
1267 aliases_everything_p (mem)
1270 if (GET_CODE (XEXP (mem, 0)) == AND)
1271 /* If the address is an AND, its very hard to know at what it is
1272 actually pointing. */
1278 /* True dependence: X is read after store in MEM takes place. */
1281 true_dependence (mem, mem_mode, x, varies)
1283 enum machine_mode mem_mode;
1285 int (*varies) PARAMS ((rtx));
1287 register rtx x_addr, mem_addr;
1289 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1292 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1295 /* If X is an unchanging read, then it can't possibly conflict with any
1296 non-unchanging store. It may conflict with an unchanging write though,
1297 because there may be a single store to this address to initialize it.
1298 Just fall through to the code below to resolve the case where we have
1299 both an unchanging read and an unchanging write. This won't handle all
1300 cases optimally, but the possible performance loss should be
1302 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1305 if (mem_mode == VOIDmode)
1306 mem_mode = GET_MODE (mem);
1308 x_addr = get_addr (XEXP (x, 0));
1309 mem_addr = get_addr (XEXP (mem, 0));
1311 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1314 x_addr = canon_rtx (x_addr);
1315 mem_addr = canon_rtx (mem_addr);
1317 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1318 SIZE_FOR_MODE (x), x_addr, 0))
1321 if (aliases_everything_p (x))
1324 /* We cannot use aliases_everyting_p to test MEM, since we must look
1325 at MEM_MODE, rather than GET_MODE (MEM). */
1326 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1329 /* In true_dependence we also allow BLKmode to alias anything. Why
1330 don't we do this in anti_dependence and output_dependence? */
1331 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1334 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1338 /* Returns non-zero if a write to X might alias a previous read from
1339 (or, if WRITEP is non-zero, a write to) MEM. */
1342 write_dependence_p (mem, x, writep)
1347 rtx x_addr, mem_addr;
1350 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1353 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1356 /* If MEM is an unchanging read, then it can't possibly conflict with
1357 the store to X, because there is at most one store to MEM, and it must
1358 have occurred somewhere before MEM. */
1359 if (!writep && RTX_UNCHANGING_P (mem))
1362 x_addr = get_addr (XEXP (x, 0));
1363 mem_addr = get_addr (XEXP (mem, 0));
1365 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
1369 x_addr = canon_rtx (x_addr);
1370 mem_addr = canon_rtx (mem_addr);
1372 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1373 SIZE_FOR_MODE (x), x_addr, 0))
1377 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1380 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1381 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1384 /* Anti dependence: X is written after read in MEM takes place. */
1387 anti_dependence (mem, x)
1391 return write_dependence_p (mem, x, /*writep=*/0);
1394 /* Output dependence: X is written after store in MEM takes place. */
1397 output_dependence (mem, x)
1401 return write_dependence_p (mem, x, /*writep=*/1);
1404 /* Returns non-zero if X might refer to something which is not
1405 local to the function and is not constant. */
1408 nonlocal_reference_p (x)
1412 register RTX_CODE code;
1415 code = GET_CODE (x);
1417 if (GET_RTX_CLASS (code) == 'i')
1419 /* Constant functions can be constant if they don't use
1420 scratch memory used to mark function w/o side effects. */
1421 if (code == CALL_INSN && CONST_CALL_P (x))
1423 x = CALL_INSN_FUNCTION_USAGE (x);
1429 code = GET_CODE (x);
1435 if (GET_CODE (SUBREG_REG (x)) == REG)
1437 /* Global registers are not local. */
1438 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1439 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1447 /* Global registers are not local. */
1448 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1462 /* Constants in the function's constants pool are constant. */
1463 if (CONSTANT_POOL_ADDRESS_P (x))
1468 /* Recursion introduces no additional considerations. */
1469 if (GET_CODE (XEXP (x, 0)) == MEM
1470 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1471 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1472 IDENTIFIER_POINTER (
1473 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1478 /* Be overly conservative and consider any volatile memory
1479 reference as not local. */
1480 if (MEM_VOLATILE_P (x))
1482 base = find_base_term (XEXP (x, 0));
1485 /* A Pmode ADDRESS could be a reference via the structure value
1486 address or static chain. Such memory references are nonlocal.
1488 Thus, we have to examine the contents of the ADDRESS to find
1489 out if this is a local reference or not. */
1490 if (GET_CODE (base) == ADDRESS
1491 && GET_MODE (base) == Pmode
1492 && (XEXP (base, 0) == stack_pointer_rtx
1493 || XEXP (base, 0) == arg_pointer_rtx
1494 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1495 || XEXP (base, 0) == hard_frame_pointer_rtx
1497 || XEXP (base, 0) == frame_pointer_rtx))
1499 /* Constants in the function's constant pool are constant. */
1500 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1513 /* Recursively scan the operands of this expression. */
1516 register const char *fmt = GET_RTX_FORMAT (code);
1519 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1521 if (fmt[i] == 'e' && XEXP (x, i))
1523 if (nonlocal_reference_p (XEXP (x, i)))
1526 else if (fmt[i] == 'E')
1529 for (j = 0; j < XVECLEN (x, i); j++)
1530 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1539 /* Mark the function if it is constant. */
1542 mark_constant_function ()
1546 if (TREE_PUBLIC (current_function_decl)
1547 || TREE_READONLY (current_function_decl)
1548 || TREE_THIS_VOLATILE (current_function_decl)
1549 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1552 /* Determine if this is a constant function. */
1554 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1555 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1556 && nonlocal_reference_p (insn))
1559 /* Mark the function. */
1561 TREE_READONLY (current_function_decl) = 1;
1565 static HARD_REG_SET argument_registers;
1572 #ifndef OUTGOING_REGNO
1573 #define OUTGOING_REGNO(N) N
1575 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1576 /* Check whether this register can hold an incoming pointer
1577 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1578 numbers, so translate if necessary due to register windows. */
1579 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1580 && HARD_REGNO_MODE_OK (i, Pmode))
1581 SET_HARD_REG_BIT (argument_registers, i);
1583 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1586 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
1590 init_alias_analysis ()
1592 int maxreg = max_reg_num ();
1595 register unsigned int ui;
1598 reg_known_value_size = maxreg;
1601 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
1602 - FIRST_PSEUDO_REGISTER;
1604 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
1605 - FIRST_PSEUDO_REGISTER;
1607 /* Overallocate reg_base_value to allow some growth during loop
1608 optimization. Loop unrolling can create a large number of
1610 reg_base_value_size = maxreg * 2;
1611 reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
1613 ggc_add_rtx_root (reg_base_value, reg_base_value_size);
1615 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
1616 reg_seen = (char *) xmalloc (reg_base_value_size);
1617 if (! reload_completed && flag_unroll_loops)
1619 /* ??? Why are we realloc'ing if we're just going to zero it? */
1620 alias_invariant = (rtx *)xrealloc (alias_invariant,
1621 reg_base_value_size * sizeof (rtx));
1622 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1626 /* The basic idea is that each pass through this loop will use the
1627 "constant" information from the previous pass to propagate alias
1628 information through another level of assignments.
1630 This could get expensive if the assignment chains are long. Maybe
1631 we should throttle the number of iterations, possibly based on
1632 the optimization level or flag_expensive_optimizations.
1634 We could propagate more information in the first pass by making use
1635 of REG_N_SETS to determine immediately that the alias information
1636 for a pseudo is "constant".
1638 A program with an uninitialized variable can cause an infinite loop
1639 here. Instead of doing a full dataflow analysis to detect such problems
1640 we just cap the number of iterations for the loop.
1642 The state of the arrays for the set chain in question does not matter
1643 since the program has undefined behavior. */
1648 /* Assume nothing will change this iteration of the loop. */
1651 /* We want to assign the same IDs each iteration of this loop, so
1652 start counting from zero each iteration of the loop. */
1655 /* We're at the start of the funtion each iteration through the
1656 loop, so we're copying arguments. */
1657 copying_arguments = 1;
1659 /* Wipe the potential alias information clean for this pass. */
1660 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1662 /* Wipe the reg_seen array clean. */
1663 bzero ((char *) reg_seen, reg_base_value_size);
1665 /* Mark all hard registers which may contain an address.
1666 The stack, frame and argument pointers may contain an address.
1667 An argument register which can hold a Pmode value may contain
1668 an address even if it is not in BASE_REGS.
1670 The address expression is VOIDmode for an argument and
1671 Pmode for other registers. */
1673 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1674 if (TEST_HARD_REG_BIT (argument_registers, i))
1675 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1676 gen_rtx_REG (Pmode, i));
1678 new_reg_base_value[STACK_POINTER_REGNUM]
1679 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1680 new_reg_base_value[ARG_POINTER_REGNUM]
1681 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1682 new_reg_base_value[FRAME_POINTER_REGNUM]
1683 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1684 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1685 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1686 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1688 if (struct_value_incoming_rtx
1689 && GET_CODE (struct_value_incoming_rtx) == REG)
1690 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1691 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1693 if (static_chain_rtx
1694 && GET_CODE (static_chain_rtx) == REG)
1695 new_reg_base_value[REGNO (static_chain_rtx)]
1696 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1698 /* Walk the insns adding values to the new_reg_base_value array. */
1699 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1701 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1705 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1706 if (prologue_epilogue_contains (insn))
1710 /* If this insn has a noalias note, process it, Otherwise,
1711 scan for sets. A simple set will have no side effects
1712 which could change the base value of any other register. */
1714 if (GET_CODE (PATTERN (insn)) == SET
1715 && REG_NOTES (insn) != 0
1716 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
1717 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
1719 note_stores (PATTERN (insn), record_set, NULL);
1721 set = single_set (insn);
1724 && GET_CODE (SET_DEST (set)) == REG
1725 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1726 && REG_NOTES (insn) != 0
1727 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1728 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1729 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1730 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1731 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1733 int regno = REGNO (SET_DEST (set));
1734 reg_known_value[regno] = XEXP (note, 0);
1735 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1738 else if (GET_CODE (insn) == NOTE
1739 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1740 copying_arguments = 0;
1743 /* Now propagate values from new_reg_base_value to reg_base_value. */
1744 for (ui = 0; ui < reg_base_value_size; ui++)
1746 if (new_reg_base_value[ui]
1747 && new_reg_base_value[ui] != reg_base_value[ui]
1748 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1750 reg_base_value[ui] = new_reg_base_value[ui];
1755 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1757 /* Fill in the remaining entries. */
1758 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1759 if (reg_known_value[i] == 0)
1760 reg_known_value[i] = regno_reg_rtx[i];
1762 /* Simplify the reg_base_value array so that no register refers to
1763 another register, except to special registers indirectly through
1764 ADDRESS expressions.
1766 In theory this loop can take as long as O(registers^2), but unless
1767 there are very long dependency chains it will run in close to linear
1770 This loop may not be needed any longer now that the main loop does
1771 a better job at propagating alias information. */
1777 for (ui = 0; ui < reg_base_value_size; ui++)
1779 rtx base = reg_base_value[ui];
1780 if (base && GET_CODE (base) == REG)
1782 unsigned int base_regno = REGNO (base);
1783 if (base_regno == ui) /* register set from itself */
1784 reg_base_value[ui] = 0;
1786 reg_base_value[ui] = reg_base_value[base_regno];
1791 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1794 free (new_reg_base_value);
1795 new_reg_base_value = 0;
1801 end_alias_analysis ()
1803 free (reg_known_value + FIRST_PSEUDO_REGISTER);
1804 reg_known_value = 0;
1805 reg_known_value_size = 0;
1806 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
1807 reg_known_equiv_p = 0;
1811 ggc_del_root (reg_base_value);
1812 free (reg_base_value);
1815 reg_base_value_size = 0;
1816 if (alias_invariant)
1818 free (alias_invariant);
1819 alias_invariant = 0;