1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
33 #include "splay-tree.h"
35 /* The alias sets assigned to MEMs assist the back-end in determining
36 which MEMs can alias which other MEMs. In general, two MEMs in
37 different alias sets to not alias each other. There is one
38 exception, however. Consider something like:
40 struct S {int i; double d; };
42 a store to an `S' can alias something of either type `int' or type
43 `double'. (However, a store to an `int' cannot alias a `double'
44 and vice versa.) We indicate this via a tree structure that looks
52 (The arrows are directed and point downwards.) If, when comparing
53 two alias sets, we can hold one set fixed, and trace the other set
54 downwards, and at some point find the first set, the two MEMs can
55 alias one another. In this situation we say the alias set for
56 `struct S' is the `superset' and that those for `int' and `double'
59 Alias set zero is implicitly a superset of all other alias sets.
60 However, this is no actual entry for alias set zero. It is an
61 error to attempt to explicitly construct a subset of zero. */
63 typedef struct alias_set_entry {
64 /* The alias set number, as stored in MEM_ALIAS_SET. */
67 /* The children of the alias set. These are not just the immediate
68 children, but, in fact, all children. So, if we have:
70 struct T { struct S s; float f; }
72 continuing our example above, the children here will be all of
73 `int', `double', `float', and `struct S'. */
77 static rtx canon_rtx PROTO((rtx));
78 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
79 static rtx find_symbolic_term PROTO((rtx));
80 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
82 static void record_set PROTO((rtx, rtx));
83 static rtx find_base_term PROTO((rtx));
84 static int base_alias_check PROTO((rtx, rtx, enum machine_mode,
86 static rtx find_base_value PROTO((rtx));
87 static int mems_in_disjoint_alias_sets_p PROTO((rtx, rtx));
88 static int insert_subset_children PROTO((splay_tree_node,
90 static alias_set_entry get_alias_set_entry PROTO((int));
91 static rtx fixed_scalar_and_varying_struct_p PROTO((rtx, rtx, int (*)(rtx)));
92 static int aliases_everything_p PROTO((rtx));
93 static int write_dependence_p PROTO((rtx, rtx, int));
95 /* Set up all info needed to perform alias analysis on memory references. */
97 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
99 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
100 different alias sets. We ignore alias sets in functions making use
101 of variable arguments because the va_arg macros on some systems are
103 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
104 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
106 /* Cap the number of passes we make over the insns propagating alias
107 information through set chains.
109 10 is a completely arbitrary choice. */
110 #define MAX_ALIAS_LOOP_PASSES 10
112 /* reg_base_value[N] gives an address to which register N is related.
113 If all sets after the first add or subtract to the current value
114 or otherwise modify it so it does not point to a different top level
115 object, reg_base_value[N] is equal to the address part of the source
118 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
119 expressions represent certain special values: function arguments and
120 the stack, frame, and argument pointers. The contents of an address
121 expression are not used (but they are descriptive for debugging);
122 only the address and mode matter. Pointer equality, not rtx_equal_p,
123 determines whether two ADDRESS expressions refer to the same base
124 address. The mode determines whether it is a function argument or
125 other special value. */
128 rtx *new_reg_base_value;
129 unsigned int reg_base_value_size; /* size of reg_base_value array */
130 #define REG_BASE_VALUE(X) \
131 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
133 /* Vector of known invariant relationships between registers. Set in
134 loop unrolling. Indexed by register number, if nonzero the value
135 is an expression describing this register in terms of another.
137 The length of this array is REG_BASE_VALUE_SIZE.
139 Because this array contains only pseudo registers it has no effect
141 static rtx *alias_invariant;
143 /* Vector indexed by N giving the initial (unchanging) value known
144 for pseudo-register N. */
145 rtx *reg_known_value;
147 /* Indicates number of valid entries in reg_known_value. */
148 static int reg_known_value_size;
150 /* Vector recording for each reg_known_value whether it is due to a
151 REG_EQUIV note. Future passes (viz., reload) may replace the
152 pseudo with the equivalent expression and so we account for the
153 dependences that would be introduced if that happens. */
154 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
155 assign_parms mention the arg pointer, and there are explicit insns in the
156 RTL that modify the arg pointer. Thus we must ensure that such insns don't
157 get scheduled across each other because that would invalidate the REG_EQUIV
158 notes. One could argue that the REG_EQUIV notes are wrong, but solving
159 the problem in the scheduler will likely give better code, so we do it
161 char *reg_known_equiv_p;
163 /* True when scanning insns from the start of the rtl to the
164 NOTE_INSN_FUNCTION_BEG note. */
166 static int copying_arguments;
168 /* The splay-tree used to store the various alias set entries. */
170 static splay_tree alias_sets;
172 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
173 such an entry, or NULL otherwise. */
175 static alias_set_entry
176 get_alias_set_entry (alias_set)
180 splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
182 return sn ? ((alias_set_entry) sn->value) : ((alias_set_entry) 0);
185 /* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
186 that the two MEMs cannot alias each other. */
189 mems_in_disjoint_alias_sets_p (mem1, mem2)
195 #ifdef ENABLE_CHECKING
196 /* Perform a basic sanity check. Namely, that there are no alias sets
197 if we're not using strict aliasing. This helps to catch bugs
198 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
199 where a MEM is allocated in some way other than by the use of
200 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
201 use alias sets to indicate that spilled registers cannot alias each
202 other, we might need to remove this check. */
203 if (!flag_strict_aliasing &&
204 (MEM_ALIAS_SET (mem1) || MEM_ALIAS_SET (mem2)))
208 /* The code used in varargs macros are often not conforming ANSI C,
209 which can trick the compiler into making incorrect aliasing
210 assumptions in these functions. So, we don't use alias sets in
211 such a function. FIXME: This should be moved into the front-end;
212 it is a language-dependent notion, and there's no reason not to
213 still use these checks to handle globals. */
214 if (current_function_stdarg || current_function_varargs)
217 if (!MEM_ALIAS_SET (mem1) || !MEM_ALIAS_SET (mem2))
218 /* We have no alias set information for one of the MEMs, so we
219 have to assume it can alias anything. */
222 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
223 /* The two alias sets are the same, so they may alias. */
226 /* Iterate through each of the children of the first alias set,
227 comparing it with the second alias set. */
228 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
229 if (ase && splay_tree_lookup (ase->children,
230 (splay_tree_key) MEM_ALIAS_SET (mem2)))
233 /* Now do the same, but with the alias sets reversed. */
234 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
235 if (ase && splay_tree_lookup (ase->children,
236 (splay_tree_key) MEM_ALIAS_SET (mem1)))
239 /* The two MEMs are in distinct alias sets, and neither one is the
240 child of the other. Therefore, they cannot alias. */
244 /* Insert the NODE into the splay tree given by DATA. Used by
245 record_alias_subset via splay_tree_foreach. */
248 insert_subset_children (node, data)
249 splay_tree_node node;
252 splay_tree_insert ((splay_tree) data,
259 /* Indicate that things in SUBSET can alias things in SUPERSET, but
260 not vice versa. For example, in C, a store to an `int' can alias a
261 structure containing an `int', but not vice versa. Here, the
262 structure would be the SUPERSET and `int' the SUBSET. This
263 function should be called only once per SUPERSET/SUBSET pair. At
264 present any given alias set may only be a subset of one superset.
266 It is illegal for SUPERSET to be zero; everything is implicitly a
267 subset of alias set zero. */
270 record_alias_subset (superset, subset)
274 alias_set_entry superset_entry;
275 alias_set_entry subset_entry;
280 superset_entry = get_alias_set_entry (superset);
283 /* Create an entry for the SUPERSET, so that we have a place to
284 attach the SUBSET. */
286 (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
287 superset_entry->alias_set = superset;
288 superset_entry->children
289 = splay_tree_new (splay_tree_compare_ints, 0, 0);
290 splay_tree_insert (alias_sets,
291 (splay_tree_key) superset,
292 (splay_tree_value) superset_entry);
296 subset_entry = get_alias_set_entry (subset);
298 /* There is an entry for the subset. Enter all of its children
299 (if they are not already present) as children of the SUPERSET. */
300 splay_tree_foreach (subset_entry->children,
301 insert_subset_children,
302 superset_entry->children);
304 /* Enter the SUBSET itself as a child of the SUPERSET. */
305 splay_tree_insert (superset_entry->children,
306 (splay_tree_key) subset,
310 /* Inside SRC, the source of a SET, find a base address. */
313 find_base_value (src)
316 switch (GET_CODE (src))
323 /* At the start of a function argument registers have known base
324 values which may be lost later. Returning an ADDRESS
325 expression here allows optimization based on argument values
326 even when the argument registers are used for other purposes. */
327 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
328 return new_reg_base_value[REGNO (src)];
330 /* If a pseudo has a known base value, return it. Do not do this
331 for hard regs since it can result in a circular dependency
332 chain for registers which have values at function entry.
334 The test above is not sufficient because the scheduler may move
335 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
336 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
337 && (unsigned) REGNO (src) < reg_base_value_size
338 && reg_base_value[REGNO (src)])
339 return reg_base_value[REGNO (src)];
344 /* Check for an argument passed in memory. Only record in the
345 copying-arguments block; it is too hard to track changes
347 if (copying_arguments
348 && (XEXP (src, 0) == arg_pointer_rtx
349 || (GET_CODE (XEXP (src, 0)) == PLUS
350 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
351 return gen_rtx_ADDRESS (VOIDmode, src);
356 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
363 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
365 /* If either operand is a REG, then see if we already have
366 a known value for it. */
367 if (GET_CODE (src_0) == REG)
369 temp = find_base_value (src_0);
374 if (GET_CODE (src_1) == REG)
376 temp = find_base_value (src_1);
381 /* Guess which operand is the base address.
383 If either operand is a symbol, then it is the base. If
384 either operand is a CONST_INT, then the other is the base. */
386 if (GET_CODE (src_1) == CONST_INT
387 || GET_CODE (src_0) == SYMBOL_REF
388 || GET_CODE (src_0) == LABEL_REF
389 || GET_CODE (src_0) == CONST)
390 return find_base_value (src_0);
392 if (GET_CODE (src_0) == CONST_INT
393 || GET_CODE (src_1) == SYMBOL_REF
394 || GET_CODE (src_1) == LABEL_REF
395 || GET_CODE (src_1) == CONST)
396 return find_base_value (src_1);
398 /* This might not be necessary anymore.
400 If either operand is a REG that is a known pointer, then it
402 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
403 return find_base_value (src_0);
405 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
406 return find_base_value (src_1);
412 /* The standard form is (lo_sum reg sym) so look only at the
414 return find_base_value (XEXP (src, 1));
417 /* If the second operand is constant set the base
418 address to the first operand. */
419 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
420 return find_base_value (XEXP (src, 0));
424 case SIGN_EXTEND: /* used for NT/Alpha pointers */
426 return find_base_value (XEXP (src, 0));
435 /* Called from init_alias_analysis indirectly through note_stores. */
437 /* while scanning insns to find base values, reg_seen[N] is nonzero if
438 register N has been set in this function. */
439 static char *reg_seen;
441 /* Addresses which are known not to alias anything else are identified
442 by a unique integer. */
443 static int unique_id;
446 record_set (dest, set)
452 if (GET_CODE (dest) != REG)
455 regno = REGNO (dest);
459 /* A CLOBBER wipes out any old value but does not prevent a previously
460 unset register from acquiring a base address (i.e. reg_seen is not
462 if (GET_CODE (set) == CLOBBER)
464 new_reg_base_value[regno] = 0;
473 new_reg_base_value[regno] = 0;
477 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
478 GEN_INT (unique_id++));
482 /* This is not the first set. If the new value is not related to the
483 old value, forget the base value. Note that the following code is
485 extern int x, y; int *p = &x; p += (&y-&x);
486 ANSI C does not allow computing the difference of addresses
487 of distinct top level objects. */
488 if (new_reg_base_value[regno])
489 switch (GET_CODE (src))
494 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
495 new_reg_base_value[regno] = 0;
498 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
499 new_reg_base_value[regno] = 0;
502 new_reg_base_value[regno] = 0;
505 /* If this is the first set of a register, record the value. */
506 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
507 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
508 new_reg_base_value[regno] = find_base_value (src);
513 /* Called from loop optimization when a new pseudo-register is created. */
515 record_base_value (regno, val, invariant)
520 if ((unsigned) regno >= reg_base_value_size)
523 /* If INVARIANT is true then this value also describes an invariant
524 relationship which can be used to deduce that two registers with
525 unknown values are different. */
526 if (invariant && alias_invariant)
527 alias_invariant[regno] = val;
529 if (GET_CODE (val) == REG)
531 if ((unsigned) REGNO (val) < reg_base_value_size)
533 reg_base_value[regno] = reg_base_value[REGNO (val)];
537 reg_base_value[regno] = find_base_value (val);
544 /* Recursively look for equivalences. */
545 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
546 && REGNO (x) < reg_known_value_size)
547 return reg_known_value[REGNO (x)] == x
548 ? x : canon_rtx (reg_known_value[REGNO (x)]);
549 else if (GET_CODE (x) == PLUS)
551 rtx x0 = canon_rtx (XEXP (x, 0));
552 rtx x1 = canon_rtx (XEXP (x, 1));
554 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
556 /* We can tolerate LO_SUMs being offset here; these
557 rtl are used for nothing other than comparisons. */
558 if (GET_CODE (x0) == CONST_INT)
559 return plus_constant_for_output (x1, INTVAL (x0));
560 else if (GET_CODE (x1) == CONST_INT)
561 return plus_constant_for_output (x0, INTVAL (x1));
562 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
565 /* This gives us much better alias analysis when called from
566 the loop optimizer. Note we want to leave the original
567 MEM alone, but need to return the canonicalized MEM with
568 all the flags with their original values. */
569 else if (GET_CODE (x) == MEM)
571 rtx addr = canon_rtx (XEXP (x, 0));
572 if (addr != XEXP (x, 0))
574 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
575 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
576 MEM_COPY_ATTRIBUTES (new, x);
577 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
584 /* Return 1 if X and Y are identical-looking rtx's.
586 We use the data in reg_known_value above to see if two registers with
587 different numbers are, in fact, equivalent. */
590 rtx_equal_for_memref_p (x, y)
595 register enum rtx_code code;
598 if (x == 0 && y == 0)
600 if (x == 0 || y == 0)
609 /* Rtx's of different codes cannot be equal. */
610 if (code != GET_CODE (y))
613 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
614 (REG:SI x) and (REG:HI x) are NOT equivalent. */
616 if (GET_MODE (x) != GET_MODE (y))
619 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
622 return REGNO (x) == REGNO (y);
623 if (code == LABEL_REF)
624 return XEXP (x, 0) == XEXP (y, 0);
625 if (code == SYMBOL_REF)
626 return XSTR (x, 0) == XSTR (y, 0);
627 if (code == CONST_INT)
628 return INTVAL (x) == INTVAL (y);
629 if (code == ADDRESSOF)
630 return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
632 /* For commutative operations, the RTX match if the operand match in any
633 order. Also handle the simple binary and unary cases without a loop. */
634 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
635 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
636 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
637 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
638 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
639 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
640 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
641 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
642 else if (GET_RTX_CLASS (code) == '1')
643 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
645 /* Compare the elements. If any pair of corresponding elements
646 fail to match, return 0 for the whole things.
648 Limit cases to types which actually appear in addresses. */
650 fmt = GET_RTX_FORMAT (code);
651 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
656 if (XINT (x, i) != XINT (y, i))
661 /* Two vectors must have the same length. */
662 if (XVECLEN (x, i) != XVECLEN (y, i))
665 /* And the corresponding elements must match. */
666 for (j = 0; j < XVECLEN (x, i); j++)
667 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
672 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
676 /* This can happen for an asm which clobbers memory. */
680 /* It is believed that rtx's at this level will never
681 contain anything but integers and other rtx's,
682 except for within LABEL_REFs and SYMBOL_REFs. */
690 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
691 X and return it, or return 0 if none found. */
694 find_symbolic_term (x)
698 register enum rtx_code code;
702 if (code == SYMBOL_REF || code == LABEL_REF)
704 if (GET_RTX_CLASS (code) == 'o')
707 fmt = GET_RTX_FORMAT (code);
708 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
714 t = find_symbolic_term (XEXP (x, i));
718 else if (fmt[i] == 'E')
728 switch (GET_CODE (x))
731 return REG_BASE_VALUE (x);
734 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
740 return find_base_term (XEXP (x, 0));
744 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
751 rtx tmp1 = XEXP (x, 0);
752 rtx tmp2 = XEXP (x, 1);
754 /* This is a litle bit tricky since we have to determine which of
755 the two operands represents the real base address. Otherwise this
756 routine may return the index register instead of the base register.
758 That may cause us to believe no aliasing was possible, when in
759 fact aliasing is possible.
761 We use a few simple tests to guess the base register. Additional
762 tests can certainly be added. For example, if one of the operands
763 is a shift or multiply, then it must be the index register and the
764 other operand is the base register. */
766 /* If either operand is known to be a pointer, then use it
767 to determine the base term. */
768 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
769 return find_base_term (tmp1);
771 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
772 return find_base_term (tmp2);
774 /* Neither operand was known to be a pointer. Go ahead and find the
775 base term for both operands. */
776 tmp1 = find_base_term (tmp1);
777 tmp2 = find_base_term (tmp2);
779 /* If either base term is named object or a special address
780 (like an argument or stack reference), then use it for the
783 && (GET_CODE (tmp1) == SYMBOL_REF
784 || GET_CODE (tmp1) == LABEL_REF
785 || (GET_CODE (tmp1) == ADDRESS
786 && GET_MODE (tmp1) != VOIDmode)))
790 && (GET_CODE (tmp2) == SYMBOL_REF
791 || GET_CODE (tmp2) == LABEL_REF
792 || (GET_CODE (tmp2) == ADDRESS
793 && GET_MODE (tmp2) != VOIDmode)))
796 /* We could not determine which of the two operands was the
797 base register and which was the index. So we can determine
798 nothing from the base alias check. */
803 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
804 return REG_BASE_VALUE (XEXP (x, 0));
816 /* Return 0 if the addresses X and Y are known to point to different
817 objects, 1 if they might be pointers to the same object. */
820 base_alias_check (x, y, x_mode, y_mode)
822 enum machine_mode x_mode, y_mode;
824 rtx x_base = find_base_term (x);
825 rtx y_base = find_base_term (y);
827 /* If the address itself has no known base see if a known equivalent
828 value has one. If either address still has no known base, nothing
829 is known about aliasing. */
833 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
835 x_base = find_base_term (x_c);
843 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
845 y_base = find_base_term (y_c);
850 /* If the base addresses are equal nothing is known about aliasing. */
851 if (rtx_equal_p (x_base, y_base))
854 /* The base addresses of the read and write are different expressions.
855 If they are both symbols and they are not accessed via AND, there is
856 no conflict. We can bring knowledge of object alignment into play
857 here. For example, on alpha, "char a, b;" can alias one another,
858 though "char a; long b;" cannot. */
859 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
861 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
863 if (GET_CODE (x) == AND
864 && (GET_CODE (XEXP (x, 1)) != CONST_INT
865 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
867 if (GET_CODE (y) == AND
868 && (GET_CODE (XEXP (y, 1)) != CONST_INT
869 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
871 /* Differing symbols never alias. */
875 /* If one address is a stack reference there can be no alias:
876 stack references using different base registers do not alias,
877 a stack reference can not alias a parameter, and a stack reference
878 can not alias a global. */
879 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
880 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
883 if (! flag_argument_noalias)
886 if (flag_argument_noalias > 1)
889 /* Weak noalias assertion (arguments are distinct, but may match globals). */
890 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
893 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
894 where SIZE is the size in bytes of the memory reference. If ADDR
895 is not modified by the memory reference then ADDR is returned. */
898 addr_side_effect_eval (addr, size, n_refs)
905 switch (GET_CODE (addr))
908 offset = (n_refs + 1) * size;
911 offset = -(n_refs + 1) * size;
914 offset = n_refs * size;
917 offset = -n_refs * size;
925 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
927 addr = XEXP (addr, 0);
932 /* Return nonzero if X and Y (memory addresses) could reference the
933 same location in memory. C is an offset accumulator. When
934 C is nonzero, we are testing aliases between X and Y + C.
935 XSIZE is the size in bytes of the X reference,
936 similarly YSIZE is the size in bytes for Y.
938 If XSIZE or YSIZE is zero, we do not know the amount of memory being
939 referenced (the reference was BLKmode), so make the most pessimistic
942 If XSIZE or YSIZE is negative, we may access memory outside the object
943 being referenced as a side effect. This can happen when using AND to
944 align memory references, as is done on the Alpha.
946 Nice to notice that varying addresses cannot conflict with fp if no
947 local variables had their addresses taken, but that's too hard now. */
951 memrefs_conflict_p (xsize, x, ysize, y, c)
956 if (GET_CODE (x) == HIGH)
958 else if (GET_CODE (x) == LO_SUM)
961 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
962 if (GET_CODE (y) == HIGH)
964 else if (GET_CODE (y) == LO_SUM)
967 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
969 if (rtx_equal_for_memref_p (x, y))
971 if (xsize <= 0 || ysize <= 0)
973 if (c >= 0 && xsize > c)
975 if (c < 0 && ysize+c > 0)
980 /* This code used to check for conflicts involving stack references and
981 globals but the base address alias code now handles these cases. */
983 if (GET_CODE (x) == PLUS)
985 /* The fact that X is canonicalized means that this
986 PLUS rtx is canonicalized. */
987 rtx x0 = XEXP (x, 0);
988 rtx x1 = XEXP (x, 1);
990 if (GET_CODE (y) == PLUS)
992 /* The fact that Y is canonicalized means that this
993 PLUS rtx is canonicalized. */
994 rtx y0 = XEXP (y, 0);
995 rtx y1 = XEXP (y, 1);
997 if (rtx_equal_for_memref_p (x1, y1))
998 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
999 if (rtx_equal_for_memref_p (x0, y0))
1000 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1001 if (GET_CODE (x1) == CONST_INT)
1003 if (GET_CODE (y1) == CONST_INT)
1004 return memrefs_conflict_p (xsize, x0, ysize, y0,
1005 c - INTVAL (x1) + INTVAL (y1));
1007 return memrefs_conflict_p (xsize, x0, ysize, y,
1010 else if (GET_CODE (y1) == CONST_INT)
1011 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1015 else if (GET_CODE (x1) == CONST_INT)
1016 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1018 else if (GET_CODE (y) == PLUS)
1020 /* The fact that Y is canonicalized means that this
1021 PLUS rtx is canonicalized. */
1022 rtx y0 = XEXP (y, 0);
1023 rtx y1 = XEXP (y, 1);
1025 if (GET_CODE (y1) == CONST_INT)
1026 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1031 if (GET_CODE (x) == GET_CODE (y))
1032 switch (GET_CODE (x))
1036 /* Handle cases where we expect the second operands to be the
1037 same, and check only whether the first operand would conflict
1040 rtx x1 = canon_rtx (XEXP (x, 1));
1041 rtx y1 = canon_rtx (XEXP (y, 1));
1042 if (! rtx_equal_for_memref_p (x1, y1))
1044 x0 = canon_rtx (XEXP (x, 0));
1045 y0 = canon_rtx (XEXP (y, 0));
1046 if (rtx_equal_for_memref_p (x0, y0))
1047 return (xsize == 0 || ysize == 0
1048 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1050 /* Can't properly adjust our sizes. */
1051 if (GET_CODE (x1) != CONST_INT)
1053 xsize /= INTVAL (x1);
1054 ysize /= INTVAL (x1);
1056 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1060 /* Are these registers known not to be equal? */
1061 if (alias_invariant)
1063 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1064 rtx i_x, i_y; /* invariant relationships of X and Y */
1066 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1067 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1069 if (i_x == 0 && i_y == 0)
1072 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1073 ysize, i_y ? i_y : y, c))
1082 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1083 as an access with indeterminate size. Assume that references
1084 besides AND are aligned, so if the size of the other reference is
1085 at least as large as the alignment, assume no other overlap. */
1086 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1088 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1090 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1092 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1094 /* ??? If we are indexing far enough into the array/structure, we
1095 may yet be able to determine that we can not overlap. But we
1096 also need to that we are far enough from the end not to overlap
1097 a following reference, so we do nothing with that for now. */
1098 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1100 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1105 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1107 c += (INTVAL (y) - INTVAL (x));
1108 return (xsize <= 0 || ysize <= 0
1109 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1112 if (GET_CODE (x) == CONST)
1114 if (GET_CODE (y) == CONST)
1115 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1116 ysize, canon_rtx (XEXP (y, 0)), c);
1118 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1121 if (GET_CODE (y) == CONST)
1122 return memrefs_conflict_p (xsize, x, ysize,
1123 canon_rtx (XEXP (y, 0)), c);
1126 return (xsize < 0 || ysize < 0
1127 || (rtx_equal_for_memref_p (x, y)
1128 && (xsize == 0 || ysize == 0
1129 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1136 /* Functions to compute memory dependencies.
1138 Since we process the insns in execution order, we can build tables
1139 to keep track of what registers are fixed (and not aliased), what registers
1140 are varying in known ways, and what registers are varying in unknown
1143 If both memory references are volatile, then there must always be a
1144 dependence between the two references, since their order can not be
1145 changed. A volatile and non-volatile reference can be interchanged
1148 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1149 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
1150 allow QImode aliasing because the ANSI C standard allows character
1151 pointers to alias anything. We are assuming that characters are
1152 always QImode here. We also must allow AND addresses, because they may
1153 generate accesses outside the object being referenced. This is used to
1154 generate aligned addresses from unaligned addresses, for instance, the
1155 alpha storeqi_unaligned pattern. */
1157 /* Read dependence: X is read after read in MEM takes place. There can
1158 only be a dependence here if both reads are volatile. */
1161 read_dependence (mem, x)
1165 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1168 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1169 MEM2 is a reference to a structure at a varying address, or returns
1170 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1171 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1172 to decide whether or not an address may vary; it should return
1173 nozero whenever variation is possible. */
1176 fixed_scalar_and_varying_struct_p (mem1, mem2, varies_p)
1179 int (*varies_p) PROTO((rtx));
1181 rtx mem1_addr = XEXP (mem1, 0);
1182 rtx mem2_addr = XEXP (mem2, 0);
1184 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1185 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1186 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1190 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1191 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1192 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1199 /* Returns nonzero if something about the mode or address format MEM1
1200 indicates that it might well alias *anything*. */
1203 aliases_everything_p (mem)
1206 if (GET_MODE (mem) == QImode)
1207 /* ANSI C says that a `char*' can point to anything. */
1210 if (GET_CODE (XEXP (mem, 0)) == AND)
1211 /* If the address is an AND, its very hard to know at what it is
1212 actually pointing. */
1218 /* True dependence: X is read after store in MEM takes place. */
1221 true_dependence (mem, mem_mode, x, varies)
1223 enum machine_mode mem_mode;
1225 int (*varies) PROTO((rtx));
1227 register rtx x_addr, mem_addr;
1229 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1232 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1235 /* If X is an unchanging read, then it can't possibly conflict with any
1236 non-unchanging store. It may conflict with an unchanging write though,
1237 because there may be a single store to this address to initialize it.
1238 Just fall through to the code below to resolve the case where we have
1239 both an unchanging read and an unchanging write. This won't handle all
1240 cases optimally, but the possible performance loss should be
1242 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1245 if (mem_mode == VOIDmode)
1246 mem_mode = GET_MODE (mem);
1248 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x), mem_mode))
1251 x_addr = canon_rtx (XEXP (x, 0));
1252 mem_addr = canon_rtx (XEXP (mem, 0));
1254 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1255 SIZE_FOR_MODE (x), x_addr, 0))
1258 if (aliases_everything_p (x))
1261 /* We cannot use aliases_everyting_p to test MEM, since we must look
1262 at MEM_MODE, rather than GET_MODE (MEM). */
1263 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1266 /* In true_dependence we also allow BLKmode to alias anything. Why
1267 don't we do this in anti_dependence and output_dependence? */
1268 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1271 return !fixed_scalar_and_varying_struct_p (mem, x, varies);
1274 /* Returns non-zero if a write to X might alias a previous read from
1275 (or, if WRITEP is non-zero, a write to) MEM. */
1278 write_dependence_p (mem, x, writep)
1283 rtx x_addr, mem_addr;
1286 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1289 /* If MEM is an unchanging read, then it can't possibly conflict with
1290 the store to X, because there is at most one store to MEM, and it must
1291 have occurred somewhere before MEM. */
1292 if (!writep && RTX_UNCHANGING_P (mem))
1295 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
1300 mem = canon_rtx (mem);
1302 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1305 x_addr = XEXP (x, 0);
1306 mem_addr = XEXP (mem, 0);
1308 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1309 SIZE_FOR_MODE (x), x_addr, 0))
1313 = fixed_scalar_and_varying_struct_p (mem, x, rtx_addr_varies_p);
1315 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1316 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1319 /* Anti dependence: X is written after read in MEM takes place. */
1322 anti_dependence (mem, x)
1326 return write_dependence_p (mem, x, /*writep=*/0);
1329 /* Output dependence: X is written after store in MEM takes place. */
1332 output_dependence (mem, x)
1336 return write_dependence_p (mem, x, /*writep=*/1);
1339 /* Returns non-zero if X might refer to something which is not
1340 local to the function and is not constant. */
1343 nonlocal_reference_p (x)
1347 register RTX_CODE code;
1350 code = GET_CODE (x);
1352 if (GET_RTX_CLASS (code) == 'i')
1354 /* Constant functions are constant. */
1355 if (code == CALL_INSN && CONST_CALL_P (x))
1358 code = GET_CODE (x);
1364 if (GET_CODE (SUBREG_REG (x)) == REG)
1366 /* Global registers are not local. */
1367 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1368 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1376 /* Global registers are not local. */
1377 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1391 /* Constants in the function's constants pool are constant. */
1392 if (CONSTANT_POOL_ADDRESS_P (x))
1397 /* Recursion introduces no additional considerations. */
1398 if (GET_CODE (XEXP (x, 0)) == MEM
1399 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1400 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1401 IDENTIFIER_POINTER (
1402 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1407 /* Be overly conservative and consider any volatile memory
1408 reference as not local. */
1409 if (MEM_VOLATILE_P (x))
1411 base = find_base_term (XEXP (x, 0));
1414 /* Stack references are local. */
1415 if (GET_CODE (base) == ADDRESS && GET_MODE (base) == Pmode)
1417 /* Constants in the function's constant pool are constant. */
1418 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1431 /* Recursively scan the operands of this expression. */
1434 register char *fmt = GET_RTX_FORMAT (code);
1437 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1441 if (nonlocal_reference_p (XEXP (x, i)))
1447 for (j = 0; j < XVECLEN (x, i); j++)
1448 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1457 /* Mark the function if it is constant. */
1460 mark_constant_function ()
1464 if (TREE_PUBLIC (current_function_decl)
1465 || TREE_READONLY (current_function_decl)
1466 || TREE_THIS_VOLATILE (current_function_decl)
1467 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1470 /* Determine if this is a constant function. */
1472 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1473 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1474 && nonlocal_reference_p (insn))
1477 /* Mark the function. */
1479 TREE_READONLY (current_function_decl) = 1;
1483 static HARD_REG_SET argument_registers;
1490 #ifndef OUTGOING_REGNO
1491 #define OUTGOING_REGNO(N) N
1493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1494 /* Check whether this register can hold an incoming pointer
1495 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1496 numbers, so translate if necessary due to register windows. */
1497 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1498 && HARD_REGNO_MODE_OK (i, Pmode))
1499 SET_HARD_REG_BIT (argument_registers, i);
1501 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1505 init_alias_analysis ()
1507 int maxreg = max_reg_num ();
1510 register unsigned int ui;
1513 reg_known_value_size = maxreg;
1516 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1517 - FIRST_PSEUDO_REGISTER;
1519 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1520 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1521 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1522 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1523 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1525 /* Overallocate reg_base_value to allow some growth during loop
1526 optimization. Loop unrolling can create a large number of
1528 reg_base_value_size = maxreg * 2;
1529 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1530 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1531 reg_seen = (char *)alloca (reg_base_value_size);
1532 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1533 if (! reload_completed && flag_unroll_loops)
1535 alias_invariant = (rtx *)xrealloc (alias_invariant,
1536 reg_base_value_size * sizeof (rtx));
1537 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1541 /* The basic idea is that each pass through this loop will use the
1542 "constant" information from the previous pass to propagate alias
1543 information through another level of assignments.
1545 This could get expensive if the assignment chains are long. Maybe
1546 we should throttle the number of iterations, possibly based on
1547 the optimization level or flag_expensive_optimizations.
1549 We could propagate more information in the first pass by making use
1550 of REG_N_SETS to determine immediately that the alias information
1551 for a pseudo is "constant".
1553 A program with an uninitialized variable can cause an infinite loop
1554 here. Instead of doing a full dataflow analysis to detect such problems
1555 we just cap the number of iterations for the loop.
1557 The state of the arrays for the set chain in question does not matter
1558 since the program has undefined behavior. */
1563 /* Assume nothing will change this iteration of the loop. */
1566 /* We want to assign the same IDs each iteration of this loop, so
1567 start counting from zero each iteration of the loop. */
1570 /* We're at the start of the funtion each iteration through the
1571 loop, so we're copying arguments. */
1572 copying_arguments = 1;
1574 /* Wipe the potential alias information clean for this pass. */
1575 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1577 /* Wipe the reg_seen array clean. */
1578 bzero ((char *) reg_seen, reg_base_value_size);
1580 /* Mark all hard registers which may contain an address.
1581 The stack, frame and argument pointers may contain an address.
1582 An argument register which can hold a Pmode value may contain
1583 an address even if it is not in BASE_REGS.
1585 The address expression is VOIDmode for an argument and
1586 Pmode for other registers. */
1588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1589 if (TEST_HARD_REG_BIT (argument_registers, i))
1590 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1591 gen_rtx_REG (Pmode, i));
1593 new_reg_base_value[STACK_POINTER_REGNUM]
1594 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1595 new_reg_base_value[ARG_POINTER_REGNUM]
1596 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1597 new_reg_base_value[FRAME_POINTER_REGNUM]
1598 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1599 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1600 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1601 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1603 if (struct_value_incoming_rtx
1604 && GET_CODE (struct_value_incoming_rtx) == REG)
1605 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1606 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1608 if (static_chain_rtx
1609 && GET_CODE (static_chain_rtx) == REG)
1610 new_reg_base_value[REGNO (static_chain_rtx)]
1611 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1613 /* Walk the insns adding values to the new_reg_base_value array. */
1614 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1616 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1617 if (prologue_epilogue_contains (insn))
1620 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1623 /* If this insn has a noalias note, process it, Otherwise,
1624 scan for sets. A simple set will have no side effects
1625 which could change the base value of any other register. */
1627 if (GET_CODE (PATTERN (insn)) == SET
1628 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1629 record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1631 note_stores (PATTERN (insn), record_set);
1633 set = single_set (insn);
1636 && GET_CODE (SET_DEST (set)) == REG
1637 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1638 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1639 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1640 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1641 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1642 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1644 int regno = REGNO (SET_DEST (set));
1645 reg_known_value[regno] = XEXP (note, 0);
1646 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1649 else if (GET_CODE (insn) == NOTE
1650 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1651 copying_arguments = 0;
1654 /* Now propagate values from new_reg_base_value to reg_base_value. */
1655 for (ui = 0; ui < reg_base_value_size; ui++)
1657 if (new_reg_base_value[ui]
1658 && new_reg_base_value[ui] != reg_base_value[ui]
1659 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1661 reg_base_value[ui] = new_reg_base_value[ui];
1666 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1668 /* Fill in the remaining entries. */
1669 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1670 if (reg_known_value[i] == 0)
1671 reg_known_value[i] = regno_reg_rtx[i];
1673 /* Simplify the reg_base_value array so that no register refers to
1674 another register, except to special registers indirectly through
1675 ADDRESS expressions.
1677 In theory this loop can take as long as O(registers^2), but unless
1678 there are very long dependency chains it will run in close to linear
1681 This loop may not be needed any longer now that the main loop does
1682 a better job at propagating alias information. */
1688 for (ui = 0; ui < reg_base_value_size; ui++)
1690 rtx base = reg_base_value[ui];
1691 if (base && GET_CODE (base) == REG)
1693 unsigned int base_regno = REGNO (base);
1694 if (base_regno == ui) /* register set from itself */
1695 reg_base_value[ui] = 0;
1697 reg_base_value[ui] = reg_base_value[base_regno];
1702 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1704 new_reg_base_value = 0;
1709 end_alias_analysis ()
1711 reg_known_value = 0;
1713 reg_base_value_size = 0;
1714 if (alias_invariant)
1716 free ((char *)alias_invariant);
1717 alias_invariant = 0;