1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
31 #include "hard-reg-set.h"
36 #include "splay-tree.h"
39 /* The alias sets assigned to MEMs assist the back-end in determining
40 which MEMs can alias which other MEMs. In general, two MEMs in
41 different alias sets cannot alias each other, with one important
42 exception. Consider something like:
44 struct S {int i; double d; };
46 a store to an `S' can alias something of either type `int' or type
47 `double'. (However, a store to an `int' cannot alias a `double'
48 and vice versa.) We indicate this via a tree structure that looks
56 (The arrows are directed and point downwards.)
57 In this situation we say the alias set for `struct S' is the
58 `superset' and that those for `int' and `double' are `subsets'.
60 To see whether two alias sets can point to the same memory, we must go
61 down the list of decendents of each and see if there is some alias set
62 in common. We need not trace past immediate decendents, however, since
63 we propagate all grandchildren up one level.
65 Alias set zero is implicitly a superset of all other alias sets.
66 However, this is no actual entry for alias set zero. It is an
67 error to attempt to explicitly construct a subset of zero. */
69 typedef struct alias_set_entry
71 /* The alias set number, as stored in MEM_ALIAS_SET. */
74 /* The children of the alias set. These are not just the immediate
75 children, but, in fact, all decendents. So, if we have:
77 struct T { struct S s; float f; }
79 continuing our example above, the children here will be all of
80 `int', `double', `float', and `struct S'. */
84 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
85 static rtx find_symbolic_term PARAMS ((rtx));
86 static rtx get_addr PARAMS ((rtx));
87 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
89 static void record_set PARAMS ((rtx, rtx, void *));
90 static rtx find_base_term PARAMS ((rtx));
91 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
93 static rtx find_base_value PARAMS ((rtx));
94 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
95 static int insert_subset_children PARAMS ((splay_tree_node, void*));
96 static alias_set_entry get_alias_set_entry PARAMS ((int));
97 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
99 static int aliases_everything_p PARAMS ((rtx));
100 static int write_dependence_p PARAMS ((rtx, rtx, int));
101 static int nonlocal_reference_p PARAMS ((rtx));
103 /* Set up all info needed to perform alias analysis on memory references. */
105 /* Returns the size in bytes of the mode of X. */
106 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
108 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
109 different alias sets. We ignore alias sets in functions making use
110 of variable arguments because the va_arg macros on some systems are
112 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
113 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
115 /* Cap the number of passes we make over the insns propagating alias
116 information through set chains. 10 is a completely arbitrary choice. */
117 #define MAX_ALIAS_LOOP_PASSES 10
119 /* reg_base_value[N] gives an address to which register N is related.
120 If all sets after the first add or subtract to the current value
121 or otherwise modify it so it does not point to a different top level
122 object, reg_base_value[N] is equal to the address part of the source
125 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
126 expressions represent certain special values: function arguments and
127 the stack, frame, and argument pointers.
129 The contents of an ADDRESS is not normally used, the mode of the
130 ADDRESS determines whether the ADDRESS is a function argument or some
131 other special value. Pointer equality, not rtx_equal_p, determines whether
132 two ADDRESS expressions refer to the same base address.
134 The only use of the contents of an ADDRESS is for determining if the
135 current function performs nonlocal memory memory references for the
136 purposes of marking the function as a constant function. */
138 static rtx *reg_base_value;
139 static rtx *new_reg_base_value;
140 static unsigned int reg_base_value_size; /* size of reg_base_value array */
142 #define REG_BASE_VALUE(X) \
143 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
145 /* Vector of known invariant relationships between registers. Set in
146 loop unrolling. Indexed by register number, if nonzero the value
147 is an expression describing this register in terms of another.
149 The length of this array is REG_BASE_VALUE_SIZE.
151 Because this array contains only pseudo registers it has no effect
153 static rtx *alias_invariant;
155 /* Vector indexed by N giving the initial (unchanging) value known for
156 pseudo-register N. This array is initialized in
157 init_alias_analysis, and does not change until end_alias_analysis
159 rtx *reg_known_value;
161 /* Indicates number of valid entries in reg_known_value. */
162 static unsigned int reg_known_value_size;
164 /* Vector recording for each reg_known_value whether it is due to a
165 REG_EQUIV note. Future passes (viz., reload) may replace the
166 pseudo with the equivalent expression and so we account for the
167 dependences that would be introduced if that happens.
169 The REG_EQUIV notes created in assign_parms may mention the arg
170 pointer, and there are explicit insns in the RTL that modify the
171 arg pointer. Thus we must ensure that such insns don't get
172 scheduled across each other because that would invalidate the
173 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
174 wrong, but solving the problem in the scheduler will likely give
175 better code, so we do it here. */
176 char *reg_known_equiv_p;
178 /* True when scanning insns from the start of the rtl to the
179 NOTE_INSN_FUNCTION_BEG note. */
180 static int copying_arguments;
182 /* The splay-tree used to store the various alias set entries. */
183 static splay_tree alias_sets;
185 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
186 such an entry, or NULL otherwise. */
188 static alias_set_entry
189 get_alias_set_entry (alias_set)
193 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
195 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
198 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
199 the two MEMs cannot alias each other. */
202 mems_in_disjoint_alias_sets_p (mem1, mem2)
208 #ifdef ENABLE_CHECKING
209 /* Perform a basic sanity check. Namely, that there are no alias sets
210 if we're not using strict aliasing. This helps to catch bugs
211 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
212 where a MEM is allocated in some way other than by the use of
213 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
214 use alias sets to indicate that spilled registers cannot alias each
215 other, we might need to remove this check. */
216 if (! flag_strict_aliasing
217 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
221 /* The code used in varargs macros are often not conforming ANSI C,
222 which can trick the compiler into making incorrect aliasing
223 assumptions in these functions. So, we don't use alias sets in
224 such a function. FIXME: This should be moved into the front-end;
225 it is a language-dependent notion, and there's no reason not to
226 still use these checks to handle globals. */
227 if (current_function_stdarg || current_function_varargs)
230 /* If have no alias set information for one of the MEMs, we have to assume
231 it can alias anything. */
232 if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
235 /* If the two alias sets are the same, they may alias. */
236 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
239 /* Iterate through each of the children of the first alias set,
240 comparing it with the second alias set. */
241 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
242 if (ase != 0 && splay_tree_lookup (ase->children,
243 (splay_tree_key) MEM_ALIAS_SET (mem2)))
246 /* Now do the same, but with the alias sets reversed. */
247 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
248 if (ase != 0 && splay_tree_lookup (ase->children,
249 (splay_tree_key) MEM_ALIAS_SET (mem1)))
252 /* The two MEMs are in distinct alias sets, and neither one is the
253 child of the other. Therefore, they cannot alias. */
257 /* Insert the NODE into the splay tree given by DATA. Used by
258 record_alias_subset via splay_tree_foreach. */
261 insert_subset_children (node, data)
262 splay_tree_node node;
265 splay_tree_insert ((splay_tree) data, node->key, node->value);
270 /* Indicate that things in SUBSET can alias things in SUPERSET, but
271 not vice versa. For example, in C, a store to an `int' can alias a
272 structure containing an `int', but not vice versa. Here, the
273 structure would be the SUPERSET and `int' the SUBSET. This
274 function should be called only once per SUPERSET/SUBSET pair.
276 It is illegal for SUPERSET to be zero; everything is implicitly a
277 subset of alias set zero. */
280 record_alias_subset (superset, subset)
284 alias_set_entry superset_entry;
285 alias_set_entry subset_entry;
290 superset_entry = get_alias_set_entry (superset);
291 if (superset_entry == 0)
293 /* Create an entry for the SUPERSET, so that we have a place to
294 attach the SUBSET. */
296 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
297 superset_entry->alias_set = superset;
298 superset_entry->children
299 = splay_tree_new (splay_tree_compare_ints, 0, 0);
300 splay_tree_insert (alias_sets, (splay_tree_key) superset,
301 (splay_tree_value) superset_entry);
305 subset_entry = get_alias_set_entry (subset);
307 /* If there is an entry for the subset, enter all of its children
308 (if they are not already present) as children of the SUPERSET. */
310 splay_tree_foreach (subset_entry->children,
311 insert_subset_children,
312 superset_entry->children);
314 /* Enter the SUBSET itself as a child of the SUPERSET. */
315 splay_tree_insert (superset_entry->children,
316 (splay_tree_key) subset, 0);
319 /* Record that component types of TYPE, if any, are part of that type for
320 aliasing purposes. For record types, we only record component types
321 for fields that are marked addressable. For array types, we always
322 record the component types, so the front end should not call this
323 function if the individual component aren't addressable. */
326 record_component_aliases (type)
329 int superset = get_alias_set (type);
336 switch (TREE_CODE (type))
340 subset = get_alias_set (TREE_TYPE (type));
342 record_alias_subset (superset, subset);
347 case QUAL_UNION_TYPE:
348 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
350 subset = get_alias_set (TREE_TYPE (field));
351 if (TREE_ADDRESSABLE (field) && subset != 0 && subset != superset)
352 record_alias_subset (superset, subset);
361 /* Inside SRC, the source of a SET, find a base address. */
364 find_base_value (src)
367 switch (GET_CODE (src))
374 /* At the start of a function, argument registers have known base
375 values which may be lost later. Returning an ADDRESS
376 expression here allows optimization based on argument values
377 even when the argument registers are used for other purposes. */
378 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
379 return new_reg_base_value[REGNO (src)];
381 /* If a pseudo has a known base value, return it. Do not do this
382 for hard regs since it can result in a circular dependency
383 chain for registers which have values at function entry.
385 The test above is not sufficient because the scheduler may move
386 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
387 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
388 && (unsigned) REGNO (src) < reg_base_value_size
389 && reg_base_value[REGNO (src)])
390 return reg_base_value[REGNO (src)];
395 /* Check for an argument passed in memory. Only record in the
396 copying-arguments block; it is too hard to track changes
398 if (copying_arguments
399 && (XEXP (src, 0) == arg_pointer_rtx
400 || (GET_CODE (XEXP (src, 0)) == PLUS
401 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
402 return gen_rtx_ADDRESS (VOIDmode, src);
407 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
410 /* ... fall through ... */
415 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
417 /* If either operand is a REG, then see if we already have
418 a known value for it. */
419 if (GET_CODE (src_0) == REG)
421 temp = find_base_value (src_0);
426 if (GET_CODE (src_1) == REG)
428 temp = find_base_value (src_1);
433 /* Guess which operand is the base address:
434 If either operand is a symbol, then it is the base. If
435 either operand is a CONST_INT, then the other is the base. */
436 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
437 return find_base_value (src_0);
438 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
439 return find_base_value (src_1);
441 /* This might not be necessary anymore:
442 If either operand is a REG that is a known pointer, then it
444 else if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
445 return find_base_value (src_0);
446 else if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
447 return find_base_value (src_1);
453 /* The standard form is (lo_sum reg sym) so look only at the
455 return find_base_value (XEXP (src, 1));
458 /* If the second operand is constant set the base
459 address to the first operand. */
460 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
461 return find_base_value (XEXP (src, 0));
465 case SIGN_EXTEND: /* used for NT/Alpha pointers */
467 return find_base_value (XEXP (src, 0));
476 /* Called from init_alias_analysis indirectly through note_stores. */
478 /* While scanning insns to find base values, reg_seen[N] is nonzero if
479 register N has been set in this function. */
480 static char *reg_seen;
482 /* Addresses which are known not to alias anything else are identified
483 by a unique integer. */
484 static int unique_id;
487 record_set (dest, set, data)
489 void *data ATTRIBUTE_UNUSED;
491 register unsigned regno;
494 if (GET_CODE (dest) != REG)
497 regno = REGNO (dest);
499 if (regno >= reg_base_value_size)
504 /* A CLOBBER wipes out any old value but does not prevent a previously
505 unset register from acquiring a base address (i.e. reg_seen is not
507 if (GET_CODE (set) == CLOBBER)
509 new_reg_base_value[regno] = 0;
518 new_reg_base_value[regno] = 0;
522 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
523 GEN_INT (unique_id++));
527 /* This is not the first set. If the new value is not related to the
528 old value, forget the base value. Note that the following code is
530 extern int x, y; int *p = &x; p += (&y-&x);
531 ANSI C does not allow computing the difference of addresses
532 of distinct top level objects. */
533 if (new_reg_base_value[regno])
534 switch (GET_CODE (src))
539 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
540 new_reg_base_value[regno] = 0;
543 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
544 new_reg_base_value[regno] = 0;
547 new_reg_base_value[regno] = 0;
550 /* If this is the first set of a register, record the value. */
551 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
552 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
553 new_reg_base_value[regno] = find_base_value (src);
558 /* Called from loop optimization when a new pseudo-register is
559 created. It indicates that REGNO is being set to VAL. f INVARIANT
560 is true then this value also describes an invariant relationship
561 which can be used to deduce that two registers with unknown values
565 record_base_value (regno, val, invariant)
570 if (regno >= reg_base_value_size)
573 if (invariant && alias_invariant)
574 alias_invariant[regno] = val;
576 if (GET_CODE (val) == REG)
578 if (REGNO (val) < reg_base_value_size)
579 reg_base_value[regno] = reg_base_value[REGNO (val)];
584 reg_base_value[regno] = find_base_value (val);
587 /* Returns a canonical version of X, from the point of view alias
588 analysis. (For example, if X is a MEM whose address is a register,
589 and the register has a known value (say a SYMBOL_REF), then a MEM
590 whose address is the SYMBOL_REF is returned.) */
596 /* Recursively look for equivalences. */
597 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
598 && REGNO (x) < reg_known_value_size)
599 return reg_known_value[REGNO (x)] == x
600 ? x : canon_rtx (reg_known_value[REGNO (x)]);
601 else if (GET_CODE (x) == PLUS)
603 rtx x0 = canon_rtx (XEXP (x, 0));
604 rtx x1 = canon_rtx (XEXP (x, 1));
606 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
608 /* We can tolerate LO_SUMs being offset here; these
609 rtl are used for nothing other than comparisons. */
610 if (GET_CODE (x0) == CONST_INT)
611 return plus_constant_for_output (x1, INTVAL (x0));
612 else if (GET_CODE (x1) == CONST_INT)
613 return plus_constant_for_output (x0, INTVAL (x1));
614 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
618 /* This gives us much better alias analysis when called from
619 the loop optimizer. Note we want to leave the original
620 MEM alone, but need to return the canonicalized MEM with
621 all the flags with their original values. */
622 else if (GET_CODE (x) == MEM)
624 rtx addr = canon_rtx (XEXP (x, 0));
626 if (addr != XEXP (x, 0))
628 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
630 MEM_COPY_ATTRIBUTES (new, x);
637 /* Return 1 if X and Y are identical-looking rtx's.
639 We use the data in reg_known_value above to see if two registers with
640 different numbers are, in fact, equivalent. */
643 rtx_equal_for_memref_p (x, y)
648 register enum rtx_code code;
649 register const char *fmt;
651 if (x == 0 && y == 0)
653 if (x == 0 || y == 0)
663 /* Rtx's of different codes cannot be equal. */
664 if (code != GET_CODE (y))
667 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
668 (REG:SI x) and (REG:HI x) are NOT equivalent. */
670 if (GET_MODE (x) != GET_MODE (y))
673 /* Some RTL can be compared without a recursive examination. */
677 return REGNO (x) == REGNO (y);
680 return XEXP (x, 0) == XEXP (y, 0);
683 return XSTR (x, 0) == XSTR (y, 0);
687 /* There's no need to compare the contents of CONST_DOUBLEs or
688 CONST_INTs because pointer equality is a good enough
689 comparison for these nodes. */
693 return (REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0))
694 && XINT (x, 1) == XINT (y, 1));
700 /* For commutative operations, the RTX match if the operand match in any
701 order. Also handle the simple binary and unary cases without a loop. */
702 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
703 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
704 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
705 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
706 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
707 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
708 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
709 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
710 else if (GET_RTX_CLASS (code) == '1')
711 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
713 /* Compare the elements. If any pair of corresponding elements
714 fail to match, return 0 for the whole things.
716 Limit cases to types which actually appear in addresses. */
718 fmt = GET_RTX_FORMAT (code);
719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
724 if (XINT (x, i) != XINT (y, i))
729 /* Two vectors must have the same length. */
730 if (XVECLEN (x, i) != XVECLEN (y, i))
733 /* And the corresponding elements must match. */
734 for (j = 0; j < XVECLEN (x, i); j++)
735 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
736 XVECEXP (y, i, j)) == 0)
741 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
745 /* This can happen for an asm which clobbers memory. */
749 /* It is believed that rtx's at this level will never
750 contain anything but integers and other rtx's,
751 except for within LABEL_REFs and SYMBOL_REFs. */
759 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
760 X and return it, or return 0 if none found. */
763 find_symbolic_term (x)
767 register enum rtx_code code;
768 register const char *fmt;
771 if (code == SYMBOL_REF || code == LABEL_REF)
773 if (GET_RTX_CLASS (code) == 'o')
776 fmt = GET_RTX_FORMAT (code);
777 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
783 t = find_symbolic_term (XEXP (x, i));
787 else if (fmt[i] == 'E')
798 struct elt_loc_list *l;
800 switch (GET_CODE (x))
803 return REG_BASE_VALUE (x);
806 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
812 return find_base_term (XEXP (x, 0));
815 val = CSELIB_VAL_PTR (x);
816 for (l = val->locs; l; l = l->next)
817 if ((x = find_base_term (l->loc)) != 0)
823 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
830 rtx tmp1 = XEXP (x, 0);
831 rtx tmp2 = XEXP (x, 1);
833 /* This is a litle bit tricky since we have to determine which of
834 the two operands represents the real base address. Otherwise this
835 routine may return the index register instead of the base register.
837 That may cause us to believe no aliasing was possible, when in
838 fact aliasing is possible.
840 We use a few simple tests to guess the base register. Additional
841 tests can certainly be added. For example, if one of the operands
842 is a shift or multiply, then it must be the index register and the
843 other operand is the base register. */
845 /* If either operand is known to be a pointer, then use it
846 to determine the base term. */
847 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
848 return find_base_term (tmp1);
850 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
851 return find_base_term (tmp2);
853 /* Neither operand was known to be a pointer. Go ahead and find the
854 base term for both operands. */
855 tmp1 = find_base_term (tmp1);
856 tmp2 = find_base_term (tmp2);
858 /* If either base term is named object or a special address
859 (like an argument or stack reference), then use it for the
862 && (GET_CODE (tmp1) == SYMBOL_REF
863 || GET_CODE (tmp1) == LABEL_REF
864 || (GET_CODE (tmp1) == ADDRESS
865 && GET_MODE (tmp1) != VOIDmode)))
869 && (GET_CODE (tmp2) == SYMBOL_REF
870 || GET_CODE (tmp2) == LABEL_REF
871 || (GET_CODE (tmp2) == ADDRESS
872 && GET_MODE (tmp2) != VOIDmode)))
875 /* We could not determine which of the two operands was the
876 base register and which was the index. So we can determine
877 nothing from the base alias check. */
882 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
883 return REG_BASE_VALUE (XEXP (x, 0));
895 /* Return 0 if the addresses X and Y are known to point to different
896 objects, 1 if they might be pointers to the same object. */
899 base_alias_check (x, y, x_mode, y_mode)
901 enum machine_mode x_mode, y_mode;
903 rtx x_base = find_base_term (x);
904 rtx y_base = find_base_term (y);
906 /* If the address itself has no known base see if a known equivalent
907 value has one. If either address still has no known base, nothing
908 is known about aliasing. */
913 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
916 x_base = find_base_term (x_c);
924 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
927 y_base = find_base_term (y_c);
932 /* If the base addresses are equal nothing is known about aliasing. */
933 if (rtx_equal_p (x_base, y_base))
936 /* The base addresses of the read and write are different expressions.
937 If they are both symbols and they are not accessed via AND, there is
938 no conflict. We can bring knowledge of object alignment into play
939 here. For example, on alpha, "char a, b;" can alias one another,
940 though "char a; long b;" cannot. */
941 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
943 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
945 if (GET_CODE (x) == AND
946 && (GET_CODE (XEXP (x, 1)) != CONST_INT
947 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
949 if (GET_CODE (y) == AND
950 && (GET_CODE (XEXP (y, 1)) != CONST_INT
951 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
953 /* Differing symbols never alias. */
957 /* If one address is a stack reference there can be no alias:
958 stack references using different base registers do not alias,
959 a stack reference can not alias a parameter, and a stack reference
960 can not alias a global. */
961 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
962 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
965 if (! flag_argument_noalias)
968 if (flag_argument_noalias > 1)
971 /* Weak noalias assertion (arguments are distinct, but may match globals). */
972 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
975 /* Convert the address X into something we can use. This is done by returning
976 it unchanged unless it is a value; in the latter case we call cselib to get
977 a more useful rtx. */
983 struct elt_loc_list *l;
985 if (GET_CODE (x) != VALUE)
987 v = CSELIB_VAL_PTR (x);
988 for (l = v->locs; l; l = l->next)
989 if (CONSTANT_P (l->loc))
991 for (l = v->locs; l; l = l->next)
992 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
999 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1000 where SIZE is the size in bytes of the memory reference. If ADDR
1001 is not modified by the memory reference then ADDR is returned. */
1004 addr_side_effect_eval (addr, size, n_refs)
1011 switch (GET_CODE (addr))
1014 offset = (n_refs + 1) * size;
1017 offset = -(n_refs + 1) * size;
1020 offset = n_refs * size;
1023 offset = -n_refs * size;
1031 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
1033 addr = XEXP (addr, 0);
1038 /* Return nonzero if X and Y (memory addresses) could reference the
1039 same location in memory. C is an offset accumulator. When
1040 C is nonzero, we are testing aliases between X and Y + C.
1041 XSIZE is the size in bytes of the X reference,
1042 similarly YSIZE is the size in bytes for Y.
1044 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1045 referenced (the reference was BLKmode), so make the most pessimistic
1048 If XSIZE or YSIZE is negative, we may access memory outside the object
1049 being referenced as a side effect. This can happen when using AND to
1050 align memory references, as is done on the Alpha.
1052 Nice to notice that varying addresses cannot conflict with fp if no
1053 local variables had their addresses taken, but that's too hard now. */
1056 memrefs_conflict_p (xsize, x, ysize, y, c)
1061 if (GET_CODE (x) == VALUE)
1063 if (GET_CODE (y) == VALUE)
1065 if (GET_CODE (x) == HIGH)
1067 else if (GET_CODE (x) == LO_SUM)
1070 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1071 if (GET_CODE (y) == HIGH)
1073 else if (GET_CODE (y) == LO_SUM)
1076 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1078 if (rtx_equal_for_memref_p (x, y))
1080 if (xsize <= 0 || ysize <= 0)
1082 if (c >= 0 && xsize > c)
1084 if (c < 0 && ysize+c > 0)
1089 /* This code used to check for conflicts involving stack references and
1090 globals but the base address alias code now handles these cases. */
1092 if (GET_CODE (x) == PLUS)
1094 /* The fact that X is canonicalized means that this
1095 PLUS rtx is canonicalized. */
1096 rtx x0 = XEXP (x, 0);
1097 rtx x1 = XEXP (x, 1);
1099 if (GET_CODE (y) == PLUS)
1101 /* The fact that Y is canonicalized means that this
1102 PLUS rtx is canonicalized. */
1103 rtx y0 = XEXP (y, 0);
1104 rtx y1 = XEXP (y, 1);
1106 if (rtx_equal_for_memref_p (x1, y1))
1107 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1108 if (rtx_equal_for_memref_p (x0, y0))
1109 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1110 if (GET_CODE (x1) == CONST_INT)
1112 if (GET_CODE (y1) == CONST_INT)
1113 return memrefs_conflict_p (xsize, x0, ysize, y0,
1114 c - INTVAL (x1) + INTVAL (y1));
1116 return memrefs_conflict_p (xsize, x0, ysize, y,
1119 else if (GET_CODE (y1) == CONST_INT)
1120 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1124 else if (GET_CODE (x1) == CONST_INT)
1125 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1127 else if (GET_CODE (y) == PLUS)
1129 /* The fact that Y is canonicalized means that this
1130 PLUS rtx is canonicalized. */
1131 rtx y0 = XEXP (y, 0);
1132 rtx y1 = XEXP (y, 1);
1134 if (GET_CODE (y1) == CONST_INT)
1135 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1140 if (GET_CODE (x) == GET_CODE (y))
1141 switch (GET_CODE (x))
1145 /* Handle cases where we expect the second operands to be the
1146 same, and check only whether the first operand would conflict
1149 rtx x1 = canon_rtx (XEXP (x, 1));
1150 rtx y1 = canon_rtx (XEXP (y, 1));
1151 if (! rtx_equal_for_memref_p (x1, y1))
1153 x0 = canon_rtx (XEXP (x, 0));
1154 y0 = canon_rtx (XEXP (y, 0));
1155 if (rtx_equal_for_memref_p (x0, y0))
1156 return (xsize == 0 || ysize == 0
1157 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1159 /* Can't properly adjust our sizes. */
1160 if (GET_CODE (x1) != CONST_INT)
1162 xsize /= INTVAL (x1);
1163 ysize /= INTVAL (x1);
1165 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1169 /* Are these registers known not to be equal? */
1170 if (alias_invariant)
1172 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1173 rtx i_x, i_y; /* invariant relationships of X and Y */
1175 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1176 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1178 if (i_x == 0 && i_y == 0)
1181 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1182 ysize, i_y ? i_y : y, c))
1191 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1192 as an access with indeterminate size. Assume that references
1193 besides AND are aligned, so if the size of the other reference is
1194 at least as large as the alignment, assume no other overlap. */
1195 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1197 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1199 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1201 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1203 /* ??? If we are indexing far enough into the array/structure, we
1204 may yet be able to determine that we can not overlap. But we
1205 also need to that we are far enough from the end not to overlap
1206 a following reference, so we do nothing with that for now. */
1207 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1209 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1214 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1216 c += (INTVAL (y) - INTVAL (x));
1217 return (xsize <= 0 || ysize <= 0
1218 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1221 if (GET_CODE (x) == CONST)
1223 if (GET_CODE (y) == CONST)
1224 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1225 ysize, canon_rtx (XEXP (y, 0)), c);
1227 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1230 if (GET_CODE (y) == CONST)
1231 return memrefs_conflict_p (xsize, x, ysize,
1232 canon_rtx (XEXP (y, 0)), c);
1235 return (xsize < 0 || ysize < 0
1236 || (rtx_equal_for_memref_p (x, y)
1237 && (xsize == 0 || ysize == 0
1238 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1245 /* Functions to compute memory dependencies.
1247 Since we process the insns in execution order, we can build tables
1248 to keep track of what registers are fixed (and not aliased), what registers
1249 are varying in known ways, and what registers are varying in unknown
1252 If both memory references are volatile, then there must always be a
1253 dependence between the two references, since their order can not be
1254 changed. A volatile and non-volatile reference can be interchanged
1257 A MEM_IN_STRUCT reference at a non-AND varying address can never
1258 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1259 also must allow AND addresses, because they may generate accesses
1260 outside the object being referenced. This is used to generate
1261 aligned addresses from unaligned addresses, for instance, the alpha
1262 storeqi_unaligned pattern. */
1264 /* Read dependence: X is read after read in MEM takes place. There can
1265 only be a dependence here if both reads are volatile. */
1268 read_dependence (mem, x)
1272 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1275 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1276 MEM2 is a reference to a structure at a varying address, or returns
1277 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1278 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1279 to decide whether or not an address may vary; it should return
1280 nonzero whenever variation is possible.
1281 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1284 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1286 rtx mem1_addr, mem2_addr;
1287 int (*varies_p) PARAMS ((rtx));
1289 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1290 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1291 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1295 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1296 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1297 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1304 /* Returns nonzero if something about the mode or address format MEM1
1305 indicates that it might well alias *anything*. */
1308 aliases_everything_p (mem)
1311 if (GET_CODE (XEXP (mem, 0)) == AND)
1312 /* If the address is an AND, its very hard to know at what it is
1313 actually pointing. */
1319 /* True dependence: X is read after store in MEM takes place. */
1322 true_dependence (mem, mem_mode, x, varies)
1324 enum machine_mode mem_mode;
1326 int (*varies) PARAMS ((rtx));
1328 register rtx x_addr, mem_addr;
1330 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1333 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1336 /* If X is an unchanging read, then it can't possibly conflict with any
1337 non-unchanging store. It may conflict with an unchanging write though,
1338 because there may be a single store to this address to initialize it.
1339 Just fall through to the code below to resolve the case where we have
1340 both an unchanging read and an unchanging write. This won't handle all
1341 cases optimally, but the possible performance loss should be
1343 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1346 if (mem_mode == VOIDmode)
1347 mem_mode = GET_MODE (mem);
1349 x_addr = get_addr (XEXP (x, 0));
1350 mem_addr = get_addr (XEXP (mem, 0));
1352 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1355 x_addr = canon_rtx (x_addr);
1356 mem_addr = canon_rtx (mem_addr);
1358 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1359 SIZE_FOR_MODE (x), x_addr, 0))
1362 if (aliases_everything_p (x))
1365 /* We cannot use aliases_everyting_p to test MEM, since we must look
1366 at MEM_MODE, rather than GET_MODE (MEM). */
1367 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1370 /* In true_dependence we also allow BLKmode to alias anything. Why
1371 don't we do this in anti_dependence and output_dependence? */
1372 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1375 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1379 /* Returns non-zero if a write to X might alias a previous read from
1380 (or, if WRITEP is non-zero, a write to) MEM. */
1383 write_dependence_p (mem, x, writep)
1388 rtx x_addr, mem_addr;
1391 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1394 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1397 /* If MEM is an unchanging read, then it can't possibly conflict with
1398 the store to X, because there is at most one store to MEM, and it must
1399 have occurred somewhere before MEM. */
1400 if (!writep && RTX_UNCHANGING_P (mem))
1403 x_addr = get_addr (XEXP (x, 0));
1404 mem_addr = get_addr (XEXP (mem, 0));
1406 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
1410 x_addr = canon_rtx (x_addr);
1411 mem_addr = canon_rtx (mem_addr);
1413 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1414 SIZE_FOR_MODE (x), x_addr, 0))
1418 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1421 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1422 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1425 /* Anti dependence: X is written after read in MEM takes place. */
1428 anti_dependence (mem, x)
1432 return write_dependence_p (mem, x, /*writep=*/0);
1435 /* Output dependence: X is written after store in MEM takes place. */
1438 output_dependence (mem, x)
1442 return write_dependence_p (mem, x, /*writep=*/1);
1445 /* Returns non-zero if X might refer to something which is not
1446 local to the function and is not constant. */
1449 nonlocal_reference_p (x)
1453 register RTX_CODE code;
1456 code = GET_CODE (x);
1458 if (GET_RTX_CLASS (code) == 'i')
1460 /* Constant functions can be constant if they don't use
1461 scratch memory used to mark function w/o side effects. */
1462 if (code == CALL_INSN && CONST_CALL_P (x))
1464 x = CALL_INSN_FUNCTION_USAGE (x);
1470 code = GET_CODE (x);
1476 if (GET_CODE (SUBREG_REG (x)) == REG)
1478 /* Global registers are not local. */
1479 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1480 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1488 /* Global registers are not local. */
1489 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1503 /* Constants in the function's constants pool are constant. */
1504 if (CONSTANT_POOL_ADDRESS_P (x))
1509 /* Recursion introduces no additional considerations. */
1510 if (GET_CODE (XEXP (x, 0)) == MEM
1511 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1512 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1513 IDENTIFIER_POINTER (
1514 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1519 /* Be overly conservative and consider any volatile memory
1520 reference as not local. */
1521 if (MEM_VOLATILE_P (x))
1523 base = find_base_term (XEXP (x, 0));
1526 /* A Pmode ADDRESS could be a reference via the structure value
1527 address or static chain. Such memory references are nonlocal.
1529 Thus, we have to examine the contents of the ADDRESS to find
1530 out if this is a local reference or not. */
1531 if (GET_CODE (base) == ADDRESS
1532 && GET_MODE (base) == Pmode
1533 && (XEXP (base, 0) == stack_pointer_rtx
1534 || XEXP (base, 0) == arg_pointer_rtx
1535 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1536 || XEXP (base, 0) == hard_frame_pointer_rtx
1538 || XEXP (base, 0) == frame_pointer_rtx))
1540 /* Constants in the function's constant pool are constant. */
1541 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1554 /* Recursively scan the operands of this expression. */
1557 register const char *fmt = GET_RTX_FORMAT (code);
1560 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1562 if (fmt[i] == 'e' && XEXP (x, i))
1564 if (nonlocal_reference_p (XEXP (x, i)))
1567 else if (fmt[i] == 'E')
1570 for (j = 0; j < XVECLEN (x, i); j++)
1571 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1580 /* Mark the function if it is constant. */
1583 mark_constant_function ()
1587 if (TREE_PUBLIC (current_function_decl)
1588 || TREE_READONLY (current_function_decl)
1589 || TREE_THIS_VOLATILE (current_function_decl)
1590 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1593 /* Determine if this is a constant function. */
1595 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1596 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1597 && nonlocal_reference_p (insn))
1600 /* Mark the function. */
1602 TREE_READONLY (current_function_decl) = 1;
1606 static HARD_REG_SET argument_registers;
1613 #ifndef OUTGOING_REGNO
1614 #define OUTGOING_REGNO(N) N
1616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1617 /* Check whether this register can hold an incoming pointer
1618 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1619 numbers, so translate if necessary due to register windows. */
1620 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1621 && HARD_REGNO_MODE_OK (i, Pmode))
1622 SET_HARD_REG_BIT (argument_registers, i);
1624 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1627 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
1631 init_alias_analysis ()
1633 int maxreg = max_reg_num ();
1636 register unsigned int ui;
1639 reg_known_value_size = maxreg;
1642 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
1643 - FIRST_PSEUDO_REGISTER;
1645 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
1646 - FIRST_PSEUDO_REGISTER;
1648 /* Overallocate reg_base_value to allow some growth during loop
1649 optimization. Loop unrolling can create a large number of
1651 reg_base_value_size = maxreg * 2;
1652 reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
1654 ggc_add_rtx_root (reg_base_value, reg_base_value_size);
1656 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
1657 reg_seen = (char *) xmalloc (reg_base_value_size);
1658 if (! reload_completed && flag_unroll_loops)
1660 /* ??? Why are we realloc'ing if we're just going to zero it? */
1661 alias_invariant = (rtx *)xrealloc (alias_invariant,
1662 reg_base_value_size * sizeof (rtx));
1663 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1667 /* The basic idea is that each pass through this loop will use the
1668 "constant" information from the previous pass to propagate alias
1669 information through another level of assignments.
1671 This could get expensive if the assignment chains are long. Maybe
1672 we should throttle the number of iterations, possibly based on
1673 the optimization level or flag_expensive_optimizations.
1675 We could propagate more information in the first pass by making use
1676 of REG_N_SETS to determine immediately that the alias information
1677 for a pseudo is "constant".
1679 A program with an uninitialized variable can cause an infinite loop
1680 here. Instead of doing a full dataflow analysis to detect such problems
1681 we just cap the number of iterations for the loop.
1683 The state of the arrays for the set chain in question does not matter
1684 since the program has undefined behavior. */
1689 /* Assume nothing will change this iteration of the loop. */
1692 /* We want to assign the same IDs each iteration of this loop, so
1693 start counting from zero each iteration of the loop. */
1696 /* We're at the start of the funtion each iteration through the
1697 loop, so we're copying arguments. */
1698 copying_arguments = 1;
1700 /* Wipe the potential alias information clean for this pass. */
1701 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1703 /* Wipe the reg_seen array clean. */
1704 bzero ((char *) reg_seen, reg_base_value_size);
1706 /* Mark all hard registers which may contain an address.
1707 The stack, frame and argument pointers may contain an address.
1708 An argument register which can hold a Pmode value may contain
1709 an address even if it is not in BASE_REGS.
1711 The address expression is VOIDmode for an argument and
1712 Pmode for other registers. */
1714 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1715 if (TEST_HARD_REG_BIT (argument_registers, i))
1716 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1717 gen_rtx_REG (Pmode, i));
1719 new_reg_base_value[STACK_POINTER_REGNUM]
1720 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1721 new_reg_base_value[ARG_POINTER_REGNUM]
1722 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1723 new_reg_base_value[FRAME_POINTER_REGNUM]
1724 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1725 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1726 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1727 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1729 if (struct_value_incoming_rtx
1730 && GET_CODE (struct_value_incoming_rtx) == REG)
1731 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1732 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1734 if (static_chain_rtx
1735 && GET_CODE (static_chain_rtx) == REG)
1736 new_reg_base_value[REGNO (static_chain_rtx)]
1737 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1739 /* Walk the insns adding values to the new_reg_base_value array. */
1740 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1742 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1746 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1747 if (prologue_epilogue_contains (insn))
1751 /* If this insn has a noalias note, process it, Otherwise,
1752 scan for sets. A simple set will have no side effects
1753 which could change the base value of any other register. */
1755 if (GET_CODE (PATTERN (insn)) == SET
1756 && REG_NOTES (insn) != 0
1757 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
1758 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
1760 note_stores (PATTERN (insn), record_set, NULL);
1762 set = single_set (insn);
1765 && GET_CODE (SET_DEST (set)) == REG
1766 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1767 && REG_NOTES (insn) != 0
1768 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1769 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1770 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1771 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1772 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1774 int regno = REGNO (SET_DEST (set));
1775 reg_known_value[regno] = XEXP (note, 0);
1776 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1779 else if (GET_CODE (insn) == NOTE
1780 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1781 copying_arguments = 0;
1784 /* Now propagate values from new_reg_base_value to reg_base_value. */
1785 for (ui = 0; ui < reg_base_value_size; ui++)
1787 if (new_reg_base_value[ui]
1788 && new_reg_base_value[ui] != reg_base_value[ui]
1789 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1791 reg_base_value[ui] = new_reg_base_value[ui];
1796 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1798 /* Fill in the remaining entries. */
1799 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1800 if (reg_known_value[i] == 0)
1801 reg_known_value[i] = regno_reg_rtx[i];
1803 /* Simplify the reg_base_value array so that no register refers to
1804 another register, except to special registers indirectly through
1805 ADDRESS expressions.
1807 In theory this loop can take as long as O(registers^2), but unless
1808 there are very long dependency chains it will run in close to linear
1811 This loop may not be needed any longer now that the main loop does
1812 a better job at propagating alias information. */
1818 for (ui = 0; ui < reg_base_value_size; ui++)
1820 rtx base = reg_base_value[ui];
1821 if (base && GET_CODE (base) == REG)
1823 unsigned int base_regno = REGNO (base);
1824 if (base_regno == ui) /* register set from itself */
1825 reg_base_value[ui] = 0;
1827 reg_base_value[ui] = reg_base_value[base_regno];
1832 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1835 free (new_reg_base_value);
1836 new_reg_base_value = 0;
1842 end_alias_analysis ()
1844 free (reg_known_value + FIRST_PSEUDO_REGISTER);
1845 reg_known_value = 0;
1846 reg_known_value_size = 0;
1847 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
1848 reg_known_equiv_p = 0;
1852 ggc_del_root (reg_base_value);
1853 free (reg_base_value);
1856 reg_base_value_size = 0;
1857 if (alias_invariant)
1859 free (alias_invariant);
1860 alias_invariant = 0;