1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
31 #include "hard-reg-set.h"
36 #include "splay-tree.h"
39 /* The alias sets assigned to MEMs assist the back-end in determining
40 which MEMs can alias which other MEMs. In general, two MEMs in
41 different alias sets to not alias each other. There is one
42 exception, however. Consider something like:
44 struct S {int i; double d; };
46 a store to an `S' can alias something of either type `int' or type
47 `double'. (However, a store to an `int' cannot alias a `double'
48 and vice versa.) We indicate this via a tree structure that looks
56 (The arrows are directed and point downwards.) If, when comparing
57 two alias sets, we can hold one set fixed, trace the other set
58 downwards, and at some point find the first set, the two MEMs can
59 alias one another. In this situation we say the alias set for
60 `struct S' is the `superset' and that those for `int' and `double'
63 Alias set zero is implicitly a superset of all other alias sets.
64 However, this is no actual entry for alias set zero. It is an
65 error to attempt to explicitly construct a subset of zero. */
67 typedef struct alias_set_entry
69 /* The alias set number, as stored in MEM_ALIAS_SET. */
72 /* The children of the alias set. These are not just the immediate
73 children, but, in fact, all children. So, if we have:
75 struct T { struct S s; float f; }
77 continuing our example above, the children here will be all of
78 `int', `double', `float', and `struct S'. */
82 static rtx canon_rtx PARAMS ((rtx));
83 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
84 static rtx find_symbolic_term PARAMS ((rtx));
85 static rtx get_addr PARAMS ((rtx));
86 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
88 static void record_set PARAMS ((rtx, rtx, void *));
89 static rtx find_base_term PARAMS ((rtx));
90 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
92 static rtx find_base_value PARAMS ((rtx));
93 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
94 static int insert_subset_children PARAMS ((splay_tree_node, void*));
95 static alias_set_entry get_alias_set_entry PARAMS ((int));
96 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
98 static int aliases_everything_p PARAMS ((rtx));
99 static int write_dependence_p PARAMS ((rtx, rtx, int));
100 static int nonlocal_reference_p PARAMS ((rtx));
102 /* Set up all info needed to perform alias analysis on memory references. */
104 /* Returns the size in bytes of the mode of X. */
105 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
107 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
108 different alias sets. We ignore alias sets in functions making use
109 of variable arguments because the va_arg macros on some systems are
111 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
112 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
114 /* Cap the number of passes we make over the insns propagating alias
115 information through set chains.
117 10 is a completely arbitrary choice. */
118 #define MAX_ALIAS_LOOP_PASSES 10
120 /* reg_base_value[N] gives an address to which register N is related.
121 If all sets after the first add or subtract to the current value
122 or otherwise modify it so it does not point to a different top level
123 object, reg_base_value[N] is equal to the address part of the source
126 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
127 expressions represent certain special values: function arguments and
128 the stack, frame, and argument pointers.
130 The contents of an ADDRESS is not normally used, the mode of the
131 ADDRESS determines whether the ADDRESS is a function argument or some
132 other special value. Pointer equality, not rtx_equal_p, determines whether
133 two ADDRESS expressions refer to the same base address.
135 The only use of the contents of an ADDRESS is for determining if the
136 current function performs nonlocal memory memory references for the
137 purposes of marking the function as a constant function. */
139 static rtx *reg_base_value;
140 static rtx *new_reg_base_value;
141 static unsigned int reg_base_value_size; /* size of reg_base_value array */
143 #define REG_BASE_VALUE(X) \
144 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
146 /* Vector of known invariant relationships between registers. Set in
147 loop unrolling. Indexed by register number, if nonzero the value
148 is an expression describing this register in terms of another.
150 The length of this array is REG_BASE_VALUE_SIZE.
152 Because this array contains only pseudo registers it has no effect
154 static rtx *alias_invariant;
156 /* Vector indexed by N giving the initial (unchanging) value known
157 for pseudo-register N. */
158 rtx *reg_known_value;
160 /* Indicates number of valid entries in reg_known_value. */
161 static unsigned int reg_known_value_size;
163 /* Vector recording for each reg_known_value whether it is due to a
164 REG_EQUIV note. Future passes (viz., reload) may replace the
165 pseudo with the equivalent expression and so we account for the
166 dependences that would be introduced if that happens. */
167 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
168 assign_parms mention the arg pointer, and there are explicit insns in the
169 RTL that modify the arg pointer. Thus we must ensure that such insns don't
170 get scheduled across each other because that would invalidate the REG_EQUIV
171 notes. One could argue that the REG_EQUIV notes are wrong, but solving
172 the problem in the scheduler will likely give better code, so we do it
174 char *reg_known_equiv_p;
176 /* True when scanning insns from the start of the rtl to the
177 NOTE_INSN_FUNCTION_BEG note. */
179 static int copying_arguments;
181 /* The splay-tree used to store the various alias set entries. */
183 static splay_tree alias_sets;
185 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
186 such an entry, or NULL otherwise. */
188 static alias_set_entry
189 get_alias_set_entry (alias_set)
193 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
195 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
198 /* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
199 that the two MEMs cannot alias each other. */
202 mems_in_disjoint_alias_sets_p (mem1, mem2)
208 #ifdef ENABLE_CHECKING
209 /* Perform a basic sanity check. Namely, that there are no alias sets
210 if we're not using strict aliasing. This helps to catch bugs
211 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
212 where a MEM is allocated in some way other than by the use of
213 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
214 use alias sets to indicate that spilled registers cannot alias each
215 other, we might need to remove this check. */
216 if (! flag_strict_aliasing
217 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
221 /* The code used in varargs macros are often not conforming ANSI C,
222 which can trick the compiler into making incorrect aliasing
223 assumptions in these functions. So, we don't use alias sets in
224 such a function. FIXME: This should be moved into the front-end;
225 it is a language-dependent notion, and there's no reason not to
226 still use these checks to handle globals. */
227 if (current_function_stdarg || current_function_varargs)
230 /* If have no alias set information for one of the MEMs, we have to assume
231 it can alias anything. */
232 if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
235 /* If the two alias sets are the same, they may alias. */
236 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
239 /* Iterate through each of the children of the first alias set,
240 comparing it with the second alias set. */
241 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
242 if (ase != 0 && splay_tree_lookup (ase->children,
243 (splay_tree_key) MEM_ALIAS_SET (mem2)))
246 /* Now do the same, but with the alias sets reversed. */
247 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
248 if (ase != 0 && splay_tree_lookup (ase->children,
249 (splay_tree_key) MEM_ALIAS_SET (mem1)))
252 /* The two MEMs are in distinct alias sets, and neither one is the
253 child of the other. Therefore, they cannot alias. */
257 /* Insert the NODE into the splay tree given by DATA. Used by
258 record_alias_subset via splay_tree_foreach. */
261 insert_subset_children (node, data)
262 splay_tree_node node;
265 splay_tree_insert ((splay_tree) data, node->key, node->value);
270 /* Indicate that things in SUBSET can alias things in SUPERSET, but
271 not vice versa. For example, in C, a store to an `int' can alias a
272 structure containing an `int', but not vice versa. Here, the
273 structure would be the SUPERSET and `int' the SUBSET. This
274 function should be called only once per SUPERSET/SUBSET pair. At
275 present any given alias set may only be a subset of one superset.
277 It is illegal for SUPERSET to be zero; everything is implicitly a
278 subset of alias set zero. */
281 record_alias_subset (superset, subset)
285 alias_set_entry superset_entry;
286 alias_set_entry subset_entry;
291 superset_entry = get_alias_set_entry (superset);
292 if (superset_entry == 0)
294 /* Create an entry for the SUPERSET, so that we have a place to
295 attach the SUBSET. */
297 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
298 superset_entry->alias_set = superset;
299 superset_entry->children
300 = splay_tree_new (splay_tree_compare_ints, 0, 0);
301 splay_tree_insert (alias_sets, (splay_tree_key) superset,
302 (splay_tree_value) superset_entry);
306 subset_entry = get_alias_set_entry (subset);
308 /* If there is an entry for the subset, enter all of its children
309 (if they are not already present) as children of the SUPERSET. */
311 splay_tree_foreach (subset_entry->children,
312 insert_subset_children,
313 superset_entry->children);
315 /* Enter the SUBSET itself as a child of the SUPERSET. */
316 splay_tree_insert (superset_entry->children,
317 (splay_tree_key) subset, 0);
320 /* Inside SRC, the source of a SET, find a base address. */
323 find_base_value (src)
326 switch (GET_CODE (src))
333 /* At the start of a function, argument registers have known base
334 values which may be lost later. Returning an ADDRESS
335 expression here allows optimization based on argument values
336 even when the argument registers are used for other purposes. */
337 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
338 return new_reg_base_value[REGNO (src)];
340 /* If a pseudo has a known base value, return it. Do not do this
341 for hard regs since it can result in a circular dependency
342 chain for registers which have values at function entry.
344 The test above is not sufficient because the scheduler may move
345 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
346 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
347 && (unsigned) REGNO (src) < reg_base_value_size
348 && reg_base_value[REGNO (src)])
349 return reg_base_value[REGNO (src)];
354 /* Check for an argument passed in memory. Only record in the
355 copying-arguments block; it is too hard to track changes
357 if (copying_arguments
358 && (XEXP (src, 0) == arg_pointer_rtx
359 || (GET_CODE (XEXP (src, 0)) == PLUS
360 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
361 return gen_rtx_ADDRESS (VOIDmode, src);
366 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
369 /* ... fall through ... */
374 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
376 /* If either operand is a REG, then see if we already have
377 a known value for it. */
378 if (GET_CODE (src_0) == REG)
380 temp = find_base_value (src_0);
385 if (GET_CODE (src_1) == REG)
387 temp = find_base_value (src_1);
392 /* Guess which operand is the base address:
393 If either operand is a symbol, then it is the base. If
394 either operand is a CONST_INT, then the other is the base. */
395 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
396 return find_base_value (src_0);
397 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
398 return find_base_value (src_1);
400 /* This might not be necessary anymore:
401 If either operand is a REG that is a known pointer, then it
403 else if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
404 return find_base_value (src_0);
405 else if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
406 return find_base_value (src_1);
412 /* The standard form is (lo_sum reg sym) so look only at the
414 return find_base_value (XEXP (src, 1));
417 /* If the second operand is constant set the base
418 address to the first operand. */
419 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
420 return find_base_value (XEXP (src, 0));
424 case SIGN_EXTEND: /* used for NT/Alpha pointers */
426 return find_base_value (XEXP (src, 0));
435 /* Called from init_alias_analysis indirectly through note_stores. */
437 /* While scanning insns to find base values, reg_seen[N] is nonzero if
438 register N has been set in this function. */
439 static char *reg_seen;
441 /* Addresses which are known not to alias anything else are identified
442 by a unique integer. */
443 static int unique_id;
446 record_set (dest, set, data)
448 void *data ATTRIBUTE_UNUSED;
450 register unsigned regno;
453 if (GET_CODE (dest) != REG)
456 regno = REGNO (dest);
458 if (regno >= reg_base_value_size)
463 /* A CLOBBER wipes out any old value but does not prevent a previously
464 unset register from acquiring a base address (i.e. reg_seen is not
466 if (GET_CODE (set) == CLOBBER)
468 new_reg_base_value[regno] = 0;
477 new_reg_base_value[regno] = 0;
481 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
482 GEN_INT (unique_id++));
486 /* This is not the first set. If the new value is not related to the
487 old value, forget the base value. Note that the following code is
489 extern int x, y; int *p = &x; p += (&y-&x);
490 ANSI C does not allow computing the difference of addresses
491 of distinct top level objects. */
492 if (new_reg_base_value[regno])
493 switch (GET_CODE (src))
498 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
499 new_reg_base_value[regno] = 0;
502 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
503 new_reg_base_value[regno] = 0;
506 new_reg_base_value[regno] = 0;
509 /* If this is the first set of a register, record the value. */
510 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
511 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
512 new_reg_base_value[regno] = find_base_value (src);
517 /* Called from loop optimization when a new pseudo-register is created. */
520 record_base_value (regno, val, invariant)
525 if ((unsigned) regno >= reg_base_value_size)
528 /* If INVARIANT is true then this value also describes an invariant
529 relationship which can be used to deduce that two registers with
530 unknown values are different. */
531 if (invariant && alias_invariant)
532 alias_invariant[regno] = val;
534 if (GET_CODE (val) == REG)
536 if ((unsigned) REGNO (val) < reg_base_value_size)
537 reg_base_value[regno] = reg_base_value[REGNO (val)];
542 reg_base_value[regno] = find_base_value (val);
549 /* Recursively look for equivalences. */
550 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
551 && REGNO (x) < reg_known_value_size)
552 return reg_known_value[REGNO (x)] == x
553 ? x : canon_rtx (reg_known_value[REGNO (x)]);
554 else if (GET_CODE (x) == PLUS)
556 rtx x0 = canon_rtx (XEXP (x, 0));
557 rtx x1 = canon_rtx (XEXP (x, 1));
559 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
561 /* We can tolerate LO_SUMs being offset here; these
562 rtl are used for nothing other than comparisons. */
563 if (GET_CODE (x0) == CONST_INT)
564 return plus_constant_for_output (x1, INTVAL (x0));
565 else if (GET_CODE (x1) == CONST_INT)
566 return plus_constant_for_output (x0, INTVAL (x1));
567 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
571 /* This gives us much better alias analysis when called from
572 the loop optimizer. Note we want to leave the original
573 MEM alone, but need to return the canonicalized MEM with
574 all the flags with their original values. */
575 else if (GET_CODE (x) == MEM)
577 rtx addr = canon_rtx (XEXP (x, 0));
579 if (addr != XEXP (x, 0))
581 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
583 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
584 MEM_COPY_ATTRIBUTES (new, x);
585 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
592 /* Return 1 if X and Y are identical-looking rtx's.
594 We use the data in reg_known_value above to see if two registers with
595 different numbers are, in fact, equivalent. */
598 rtx_equal_for_memref_p (x, y)
603 register enum rtx_code code;
604 register const char *fmt;
606 if (x == 0 && y == 0)
608 if (x == 0 || y == 0)
618 /* Rtx's of different codes cannot be equal. */
619 if (code != GET_CODE (y))
622 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
623 (REG:SI x) and (REG:HI x) are NOT equivalent. */
625 if (GET_MODE (x) != GET_MODE (y))
628 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
631 return REGNO (x) == REGNO (y);
632 if (code == LABEL_REF)
633 return XEXP (x, 0) == XEXP (y, 0);
634 if (code == SYMBOL_REF)
635 return XSTR (x, 0) == XSTR (y, 0);
636 if (code == CONST_INT)
637 return INTVAL (x) == INTVAL (y);
638 /* There's no need to compare the contents of CONST_DOUBLEs because
640 if (code == CONST_DOUBLE)
642 if (code == ADDRESSOF)
643 return (REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0))
644 && XINT (x, 1) == XINT (y, 1));
646 /* For commutative operations, the RTX match if the operand match in any
647 order. Also handle the simple binary and unary cases without a loop. */
648 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
649 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
650 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
651 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
652 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
653 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
654 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
655 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
656 else if (GET_RTX_CLASS (code) == '1')
657 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
659 /* Compare the elements. If any pair of corresponding elements
660 fail to match, return 0 for the whole things.
662 Limit cases to types which actually appear in addresses. */
664 fmt = GET_RTX_FORMAT (code);
665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
670 if (XINT (x, i) != XINT (y, i))
675 /* Two vectors must have the same length. */
676 if (XVECLEN (x, i) != XVECLEN (y, i))
679 /* And the corresponding elements must match. */
680 for (j = 0; j < XVECLEN (x, i); j++)
681 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
682 XVECEXP (y, i, j)) == 0)
687 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
691 /* This can happen for an asm which clobbers memory. */
695 /* It is believed that rtx's at this level will never
696 contain anything but integers and other rtx's,
697 except for within LABEL_REFs and SYMBOL_REFs. */
705 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
706 X and return it, or return 0 if none found. */
709 find_symbolic_term (x)
713 register enum rtx_code code;
714 register const char *fmt;
717 if (code == SYMBOL_REF || code == LABEL_REF)
719 if (GET_RTX_CLASS (code) == 'o')
722 fmt = GET_RTX_FORMAT (code);
723 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
729 t = find_symbolic_term (XEXP (x, i));
733 else if (fmt[i] == 'E')
744 struct elt_loc_list *l;
746 switch (GET_CODE (x))
749 return REG_BASE_VALUE (x);
752 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
758 return find_base_term (XEXP (x, 0));
761 val = CSELIB_VAL_PTR (x);
762 for (l = val->locs; l; l = l->next)
763 if ((x = find_base_term (l->loc)) != 0)
769 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
776 rtx tmp1 = XEXP (x, 0);
777 rtx tmp2 = XEXP (x, 1);
779 /* This is a litle bit tricky since we have to determine which of
780 the two operands represents the real base address. Otherwise this
781 routine may return the index register instead of the base register.
783 That may cause us to believe no aliasing was possible, when in
784 fact aliasing is possible.
786 We use a few simple tests to guess the base register. Additional
787 tests can certainly be added. For example, if one of the operands
788 is a shift or multiply, then it must be the index register and the
789 other operand is the base register. */
791 /* If either operand is known to be a pointer, then use it
792 to determine the base term. */
793 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
794 return find_base_term (tmp1);
796 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
797 return find_base_term (tmp2);
799 /* Neither operand was known to be a pointer. Go ahead and find the
800 base term for both operands. */
801 tmp1 = find_base_term (tmp1);
802 tmp2 = find_base_term (tmp2);
804 /* If either base term is named object or a special address
805 (like an argument or stack reference), then use it for the
808 && (GET_CODE (tmp1) == SYMBOL_REF
809 || GET_CODE (tmp1) == LABEL_REF
810 || (GET_CODE (tmp1) == ADDRESS
811 && GET_MODE (tmp1) != VOIDmode)))
815 && (GET_CODE (tmp2) == SYMBOL_REF
816 || GET_CODE (tmp2) == LABEL_REF
817 || (GET_CODE (tmp2) == ADDRESS
818 && GET_MODE (tmp2) != VOIDmode)))
821 /* We could not determine which of the two operands was the
822 base register and which was the index. So we can determine
823 nothing from the base alias check. */
828 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
829 return REG_BASE_VALUE (XEXP (x, 0));
841 /* Return 0 if the addresses X and Y are known to point to different
842 objects, 1 if they might be pointers to the same object. */
845 base_alias_check (x, y, x_mode, y_mode)
847 enum machine_mode x_mode, y_mode;
849 rtx x_base = find_base_term (x);
850 rtx y_base = find_base_term (y);
852 /* If the address itself has no known base see if a known equivalent
853 value has one. If either address still has no known base, nothing
854 is known about aliasing. */
859 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
862 x_base = find_base_term (x_c);
870 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
873 y_base = find_base_term (y_c);
878 /* If the base addresses are equal nothing is known about aliasing. */
879 if (rtx_equal_p (x_base, y_base))
882 /* The base addresses of the read and write are different expressions.
883 If they are both symbols and they are not accessed via AND, there is
884 no conflict. We can bring knowledge of object alignment into play
885 here. For example, on alpha, "char a, b;" can alias one another,
886 though "char a; long b;" cannot. */
887 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
889 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
891 if (GET_CODE (x) == AND
892 && (GET_CODE (XEXP (x, 1)) != CONST_INT
893 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
895 if (GET_CODE (y) == AND
896 && (GET_CODE (XEXP (y, 1)) != CONST_INT
897 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
899 /* Differing symbols never alias. */
903 /* If one address is a stack reference there can be no alias:
904 stack references using different base registers do not alias,
905 a stack reference can not alias a parameter, and a stack reference
906 can not alias a global. */
907 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
908 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
911 if (! flag_argument_noalias)
914 if (flag_argument_noalias > 1)
917 /* Weak noalias assertion (arguments are distinct, but may match globals). */
918 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
921 /* Convert the address X into something we can use. This is done by returning
922 it unchanged unless it is a value; in the latter case we call cselib to get
923 a more useful rtx. */
929 struct elt_loc_list *l;
931 if (GET_CODE (x) != VALUE)
933 v = CSELIB_VAL_PTR (x);
934 for (l = v->locs; l; l = l->next)
935 if (CONSTANT_P (l->loc))
937 for (l = v->locs; l; l = l->next)
938 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
945 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
946 where SIZE is the size in bytes of the memory reference. If ADDR
947 is not modified by the memory reference then ADDR is returned. */
950 addr_side_effect_eval (addr, size, n_refs)
957 switch (GET_CODE (addr))
960 offset = (n_refs + 1) * size;
963 offset = -(n_refs + 1) * size;
966 offset = n_refs * size;
969 offset = -n_refs * size;
977 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
979 addr = XEXP (addr, 0);
984 /* Return nonzero if X and Y (memory addresses) could reference the
985 same location in memory. C is an offset accumulator. When
986 C is nonzero, we are testing aliases between X and Y + C.
987 XSIZE is the size in bytes of the X reference,
988 similarly YSIZE is the size in bytes for Y.
990 If XSIZE or YSIZE is zero, we do not know the amount of memory being
991 referenced (the reference was BLKmode), so make the most pessimistic
994 If XSIZE or YSIZE is negative, we may access memory outside the object
995 being referenced as a side effect. This can happen when using AND to
996 align memory references, as is done on the Alpha.
998 Nice to notice that varying addresses cannot conflict with fp if no
999 local variables had their addresses taken, but that's too hard now. */
1002 memrefs_conflict_p (xsize, x, ysize, y, c)
1007 if (GET_CODE (x) == VALUE)
1009 if (GET_CODE (y) == VALUE)
1011 if (GET_CODE (x) == HIGH)
1013 else if (GET_CODE (x) == LO_SUM)
1016 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1017 if (GET_CODE (y) == HIGH)
1019 else if (GET_CODE (y) == LO_SUM)
1022 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1024 if (rtx_equal_for_memref_p (x, y))
1026 if (xsize <= 0 || ysize <= 0)
1028 if (c >= 0 && xsize > c)
1030 if (c < 0 && ysize+c > 0)
1035 /* This code used to check for conflicts involving stack references and
1036 globals but the base address alias code now handles these cases. */
1038 if (GET_CODE (x) == PLUS)
1040 /* The fact that X is canonicalized means that this
1041 PLUS rtx is canonicalized. */
1042 rtx x0 = XEXP (x, 0);
1043 rtx x1 = XEXP (x, 1);
1045 if (GET_CODE (y) == PLUS)
1047 /* The fact that Y is canonicalized means that this
1048 PLUS rtx is canonicalized. */
1049 rtx y0 = XEXP (y, 0);
1050 rtx y1 = XEXP (y, 1);
1052 if (rtx_equal_for_memref_p (x1, y1))
1053 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1054 if (rtx_equal_for_memref_p (x0, y0))
1055 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1056 if (GET_CODE (x1) == CONST_INT)
1058 if (GET_CODE (y1) == CONST_INT)
1059 return memrefs_conflict_p (xsize, x0, ysize, y0,
1060 c - INTVAL (x1) + INTVAL (y1));
1062 return memrefs_conflict_p (xsize, x0, ysize, y,
1065 else if (GET_CODE (y1) == CONST_INT)
1066 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1070 else if (GET_CODE (x1) == CONST_INT)
1071 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1073 else if (GET_CODE (y) == PLUS)
1075 /* The fact that Y is canonicalized means that this
1076 PLUS rtx is canonicalized. */
1077 rtx y0 = XEXP (y, 0);
1078 rtx y1 = XEXP (y, 1);
1080 if (GET_CODE (y1) == CONST_INT)
1081 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1086 if (GET_CODE (x) == GET_CODE (y))
1087 switch (GET_CODE (x))
1091 /* Handle cases where we expect the second operands to be the
1092 same, and check only whether the first operand would conflict
1095 rtx x1 = canon_rtx (XEXP (x, 1));
1096 rtx y1 = canon_rtx (XEXP (y, 1));
1097 if (! rtx_equal_for_memref_p (x1, y1))
1099 x0 = canon_rtx (XEXP (x, 0));
1100 y0 = canon_rtx (XEXP (y, 0));
1101 if (rtx_equal_for_memref_p (x0, y0))
1102 return (xsize == 0 || ysize == 0
1103 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1105 /* Can't properly adjust our sizes. */
1106 if (GET_CODE (x1) != CONST_INT)
1108 xsize /= INTVAL (x1);
1109 ysize /= INTVAL (x1);
1111 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1115 /* Are these registers known not to be equal? */
1116 if (alias_invariant)
1118 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1119 rtx i_x, i_y; /* invariant relationships of X and Y */
1121 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1122 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1124 if (i_x == 0 && i_y == 0)
1127 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1128 ysize, i_y ? i_y : y, c))
1137 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1138 as an access with indeterminate size. Assume that references
1139 besides AND are aligned, so if the size of the other reference is
1140 at least as large as the alignment, assume no other overlap. */
1141 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1143 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1145 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1147 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1149 /* ??? If we are indexing far enough into the array/structure, we
1150 may yet be able to determine that we can not overlap. But we
1151 also need to that we are far enough from the end not to overlap
1152 a following reference, so we do nothing with that for now. */
1153 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1155 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1160 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1162 c += (INTVAL (y) - INTVAL (x));
1163 return (xsize <= 0 || ysize <= 0
1164 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1167 if (GET_CODE (x) == CONST)
1169 if (GET_CODE (y) == CONST)
1170 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1171 ysize, canon_rtx (XEXP (y, 0)), c);
1173 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1176 if (GET_CODE (y) == CONST)
1177 return memrefs_conflict_p (xsize, x, ysize,
1178 canon_rtx (XEXP (y, 0)), c);
1181 return (xsize < 0 || ysize < 0
1182 || (rtx_equal_for_memref_p (x, y)
1183 && (xsize == 0 || ysize == 0
1184 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1191 /* Functions to compute memory dependencies.
1193 Since we process the insns in execution order, we can build tables
1194 to keep track of what registers are fixed (and not aliased), what registers
1195 are varying in known ways, and what registers are varying in unknown
1198 If both memory references are volatile, then there must always be a
1199 dependence between the two references, since their order can not be
1200 changed. A volatile and non-volatile reference can be interchanged
1203 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1204 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
1205 allow QImode aliasing because the ANSI C standard allows character
1206 pointers to alias anything. We are assuming that characters are
1207 always QImode here. We also must allow AND addresses, because they may
1208 generate accesses outside the object being referenced. This is used to
1209 generate aligned addresses from unaligned addresses, for instance, the
1210 alpha storeqi_unaligned pattern. */
1212 /* Read dependence: X is read after read in MEM takes place. There can
1213 only be a dependence here if both reads are volatile. */
1216 read_dependence (mem, x)
1220 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1223 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1224 MEM2 is a reference to a structure at a varying address, or returns
1225 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1226 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1227 to decide whether or not an address may vary; it should return
1228 nonzero whenever variation is possible.
1229 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1232 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1234 rtx mem1_addr, mem2_addr;
1235 int (*varies_p) PARAMS ((rtx));
1237 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1238 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1239 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1243 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1244 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1245 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1252 /* Returns nonzero if something about the mode or address format MEM1
1253 indicates that it might well alias *anything*. */
1256 aliases_everything_p (mem)
1259 if (GET_MODE (mem) == QImode)
1260 /* ANSI C says that a `char*' can point to anything. */
1263 if (GET_CODE (XEXP (mem, 0)) == AND)
1264 /* If the address is an AND, its very hard to know at what it is
1265 actually pointing. */
1271 /* True dependence: X is read after store in MEM takes place. */
1274 true_dependence (mem, mem_mode, x, varies)
1276 enum machine_mode mem_mode;
1278 int (*varies) PARAMS ((rtx));
1280 register rtx x_addr, mem_addr;
1282 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1285 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1288 /* If X is an unchanging read, then it can't possibly conflict with any
1289 non-unchanging store. It may conflict with an unchanging write though,
1290 because there may be a single store to this address to initialize it.
1291 Just fall through to the code below to resolve the case where we have
1292 both an unchanging read and an unchanging write. This won't handle all
1293 cases optimally, but the possible performance loss should be
1295 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1298 if (mem_mode == VOIDmode)
1299 mem_mode = GET_MODE (mem);
1301 x_addr = get_addr (XEXP (x, 0));
1302 mem_addr = get_addr (XEXP (mem, 0));
1304 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1307 x_addr = canon_rtx (x_addr);
1308 mem_addr = canon_rtx (mem_addr);
1310 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1311 SIZE_FOR_MODE (x), x_addr, 0))
1314 if (aliases_everything_p (x))
1317 /* We cannot use aliases_everyting_p to test MEM, since we must look
1318 at MEM_MODE, rather than GET_MODE (MEM). */
1319 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1322 /* In true_dependence we also allow BLKmode to alias anything. Why
1323 don't we do this in anti_dependence and output_dependence? */
1324 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1327 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1331 /* Returns non-zero if a write to X might alias a previous read from
1332 (or, if WRITEP is non-zero, a write to) MEM. */
1335 write_dependence_p (mem, x, writep)
1340 rtx x_addr, mem_addr;
1343 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1346 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1349 /* If MEM is an unchanging read, then it can't possibly conflict with
1350 the store to X, because there is at most one store to MEM, and it must
1351 have occurred somewhere before MEM. */
1352 if (!writep && RTX_UNCHANGING_P (mem))
1355 x_addr = get_addr (XEXP (x, 0));
1356 mem_addr = get_addr (XEXP (mem, 0));
1358 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
1362 x_addr = canon_rtx (x_addr);
1363 mem_addr = canon_rtx (mem_addr);
1365 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1366 SIZE_FOR_MODE (x), x_addr, 0))
1370 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1373 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1374 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1377 /* Anti dependence: X is written after read in MEM takes place. */
1380 anti_dependence (mem, x)
1384 return write_dependence_p (mem, x, /*writep=*/0);
1387 /* Output dependence: X is written after store in MEM takes place. */
1390 output_dependence (mem, x)
1394 return write_dependence_p (mem, x, /*writep=*/1);
1397 /* Returns non-zero if X might refer to something which is not
1398 local to the function and is not constant. */
1401 nonlocal_reference_p (x)
1405 register RTX_CODE code;
1408 code = GET_CODE (x);
1410 if (GET_RTX_CLASS (code) == 'i')
1412 /* Constant functions are constant. */
1413 if (code == CALL_INSN && CONST_CALL_P (x))
1416 code = GET_CODE (x);
1422 if (GET_CODE (SUBREG_REG (x)) == REG)
1424 /* Global registers are not local. */
1425 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1426 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1434 /* Global registers are not local. */
1435 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1449 /* Constants in the function's constants pool are constant. */
1450 if (CONSTANT_POOL_ADDRESS_P (x))
1455 /* Recursion introduces no additional considerations. */
1456 if (GET_CODE (XEXP (x, 0)) == MEM
1457 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1458 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1459 IDENTIFIER_POINTER (
1460 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1465 /* Be overly conservative and consider any volatile memory
1466 reference as not local. */
1467 if (MEM_VOLATILE_P (x))
1469 base = find_base_term (XEXP (x, 0));
1472 /* A Pmode ADDRESS could be a reference via the structure value
1473 address or static chain. Such memory references are nonlocal.
1475 Thus, we have to examine the contents of the ADDRESS to find
1476 out if this is a local reference or not. */
1477 if (GET_CODE (base) == ADDRESS
1478 && GET_MODE (base) == Pmode
1479 && (XEXP (base, 0) == stack_pointer_rtx
1480 || XEXP (base, 0) == arg_pointer_rtx
1481 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1482 || XEXP (base, 0) == hard_frame_pointer_rtx
1484 || XEXP (base, 0) == frame_pointer_rtx))
1486 /* Constants in the function's constant pool are constant. */
1487 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1500 /* Recursively scan the operands of this expression. */
1503 register const char *fmt = GET_RTX_FORMAT (code);
1506 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1510 if (nonlocal_reference_p (XEXP (x, i)))
1513 else if (fmt[i] == 'E')
1516 for (j = 0; j < XVECLEN (x, i); j++)
1517 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1526 /* Mark the function if it is constant. */
1529 mark_constant_function ()
1533 if (TREE_PUBLIC (current_function_decl)
1534 || TREE_READONLY (current_function_decl)
1535 || TREE_THIS_VOLATILE (current_function_decl)
1536 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1539 /* Determine if this is a constant function. */
1541 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1542 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1543 && nonlocal_reference_p (insn))
1546 /* Mark the function. */
1548 TREE_READONLY (current_function_decl) = 1;
1552 static HARD_REG_SET argument_registers;
1559 #ifndef OUTGOING_REGNO
1560 #define OUTGOING_REGNO(N) N
1562 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1563 /* Check whether this register can hold an incoming pointer
1564 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1565 numbers, so translate if necessary due to register windows. */
1566 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1567 && HARD_REGNO_MODE_OK (i, Pmode))
1568 SET_HARD_REG_BIT (argument_registers, i);
1570 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1574 init_alias_analysis ()
1576 int maxreg = max_reg_num ();
1579 register unsigned int ui;
1582 reg_known_value_size = maxreg;
1585 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
1586 - FIRST_PSEUDO_REGISTER;
1588 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
1589 - FIRST_PSEUDO_REGISTER;
1591 /* Overallocate reg_base_value to allow some growth during loop
1592 optimization. Loop unrolling can create a large number of
1594 reg_base_value_size = maxreg * 2;
1595 reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
1597 ggc_add_rtx_root (reg_base_value, reg_base_value_size);
1599 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
1600 reg_seen = (char *) xmalloc (reg_base_value_size);
1601 if (! reload_completed && flag_unroll_loops)
1603 /* ??? Why are we realloc'ing if we're just going to zero it? */
1604 alias_invariant = (rtx *)xrealloc (alias_invariant,
1605 reg_base_value_size * sizeof (rtx));
1606 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1610 /* The basic idea is that each pass through this loop will use the
1611 "constant" information from the previous pass to propagate alias
1612 information through another level of assignments.
1614 This could get expensive if the assignment chains are long. Maybe
1615 we should throttle the number of iterations, possibly based on
1616 the optimization level or flag_expensive_optimizations.
1618 We could propagate more information in the first pass by making use
1619 of REG_N_SETS to determine immediately that the alias information
1620 for a pseudo is "constant".
1622 A program with an uninitialized variable can cause an infinite loop
1623 here. Instead of doing a full dataflow analysis to detect such problems
1624 we just cap the number of iterations for the loop.
1626 The state of the arrays for the set chain in question does not matter
1627 since the program has undefined behavior. */
1632 /* Assume nothing will change this iteration of the loop. */
1635 /* We want to assign the same IDs each iteration of this loop, so
1636 start counting from zero each iteration of the loop. */
1639 /* We're at the start of the funtion each iteration through the
1640 loop, so we're copying arguments. */
1641 copying_arguments = 1;
1643 /* Wipe the potential alias information clean for this pass. */
1644 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1646 /* Wipe the reg_seen array clean. */
1647 bzero ((char *) reg_seen, reg_base_value_size);
1649 /* Mark all hard registers which may contain an address.
1650 The stack, frame and argument pointers may contain an address.
1651 An argument register which can hold a Pmode value may contain
1652 an address even if it is not in BASE_REGS.
1654 The address expression is VOIDmode for an argument and
1655 Pmode for other registers. */
1657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1658 if (TEST_HARD_REG_BIT (argument_registers, i))
1659 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1660 gen_rtx_REG (Pmode, i));
1662 new_reg_base_value[STACK_POINTER_REGNUM]
1663 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1664 new_reg_base_value[ARG_POINTER_REGNUM]
1665 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1666 new_reg_base_value[FRAME_POINTER_REGNUM]
1667 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1668 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1669 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1670 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1672 if (struct_value_incoming_rtx
1673 && GET_CODE (struct_value_incoming_rtx) == REG)
1674 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1675 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1677 if (static_chain_rtx
1678 && GET_CODE (static_chain_rtx) == REG)
1679 new_reg_base_value[REGNO (static_chain_rtx)]
1680 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1682 /* Walk the insns adding values to the new_reg_base_value array. */
1683 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1685 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1686 if (prologue_epilogue_contains (insn))
1689 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1692 /* If this insn has a noalias note, process it, Otherwise,
1693 scan for sets. A simple set will have no side effects
1694 which could change the base value of any other register. */
1696 if (GET_CODE (PATTERN (insn)) == SET
1697 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1698 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
1700 note_stores (PATTERN (insn), record_set, NULL);
1702 set = single_set (insn);
1705 && GET_CODE (SET_DEST (set)) == REG
1706 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1707 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1708 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1709 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1710 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1711 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1713 int regno = REGNO (SET_DEST (set));
1714 reg_known_value[regno] = XEXP (note, 0);
1715 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1718 else if (GET_CODE (insn) == NOTE
1719 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1720 copying_arguments = 0;
1723 /* Now propagate values from new_reg_base_value to reg_base_value. */
1724 for (ui = 0; ui < reg_base_value_size; ui++)
1726 if (new_reg_base_value[ui]
1727 && new_reg_base_value[ui] != reg_base_value[ui]
1728 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1730 reg_base_value[ui] = new_reg_base_value[ui];
1735 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1737 /* Fill in the remaining entries. */
1738 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1739 if (reg_known_value[i] == 0)
1740 reg_known_value[i] = regno_reg_rtx[i];
1742 /* Simplify the reg_base_value array so that no register refers to
1743 another register, except to special registers indirectly through
1744 ADDRESS expressions.
1746 In theory this loop can take as long as O(registers^2), but unless
1747 there are very long dependency chains it will run in close to linear
1750 This loop may not be needed any longer now that the main loop does
1751 a better job at propagating alias information. */
1757 for (ui = 0; ui < reg_base_value_size; ui++)
1759 rtx base = reg_base_value[ui];
1760 if (base && GET_CODE (base) == REG)
1762 unsigned int base_regno = REGNO (base);
1763 if (base_regno == ui) /* register set from itself */
1764 reg_base_value[ui] = 0;
1766 reg_base_value[ui] = reg_base_value[base_regno];
1771 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1774 free (new_reg_base_value);
1775 new_reg_base_value = 0;
1781 end_alias_analysis ()
1783 free (reg_known_value + FIRST_PSEUDO_REGISTER);
1784 reg_known_value = 0;
1785 reg_known_value_size = 0;
1786 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
1787 reg_known_equiv_p = 0;
1791 ggc_del_root (reg_base_value);
1792 free (reg_base_value);
1795 reg_base_value_size = 0;
1796 if (alias_invariant)
1798 free (alias_invariant);
1799 alias_invariant = 0;