1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
32 static rtx canon_rtx PROTO((rtx));
33 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
34 static rtx find_symbolic_term PROTO((rtx));
35 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
37 static void record_set PROTO((rtx, rtx));
38 static rtx find_base_term PROTO((rtx));
39 static int base_alias_check PROTO((rtx, rtx, enum machine_mode,
41 static rtx find_base_value PROTO((rtx));
43 /* Set up all info needed to perform alias analysis on memory references. */
45 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
47 /* Perform a basic sanity check. Namely, that there are
48 no alias sets if we're not doing strict aliasing. This helps
49 to catch bugs whereby someone uses PUT_CODE, but doesn't clear
50 MEM_ALIAS_SET, or where a MEM is allocated in some way other
51 than by the use of gen_rtx_MEM, and the MEM_ALIAS_SET is not
53 #ifdef ENABLE_CHECKING
54 #define CHECK_ALIAS_SETS_FOR_CONSISTENCY(MEM1, MEM2) \
55 (!flag_strict_aliasing \
56 && (MEM_ALIAS_SET (MEM1) || MEM_ALIAS_SET (MEM2)) \
59 #define CHECK_ALIAS_SETS_FOR_CONSISTENCY(MEM1, MEM2) ((void)0)
62 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
63 different alias sets. We ignore alias sets in functions making use
64 of variable arguments because the va_arg macros on some systems are
66 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
67 (CHECK_ALIAS_SETS_FOR_CONSISTENCY(MEM1, MEM2), \
68 MEM_ALIAS_SET (MEM1) && MEM_ALIAS_SET (MEM2) \
69 && MEM_ALIAS_SET (MEM1) != MEM_ALIAS_SET (MEM2) \
70 && !current_function_stdarg && !current_function_varargs)
72 /* Cap the number of passes we make over the insns propagating alias
73 information through set chains.
75 10 is a completely arbitrary choice. */
76 #define MAX_ALIAS_LOOP_PASSES 10
78 /* reg_base_value[N] gives an address to which register N is related.
79 If all sets after the first add or subtract to the current value
80 or otherwise modify it so it does not point to a different top level
81 object, reg_base_value[N] is equal to the address part of the source
84 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
85 expressions represent certain special values: function arguments and
86 the stack, frame, and argument pointers. The contents of an address
87 expression are not used (but they are descriptive for debugging);
88 only the address and mode matter. Pointer equality, not rtx_equal_p,
89 determines whether two ADDRESS expressions refer to the same base
90 address. The mode determines whether it is a function argument or
91 other special value. */
94 rtx *new_reg_base_value;
95 unsigned int reg_base_value_size; /* size of reg_base_value array */
96 #define REG_BASE_VALUE(X) \
97 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
99 /* Vector of known invariant relationships between registers. Set in
100 loop unrolling. Indexed by register number, if nonzero the value
101 is an expression describing this register in terms of another.
103 The length of this array is REG_BASE_VALUE_SIZE.
105 Because this array contains only pseudo registers it has no effect
107 static rtx *alias_invariant;
109 /* Vector indexed by N giving the initial (unchanging) value known
110 for pseudo-register N. */
111 rtx *reg_known_value;
113 /* Indicates number of valid entries in reg_known_value. */
114 static int reg_known_value_size;
116 /* Vector recording for each reg_known_value whether it is due to a
117 REG_EQUIV note. Future passes (viz., reload) may replace the
118 pseudo with the equivalent expression and so we account for the
119 dependences that would be introduced if that happens. */
120 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
121 assign_parms mention the arg pointer, and there are explicit insns in the
122 RTL that modify the arg pointer. Thus we must ensure that such insns don't
123 get scheduled across each other because that would invalidate the REG_EQUIV
124 notes. One could argue that the REG_EQUIV notes are wrong, but solving
125 the problem in the scheduler will likely give better code, so we do it
127 char *reg_known_equiv_p;
129 /* True when scanning insns from the start of the rtl to the
130 NOTE_INSN_FUNCTION_BEG note. */
132 static int copying_arguments;
134 /* Inside SRC, the source of a SET, find a base address. */
137 find_base_value (src)
140 switch (GET_CODE (src))
147 /* At the start of a function argument registers have known base
148 values which may be lost later. Returning an ADDRESS
149 expression here allows optimization based on argument values
150 even when the argument registers are used for other purposes. */
151 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
152 return new_reg_base_value[REGNO (src)];
154 /* If a pseudo has a known base value, return it. Do not do this
155 for hard regs since it can result in a circular dependency
156 chain for registers which have values at function entry.
158 The test above is not sufficient because the scheduler may move
159 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
160 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
161 && (unsigned) REGNO (src) < reg_base_value_size
162 && reg_base_value[REGNO (src)])
163 return reg_base_value[REGNO (src)];
168 /* Check for an argument passed in memory. Only record in the
169 copying-arguments block; it is too hard to track changes
171 if (copying_arguments
172 && (XEXP (src, 0) == arg_pointer_rtx
173 || (GET_CODE (XEXP (src, 0)) == PLUS
174 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
175 return gen_rtx_ADDRESS (VOIDmode, src);
180 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
187 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
189 /* If either operand is a REG, then see if we already have
190 a known value for it. */
191 if (GET_CODE (src_0) == REG)
193 temp = find_base_value (src_0);
198 if (GET_CODE (src_1) == REG)
200 temp = find_base_value (src_1);
205 /* Guess which operand is the base address.
207 If either operand is a symbol, then it is the base. If
208 either operand is a CONST_INT, then the other is the base. */
210 if (GET_CODE (src_1) == CONST_INT
211 || GET_CODE (src_0) == SYMBOL_REF
212 || GET_CODE (src_0) == LABEL_REF
213 || GET_CODE (src_0) == CONST)
214 return find_base_value (src_0);
216 if (GET_CODE (src_0) == CONST_INT
217 || GET_CODE (src_1) == SYMBOL_REF
218 || GET_CODE (src_1) == LABEL_REF
219 || GET_CODE (src_1) == CONST)
220 return find_base_value (src_1);
222 /* This might not be necessary anymore.
224 If either operand is a REG that is a known pointer, then it
226 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
227 return find_base_value (src_0);
229 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
230 return find_base_value (src_1);
236 /* The standard form is (lo_sum reg sym) so look only at the
238 return find_base_value (XEXP (src, 1));
241 /* If the second operand is constant set the base
242 address to the first operand. */
243 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
244 return find_base_value (XEXP (src, 0));
248 case SIGN_EXTEND: /* used for NT/Alpha pointers */
250 return find_base_value (XEXP (src, 0));
259 /* Called from init_alias_analysis indirectly through note_stores. */
261 /* while scanning insns to find base values, reg_seen[N] is nonzero if
262 register N has been set in this function. */
263 static char *reg_seen;
265 /* Addresses which are known not to alias anything else are identified
266 by a unique integer. */
267 static int unique_id;
270 record_set (dest, set)
276 if (GET_CODE (dest) != REG)
279 regno = REGNO (dest);
283 /* A CLOBBER wipes out any old value but does not prevent a previously
284 unset register from acquiring a base address (i.e. reg_seen is not
286 if (GET_CODE (set) == CLOBBER)
288 new_reg_base_value[regno] = 0;
297 new_reg_base_value[regno] = 0;
301 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
302 GEN_INT (unique_id++));
306 /* This is not the first set. If the new value is not related to the
307 old value, forget the base value. Note that the following code is
309 extern int x, y; int *p = &x; p += (&y-&x);
310 ANSI C does not allow computing the difference of addresses
311 of distinct top level objects. */
312 if (new_reg_base_value[regno])
313 switch (GET_CODE (src))
318 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
319 new_reg_base_value[regno] = 0;
322 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
323 new_reg_base_value[regno] = 0;
326 new_reg_base_value[regno] = 0;
329 /* If this is the first set of a register, record the value. */
330 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
331 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
332 new_reg_base_value[regno] = find_base_value (src);
337 /* Called from loop optimization when a new pseudo-register is created. */
339 record_base_value (regno, val, invariant)
344 if ((unsigned) regno >= reg_base_value_size)
347 /* If INVARIANT is true then this value also describes an invariant
348 relationship which can be used to deduce that two registers with
349 unknown values are different. */
350 if (invariant && alias_invariant)
351 alias_invariant[regno] = val;
353 if (GET_CODE (val) == REG)
355 if ((unsigned) REGNO (val) < reg_base_value_size)
357 reg_base_value[regno] = reg_base_value[REGNO (val)];
361 reg_base_value[regno] = find_base_value (val);
368 /* Recursively look for equivalences. */
369 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
370 && REGNO (x) < reg_known_value_size)
371 return reg_known_value[REGNO (x)] == x
372 ? x : canon_rtx (reg_known_value[REGNO (x)]);
373 else if (GET_CODE (x) == PLUS)
375 rtx x0 = canon_rtx (XEXP (x, 0));
376 rtx x1 = canon_rtx (XEXP (x, 1));
378 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
380 /* We can tolerate LO_SUMs being offset here; these
381 rtl are used for nothing other than comparisons. */
382 if (GET_CODE (x0) == CONST_INT)
383 return plus_constant_for_output (x1, INTVAL (x0));
384 else if (GET_CODE (x1) == CONST_INT)
385 return plus_constant_for_output (x0, INTVAL (x1));
386 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
389 /* This gives us much better alias analysis when called from
390 the loop optimizer. Note we want to leave the original
391 MEM alone, but need to return the canonicalized MEM with
392 all the flags with their original values. */
393 else if (GET_CODE (x) == MEM)
395 rtx addr = canon_rtx (XEXP (x, 0));
396 if (addr != XEXP (x, 0))
398 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
399 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
400 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
401 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
402 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
409 /* Return 1 if X and Y are identical-looking rtx's.
411 We use the data in reg_known_value above to see if two registers with
412 different numbers are, in fact, equivalent. */
415 rtx_equal_for_memref_p (x, y)
420 register enum rtx_code code;
423 if (x == 0 && y == 0)
425 if (x == 0 || y == 0)
434 /* Rtx's of different codes cannot be equal. */
435 if (code != GET_CODE (y))
438 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
439 (REG:SI x) and (REG:HI x) are NOT equivalent. */
441 if (GET_MODE (x) != GET_MODE (y))
444 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
447 return REGNO (x) == REGNO (y);
448 if (code == LABEL_REF)
449 return XEXP (x, 0) == XEXP (y, 0);
450 if (code == SYMBOL_REF)
451 return XSTR (x, 0) == XSTR (y, 0);
452 if (code == CONST_INT)
453 return INTVAL (x) == INTVAL (y);
454 if (code == ADDRESSOF)
455 return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
457 /* For commutative operations, the RTX match if the operand match in any
458 order. Also handle the simple binary and unary cases without a loop. */
459 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
460 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
461 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
462 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
463 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
464 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
465 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
466 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
467 else if (GET_RTX_CLASS (code) == '1')
468 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
470 /* Compare the elements. If any pair of corresponding elements
471 fail to match, return 0 for the whole things.
473 Limit cases to types which actually appear in addresses. */
475 fmt = GET_RTX_FORMAT (code);
476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
481 if (XINT (x, i) != XINT (y, i))
486 /* Two vectors must have the same length. */
487 if (XVECLEN (x, i) != XVECLEN (y, i))
490 /* And the corresponding elements must match. */
491 for (j = 0; j < XVECLEN (x, i); j++)
492 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
497 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
501 /* This can happen for an asm which clobbers memory. */
505 /* It is believed that rtx's at this level will never
506 contain anything but integers and other rtx's,
507 except for within LABEL_REFs and SYMBOL_REFs. */
515 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
516 X and return it, or return 0 if none found. */
519 find_symbolic_term (x)
523 register enum rtx_code code;
527 if (code == SYMBOL_REF || code == LABEL_REF)
529 if (GET_RTX_CLASS (code) == 'o')
532 fmt = GET_RTX_FORMAT (code);
533 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
539 t = find_symbolic_term (XEXP (x, i));
543 else if (fmt[i] == 'E')
553 switch (GET_CODE (x))
556 return REG_BASE_VALUE (x);
559 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
565 return find_base_term (XEXP (x, 0));
569 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
576 rtx tmp = find_base_term (XEXP (x, 0));
579 return find_base_term (XEXP (x, 1));
583 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
584 return REG_BASE_VALUE (XEXP (x, 0));
596 /* Return 0 if the addresses X and Y are known to point to different
597 objects, 1 if they might be pointers to the same object. */
600 base_alias_check (x, y, x_mode, y_mode)
602 enum machine_mode x_mode, y_mode;
604 rtx x_base = find_base_term (x);
605 rtx y_base = find_base_term (y);
607 /* If the address itself has no known base see if a known equivalent
608 value has one. If either address still has no known base, nothing
609 is known about aliasing. */
613 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
615 x_base = find_base_term (x_c);
623 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
625 y_base = find_base_term (y_c);
630 /* If the base addresses are equal nothing is known about aliasing. */
631 if (rtx_equal_p (x_base, y_base))
634 /* The base addresses of the read and write are different expressions.
635 If they are both symbols and they are not accessed via AND, there is
636 no conflict. We can bring knowledge of object alignment into play
637 here. For example, on alpha, "char a, b;" can alias one another,
638 though "char a; long b;" cannot. */
639 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
641 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
643 if (GET_CODE (x) == AND
644 && (GET_CODE (XEXP (x, 1)) != CONST_INT
645 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
647 if (GET_CODE (y) == AND
648 && (GET_CODE (XEXP (y, 1)) != CONST_INT
649 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
653 /* If one address is a stack reference there can be no alias:
654 stack references using different base registers do not alias,
655 a stack reference can not alias a parameter, and a stack reference
656 can not alias a global. */
657 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
658 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
661 if (! flag_argument_noalias)
664 if (flag_argument_noalias > 1)
667 /* Weak noalias assertion (arguments are distinct, but may match globals). */
668 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
671 /* Return nonzero if X and Y (memory addresses) could reference the
672 same location in memory. C is an offset accumulator. When
673 C is nonzero, we are testing aliases between X and Y + C.
674 XSIZE is the size in bytes of the X reference,
675 similarly YSIZE is the size in bytes for Y.
677 If XSIZE or YSIZE is zero, we do not know the amount of memory being
678 referenced (the reference was BLKmode), so make the most pessimistic
681 If XSIZE or YSIZE is negative, we may access memory outside the object
682 being referenced as a side effect. This can happen when using AND to
683 align memory references, as is done on the Alpha.
685 Nice to notice that varying addresses cannot conflict with fp if no
686 local variables had their addresses taken, but that's too hard now. */
690 memrefs_conflict_p (xsize, x, ysize, y, c)
695 if (GET_CODE (x) == HIGH)
697 else if (GET_CODE (x) == LO_SUM)
701 if (GET_CODE (y) == HIGH)
703 else if (GET_CODE (y) == LO_SUM)
708 if (rtx_equal_for_memref_p (x, y))
710 if (xsize <= 0 || ysize <= 0)
712 if (c >= 0 && xsize > c)
714 if (c < 0 && ysize+c > 0)
719 /* This code used to check for conflicts involving stack references and
720 globals but the base address alias code now handles these cases. */
722 if (GET_CODE (x) == PLUS)
724 /* The fact that X is canonicalized means that this
725 PLUS rtx is canonicalized. */
726 rtx x0 = XEXP (x, 0);
727 rtx x1 = XEXP (x, 1);
729 if (GET_CODE (y) == PLUS)
731 /* The fact that Y is canonicalized means that this
732 PLUS rtx is canonicalized. */
733 rtx y0 = XEXP (y, 0);
734 rtx y1 = XEXP (y, 1);
736 if (rtx_equal_for_memref_p (x1, y1))
737 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
738 if (rtx_equal_for_memref_p (x0, y0))
739 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
740 if (GET_CODE (x1) == CONST_INT)
742 if (GET_CODE (y1) == CONST_INT)
743 return memrefs_conflict_p (xsize, x0, ysize, y0,
744 c - INTVAL (x1) + INTVAL (y1));
746 return memrefs_conflict_p (xsize, x0, ysize, y,
749 else if (GET_CODE (y1) == CONST_INT)
750 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
754 else if (GET_CODE (x1) == CONST_INT)
755 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
757 else if (GET_CODE (y) == PLUS)
759 /* The fact that Y is canonicalized means that this
760 PLUS rtx is canonicalized. */
761 rtx y0 = XEXP (y, 0);
762 rtx y1 = XEXP (y, 1);
764 if (GET_CODE (y1) == CONST_INT)
765 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
770 if (GET_CODE (x) == GET_CODE (y))
771 switch (GET_CODE (x))
775 /* Handle cases where we expect the second operands to be the
776 same, and check only whether the first operand would conflict
779 rtx x1 = canon_rtx (XEXP (x, 1));
780 rtx y1 = canon_rtx (XEXP (y, 1));
781 if (! rtx_equal_for_memref_p (x1, y1))
783 x0 = canon_rtx (XEXP (x, 0));
784 y0 = canon_rtx (XEXP (y, 0));
785 if (rtx_equal_for_memref_p (x0, y0))
786 return (xsize == 0 || ysize == 0
787 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
789 /* Can't properly adjust our sizes. */
790 if (GET_CODE (x1) != CONST_INT)
792 xsize /= INTVAL (x1);
793 ysize /= INTVAL (x1);
795 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
799 /* Are these registers known not to be equal? */
802 unsigned int r_x = REGNO (x), r_y = REGNO (y);
803 rtx i_x, i_y; /* invariant relationships of X and Y */
805 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
806 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
808 if (i_x == 0 && i_y == 0)
811 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
812 ysize, i_y ? i_y : y, c))
821 /* Treat an access through an AND (e.g. a subword access on an Alpha)
822 as an access with indeterminate size. Assume that references
823 besides AND are aligned, so if the size of the other reference is
824 at least as large as the alignment, assume no other overlap. */
825 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
827 if (ysize < -INTVAL (XEXP (x, 1)))
829 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
831 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
833 /* ??? If we are indexing far enough into the array/structure, we
834 may yet be able to determine that we can not overlap. But we
835 also need to that we are far enough from the end not to overlap
836 a following reference, so we do nothing with that for now. */
837 if (xsize < -INTVAL (XEXP (y, 1)))
839 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
844 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
846 c += (INTVAL (y) - INTVAL (x));
847 return (xsize <= 0 || ysize <= 0
848 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
851 if (GET_CODE (x) == CONST)
853 if (GET_CODE (y) == CONST)
854 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
855 ysize, canon_rtx (XEXP (y, 0)), c);
857 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
860 if (GET_CODE (y) == CONST)
861 return memrefs_conflict_p (xsize, x, ysize,
862 canon_rtx (XEXP (y, 0)), c);
865 return (xsize < 0 || ysize < 0
866 || (rtx_equal_for_memref_p (x, y)
867 && (xsize == 0 || ysize == 0
868 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
875 /* Functions to compute memory dependencies.
877 Since we process the insns in execution order, we can build tables
878 to keep track of what registers are fixed (and not aliased), what registers
879 are varying in known ways, and what registers are varying in unknown
882 If both memory references are volatile, then there must always be a
883 dependence between the two references, since their order can not be
884 changed. A volatile and non-volatile reference can be interchanged
887 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
888 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
889 allow QImode aliasing because the ANSI C standard allows character
890 pointers to alias anything. We are assuming that characters are
891 always QImode here. We also must allow AND addresses, because they may
892 generate accesses outside the object being referenced. This is used to
893 generate aligned addresses from unaligned addresses, for instance, the
894 alpha storeqi_unaligned pattern. */
896 /* Read dependence: X is read after read in MEM takes place. There can
897 only be a dependence here if both reads are volatile. */
900 read_dependence (mem, x)
904 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
907 /* True dependence: X is read after store in MEM takes place. */
910 true_dependence (mem, mem_mode, x, varies)
912 enum machine_mode mem_mode;
914 int (*varies) PROTO((rtx));
916 register rtx x_addr, mem_addr;
918 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
921 if (DIFFERENT_ALIAS_SETS_P (x, mem))
924 /* If X is an unchanging read, then it can't possibly conflict with any
925 non-unchanging store. It may conflict with an unchanging write though,
926 because there may be a single store to this address to initialize it.
927 Just fall through to the code below to resolve the case where we have
928 both an unchanging read and an unchanging write. This won't handle all
929 cases optimally, but the possible performance loss should be
931 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
934 if (mem_mode == VOIDmode)
935 mem_mode = GET_MODE (mem);
937 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x), mem_mode))
940 x_addr = canon_rtx (XEXP (x, 0));
941 mem_addr = canon_rtx (XEXP (mem, 0));
943 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
944 SIZE_FOR_MODE (x), x_addr, 0))
947 /* If both references are struct references, or both are not, nothing
948 is known about aliasing.
950 If either reference is QImode or BLKmode, ANSI C permits aliasing.
952 If both addresses are constant, or both are not, nothing is known
954 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
955 || mem_mode == QImode || mem_mode == BLKmode
956 || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode
957 || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND
958 || varies (x_addr) == varies (mem_addr))
961 /* One memory reference is to a constant address, one is not.
962 One is to a structure, the other is not.
964 If either memory reference is a variable structure the other is a
965 fixed scalar and there is no aliasing. */
966 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
967 || (MEM_IN_STRUCT_P (x) && varies (x_addr)))
973 /* Anti dependence: X is written after read in MEM takes place. */
976 anti_dependence (mem, x)
980 rtx x_addr, mem_addr;
982 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
985 /* If MEM is an unchanging read, then it can't possibly conflict with
986 the store to X, because there is at most one store to MEM, and it must
987 have occurred somewhere before MEM. */
988 if (RTX_UNCHANGING_P (mem))
991 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
996 mem = canon_rtx (mem);
998 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1001 x_addr = XEXP (x, 0);
1002 mem_addr = XEXP (mem, 0);
1004 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1005 SIZE_FOR_MODE (x), x_addr, 0)
1006 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
1007 && GET_MODE (mem) != QImode
1008 && GET_CODE (mem_addr) != AND
1009 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
1010 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
1011 && GET_MODE (x) != QImode
1012 && GET_CODE (x_addr) != AND
1013 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
1016 /* Output dependence: X is written after store in MEM takes place. */
1019 output_dependence (mem, x)
1023 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1026 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
1031 mem = canon_rtx (mem);
1033 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1036 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
1037 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
1038 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
1039 && GET_MODE (mem) != QImode
1040 && GET_CODE (XEXP (mem, 0)) != AND
1041 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
1042 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
1043 && GET_MODE (x) != QImode
1044 && GET_CODE (XEXP (x, 0)) != AND
1045 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
1049 static HARD_REG_SET argument_registers;
1056 #ifndef OUTGOING_REGNO
1057 #define OUTGOING_REGNO(N) N
1059 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1060 /* Check whether this register can hold an incoming pointer
1061 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1062 numbers, so translate if necessary due to register windows. */
1063 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1064 && HARD_REGNO_MODE_OK (i, Pmode))
1065 SET_HARD_REG_BIT (argument_registers, i);
1069 init_alias_analysis ()
1071 int maxreg = max_reg_num ();
1074 register unsigned int ui;
1077 reg_known_value_size = maxreg;
1080 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1081 - FIRST_PSEUDO_REGISTER;
1083 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1084 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1085 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1086 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1087 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1089 /* Overallocate reg_base_value to allow some growth during loop
1090 optimization. Loop unrolling can create a large number of
1092 reg_base_value_size = maxreg * 2;
1093 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1094 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1095 reg_seen = (char *)alloca (reg_base_value_size);
1096 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1097 if (! reload_completed && flag_unroll_loops)
1099 alias_invariant = (rtx *)xrealloc (alias_invariant,
1100 reg_base_value_size * sizeof (rtx));
1101 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1105 /* The basic idea is that each pass through this loop will use the
1106 "constant" information from the previous pass to propagate alias
1107 information through another level of assignments.
1109 This could get expensive if the assignment chains are long. Maybe
1110 we should throttle the number of iterations, possibly based on
1111 the optimization level or flag_expensive_optimizations.
1113 We could propagate more information in the first pass by making use
1114 of REG_N_SETS to determine immediately that the alias information
1115 for a pseudo is "constant".
1117 A program with an uninitialized variable can cause an infinite loop
1118 here. Instead of doing a full dataflow analysis to detect such problems
1119 we just cap the number of iterations for the loop.
1121 The state of the arrays for the set chain in question does not matter
1122 since the program has undefined behavior. */
1127 /* Assume nothing will change this iteration of the loop. */
1130 /* We want to assign the same IDs each iteration of this loop, so
1131 start counting from zero each iteration of the loop. */
1134 /* We're at the start of the funtion each iteration through the
1135 loop, so we're copying arguments. */
1136 copying_arguments = 1;
1138 /* Wipe the potential alias information clean for this pass. */
1139 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1141 /* Wipe the reg_seen array clean. */
1142 bzero ((char *) reg_seen, reg_base_value_size);
1144 /* Mark all hard registers which may contain an address.
1145 The stack, frame and argument pointers may contain an address.
1146 An argument register which can hold a Pmode value may contain
1147 an address even if it is not in BASE_REGS.
1149 The address expression is VOIDmode for an argument and
1150 Pmode for other registers. */
1152 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1153 if (TEST_HARD_REG_BIT (argument_registers, i))
1154 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1155 gen_rtx_REG (Pmode, i));
1157 new_reg_base_value[STACK_POINTER_REGNUM]
1158 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1159 new_reg_base_value[ARG_POINTER_REGNUM]
1160 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1161 new_reg_base_value[FRAME_POINTER_REGNUM]
1162 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1163 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1164 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1165 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1167 if (struct_value_incoming_rtx
1168 && GET_CODE (struct_value_incoming_rtx) == REG)
1169 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1170 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1172 if (static_chain_rtx
1173 && GET_CODE (static_chain_rtx) == REG)
1174 new_reg_base_value[REGNO (static_chain_rtx)]
1175 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1177 /* Walk the insns adding values to the new_reg_base_value array. */
1178 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1180 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1183 /* If this insn has a noalias note, process it, Otherwise,
1184 scan for sets. A simple set will have no side effects
1185 which could change the base value of any other register. */
1187 if (GET_CODE (PATTERN (insn)) == SET
1188 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1189 record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1191 note_stores (PATTERN (insn), record_set);
1193 set = single_set (insn);
1196 && GET_CODE (SET_DEST (set)) == REG
1197 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1198 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1199 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1200 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1201 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1203 int regno = REGNO (SET_DEST (set));
1204 reg_known_value[regno] = XEXP (note, 0);
1205 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1208 else if (GET_CODE (insn) == NOTE
1209 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1210 copying_arguments = 0;
1213 /* Now propagate values from new_reg_base_value to reg_base_value. */
1214 for (ui = 0; ui < reg_base_value_size; ui++)
1216 if (new_reg_base_value[ui]
1217 && new_reg_base_value[ui] != reg_base_value[ui]
1218 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1220 reg_base_value[ui] = new_reg_base_value[ui];
1225 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1227 /* Fill in the remaining entries. */
1228 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1229 if (reg_known_value[i] == 0)
1230 reg_known_value[i] = regno_reg_rtx[i];
1232 /* Simplify the reg_base_value array so that no register refers to
1233 another register, except to special registers indirectly through
1234 ADDRESS expressions.
1236 In theory this loop can take as long as O(registers^2), but unless
1237 there are very long dependency chains it will run in close to linear
1240 This loop may not be needed any longer now that the main loop does
1241 a better job at propagating alias information. */
1247 for (ui = 0; ui < reg_base_value_size; ui++)
1249 rtx base = reg_base_value[ui];
1250 if (base && GET_CODE (base) == REG)
1252 unsigned int base_regno = REGNO (base);
1253 if (base_regno == ui) /* register set from itself */
1254 reg_base_value[ui] = 0;
1256 reg_base_value[ui] = reg_base_value[base_regno];
1261 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1263 new_reg_base_value = 0;
1268 end_alias_analysis ()
1270 reg_known_value = 0;
1272 reg_base_value_size = 0;
1273 if (alias_invariant)
1275 free ((char *)alias_invariant);
1276 alias_invariant = 0;