1 /* Alias analysis for GNU C
2 Copyright (C) 1997 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 static rtx canon_rtx PROTO((rtx));
30 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
31 static rtx find_symbolic_term PROTO((rtx));
32 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
35 /* Set up all info needed to perform alias analysis on memory references. */
37 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
39 /* reg_base_value[N] gives an address to which register N is related.
40 If all sets after the first add or subtract to the current value
41 or otherwise modify it so it does not point to a different top level
42 object, reg_base_value[N] is equal to the address part of the source
45 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
46 expressions represent certain special values: function arguments and
47 the stack, frame, and argument pointers. The contents of an address
48 expression are not used (but they are descriptive for debugging);
49 only the address and mode matter. Pointer equality, not rtx_equal_p,
50 determines whether two ADDRESS expressions refer to the same base
51 address. The mode determines whether it is a function argument or
52 other special value. */
55 rtx *new_reg_base_value;
56 unsigned int reg_base_value_size; /* size of reg_base_value array */
57 #define REG_BASE_VALUE(X) \
58 (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
60 /* Vector indexed by N giving the initial (unchanging) value known
61 for pseudo-register N. */
64 /* Indicates number of valid entries in reg_known_value. */
65 static int reg_known_value_size;
67 /* Vector recording for each reg_known_value whether it is due to a
68 REG_EQUIV note. Future passes (viz., reload) may replace the
69 pseudo with the equivalent expression and so we account for the
70 dependences that would be introduced if that happens. */
71 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
72 assign_parms mention the arg pointer, and there are explicit insns in the
73 RTL that modify the arg pointer. Thus we must ensure that such insns don't
74 get scheduled across each other because that would invalidate the REG_EQUIV
75 notes. One could argue that the REG_EQUIV notes are wrong, but solving
76 the problem in the scheduler will likely give better code, so we do it
78 char *reg_known_equiv_p;
80 /* True when scanning insns from the start of the rtl to the
81 NOTE_INSN_FUNCTION_BEG note. */
83 static int copying_arguments;
85 /* Inside SRC, the source of a SET, find a base address. */
91 switch (GET_CODE (src))
98 /* If this REG is related to a known base value, return it. */
99 if (reg_base_value[REGNO (src)])
100 return reg_base_value[REGNO (src)];
102 /* At the start of a function argument registers have known base
103 values which may be lost later. Returning an ADDRESS
104 expression here allows optimization based on argument values
105 even when the argument registers are used for other purposes. */
106 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
107 return new_reg_base_value[REGNO (src)];
111 /* Check for an argument passed in memory. Only record in the
112 copying-arguments block; it is too hard to track changes
114 if (copying_arguments
115 && (XEXP (src, 0) == arg_pointer_rtx
116 || (GET_CODE (XEXP (src, 0)) == PLUS
117 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
118 return gen_rtx (ADDRESS, VOIDmode, src);
123 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
130 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
132 /* If either operand is a REG, then see if we already have
133 a known value for it. */
134 if (GET_CODE (src_0) == REG)
136 temp = find_base_value (src_0);
141 if (GET_CODE (src_1) == REG)
143 temp = find_base_value (src_1);
148 /* Guess which operand is the base address.
150 If either operand is a symbol, then it is the base. If
151 either operand is a CONST_INT, then the other is the base. */
153 if (GET_CODE (src_1) == CONST_INT
154 || GET_CODE (src_0) == SYMBOL_REF
155 || GET_CODE (src_0) == LABEL_REF
156 || GET_CODE (src_0) == CONST)
157 return find_base_value (src_0);
159 if (GET_CODE (src_0) == CONST_INT
160 || GET_CODE (src_1) == SYMBOL_REF
161 || GET_CODE (src_1) == LABEL_REF
162 || GET_CODE (src_1) == CONST)
163 return find_base_value (src_1);
165 /* This might not be necessary anymore.
167 If either operand is a REG that is a known pointer, then it
169 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
170 return find_base_value (src_0);
172 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
173 return find_base_value (src_1);
179 /* The standard form is (lo_sum reg sym) so look only at the
181 return find_base_value (XEXP (src, 1));
184 /* If the second operand is constant set the base
185 address to the first operand. */
186 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
187 return find_base_value (XEXP (src, 0));
191 return find_base_value (XEXP (src, 0));
197 /* Called from init_alias_analysis indirectly through note_stores. */
199 /* while scanning insns to find base values, reg_seen[N] is nonzero if
200 register N has been set in this function. */
201 static char *reg_seen;
204 static int unique_id;
207 record_set (dest, set)
213 if (GET_CODE (dest) != REG)
216 regno = REGNO (dest);
220 /* A CLOBBER wipes out any old value but does not prevent a previously
221 unset register from acquiring a base address (i.e. reg_seen is not
223 if (GET_CODE (set) == CLOBBER)
225 new_reg_base_value[regno] = 0;
234 new_reg_base_value[regno] = 0;
238 new_reg_base_value[regno] = gen_rtx (ADDRESS, Pmode,
239 GEN_INT (unique_id++));
243 /* This is not the first set. If the new value is not related to the
244 old value, forget the base value. Note that the following code is
246 extern int x, y; int *p = &x; p += (&y-&x);
247 ANSI C does not allow computing the difference of addresses
248 of distinct top level objects. */
249 if (new_reg_base_value[regno])
250 switch (GET_CODE (src))
255 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
256 new_reg_base_value[regno] = 0;
259 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
260 new_reg_base_value[regno] = 0;
263 new_reg_base_value[regno] = 0;
266 /* If this is the first set of a register, record the value. */
267 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
268 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
269 new_reg_base_value[regno] = find_base_value (src);
274 /* Called from loop optimization when a new pseudo-register is created. */
276 record_base_value (regno, val)
280 if (!flag_alias_check || regno >= reg_base_value_size)
282 if (GET_CODE (val) == REG)
284 if (REGNO (val) < reg_base_value_size)
285 reg_base_value[regno] = reg_base_value[REGNO (val)];
288 reg_base_value[regno] = find_base_value (val);
295 /* Recursively look for equivalences. */
296 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
297 && REGNO (x) < reg_known_value_size)
298 return reg_known_value[REGNO (x)] == x
299 ? x : canon_rtx (reg_known_value[REGNO (x)]);
300 else if (GET_CODE (x) == PLUS)
302 rtx x0 = canon_rtx (XEXP (x, 0));
303 rtx x1 = canon_rtx (XEXP (x, 1));
305 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
307 /* We can tolerate LO_SUMs being offset here; these
308 rtl are used for nothing other than comparisons. */
309 if (GET_CODE (x0) == CONST_INT)
310 return plus_constant_for_output (x1, INTVAL (x0));
311 else if (GET_CODE (x1) == CONST_INT)
312 return plus_constant_for_output (x0, INTVAL (x1));
313 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
316 /* This gives us much better alias analysis when called from
317 the loop optimizer. Note we want to leave the original
318 MEM alone, but need to return the canonicalized MEM with
319 all the flags with their original values. */
320 else if (GET_CODE (x) == MEM)
322 rtx addr = canon_rtx (XEXP (x, 0));
323 if (addr != XEXP (x, 0))
325 rtx new = gen_rtx (MEM, GET_MODE (x), addr);
326 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
327 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
328 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
335 /* Return 1 if X and Y are identical-looking rtx's.
337 We use the data in reg_known_value above to see if two registers with
338 different numbers are, in fact, equivalent. */
341 rtx_equal_for_memref_p (x, y)
346 register enum rtx_code code;
349 if (x == 0 && y == 0)
351 if (x == 0 || y == 0)
360 /* Rtx's of different codes cannot be equal. */
361 if (code != GET_CODE (y))
364 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
365 (REG:SI x) and (REG:HI x) are NOT equivalent. */
367 if (GET_MODE (x) != GET_MODE (y))
370 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
373 return REGNO (x) == REGNO (y);
374 if (code == LABEL_REF)
375 return XEXP (x, 0) == XEXP (y, 0);
376 if (code == SYMBOL_REF)
377 return XSTR (x, 0) == XSTR (y, 0);
379 /* For commutative operations, the RTX match if the operand match in any
380 order. Also handle the simple binary and unary cases without a loop. */
381 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
382 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
383 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
384 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
385 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
386 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
387 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
388 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
389 else if (GET_RTX_CLASS (code) == '1')
390 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
392 /* Compare the elements. If any pair of corresponding elements
393 fail to match, return 0 for the whole things. */
395 fmt = GET_RTX_FORMAT (code);
396 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
401 if (XWINT (x, i) != XWINT (y, i))
407 if (XINT (x, i) != XINT (y, i))
413 /* Two vectors must have the same length. */
414 if (XVECLEN (x, i) != XVECLEN (y, i))
417 /* And the corresponding elements must match. */
418 for (j = 0; j < XVECLEN (x, i); j++)
419 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
424 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
430 if (strcmp (XSTR (x, i), XSTR (y, i)))
435 /* These are just backpointers, so they don't matter. */
441 /* It is believed that rtx's at this level will never
442 contain anything but integers and other rtx's,
443 except for within LABEL_REFs and SYMBOL_REFs. */
451 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
452 X and return it, or return 0 if none found. */
455 find_symbolic_term (x)
459 register enum rtx_code code;
463 if (code == SYMBOL_REF || code == LABEL_REF)
465 if (GET_RTX_CLASS (code) == 'o')
468 fmt = GET_RTX_FORMAT (code);
469 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
475 t = find_symbolic_term (XEXP (x, i));
479 else if (fmt[i] == 'E')
489 switch (GET_CODE (x))
492 return REG_BASE_VALUE (x);
495 return find_base_term (XEXP (x, 0));
501 return find_base_term (XEXP (x, 0));
505 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
512 rtx tmp = find_base_term (XEXP (x, 0));
515 return find_base_term (XEXP (x, 1));
519 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
520 return REG_BASE_VALUE (XEXP (x, 0));
532 /* Return 0 if the addresses X and Y are known to point to different
533 objects, 1 if they might be pointers to the same object. */
536 base_alias_check (x, y)
539 rtx x_base = find_base_term (x);
540 rtx y_base = find_base_term (y);
542 /* If either base address is unknown or the base addresses are equal,
543 nothing is known about aliasing. */
545 if (x_base == 0 || y_base == 0 || rtx_equal_p (x_base, y_base))
548 /* The base addresses of the read and write are different
549 expressions. If they are both symbols and they are not accessed
550 via AND, there is no conflict. */
551 /* XXX: We can bring knowledge of object alignment and offset into
552 play here. For example, on alpha, "char a, b;" can alias one
553 another, though "char a; long b;" cannot. Similarly, offsets
554 into strutures may be brought into play. Given "char a, b[40];",
555 a and b[1] may overlap, but a and b[20] do not. */
556 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
558 return GET_CODE (x) == AND || GET_CODE (y) == AND;
561 /* If one address is a stack reference there can be no alias:
562 stack references using different base registers do not alias,
563 a stack reference can not alias a parameter, and a stack reference
564 can not alias a global. */
565 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
566 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
569 if (! flag_argument_noalias)
572 if (flag_argument_noalias > 1)
575 /* Weak noalias assertion (arguments are distinct, but may match globals). */
576 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
579 /* Return nonzero if X and Y (memory addresses) could reference the
580 same location in memory. C is an offset accumulator. When
581 C is nonzero, we are testing aliases between X and Y + C.
582 XSIZE is the size in bytes of the X reference,
583 similarly YSIZE is the size in bytes for Y.
585 If XSIZE or YSIZE is zero, we do not know the amount of memory being
586 referenced (the reference was BLKmode), so make the most pessimistic
589 If XSIZE or YSIZE is negative, we may access memory outside the object
590 being referenced as a side effect. This can happen when using AND to
591 align memory references, as is done on the Alpha.
593 We recognize the following cases of non-conflicting memory:
595 (1) addresses involving the frame pointer cannot conflict
596 with addresses involving static variables.
597 (2) static variables with different addresses cannot conflict.
599 Nice to notice that varying addresses cannot conflict with fp if no
600 local variables had their addresses taken, but that's too hard now. */
604 memrefs_conflict_p (xsize, x, ysize, y, c)
609 if (GET_CODE (x) == HIGH)
611 else if (GET_CODE (x) == LO_SUM)
615 if (GET_CODE (y) == HIGH)
617 else if (GET_CODE (y) == LO_SUM)
622 if (rtx_equal_for_memref_p (x, y))
624 if (xsize <= 0 || ysize <= 0)
626 if (c >= 0 && xsize > c)
628 if (c < 0 && ysize+c > 0)
633 if (y == frame_pointer_rtx || y == hard_frame_pointer_rtx
634 || y == stack_pointer_rtx || y == arg_pointer_rtx)
638 y = x; ysize = xsize;
639 x = t; xsize = tsize;
642 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
643 || x == stack_pointer_rtx || x == arg_pointer_rtx)
650 if (GET_CODE (y) == PLUS
651 && canon_rtx (XEXP (y, 0)) == x
652 && (y1 = canon_rtx (XEXP (y, 1)))
653 && GET_CODE (y1) == CONST_INT)
656 return (xsize <= 0 || ysize <= 0
657 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
660 if (GET_CODE (y) == PLUS
661 && (y1 = canon_rtx (XEXP (y, 0)))
668 if (GET_CODE (x) == PLUS)
670 /* The fact that X is canonicalized means that this
671 PLUS rtx is canonicalized. */
672 rtx x0 = XEXP (x, 0);
673 rtx x1 = XEXP (x, 1);
675 if (GET_CODE (y) == PLUS)
677 /* The fact that Y is canonicalized means that this
678 PLUS rtx is canonicalized. */
679 rtx y0 = XEXP (y, 0);
680 rtx y1 = XEXP (y, 1);
682 if (rtx_equal_for_memref_p (x1, y1))
683 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
684 if (rtx_equal_for_memref_p (x0, y0))
685 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
686 if (GET_CODE (x1) == CONST_INT)
687 if (GET_CODE (y1) == CONST_INT)
688 return memrefs_conflict_p (xsize, x0, ysize, y0,
689 c - INTVAL (x1) + INTVAL (y1));
691 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
692 else if (GET_CODE (y1) == CONST_INT)
693 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
695 /* Handle case where we cannot understand iteration operators,
696 but we notice that the base addresses are distinct objects. */
697 /* ??? Is this still necessary? */
698 x = find_symbolic_term (x);
701 y = find_symbolic_term (y);
704 return rtx_equal_for_memref_p (x, y);
706 else if (GET_CODE (x1) == CONST_INT)
707 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
709 else if (GET_CODE (y) == PLUS)
711 /* The fact that Y is canonicalized means that this
712 PLUS rtx is canonicalized. */
713 rtx y0 = XEXP (y, 0);
714 rtx y1 = XEXP (y, 1);
716 if (GET_CODE (y1) == CONST_INT)
717 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
722 if (GET_CODE (x) == GET_CODE (y))
723 switch (GET_CODE (x))
727 /* Handle cases where we expect the second operands to be the
728 same, and check only whether the first operand would conflict
731 rtx x1 = canon_rtx (XEXP (x, 1));
732 rtx y1 = canon_rtx (XEXP (y, 1));
733 if (! rtx_equal_for_memref_p (x1, y1))
735 x0 = canon_rtx (XEXP (x, 0));
736 y0 = canon_rtx (XEXP (y, 0));
737 if (rtx_equal_for_memref_p (x0, y0))
738 return (xsize == 0 || ysize == 0
739 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
741 /* Can't properly adjust our sizes. */
742 if (GET_CODE (x1) != CONST_INT)
744 xsize /= INTVAL (x1);
745 ysize /= INTVAL (x1);
747 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
751 /* Treat an access through an AND (e.g. a subword access on an Alpha)
752 as an access with indeterminate size. */
753 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
754 return memrefs_conflict_p (-1, XEXP (x, 0), ysize, y, c);
755 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
757 /* XXX: If we are indexing far enough into the array/structure, we
758 may yet be able to determine that we can not overlap. But we
759 also need to that we are far enough from the end not to overlap
760 a following reference, so we do nothing for now. */
761 return memrefs_conflict_p (xsize, x, -1, XEXP (y, 0), c);
766 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
768 c += (INTVAL (y) - INTVAL (x));
769 return (xsize <= 0 || ysize <= 0
770 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
773 if (GET_CODE (x) == CONST)
775 if (GET_CODE (y) == CONST)
776 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
777 ysize, canon_rtx (XEXP (y, 0)), c);
779 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
782 if (GET_CODE (y) == CONST)
783 return memrefs_conflict_p (xsize, x, ysize,
784 canon_rtx (XEXP (y, 0)), c);
787 return (xsize < 0 || ysize < 0
788 || (rtx_equal_for_memref_p (x, y)
789 && (xsize == 0 || ysize == 0
790 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
797 /* Functions to compute memory dependencies.
799 Since we process the insns in execution order, we can build tables
800 to keep track of what registers are fixed (and not aliased), what registers
801 are varying in known ways, and what registers are varying in unknown
804 If both memory references are volatile, then there must always be a
805 dependence between the two references, since their order can not be
806 changed. A volatile and non-volatile reference can be interchanged
809 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
810 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
811 allow QImode aliasing because the ANSI C standard allows character
812 pointers to alias anything. We are assuming that characters are
813 always QImode here. We also must allow AND addresses, because they may
814 generate accesses outside the object being referenced. This is used to
815 generate aligned addresses from unaligned addresses, for instance, the
816 alpha storeqi_unaligned pattern. */
818 /* Read dependence: X is read after read in MEM takes place. There can
819 only be a dependence here if both reads are volatile. */
822 read_dependence (mem, x)
826 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
829 /* True dependence: X is read after store in MEM takes place. */
832 true_dependence (mem, mem_mode, x, varies)
834 enum machine_mode mem_mode;
838 rtx x_addr, mem_addr;
840 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
843 x_addr = XEXP (x, 0);
844 mem_addr = XEXP (mem, 0);
846 if (flag_alias_check && ! base_alias_check (x_addr, mem_addr))
849 /* If X is an unchanging read, then it can't possibly conflict with any
850 non-unchanging store. It may conflict with an unchanging write though,
851 because there may be a single store to this address to initialize it.
852 Just fall through to the code below to resolve the case where we have
853 both an unchanging read and an unchanging write. This won't handle all
854 cases optimally, but the possible performance loss should be
856 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
859 x_addr = canon_rtx (x_addr);
860 mem_addr = canon_rtx (mem_addr);
861 if (mem_mode == VOIDmode)
862 mem_mode = GET_MODE (mem);
864 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
865 SIZE_FOR_MODE (x), x_addr, 0))
868 /* If both references are struct references, or both are not, nothing
869 is known about aliasing.
871 If either reference is QImode or BLKmode, ANSI C permits aliasing.
873 If both addresses are constant, or both are not, nothing is known
875 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
876 || mem_mode == QImode || mem_mode == BLKmode
877 || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode
878 || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND
879 || varies (x_addr) == varies (mem_addr))
882 /* One memory reference is to a constant address, one is not.
883 One is to a structure, the other is not.
885 If either memory reference is a variable structure the other is a
886 fixed scalar and there is no aliasing. */
887 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
888 || (MEM_IN_STRUCT_P (x) && varies (x_addr)))
894 /* Anti dependence: X is written after read in MEM takes place. */
897 anti_dependence (mem, x)
901 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
904 if (flag_alias_check && ! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
907 /* If MEM is an unchanging read, then it can't possibly conflict with
908 the store to X, because there is at most one store to MEM, and it must
909 have occurred somewhere before MEM. */
911 mem = canon_rtx (mem);
912 if (RTX_UNCHANGING_P (mem))
915 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
916 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
917 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
918 && GET_MODE (mem) != QImode
919 && GET_CODE (XEXP (mem, 0)) != AND
920 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
921 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
922 && GET_MODE (x) != QImode
923 && GET_CODE (XEXP (x, 0)) != AND
924 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
927 /* Output dependence: X is written after store in MEM takes place. */
930 output_dependence (mem, x)
934 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
937 if (flag_alias_check && !base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
941 mem = canon_rtx (mem);
942 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
943 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
944 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
945 && GET_MODE (mem) != QImode
946 && GET_CODE (XEXP (mem, 0)) != AND
947 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
948 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
949 && GET_MODE (x) != QImode
950 && GET_CODE (XEXP (x, 0)) != AND
951 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
955 init_alias_analysis ()
957 int maxreg = max_reg_num ();
964 reg_known_value_size = maxreg;
967 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
968 - FIRST_PSEUDO_REGISTER;
970 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
971 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
972 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
973 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
974 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
976 if (flag_alias_check)
978 /* Overallocate reg_base_value to allow some growth during loop
979 optimization. Loop unrolling can create a large number of
981 reg_base_value_size = maxreg * 2;
982 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
983 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
984 reg_seen = (char *)alloca (reg_base_value_size);
985 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
988 /* The basic idea is that each pass through this loop will use the
989 "constant" information from the previous pass to propagate alias
990 information through another level of assignments.
992 This could get expensive if the assignment chains are long. Maybe
993 we should throttle the number of iterations, possibly based on
994 the optimization level.
996 We could propagate more information in the first pass by making use
997 of REG_N_SETS to determine immediately that the alias information
998 for a pseudo is "constant". */
1002 /* Assume nothing will change this iteration of the loop. */
1005 /* We want to assign the same IDs each iteration of this loop, so
1006 start counting from zero each iteration of the loop. */
1009 /* We're at the start of the funtion each iteration through the
1010 loop, so we're copying arguments. */
1011 copying_arguments = 1;
1013 /* Only perform initialization of the arrays if we're actually
1014 performing alias analysis. */
1015 if (flag_alias_check)
1017 /* Wipe the potential alias information clean for this pass. */
1018 bzero ((char *) new_reg_base_value,
1019 reg_base_value_size * sizeof (rtx));
1021 /* Wipe the reg_seen array clean. */
1022 bzero ((char *) reg_seen, reg_base_value_size);
1024 /* Mark all hard registers which may contain an address.
1025 The stack, frame and argument pointers may contain an address.
1026 An argument register which can hold a Pmode value may contain
1027 an address even if it is not in BASE_REGS.
1029 The address expression is VOIDmode for an argument and
1030 Pmode for other registers. */
1031 #ifndef OUTGOING_REGNO
1032 #define OUTGOING_REGNO(N) N
1034 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1035 /* Check whether this register can hold an incoming pointer
1036 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1037 numbers, so translate if necessary due to register windows. */
1038 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1039 && HARD_REGNO_MODE_OK (i, Pmode))
1040 new_reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode,
1041 gen_rtx (REG, Pmode, i));
1043 new_reg_base_value[STACK_POINTER_REGNUM]
1044 = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx);
1045 new_reg_base_value[ARG_POINTER_REGNUM]
1046 = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx);
1047 new_reg_base_value[FRAME_POINTER_REGNUM]
1048 = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx);
1049 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1050 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1051 = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx);
1053 if (struct_value_incoming_rtx
1054 && GET_CODE (struct_value_incoming_rtx) == REG)
1055 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1056 = gen_rtx (ADDRESS, Pmode, struct_value_incoming_rtx);
1058 if (static_chain_rtx
1059 && GET_CODE (static_chain_rtx) == REG)
1060 new_reg_base_value[REGNO (static_chain_rtx)]
1061 = gen_rtx (ADDRESS, Pmode, static_chain_rtx);
1064 /* Walk the insns adding values to the new_reg_base_value array. */
1065 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1067 if (flag_alias_check && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1069 /* If this insn has a noalias note, process it, Otherwise,
1070 scan for sets. A simple set will have no side effects
1071 which could change the base value of any other register. */
1073 if (GET_CODE (PATTERN (insn)) == SET
1074 && (noalias_note = find_reg_note (insn,
1075 REG_NOALIAS, NULL_RTX)))
1076 record_set (SET_DEST (PATTERN (insn)), 0);
1078 note_stores (PATTERN (insn), record_set);
1080 else if (GET_CODE (insn) == NOTE
1081 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1082 copying_arguments = 0;
1084 if ((set = single_set (insn)) != 0
1085 && GET_CODE (SET_DEST (set)) == REG
1086 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1087 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1088 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1089 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1090 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1092 int regno = REGNO (SET_DEST (set));
1093 reg_known_value[regno] = XEXP (note, 0);
1094 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1098 /* Now propagate values from new_reg_base_value to reg_base_value. */
1099 if (flag_alias_check)
1100 for (i = 0; i < reg_base_value_size; i++)
1102 if (new_reg_base_value[i]
1103 && new_reg_base_value[i] != reg_base_value[i]
1104 && !rtx_equal_p (new_reg_base_value[i], reg_base_value[i]))
1106 reg_base_value[i] = new_reg_base_value[i];
1112 /* Fill in the remaining entries. */
1113 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1114 if (reg_known_value[i] == 0)
1115 reg_known_value[i] = regno_reg_rtx[i];
1117 if (! flag_alias_check)
1120 /* Simplify the reg_base_value array so that no register refers to
1121 another register, except to special registers indirectly through
1122 ADDRESS expressions.
1124 In theory this loop can take as long as O(registers^2), but unless
1125 there are very long dependency chains it will run in close to linear
1130 for (i = 0; i < reg_base_value_size; i++)
1132 rtx base = reg_base_value[i];
1133 if (base && GET_CODE (base) == REG)
1135 int base_regno = REGNO (base);
1136 if (base_regno == i) /* register set from itself */
1137 reg_base_value[i] = 0;
1139 reg_base_value[i] = reg_base_value[base_regno];
1146 new_reg_base_value = 0;
1151 end_alias_analysis ()
1153 reg_known_value = 0;
1155 reg_base_value_size = 0;