1 /* Alias analysis for GNU C
2 Copyright (C) 1997 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 static rtx canon_rtx PROTO((rtx));
30 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
31 static rtx find_symbolic_term PROTO((rtx));
32 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
35 /* Set up all info needed to perform alias analysis on memory references. */
37 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
39 /* reg_base_value[N] gives an address to which register N is related.
40 If all sets after the first add or subtract to the current value
41 or otherwise modify it so it does not point to a different top level
42 object, reg_base_value[N] is equal to the address part of the source
45 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
46 expressions represent certain special values: function arguments and
47 the stack, frame, and argument pointers. The contents of an address
48 expression are not used (but they are descriptive for debugging);
49 only the address and mode matter. Pointer equality, not rtx_equal_p,
50 determines whether two ADDRESS expressions refer to the same base
51 address. The mode determines whether it is a function argument or
52 other special value. */
55 rtx *new_reg_base_value;
56 unsigned int reg_base_value_size; /* size of reg_base_value array */
57 #define REG_BASE_VALUE(X) \
58 (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
60 /* Vector indexed by N giving the initial (unchanging) value known
61 for pseudo-register N. */
64 /* Indicates number of valid entries in reg_known_value. */
65 static int reg_known_value_size;
67 /* Vector recording for each reg_known_value whether it is due to a
68 REG_EQUIV note. Future passes (viz., reload) may replace the
69 pseudo with the equivalent expression and so we account for the
70 dependences that would be introduced if that happens. */
71 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
72 assign_parms mention the arg pointer, and there are explicit insns in the
73 RTL that modify the arg pointer. Thus we must ensure that such insns don't
74 get scheduled across each other because that would invalidate the REG_EQUIV
75 notes. One could argue that the REG_EQUIV notes are wrong, but solving
76 the problem in the scheduler will likely give better code, so we do it
78 char *reg_known_equiv_p;
80 /* True when scanning insns from the start of the rtl to the
81 NOTE_INSN_FUNCTION_BEG note. */
83 static int copying_arguments;
85 /* Inside SRC, the source of a SET, find a base address. */
91 switch (GET_CODE (src))
98 /* At the start of a function argument registers have known base
99 values which may be lost later. Returning an ADDRESS
100 expression here allows optimization based on argument values
101 even when the argument registers are used for other purposes. */
102 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
103 return new_reg_base_value[REGNO (src)];
105 /* If this REG is related to a known base value, return it.
106 This must happen after the arg register check above to avoid
107 circular set chains. */
108 if (reg_base_value[REGNO (src)])
109 return reg_base_value[REGNO (src)];
114 /* Check for an argument passed in memory. Only record in the
115 copying-arguments block; it is too hard to track changes
117 if (copying_arguments
118 && (XEXP (src, 0) == arg_pointer_rtx
119 || (GET_CODE (XEXP (src, 0)) == PLUS
120 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
121 return gen_rtx (ADDRESS, VOIDmode, src);
126 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
133 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
135 /* If either operand is a REG, then see if we already have
136 a known value for it. */
137 if (GET_CODE (src_0) == REG)
139 temp = find_base_value (src_0);
144 if (GET_CODE (src_1) == REG)
146 temp = find_base_value (src_1);
151 /* Guess which operand is the base address.
153 If either operand is a symbol, then it is the base. If
154 either operand is a CONST_INT, then the other is the base. */
156 if (GET_CODE (src_1) == CONST_INT
157 || GET_CODE (src_0) == SYMBOL_REF
158 || GET_CODE (src_0) == LABEL_REF
159 || GET_CODE (src_0) == CONST)
160 return find_base_value (src_0);
162 if (GET_CODE (src_0) == CONST_INT
163 || GET_CODE (src_1) == SYMBOL_REF
164 || GET_CODE (src_1) == LABEL_REF
165 || GET_CODE (src_1) == CONST)
166 return find_base_value (src_1);
168 /* This might not be necessary anymore.
170 If either operand is a REG that is a known pointer, then it
172 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
173 return find_base_value (src_0);
175 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
176 return find_base_value (src_1);
182 /* The standard form is (lo_sum reg sym) so look only at the
184 return find_base_value (XEXP (src, 1));
187 /* If the second operand is constant set the base
188 address to the first operand. */
189 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
190 return find_base_value (XEXP (src, 0));
194 return find_base_value (XEXP (src, 0));
200 /* Called from init_alias_analysis indirectly through note_stores. */
202 /* while scanning insns to find base values, reg_seen[N] is nonzero if
203 register N has been set in this function. */
204 static char *reg_seen;
207 static int unique_id;
210 record_set (dest, set)
216 if (GET_CODE (dest) != REG)
219 regno = REGNO (dest);
223 /* A CLOBBER wipes out any old value but does not prevent a previously
224 unset register from acquiring a base address (i.e. reg_seen is not
226 if (GET_CODE (set) == CLOBBER)
228 new_reg_base_value[regno] = 0;
237 new_reg_base_value[regno] = 0;
241 new_reg_base_value[regno] = gen_rtx (ADDRESS, Pmode,
242 GEN_INT (unique_id++));
246 /* This is not the first set. If the new value is not related to the
247 old value, forget the base value. Note that the following code is
249 extern int x, y; int *p = &x; p += (&y-&x);
250 ANSI C does not allow computing the difference of addresses
251 of distinct top level objects. */
252 if (new_reg_base_value[regno])
253 switch (GET_CODE (src))
258 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
259 new_reg_base_value[regno] = 0;
262 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
263 new_reg_base_value[regno] = 0;
266 new_reg_base_value[regno] = 0;
269 /* If this is the first set of a register, record the value. */
270 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
271 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
272 new_reg_base_value[regno] = find_base_value (src);
277 /* Called from loop optimization when a new pseudo-register is created. */
279 record_base_value (regno, val)
283 if (!flag_alias_check || regno >= reg_base_value_size)
285 if (GET_CODE (val) == REG)
287 if (REGNO (val) < reg_base_value_size)
288 reg_base_value[regno] = reg_base_value[REGNO (val)];
291 reg_base_value[regno] = find_base_value (val);
298 /* Recursively look for equivalences. */
299 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
300 && REGNO (x) < reg_known_value_size)
301 return reg_known_value[REGNO (x)] == x
302 ? x : canon_rtx (reg_known_value[REGNO (x)]);
303 else if (GET_CODE (x) == PLUS)
305 rtx x0 = canon_rtx (XEXP (x, 0));
306 rtx x1 = canon_rtx (XEXP (x, 1));
308 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
310 /* We can tolerate LO_SUMs being offset here; these
311 rtl are used for nothing other than comparisons. */
312 if (GET_CODE (x0) == CONST_INT)
313 return plus_constant_for_output (x1, INTVAL (x0));
314 else if (GET_CODE (x1) == CONST_INT)
315 return plus_constant_for_output (x0, INTVAL (x1));
316 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
319 /* This gives us much better alias analysis when called from
320 the loop optimizer. Note we want to leave the original
321 MEM alone, but need to return the canonicalized MEM with
322 all the flags with their original values. */
323 else if (GET_CODE (x) == MEM)
325 rtx addr = canon_rtx (XEXP (x, 0));
326 if (addr != XEXP (x, 0))
328 rtx new = gen_rtx (MEM, GET_MODE (x), addr);
329 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
330 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
331 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
338 /* Return 1 if X and Y are identical-looking rtx's.
340 We use the data in reg_known_value above to see if two registers with
341 different numbers are, in fact, equivalent. */
344 rtx_equal_for_memref_p (x, y)
349 register enum rtx_code code;
352 if (x == 0 && y == 0)
354 if (x == 0 || y == 0)
363 /* Rtx's of different codes cannot be equal. */
364 if (code != GET_CODE (y))
367 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
368 (REG:SI x) and (REG:HI x) are NOT equivalent. */
370 if (GET_MODE (x) != GET_MODE (y))
373 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
376 return REGNO (x) == REGNO (y);
377 if (code == LABEL_REF)
378 return XEXP (x, 0) == XEXP (y, 0);
379 if (code == SYMBOL_REF)
380 return XSTR (x, 0) == XSTR (y, 0);
382 /* For commutative operations, the RTX match if the operand match in any
383 order. Also handle the simple binary and unary cases without a loop. */
384 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
385 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
386 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
387 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
388 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
389 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
390 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
391 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
392 else if (GET_RTX_CLASS (code) == '1')
393 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
395 /* Compare the elements. If any pair of corresponding elements
396 fail to match, return 0 for the whole things. */
398 fmt = GET_RTX_FORMAT (code);
399 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
404 if (XWINT (x, i) != XWINT (y, i))
410 if (XINT (x, i) != XINT (y, i))
416 /* Two vectors must have the same length. */
417 if (XVECLEN (x, i) != XVECLEN (y, i))
420 /* And the corresponding elements must match. */
421 for (j = 0; j < XVECLEN (x, i); j++)
422 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
427 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
433 if (strcmp (XSTR (x, i), XSTR (y, i)))
438 /* These are just backpointers, so they don't matter. */
444 /* It is believed that rtx's at this level will never
445 contain anything but integers and other rtx's,
446 except for within LABEL_REFs and SYMBOL_REFs. */
454 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
455 X and return it, or return 0 if none found. */
458 find_symbolic_term (x)
462 register enum rtx_code code;
466 if (code == SYMBOL_REF || code == LABEL_REF)
468 if (GET_RTX_CLASS (code) == 'o')
471 fmt = GET_RTX_FORMAT (code);
472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
478 t = find_symbolic_term (XEXP (x, i));
482 else if (fmt[i] == 'E')
492 switch (GET_CODE (x))
495 return REG_BASE_VALUE (x);
498 return find_base_term (XEXP (x, 0));
504 return find_base_term (XEXP (x, 0));
508 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
515 rtx tmp = find_base_term (XEXP (x, 0));
518 return find_base_term (XEXP (x, 1));
522 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
523 return REG_BASE_VALUE (XEXP (x, 0));
535 /* Return 0 if the addresses X and Y are known to point to different
536 objects, 1 if they might be pointers to the same object. */
539 base_alias_check (x, y)
542 rtx x_base = find_base_term (x);
543 rtx y_base = find_base_term (y);
545 /* If either base address is unknown or the base addresses are equal,
546 nothing is known about aliasing. */
548 if (x_base == 0 || y_base == 0 || rtx_equal_p (x_base, y_base))
551 /* The base addresses of the read and write are different
552 expressions. If they are both symbols and they are not accessed
553 via AND, there is no conflict. */
554 /* XXX: We can bring knowledge of object alignment and offset into
555 play here. For example, on alpha, "char a, b;" can alias one
556 another, though "char a; long b;" cannot. Similarly, offsets
557 into strutures may be brought into play. Given "char a, b[40];",
558 a and b[1] may overlap, but a and b[20] do not. */
559 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
561 return GET_CODE (x) == AND || GET_CODE (y) == AND;
564 /* If one address is a stack reference there can be no alias:
565 stack references using different base registers do not alias,
566 a stack reference can not alias a parameter, and a stack reference
567 can not alias a global. */
568 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
569 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
572 if (! flag_argument_noalias)
575 if (flag_argument_noalias > 1)
578 /* Weak noalias assertion (arguments are distinct, but may match globals). */
579 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
582 /* Return nonzero if X and Y (memory addresses) could reference the
583 same location in memory. C is an offset accumulator. When
584 C is nonzero, we are testing aliases between X and Y + C.
585 XSIZE is the size in bytes of the X reference,
586 similarly YSIZE is the size in bytes for Y.
588 If XSIZE or YSIZE is zero, we do not know the amount of memory being
589 referenced (the reference was BLKmode), so make the most pessimistic
592 If XSIZE or YSIZE is negative, we may access memory outside the object
593 being referenced as a side effect. This can happen when using AND to
594 align memory references, as is done on the Alpha.
596 We recognize the following cases of non-conflicting memory:
598 (1) addresses involving the frame pointer cannot conflict
599 with addresses involving static variables.
600 (2) static variables with different addresses cannot conflict.
602 Nice to notice that varying addresses cannot conflict with fp if no
603 local variables had their addresses taken, but that's too hard now. */
607 memrefs_conflict_p (xsize, x, ysize, y, c)
612 if (GET_CODE (x) == HIGH)
614 else if (GET_CODE (x) == LO_SUM)
618 if (GET_CODE (y) == HIGH)
620 else if (GET_CODE (y) == LO_SUM)
625 if (rtx_equal_for_memref_p (x, y))
627 if (xsize <= 0 || ysize <= 0)
629 if (c >= 0 && xsize > c)
631 if (c < 0 && ysize+c > 0)
636 if (y == frame_pointer_rtx || y == hard_frame_pointer_rtx
637 || y == stack_pointer_rtx || y == arg_pointer_rtx)
641 y = x; ysize = xsize;
642 x = t; xsize = tsize;
645 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
646 || x == stack_pointer_rtx || x == arg_pointer_rtx)
653 if (GET_CODE (y) == PLUS
654 && canon_rtx (XEXP (y, 0)) == x
655 && (y1 = canon_rtx (XEXP (y, 1)))
656 && GET_CODE (y1) == CONST_INT)
659 return (xsize <= 0 || ysize <= 0
660 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
663 if (GET_CODE (y) == PLUS
664 && (y1 = canon_rtx (XEXP (y, 0)))
671 if (GET_CODE (x) == PLUS)
673 /* The fact that X is canonicalized means that this
674 PLUS rtx is canonicalized. */
675 rtx x0 = XEXP (x, 0);
676 rtx x1 = XEXP (x, 1);
678 if (GET_CODE (y) == PLUS)
680 /* The fact that Y is canonicalized means that this
681 PLUS rtx is canonicalized. */
682 rtx y0 = XEXP (y, 0);
683 rtx y1 = XEXP (y, 1);
685 if (rtx_equal_for_memref_p (x1, y1))
686 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
687 if (rtx_equal_for_memref_p (x0, y0))
688 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
689 if (GET_CODE (x1) == CONST_INT)
690 if (GET_CODE (y1) == CONST_INT)
691 return memrefs_conflict_p (xsize, x0, ysize, y0,
692 c - INTVAL (x1) + INTVAL (y1));
694 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
695 else if (GET_CODE (y1) == CONST_INT)
696 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
698 /* Handle case where we cannot understand iteration operators,
699 but we notice that the base addresses are distinct objects. */
700 /* ??? Is this still necessary? */
701 x = find_symbolic_term (x);
704 y = find_symbolic_term (y);
707 return rtx_equal_for_memref_p (x, y);
709 else if (GET_CODE (x1) == CONST_INT)
710 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
712 else if (GET_CODE (y) == PLUS)
714 /* The fact that Y is canonicalized means that this
715 PLUS rtx is canonicalized. */
716 rtx y0 = XEXP (y, 0);
717 rtx y1 = XEXP (y, 1);
719 if (GET_CODE (y1) == CONST_INT)
720 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
725 if (GET_CODE (x) == GET_CODE (y))
726 switch (GET_CODE (x))
730 /* Handle cases where we expect the second operands to be the
731 same, and check only whether the first operand would conflict
734 rtx x1 = canon_rtx (XEXP (x, 1));
735 rtx y1 = canon_rtx (XEXP (y, 1));
736 if (! rtx_equal_for_memref_p (x1, y1))
738 x0 = canon_rtx (XEXP (x, 0));
739 y0 = canon_rtx (XEXP (y, 0));
740 if (rtx_equal_for_memref_p (x0, y0))
741 return (xsize == 0 || ysize == 0
742 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
744 /* Can't properly adjust our sizes. */
745 if (GET_CODE (x1) != CONST_INT)
747 xsize /= INTVAL (x1);
748 ysize /= INTVAL (x1);
750 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
754 /* Treat an access through an AND (e.g. a subword access on an Alpha)
755 as an access with indeterminate size. */
756 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
757 return memrefs_conflict_p (-1, XEXP (x, 0), ysize, y, c);
758 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
760 /* XXX: If we are indexing far enough into the array/structure, we
761 may yet be able to determine that we can not overlap. But we
762 also need to that we are far enough from the end not to overlap
763 a following reference, so we do nothing for now. */
764 return memrefs_conflict_p (xsize, x, -1, XEXP (y, 0), c);
769 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
771 c += (INTVAL (y) - INTVAL (x));
772 return (xsize <= 0 || ysize <= 0
773 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
776 if (GET_CODE (x) == CONST)
778 if (GET_CODE (y) == CONST)
779 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
780 ysize, canon_rtx (XEXP (y, 0)), c);
782 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
785 if (GET_CODE (y) == CONST)
786 return memrefs_conflict_p (xsize, x, ysize,
787 canon_rtx (XEXP (y, 0)), c);
790 return (xsize < 0 || ysize < 0
791 || (rtx_equal_for_memref_p (x, y)
792 && (xsize == 0 || ysize == 0
793 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
800 /* Functions to compute memory dependencies.
802 Since we process the insns in execution order, we can build tables
803 to keep track of what registers are fixed (and not aliased), what registers
804 are varying in known ways, and what registers are varying in unknown
807 If both memory references are volatile, then there must always be a
808 dependence between the two references, since their order can not be
809 changed. A volatile and non-volatile reference can be interchanged
812 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
813 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
814 allow QImode aliasing because the ANSI C standard allows character
815 pointers to alias anything. We are assuming that characters are
816 always QImode here. We also must allow AND addresses, because they may
817 generate accesses outside the object being referenced. This is used to
818 generate aligned addresses from unaligned addresses, for instance, the
819 alpha storeqi_unaligned pattern. */
821 /* Read dependence: X is read after read in MEM takes place. There can
822 only be a dependence here if both reads are volatile. */
825 read_dependence (mem, x)
829 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
832 /* True dependence: X is read after store in MEM takes place. */
835 true_dependence (mem, mem_mode, x, varies)
837 enum machine_mode mem_mode;
841 rtx x_addr, mem_addr;
843 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
846 x_addr = XEXP (x, 0);
847 mem_addr = XEXP (mem, 0);
849 if (flag_alias_check && ! base_alias_check (x_addr, mem_addr))
852 /* If X is an unchanging read, then it can't possibly conflict with any
853 non-unchanging store. It may conflict with an unchanging write though,
854 because there may be a single store to this address to initialize it.
855 Just fall through to the code below to resolve the case where we have
856 both an unchanging read and an unchanging write. This won't handle all
857 cases optimally, but the possible performance loss should be
859 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
862 x_addr = canon_rtx (x_addr);
863 mem_addr = canon_rtx (mem_addr);
864 if (mem_mode == VOIDmode)
865 mem_mode = GET_MODE (mem);
867 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
868 SIZE_FOR_MODE (x), x_addr, 0))
871 /* If both references are struct references, or both are not, nothing
872 is known about aliasing.
874 If either reference is QImode or BLKmode, ANSI C permits aliasing.
876 If both addresses are constant, or both are not, nothing is known
878 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
879 || mem_mode == QImode || mem_mode == BLKmode
880 || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode
881 || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND
882 || varies (x_addr) == varies (mem_addr))
885 /* One memory reference is to a constant address, one is not.
886 One is to a structure, the other is not.
888 If either memory reference is a variable structure the other is a
889 fixed scalar and there is no aliasing. */
890 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
891 || (MEM_IN_STRUCT_P (x) && varies (x_addr)))
897 /* Anti dependence: X is written after read in MEM takes place. */
900 anti_dependence (mem, x)
904 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
907 if (flag_alias_check && ! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
910 /* If MEM is an unchanging read, then it can't possibly conflict with
911 the store to X, because there is at most one store to MEM, and it must
912 have occurred somewhere before MEM. */
914 mem = canon_rtx (mem);
915 if (RTX_UNCHANGING_P (mem))
918 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
919 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
920 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
921 && GET_MODE (mem) != QImode
922 && GET_CODE (XEXP (mem, 0)) != AND
923 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
924 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
925 && GET_MODE (x) != QImode
926 && GET_CODE (XEXP (x, 0)) != AND
927 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
930 /* Output dependence: X is written after store in MEM takes place. */
933 output_dependence (mem, x)
937 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
940 if (flag_alias_check && !base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
944 mem = canon_rtx (mem);
945 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
946 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
947 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
948 && GET_MODE (mem) != QImode
949 && GET_CODE (XEXP (mem, 0)) != AND
950 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
951 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
952 && GET_MODE (x) != QImode
953 && GET_CODE (XEXP (x, 0)) != AND
954 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
958 init_alias_analysis ()
960 int maxreg = max_reg_num ();
967 reg_known_value_size = maxreg;
970 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
971 - FIRST_PSEUDO_REGISTER;
973 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
974 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
975 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
976 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
977 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
979 if (flag_alias_check)
981 /* Overallocate reg_base_value to allow some growth during loop
982 optimization. Loop unrolling can create a large number of
984 reg_base_value_size = maxreg * 2;
985 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
986 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
987 reg_seen = (char *)alloca (reg_base_value_size);
988 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
991 /* The basic idea is that each pass through this loop will use the
992 "constant" information from the previous pass to propagate alias
993 information through another level of assignments.
995 This could get expensive if the assignment chains are long. Maybe
996 we should throttle the number of iterations, possibly based on
997 the optimization level.
999 We could propagate more information in the first pass by making use
1000 of REG_N_SETS to determine immediately that the alias information
1001 for a pseudo is "constant". */
1005 /* Assume nothing will change this iteration of the loop. */
1008 /* We want to assign the same IDs each iteration of this loop, so
1009 start counting from zero each iteration of the loop. */
1012 /* We're at the start of the funtion each iteration through the
1013 loop, so we're copying arguments. */
1014 copying_arguments = 1;
1016 /* Only perform initialization of the arrays if we're actually
1017 performing alias analysis. */
1018 if (flag_alias_check)
1020 /* Wipe the potential alias information clean for this pass. */
1021 bzero ((char *) new_reg_base_value,
1022 reg_base_value_size * sizeof (rtx));
1024 /* Wipe the reg_seen array clean. */
1025 bzero ((char *) reg_seen, reg_base_value_size);
1027 /* Mark all hard registers which may contain an address.
1028 The stack, frame and argument pointers may contain an address.
1029 An argument register which can hold a Pmode value may contain
1030 an address even if it is not in BASE_REGS.
1032 The address expression is VOIDmode for an argument and
1033 Pmode for other registers. */
1034 #ifndef OUTGOING_REGNO
1035 #define OUTGOING_REGNO(N) N
1037 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1038 /* Check whether this register can hold an incoming pointer
1039 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1040 numbers, so translate if necessary due to register windows. */
1041 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1042 && HARD_REGNO_MODE_OK (i, Pmode))
1043 new_reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode,
1044 gen_rtx (REG, Pmode, i));
1046 new_reg_base_value[STACK_POINTER_REGNUM]
1047 = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx);
1048 new_reg_base_value[ARG_POINTER_REGNUM]
1049 = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx);
1050 new_reg_base_value[FRAME_POINTER_REGNUM]
1051 = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx);
1052 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1053 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1054 = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx);
1056 if (struct_value_incoming_rtx
1057 && GET_CODE (struct_value_incoming_rtx) == REG)
1058 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1059 = gen_rtx (ADDRESS, Pmode, struct_value_incoming_rtx);
1061 if (static_chain_rtx
1062 && GET_CODE (static_chain_rtx) == REG)
1063 new_reg_base_value[REGNO (static_chain_rtx)]
1064 = gen_rtx (ADDRESS, Pmode, static_chain_rtx);
1067 /* Walk the insns adding values to the new_reg_base_value array. */
1068 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1070 if (flag_alias_check && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1072 /* If this insn has a noalias note, process it, Otherwise,
1073 scan for sets. A simple set will have no side effects
1074 which could change the base value of any other register. */
1076 if (GET_CODE (PATTERN (insn)) == SET
1077 && (noalias_note = find_reg_note (insn,
1078 REG_NOALIAS, NULL_RTX)))
1079 record_set (SET_DEST (PATTERN (insn)), 0);
1081 note_stores (PATTERN (insn), record_set);
1083 else if (GET_CODE (insn) == NOTE
1084 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1085 copying_arguments = 0;
1087 if ((set = single_set (insn)) != 0
1088 && GET_CODE (SET_DEST (set)) == REG
1089 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1090 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1091 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1092 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1093 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1095 int regno = REGNO (SET_DEST (set));
1096 reg_known_value[regno] = XEXP (note, 0);
1097 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1101 /* Now propagate values from new_reg_base_value to reg_base_value. */
1102 if (flag_alias_check)
1103 for (i = 0; i < reg_base_value_size; i++)
1105 if (new_reg_base_value[i]
1106 && new_reg_base_value[i] != reg_base_value[i]
1107 && !rtx_equal_p (new_reg_base_value[i], reg_base_value[i]))
1109 reg_base_value[i] = new_reg_base_value[i];
1115 /* Fill in the remaining entries. */
1116 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1117 if (reg_known_value[i] == 0)
1118 reg_known_value[i] = regno_reg_rtx[i];
1120 if (! flag_alias_check)
1123 /* Simplify the reg_base_value array so that no register refers to
1124 another register, except to special registers indirectly through
1125 ADDRESS expressions.
1127 In theory this loop can take as long as O(registers^2), but unless
1128 there are very long dependency chains it will run in close to linear
1133 for (i = 0; i < reg_base_value_size; i++)
1135 rtx base = reg_base_value[i];
1136 if (base && GET_CODE (base) == REG)
1138 int base_regno = REGNO (base);
1139 if (base_regno == i) /* register set from itself */
1140 reg_base_value[i] = 0;
1142 reg_base_value[i] = reg_base_value[base_regno];
1149 new_reg_base_value = 0;
1154 end_alias_analysis ()
1156 reg_known_value = 0;
1158 reg_base_value_size = 0;