1 /* Alias analysis for GNU C
2 Copyright (C) 1997 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 static rtx canon_rtx PROTO((rtx));
30 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
31 static rtx find_symbolic_term PROTO((rtx));
32 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
35 /* Set up all info needed to perform alias analysis on memory references. */
37 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
39 /* reg_base_value[N] gives an address to which register N is related.
40 If all sets after the first add or subtract to the current value
41 or otherwise modify it so it does not point to a different top level
42 object, reg_base_value[N] is equal to the address part of the source
45 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
46 expressions represent certain special values: function arguments and
47 the stack, frame, and argument pointers. The contents of an address
48 expression are not used (but they are descriptive for debugging);
49 only the address and mode matter. Pointer equality, not rtx_equal_p,
50 determines whether two ADDRESS expressions refer to the same base
51 address. The mode determines whether it is a function argument or
52 other special value. */
55 unsigned int reg_base_value_size; /* size of reg_base_value array */
56 #define REG_BASE_VALUE(X) \
57 (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
59 /* Vector indexed by N giving the initial (unchanging) value known
60 for pseudo-register N. */
63 /* Indicates number of valid entries in reg_known_value. */
64 static int reg_known_value_size;
66 /* Vector recording for each reg_known_value whether it is due to a
67 REG_EQUIV note. Future passes (viz., reload) may replace the
68 pseudo with the equivalent expression and so we account for the
69 dependences that would be introduced if that happens. */
70 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
71 assign_parms mention the arg pointer, and there are explicit insns in the
72 RTL that modify the arg pointer. Thus we must ensure that such insns don't
73 get scheduled across each other because that would invalidate the REG_EQUIV
74 notes. One could argue that the REG_EQUIV notes are wrong, but solving
75 the problem in the scheduler will likely give better code, so we do it
77 char *reg_known_equiv_p;
79 /* True when scanning insns from the start of the rtl to the
80 NOTE_INSN_FUNCTION_BEG note. */
82 static int copying_arguments;
84 /* Inside SRC, the source of a SET, find a base address. */
90 switch (GET_CODE (src))
97 /* At the start of a function argument registers have known base
98 values which may be lost later. Returning an ADDRESS
99 expression here allows optimization based on argument values
100 even when the argument registers are used for other purposes. */
101 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
102 return reg_base_value[REGNO (src)];
106 /* Check for an argument passed in memory. Only record in the
107 copying-arguments block; it is too hard to track changes
109 if (copying_arguments
110 && (XEXP (src, 0) == arg_pointer_rtx
111 || (GET_CODE (XEXP (src, 0)) == PLUS
112 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
113 return gen_rtx (ADDRESS, VOIDmode, src);
118 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
125 rtx src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
127 /* Guess which operand is the base address.
129 If the first operand is a symbol or the second operand is
130 an integer, the first operand is the base address. Else if
131 either operand is a register marked as a pointer, it is the
134 if (GET_CODE (src_1) == CONST_INT
135 || GET_CODE (src_0) == SYMBOL_REF
136 || GET_CODE (src_0) == LABEL_REF
137 || GET_CODE (src_0) == CONST)
138 return find_base_value (src_0);
140 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
141 return find_base_value (src_0);
143 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
144 return find_base_value (src_1);
150 /* The standard form is (lo_sum reg sym) so look only at the
152 return find_base_value (XEXP (src, 1));
155 /* If the second operand is constant set the base
156 address to the first operand. */
157 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
158 return find_base_value (XEXP (src, 0));
162 return find_base_value (XEXP (src, 0));
168 /* Called from init_alias_analysis indirectly through note_stores. */
170 /* while scanning insns to find base values, reg_seen[N] is nonzero if
171 register N has been set in this function. */
172 static char *reg_seen;
175 record_set (dest, set)
181 if (GET_CODE (dest) != REG)
184 regno = REGNO (dest);
188 /* A CLOBBER wipes out any old value but does not prevent a previously
189 unset register from acquiring a base address (i.e. reg_seen is not
191 if (GET_CODE (set) == CLOBBER)
193 reg_base_value[regno] = 0;
200 static int unique_id;
203 reg_base_value[regno] = 0;
207 reg_base_value[regno] = gen_rtx (ADDRESS, Pmode,
208 GEN_INT (unique_id++));
212 /* This is not the first set. If the new value is not related to the
213 old value, forget the base value. Note that the following code is
215 extern int x, y; int *p = &x; p += (&y-&x);
216 ANSI C does not allow computing the difference of addresses
217 of distinct top level objects. */
218 if (reg_base_value[regno])
219 switch (GET_CODE (src))
224 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
225 reg_base_value[regno] = 0;
228 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
229 reg_base_value[regno] = 0;
232 reg_base_value[regno] = 0;
235 /* If this is the first set of a register, record the value. */
236 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
237 && ! reg_seen[regno] && reg_base_value[regno] == 0)
238 reg_base_value[regno] = find_base_value (src);
243 /* Called from loop optimization when a new pseudo-register is created. */
245 record_base_value (regno, val)
249 if (!flag_alias_check || regno >= reg_base_value_size)
251 if (GET_CODE (val) == REG)
253 if (REGNO (val) < reg_base_value_size)
254 reg_base_value[regno] = reg_base_value[REGNO (val)];
257 reg_base_value[regno] = find_base_value (val);
264 /* Recursively look for equivalences. */
265 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
266 && REGNO (x) < reg_known_value_size)
267 return reg_known_value[REGNO (x)] == x
268 ? x : canon_rtx (reg_known_value[REGNO (x)]);
269 else if (GET_CODE (x) == PLUS)
271 rtx x0 = canon_rtx (XEXP (x, 0));
272 rtx x1 = canon_rtx (XEXP (x, 1));
274 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
276 /* We can tolerate LO_SUMs being offset here; these
277 rtl are used for nothing other than comparisons. */
278 if (GET_CODE (x0) == CONST_INT)
279 return plus_constant_for_output (x1, INTVAL (x0));
280 else if (GET_CODE (x1) == CONST_INT)
281 return plus_constant_for_output (x0, INTVAL (x1));
282 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
285 /* This gives us much better alias analysis when called from
286 the loop optimizer. Note we want to leave the original
287 MEM alone, but need to return the canonicalized MEM with
288 all the flags with their original values. */
289 else if (GET_CODE (x) == MEM)
291 rtx addr = canon_rtx (XEXP (x, 0));
292 if (addr != XEXP (x, 0))
294 rtx new = gen_rtx (MEM, GET_MODE (x), addr);
295 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
296 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
297 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
304 /* Return 1 if X and Y are identical-looking rtx's.
306 We use the data in reg_known_value above to see if two registers with
307 different numbers are, in fact, equivalent. */
310 rtx_equal_for_memref_p (x, y)
315 register enum rtx_code code;
318 if (x == 0 && y == 0)
320 if (x == 0 || y == 0)
329 /* Rtx's of different codes cannot be equal. */
330 if (code != GET_CODE (y))
333 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
334 (REG:SI x) and (REG:HI x) are NOT equivalent. */
336 if (GET_MODE (x) != GET_MODE (y))
339 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
342 return REGNO (x) == REGNO (y);
343 if (code == LABEL_REF)
344 return XEXP (x, 0) == XEXP (y, 0);
345 if (code == SYMBOL_REF)
346 return XSTR (x, 0) == XSTR (y, 0);
348 /* For commutative operations, the RTX match if the operand match in any
349 order. Also handle the simple binary and unary cases without a loop. */
350 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
351 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
352 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
353 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
354 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
355 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
356 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
357 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
358 else if (GET_RTX_CLASS (code) == '1')
359 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
361 /* Compare the elements. If any pair of corresponding elements
362 fail to match, return 0 for the whole things. */
364 fmt = GET_RTX_FORMAT (code);
365 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
370 if (XWINT (x, i) != XWINT (y, i))
376 if (XINT (x, i) != XINT (y, i))
382 /* Two vectors must have the same length. */
383 if (XVECLEN (x, i) != XVECLEN (y, i))
386 /* And the corresponding elements must match. */
387 for (j = 0; j < XVECLEN (x, i); j++)
388 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
393 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
399 if (strcmp (XSTR (x, i), XSTR (y, i)))
404 /* These are just backpointers, so they don't matter. */
410 /* It is believed that rtx's at this level will never
411 contain anything but integers and other rtx's,
412 except for within LABEL_REFs and SYMBOL_REFs. */
420 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
421 X and return it, or return 0 if none found. */
424 find_symbolic_term (x)
428 register enum rtx_code code;
432 if (code == SYMBOL_REF || code == LABEL_REF)
434 if (GET_RTX_CLASS (code) == 'o')
437 fmt = GET_RTX_FORMAT (code);
438 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
444 t = find_symbolic_term (XEXP (x, i));
448 else if (fmt[i] == 'E')
458 switch (GET_CODE (x))
461 return REG_BASE_VALUE (x);
464 return find_base_term (XEXP (x, 0));
470 return find_base_term (XEXP (x, 0));
474 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
481 rtx tmp = find_base_term (XEXP (x, 0));
484 return find_base_term (XEXP (x, 1));
488 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
489 return REG_BASE_VALUE (XEXP (x, 0));
501 /* Return 0 if the addresses X and Y are known to point to different
502 objects, 1 if they might be pointers to the same object. */
505 base_alias_check (x, y)
508 rtx x_base = find_base_term (x);
509 rtx y_base = find_base_term (y);
511 /* If either base address is unknown or the base addresses are equal,
512 nothing is known about aliasing. */
514 if (x_base == 0 || y_base == 0 || rtx_equal_p (x_base, y_base))
517 /* The base addresses of the read and write are different
518 expressions. If they are both symbols and they are not accessed
519 via AND, there is no conflict. */
520 /* XXX: We can bring knowledge of object alignment and offset into
521 play here. For example, on alpha, "char a, b;" can alias one
522 another, though "char a; long b;" cannot. Similarly, offsets
523 into strutures may be brought into play. Given "char a, b[40];",
524 a and b[1] may overlap, but a and b[20] do not. */
525 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
527 return GET_CODE (x) == AND || GET_CODE (y) == AND;
530 /* If one address is a stack reference there can be no alias:
531 stack references using different base registers do not alias,
532 a stack reference can not alias a parameter, and a stack reference
533 can not alias a global. */
534 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
535 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
538 if (! flag_argument_noalias)
541 if (flag_argument_noalias > 1)
544 /* Weak noalias assertion (arguments are distinct, but may match globals). */
545 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
548 /* Return nonzero if X and Y (memory addresses) could reference the
549 same location in memory. C is an offset accumulator. When
550 C is nonzero, we are testing aliases between X and Y + C.
551 XSIZE is the size in bytes of the X reference,
552 similarly YSIZE is the size in bytes for Y.
554 If XSIZE or YSIZE is zero, we do not know the amount of memory being
555 referenced (the reference was BLKmode), so make the most pessimistic
558 If XSIZE or YSIZE is negative, we may access memory outside the object
559 being referenced as a side effect. This can happen when using AND to
560 align memory references, as is done on the Alpha.
562 We recognize the following cases of non-conflicting memory:
564 (1) addresses involving the frame pointer cannot conflict
565 with addresses involving static variables.
566 (2) static variables with different addresses cannot conflict.
568 Nice to notice that varying addresses cannot conflict with fp if no
569 local variables had their addresses taken, but that's too hard now. */
573 memrefs_conflict_p (xsize, x, ysize, y, c)
578 if (GET_CODE (x) == HIGH)
580 else if (GET_CODE (x) == LO_SUM)
584 if (GET_CODE (y) == HIGH)
586 else if (GET_CODE (y) == LO_SUM)
591 if (rtx_equal_for_memref_p (x, y))
593 if (xsize <= 0 || ysize <= 0)
595 if (c >= 0 && xsize > c)
597 if (c < 0 && ysize+c > 0)
602 if (y == frame_pointer_rtx || y == hard_frame_pointer_rtx
603 || y == stack_pointer_rtx || y == arg_pointer_rtx)
607 y = x; ysize = xsize;
608 x = t; xsize = tsize;
611 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
612 || x == stack_pointer_rtx || x == arg_pointer_rtx)
619 if (GET_CODE (y) == PLUS
620 && canon_rtx (XEXP (y, 0)) == x
621 && (y1 = canon_rtx (XEXP (y, 1)))
622 && GET_CODE (y1) == CONST_INT)
625 return (xsize <= 0 || ysize <= 0
626 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
629 if (GET_CODE (y) == PLUS
630 && (y1 = canon_rtx (XEXP (y, 0)))
637 if (GET_CODE (x) == PLUS)
639 /* The fact that X is canonicalized means that this
640 PLUS rtx is canonicalized. */
641 rtx x0 = XEXP (x, 0);
642 rtx x1 = XEXP (x, 1);
644 if (GET_CODE (y) == PLUS)
646 /* The fact that Y is canonicalized means that this
647 PLUS rtx is canonicalized. */
648 rtx y0 = XEXP (y, 0);
649 rtx y1 = XEXP (y, 1);
651 if (rtx_equal_for_memref_p (x1, y1))
652 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
653 if (rtx_equal_for_memref_p (x0, y0))
654 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
655 if (GET_CODE (x1) == CONST_INT)
656 if (GET_CODE (y1) == CONST_INT)
657 return memrefs_conflict_p (xsize, x0, ysize, y0,
658 c - INTVAL (x1) + INTVAL (y1));
660 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
661 else if (GET_CODE (y1) == CONST_INT)
662 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
664 /* Handle case where we cannot understand iteration operators,
665 but we notice that the base addresses are distinct objects. */
666 /* ??? Is this still necessary? */
667 x = find_symbolic_term (x);
670 y = find_symbolic_term (y);
673 return rtx_equal_for_memref_p (x, y);
675 else if (GET_CODE (x1) == CONST_INT)
676 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
678 else if (GET_CODE (y) == PLUS)
680 /* The fact that Y is canonicalized means that this
681 PLUS rtx is canonicalized. */
682 rtx y0 = XEXP (y, 0);
683 rtx y1 = XEXP (y, 1);
685 if (GET_CODE (y1) == CONST_INT)
686 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
691 if (GET_CODE (x) == GET_CODE (y))
692 switch (GET_CODE (x))
696 /* Handle cases where we expect the second operands to be the
697 same, and check only whether the first operand would conflict
700 rtx x1 = canon_rtx (XEXP (x, 1));
701 rtx y1 = canon_rtx (XEXP (y, 1));
702 if (! rtx_equal_for_memref_p (x1, y1))
704 x0 = canon_rtx (XEXP (x, 0));
705 y0 = canon_rtx (XEXP (y, 0));
706 if (rtx_equal_for_memref_p (x0, y0))
707 return (xsize == 0 || ysize == 0
708 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
710 /* Can't properly adjust our sizes. */
711 if (GET_CODE (x1) != CONST_INT)
713 xsize /= INTVAL (x1);
714 ysize /= INTVAL (x1);
716 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
720 /* Treat an access through an AND (e.g. a subword access on an Alpha)
721 as an access with indeterminate size. */
722 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
723 return memrefs_conflict_p (-1, XEXP (x, 0), ysize, y, c);
724 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
726 /* XXX: If we are indexing far enough into the array/structure, we
727 may yet be able to determine that we can not overlap. But we
728 also need to that we are far enough from the end not to overlap
729 a following reference, so we do nothing for now. */
730 return memrefs_conflict_p (xsize, x, -1, XEXP (y, 0), c);
735 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
737 c += (INTVAL (y) - INTVAL (x));
738 return (xsize <= 0 || ysize <= 0
739 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
742 if (GET_CODE (x) == CONST)
744 if (GET_CODE (y) == CONST)
745 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
746 ysize, canon_rtx (XEXP (y, 0)), c);
748 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
751 if (GET_CODE (y) == CONST)
752 return memrefs_conflict_p (xsize, x, ysize,
753 canon_rtx (XEXP (y, 0)), c);
756 return (xsize < 0 || ysize < 0
757 || (rtx_equal_for_memref_p (x, y)
758 && (xsize == 0 || ysize == 0
759 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
766 /* Functions to compute memory dependencies.
768 Since we process the insns in execution order, we can build tables
769 to keep track of what registers are fixed (and not aliased), what registers
770 are varying in known ways, and what registers are varying in unknown
773 If both memory references are volatile, then there must always be a
774 dependence between the two references, since their order can not be
775 changed. A volatile and non-volatile reference can be interchanged
778 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
779 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
780 allow QImode aliasing because the ANSI C standard allows character
781 pointers to alias anything. We are assuming that characters are
782 always QImode here. We also must allow AND addresses, because they may
783 generate accesses outside the object being referenced. This is used to
784 generate aligned addresses from unaligned addresses, for instance, the
785 alpha storeqi_unaligned pattern. */
787 /* Read dependence: X is read after read in MEM takes place. There can
788 only be a dependence here if both reads are volatile. */
791 read_dependence (mem, x)
795 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
798 /* True dependence: X is read after store in MEM takes place. */
801 true_dependence (mem, mem_mode, x, varies)
803 enum machine_mode mem_mode;
807 rtx x_addr, mem_addr;
809 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
812 x_addr = XEXP (x, 0);
813 mem_addr = XEXP (mem, 0);
815 if (flag_alias_check && ! base_alias_check (x_addr, mem_addr))
818 /* If X is an unchanging read, then it can't possibly conflict with any
819 non-unchanging store. It may conflict with an unchanging write though,
820 because there may be a single store to this address to initialize it.
821 Just fall through to the code below to resolve the case where we have
822 both an unchanging read and an unchanging write. This won't handle all
823 cases optimally, but the possible performance loss should be
825 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
828 x_addr = canon_rtx (x_addr);
829 mem_addr = canon_rtx (mem_addr);
830 if (mem_mode == VOIDmode)
831 mem_mode = GET_MODE (mem);
833 if (! memrefs_conflict_p (SIZE_FOR_MODE (mem_mode), mem_addr,
834 SIZE_FOR_MODE (x), x_addr, 0))
837 /* If both references are struct references, or both are not, nothing
838 is known about aliasing.
840 If either reference is QImode or BLKmode, ANSI C permits aliasing.
842 If both addresses are constant, or both are not, nothing is known
844 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
845 || mem_mode == QImode || mem_mode == BLKmode
846 || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode
847 || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND
848 || varies (x_addr) == varies (mem_addr))
851 /* One memory reference is to a constant address, one is not.
852 One is to a structure, the other is not.
854 If either memory reference is a variable structure the other is a
855 fixed scalar and there is no aliasing. */
856 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
857 || (MEM_IN_STRUCT_P (x) && varies (x_addr)))
863 /* Anti dependence: X is written after read in MEM takes place. */
866 anti_dependence (mem, x)
870 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
873 if (flag_alias_check && ! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
876 /* If MEM is an unchanging read, then it can't possibly conflict with
877 the store to X, because there is at most one store to MEM, and it must
878 have occurred somewhere before MEM. */
880 mem = canon_rtx (mem);
881 if (RTX_UNCHANGING_P (mem))
884 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
885 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
886 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
887 && GET_MODE (mem) != QImode
888 && GET_CODE (XEXP (mem, 0)) != AND
889 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
890 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
891 && GET_MODE (x) != QImode
892 && GET_CODE (XEXP (x, 0)) != AND
893 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
896 /* Output dependence: X is written after store in MEM takes place. */
899 output_dependence (mem, x)
903 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
906 if (flag_alias_check && !base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
910 mem = canon_rtx (mem);
911 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
912 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
913 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
914 && GET_MODE (mem) != QImode
915 && GET_CODE (XEXP (mem, 0)) != AND
916 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
917 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
918 && GET_MODE (x) != QImode
919 && GET_CODE (XEXP (x, 0)) != AND
920 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
924 init_alias_analysis ()
926 int maxreg = max_reg_num ();
933 reg_known_value_size = maxreg;
936 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
937 - FIRST_PSEUDO_REGISTER;
939 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
940 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
941 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
942 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
943 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
945 if (flag_alias_check)
947 /* Overallocate reg_base_value to allow some growth during loop
948 optimization. Loop unrolling can create a large number of
950 reg_base_value_size = maxreg * 2;
951 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
952 reg_seen = (char *)alloca (reg_base_value_size);
953 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
954 bzero (reg_seen, reg_base_value_size);
956 /* Mark all hard registers which may contain an address.
957 The stack, frame and argument pointers may contain an address.
958 An argument register which can hold a Pmode value may contain
959 an address even if it is not in BASE_REGS.
961 The address expression is VOIDmode for an argument and
962 Pmode for other registers. */
963 #ifndef OUTGOING_REGNO
964 #define OUTGOING_REGNO(N) N
966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
967 /* Check whether this register can hold an incoming pointer
968 argument. FUNCTION_ARG_REGNO_P tests outgoing register
969 numbers, so translate if necessary due to register windows. */
970 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i)) && HARD_REGNO_MODE_OK (i, Pmode))
971 reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode,
972 gen_rtx (REG, Pmode, i));
974 reg_base_value[STACK_POINTER_REGNUM]
975 = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx);
976 reg_base_value[ARG_POINTER_REGNUM]
977 = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx);
978 reg_base_value[FRAME_POINTER_REGNUM]
979 = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx);
980 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
981 reg_base_value[HARD_FRAME_POINTER_REGNUM]
982 = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx);
986 copying_arguments = 1;
987 /* Fill in the entries with known constant values. */
988 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
990 if (flag_alias_check && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
992 /* If this insn has a noalias note, process it, Otherwise,
993 scan for sets. A simple set will have no side effects
994 which could change the base value of any other register. */
996 if (GET_CODE (PATTERN (insn)) == SET
997 && (noalias_note = find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
998 record_set (SET_DEST (PATTERN (insn)), 0);
1000 note_stores (PATTERN (insn), record_set);
1002 else if (GET_CODE (insn) == NOTE
1003 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1004 copying_arguments = 0;
1006 if ((set = single_set (insn)) != 0
1007 && GET_CODE (SET_DEST (set)) == REG
1008 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1009 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1010 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1011 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1012 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1014 int regno = REGNO (SET_DEST (set));
1015 reg_known_value[regno] = XEXP (note, 0);
1016 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1020 /* Fill in the remaining entries. */
1021 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1022 if (reg_known_value[i] == 0)
1023 reg_known_value[i] = regno_reg_rtx[i];
1025 if (! flag_alias_check)
1028 /* Simplify the reg_base_value array so that no register refers to
1029 another register, except to special registers indirectly through
1030 ADDRESS expressions.
1032 In theory this loop can take as long as O(registers^2), but unless
1033 there are very long dependency chains it will run in close to linear
1038 for (i = 0; i < reg_base_value_size; i++)
1040 rtx base = reg_base_value[i];
1041 if (base && GET_CODE (base) == REG)
1043 int base_regno = REGNO (base);
1044 if (base_regno == i) /* register set from itself */
1045 reg_base_value[i] = 0;
1047 reg_base_value[i] = reg_base_value[base_regno];
1058 end_alias_analysis ()
1060 reg_known_value = 0;
1062 reg_base_value_size = 0;