1 /* Alias analysis for GNU C
2 Copyright (C) 1997 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 static rtx canon_rtx PROTO((rtx));
30 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
31 static rtx find_symbolic_term PROTO((rtx));
32 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
35 /* Set up all info needed to perform alias analysis on memory references. */
37 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
39 /* reg_base_value[N] gives an address to which register N is related.
40 If all sets after the first add or subtract to the current value
41 or otherwise modify it so it does not point to a different top level
42 object, reg_base_value[N] is equal to the address part of the source
45 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
46 expressions represent certain special values: function arguments and
47 the stack, frame, and argument pointers. The contents of an address
48 expression are not used (but they are descriptive for debugging);
49 only the address and mode matter. Pointer equality, not rtx_equal_p,
50 determines whether two ADDRESS expressions refer to the same base
51 address. The mode determines whether it is a function argument or
52 other special value. */
55 unsigned int reg_base_value_size; /* size of reg_base_value array */
56 #define REG_BASE_VALUE(X) \
57 (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
59 /* Vector indexed by N giving the initial (unchanging) value known
60 for pseudo-register N. */
63 /* Indicates number of valid entries in reg_known_value. */
64 static int reg_known_value_size;
66 /* Vector recording for each reg_known_value whether it is due to a
67 REG_EQUIV note. Future passes (viz., reload) may replace the
68 pseudo with the equivalent expression and so we account for the
69 dependences that would be introduced if that happens. */
70 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
71 assign_parms mention the arg pointer, and there are explicit insns in the
72 RTL that modify the arg pointer. Thus we must ensure that such insns don't
73 get scheduled across each other because that would invalidate the REG_EQUIV
74 notes. One could argue that the REG_EQUIV notes are wrong, but solving
75 the problem in the scheduler will likely give better code, so we do it
77 char *reg_known_equiv_p;
79 /* True when scanning insns from the start of the rtl to the
80 NOTE_INSN_FUNCTION_BEG note. */
82 static int copying_arguments;
84 /* Inside SRC, the source of a SET, find a base address. */
90 switch (GET_CODE (src))
97 /* At the start of a function argument registers have known base
98 values which may be lost later. Returning an ADDRESS
99 expression here allows optimization based on argument values
100 even when the argument registers are used for other purposes. */
101 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
102 return reg_base_value[REGNO (src)];
106 /* Check for an argument passed in memory. Only record in the
107 copying-arguments block; it is too hard to track changes
109 if (copying_arguments
110 && (XEXP (src, 0) == arg_pointer_rtx
111 || (GET_CODE (XEXP (src, 0)) == PLUS
112 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
113 return gen_rtx (ADDRESS, VOIDmode, src);
118 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
125 rtx src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
127 /* Guess which operand is the base address.
129 If the first operand is a symbol or the second operand is
130 an integer, the first operand is the base address. Else if
131 either operand is a register marked as a pointer, it is the
134 if (GET_CODE (src_1) == CONST_INT
135 || GET_CODE (src_0) == SYMBOL_REF
136 || GET_CODE (src_0) == LABEL_REF
137 || GET_CODE (src_0) == CONST)
138 return find_base_value (src_0);
140 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
141 return find_base_value (src_0);
143 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
144 return find_base_value (src_1);
150 /* The standard form is (lo_sum reg sym) so look only at the
152 return find_base_value (XEXP (src, 1));
155 /* If the second operand is constant set the base
156 address to the first operand. */
157 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
158 return find_base_value (XEXP (src, 0));
162 return find_base_value (XEXP (src, 0));
168 /* Called from init_alias_analysis indirectly through note_stores. */
170 /* while scanning insns to find base values, reg_seen[N] is nonzero if
171 register N has been set in this function. */
172 static char *reg_seen;
175 record_set (dest, set)
181 if (GET_CODE (dest) != REG)
184 regno = REGNO (dest);
188 /* A CLOBBER wipes out any old value but does not prevent a previously
189 unset register from acquiring a base address (i.e. reg_seen is not
191 if (GET_CODE (set) == CLOBBER)
193 reg_base_value[regno] = 0;
200 static int unique_id;
203 reg_base_value[regno] = 0;
207 reg_base_value[regno] = gen_rtx (ADDRESS, Pmode,
208 GEN_INT (unique_id++));
212 /* This is not the first set. If the new value is not related to the
213 old value, forget the base value. Note that the following code is
215 extern int x, y; int *p = &x; p += (&y-&x);
216 ANSI C does not allow computing the difference of addresses
217 of distinct top level objects. */
218 if (reg_base_value[regno])
219 switch (GET_CODE (src))
224 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
225 reg_base_value[regno] = 0;
228 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
229 reg_base_value[regno] = 0;
232 reg_base_value[regno] = 0;
235 /* If this is the first set of a register, record the value. */
236 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
237 && ! reg_seen[regno] && reg_base_value[regno] == 0)
238 reg_base_value[regno] = find_base_value (src);
243 /* Called from loop optimization when a new pseudo-register is created. */
245 record_base_value (regno, val)
249 if (!flag_alias_check || regno >= reg_base_value_size)
251 if (GET_CODE (val) == REG)
253 if (REGNO (val) < reg_base_value_size)
254 reg_base_value[regno] = reg_base_value[REGNO (val)];
257 reg_base_value[regno] = find_base_value (val);
264 /* Recursively look for equivalences. */
265 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
266 && REGNO (x) < reg_known_value_size)
267 return reg_known_value[REGNO (x)] == x
268 ? x : canon_rtx (reg_known_value[REGNO (x)]);
269 else if (GET_CODE (x) == PLUS)
271 rtx x0 = canon_rtx (XEXP (x, 0));
272 rtx x1 = canon_rtx (XEXP (x, 1));
274 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
276 /* We can tolerate LO_SUMs being offset here; these
277 rtl are used for nothing other than comparisons. */
278 if (GET_CODE (x0) == CONST_INT)
279 return plus_constant_for_output (x1, INTVAL (x0));
280 else if (GET_CODE (x1) == CONST_INT)
281 return plus_constant_for_output (x0, INTVAL (x1));
282 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
285 /* This gives us much better alias analysis when called from
286 the loop optimizer. Note we want to leave the original
287 MEM alone, but need to return the canonicalized MEM with
288 all the flags with their original values. */
289 else if (GET_CODE (x) == MEM)
291 rtx addr = canon_rtx (XEXP (x, 0));
292 if (addr != XEXP (x, 0))
294 rtx new = gen_rtx (MEM, GET_MODE (x), addr);
295 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
296 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
297 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
304 /* Return 1 if X and Y are identical-looking rtx's.
306 We use the data in reg_known_value above to see if two registers with
307 different numbers are, in fact, equivalent. */
310 rtx_equal_for_memref_p (x, y)
315 register enum rtx_code code;
318 if (x == 0 && y == 0)
320 if (x == 0 || y == 0)
329 /* Rtx's of different codes cannot be equal. */
330 if (code != GET_CODE (y))
333 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
334 (REG:SI x) and (REG:HI x) are NOT equivalent. */
336 if (GET_MODE (x) != GET_MODE (y))
339 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
342 return REGNO (x) == REGNO (y);
343 if (code == LABEL_REF)
344 return XEXP (x, 0) == XEXP (y, 0);
345 if (code == SYMBOL_REF)
346 return XSTR (x, 0) == XSTR (y, 0);
348 /* For commutative operations, the RTX match if the operand match in any
349 order. Also handle the simple binary and unary cases without a loop. */
350 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
351 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
352 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
353 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
354 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
355 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
356 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
357 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
358 else if (GET_RTX_CLASS (code) == '1')
359 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
361 /* Compare the elements. If any pair of corresponding elements
362 fail to match, return 0 for the whole things. */
364 fmt = GET_RTX_FORMAT (code);
365 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
370 if (XWINT (x, i) != XWINT (y, i))
376 if (XINT (x, i) != XINT (y, i))
382 /* Two vectors must have the same length. */
383 if (XVECLEN (x, i) != XVECLEN (y, i))
386 /* And the corresponding elements must match. */
387 for (j = 0; j < XVECLEN (x, i); j++)
388 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
393 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
399 if (strcmp (XSTR (x, i), XSTR (y, i)))
404 /* These are just backpointers, so they don't matter. */
410 /* It is believed that rtx's at this level will never
411 contain anything but integers and other rtx's,
412 except for within LABEL_REFs and SYMBOL_REFs. */
420 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
421 X and return it, or return 0 if none found. */
424 find_symbolic_term (x)
428 register enum rtx_code code;
432 if (code == SYMBOL_REF || code == LABEL_REF)
434 if (GET_RTX_CLASS (code) == 'o')
437 fmt = GET_RTX_FORMAT (code);
438 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
444 t = find_symbolic_term (XEXP (x, i));
448 else if (fmt[i] == 'E')
458 switch (GET_CODE (x))
461 return REG_BASE_VALUE (x);
464 return find_base_term (XEXP (x, 0));
468 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
475 rtx tmp = find_base_term (XEXP (x, 0));
478 return find_base_term (XEXP (x, 1));
482 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
483 return REG_BASE_VALUE (XEXP (x, 0));
495 /* Return 0 if the addresses X and Y are known to point to different
496 objects, 1 if they might be pointers to the same object. */
499 base_alias_check (x, y)
502 rtx x_base = find_base_term (x);
503 rtx y_base = find_base_term (y);
505 /* If either base address is unknown or the base addresses are equal,
506 nothing is known about aliasing. */
508 if (x_base == 0 || y_base == 0 || rtx_equal_p (x_base, y_base))
511 /* The base addresses of the read and write are different
512 expressions. If they are both symbols there is no
514 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
517 /* If one address is a stack reference there can be no alias:
518 stack references using different base registers do not alias,
519 a stack reference can not alias a parameter, and a stack reference
520 can not alias a global. */
521 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
522 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
525 if (! flag_argument_noalias)
528 if (flag_argument_noalias > 1)
531 /* Weak noalias assertion (arguments are distinct, but may match globals). */
532 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
535 /* Return nonzero if X and Y (memory addresses) could reference the
536 same location in memory. C is an offset accumulator. When
537 C is nonzero, we are testing aliases between X and Y + C.
538 XSIZE is the size in bytes of the X reference,
539 similarly YSIZE is the size in bytes for Y.
541 If XSIZE or YSIZE is zero, we do not know the amount of memory being
542 referenced (the reference was BLKmode), so make the most pessimistic
545 We recognize the following cases of non-conflicting memory:
547 (1) addresses involving the frame pointer cannot conflict
548 with addresses involving static variables.
549 (2) static variables with different addresses cannot conflict.
551 Nice to notice that varying addresses cannot conflict with fp if no
552 local variables had their addresses taken, but that's too hard now. */
556 memrefs_conflict_p (xsize, x, ysize, y, c)
561 if (GET_CODE (x) == HIGH)
563 else if (GET_CODE (x) == LO_SUM)
567 if (GET_CODE (y) == HIGH)
569 else if (GET_CODE (y) == LO_SUM)
574 if (rtx_equal_for_memref_p (x, y))
576 if (xsize == 0 || ysize == 0)
578 if (c >= 0 && xsize > c)
580 if (c < 0 && ysize+c > 0)
585 if (y == frame_pointer_rtx || y == hard_frame_pointer_rtx
586 || y == stack_pointer_rtx)
590 y = x; ysize = xsize;
591 x = t; xsize = tsize;
594 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
595 || x == stack_pointer_rtx)
602 if (GET_CODE (y) == PLUS
603 && canon_rtx (XEXP (y, 0)) == x
604 && (y1 = canon_rtx (XEXP (y, 1)))
605 && GET_CODE (y1) == CONST_INT)
608 return (xsize == 0 || ysize == 0
609 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
612 if (GET_CODE (y) == PLUS
613 && (y1 = canon_rtx (XEXP (y, 0)))
620 if (GET_CODE (x) == PLUS)
622 /* The fact that X is canonicalized means that this
623 PLUS rtx is canonicalized. */
624 rtx x0 = XEXP (x, 0);
625 rtx x1 = XEXP (x, 1);
627 if (GET_CODE (y) == PLUS)
629 /* The fact that Y is canonicalized means that this
630 PLUS rtx is canonicalized. */
631 rtx y0 = XEXP (y, 0);
632 rtx y1 = XEXP (y, 1);
634 if (rtx_equal_for_memref_p (x1, y1))
635 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
636 if (rtx_equal_for_memref_p (x0, y0))
637 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
638 if (GET_CODE (x1) == CONST_INT)
639 if (GET_CODE (y1) == CONST_INT)
640 return memrefs_conflict_p (xsize, x0, ysize, y0,
641 c - INTVAL (x1) + INTVAL (y1));
643 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
644 else if (GET_CODE (y1) == CONST_INT)
645 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
647 /* Handle case where we cannot understand iteration operators,
648 but we notice that the base addresses are distinct objects. */
649 /* ??? Is this still necessary? */
650 x = find_symbolic_term (x);
653 y = find_symbolic_term (y);
656 return rtx_equal_for_memref_p (x, y);
658 else if (GET_CODE (x1) == CONST_INT)
659 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
661 else if (GET_CODE (y) == PLUS)
663 /* The fact that Y is canonicalized means that this
664 PLUS rtx is canonicalized. */
665 rtx y0 = XEXP (y, 0);
666 rtx y1 = XEXP (y, 1);
668 if (GET_CODE (y1) == CONST_INT)
669 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
674 if (GET_CODE (x) == GET_CODE (y))
675 switch (GET_CODE (x))
679 /* Handle cases where we expect the second operands to be the
680 same, and check only whether the first operand would conflict
683 rtx x1 = canon_rtx (XEXP (x, 1));
684 rtx y1 = canon_rtx (XEXP (y, 1));
685 if (! rtx_equal_for_memref_p (x1, y1))
687 x0 = canon_rtx (XEXP (x, 0));
688 y0 = canon_rtx (XEXP (y, 0));
689 if (rtx_equal_for_memref_p (x0, y0))
690 return (xsize == 0 || ysize == 0
691 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
693 /* Can't properly adjust our sizes. */
694 if (GET_CODE (x1) != CONST_INT)
696 xsize /= INTVAL (x1);
697 ysize /= INTVAL (x1);
699 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
703 /* Treat an access through an AND (e.g. a subword access on an Alpha)
704 as an access with indeterminate size. */
705 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
706 return memrefs_conflict_p (0, XEXP (x, 0), ysize, y, c);
707 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
708 return memrefs_conflict_p (xsize, x, 0, XEXP (y, 0), c);
712 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
714 c += (INTVAL (y) - INTVAL (x));
715 return (xsize == 0 || ysize == 0
716 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
719 if (GET_CODE (x) == CONST)
721 if (GET_CODE (y) == CONST)
722 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
723 ysize, canon_rtx (XEXP (y, 0)), c);
725 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
728 if (GET_CODE (y) == CONST)
729 return memrefs_conflict_p (xsize, x, ysize,
730 canon_rtx (XEXP (y, 0)), c);
733 return (rtx_equal_for_memref_p (x, y)
734 && (xsize == 0 || ysize == 0
735 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)));
742 /* Functions to compute memory dependencies.
744 Since we process the insns in execution order, we can build tables
745 to keep track of what registers are fixed (and not aliased), what registers
746 are varying in known ways, and what registers are varying in unknown
749 If both memory references are volatile, then there must always be a
750 dependence between the two references, since their order can not be
751 changed. A volatile and non-volatile reference can be interchanged
754 A MEM_IN_STRUCT reference at a non-QImode varying address can never
755 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
756 allow QImode aliasing because the ANSI C standard allows character
757 pointers to alias anything. We are assuming that characters are
758 always QImode here. */
760 /* Read dependence: X is read after read in MEM takes place. There can
761 only be a dependence here if both reads are volatile. */
764 read_dependence (mem, x)
768 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
771 /* True dependence: X is read after store in MEM takes place. */
774 true_dependence (mem, mem_mode, x, varies)
776 enum machine_mode mem_mode;
780 rtx x_addr, mem_addr;
782 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
785 x_addr = XEXP (x, 0);
786 mem_addr = XEXP (mem, 0);
788 if (flag_alias_check && ! base_alias_check (x_addr, mem_addr))
791 /* If X is an unchanging read, then it can't possibly conflict with any
792 non-unchanging store. It may conflict with an unchanging write though,
793 because there may be a single store to this address to initialize it.
794 Just fall through to the code below to resolve the case where we have
795 both an unchanging read and an unchanging write. This won't handle all
796 cases optimally, but the possible performance loss should be
798 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
801 x_addr = canon_rtx (x_addr);
802 mem_addr = canon_rtx (mem_addr);
803 if (mem_mode == VOIDmode)
804 mem_mode = GET_MODE (mem);
806 if (! memrefs_conflict_p (mem_mode, mem_addr, SIZE_FOR_MODE (x), x_addr, 0))
809 /* If both references are struct references, or both are not, nothing
810 is known about aliasing.
812 If either reference is QImode or BLKmode, ANSI C permits aliasing.
814 If both addresses are constant, or both are not, nothing is known
816 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
817 || mem_mode == QImode || mem_mode == BLKmode
818 || GET_MODE (x) == QImode || GET_MODE (mem) == BLKmode
819 || varies (x_addr) == varies (mem_addr))
822 /* One memory reference is to a constant address, one is not.
823 One is to a structure, the other is not.
825 If either memory reference is a variable structure the other is a
826 fixed scalar and there is no aliasing. */
827 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
828 || (MEM_IN_STRUCT_P (x) && varies (x)))
834 /* Anti dependence: X is written after read in MEM takes place. */
837 anti_dependence (mem, x)
841 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
844 if (flag_alias_check && ! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
847 /* If MEM is an unchanging read, then it can't possibly conflict with
848 the store to X, because there is at most one store to MEM, and it must
849 have occurred somewhere before MEM. */
851 mem = canon_rtx (mem);
852 if (RTX_UNCHANGING_P (mem))
855 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
856 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
857 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
858 && GET_MODE (mem) != QImode
859 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
860 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
861 && GET_MODE (x) != QImode
862 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
865 /* Output dependence: X is written after store in MEM takes place. */
868 output_dependence (mem, x)
872 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
875 if (flag_alias_check && !base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
879 mem = canon_rtx (mem);
880 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
881 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
882 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
883 && GET_MODE (mem) != QImode
884 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
885 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
886 && GET_MODE (x) != QImode
887 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
891 init_alias_analysis ()
893 int maxreg = max_reg_num ();
900 reg_known_value_size = maxreg;
903 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
904 - FIRST_PSEUDO_REGISTER;
906 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
907 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
908 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
909 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
910 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
912 if (flag_alias_check)
914 /* Overallocate reg_base_value to allow some growth during loop
915 optimization. Loop unrolling can create a large number of
917 reg_base_value_size = maxreg * 2;
918 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
919 reg_seen = (char *)alloca (reg_base_value_size);
920 bzero (reg_base_value, reg_base_value_size * sizeof (rtx));
921 bzero (reg_seen, reg_base_value_size);
923 /* Mark all hard registers which may contain an address.
924 The stack, frame and argument pointers may contain an address.
925 An argument register which can hold a Pmode value may contain
926 an address even if it is not in BASE_REGS.
928 The address expression is VOIDmode for an argument and
929 Pmode for other registers. */
930 #ifndef OUTGOING_REGNO
931 #define OUTGOING_REGNO(N) N
933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
934 /* Check whether this register can hold an incoming pointer
935 argument. FUNCTION_ARG_REGNO_P tests outgoing register
936 numbers, so translate if necessary due to register windows. */
937 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i)) && HARD_REGNO_MODE_OK (i, Pmode))
938 reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode,
939 gen_rtx (REG, Pmode, i));
941 reg_base_value[STACK_POINTER_REGNUM]
942 = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx);
943 reg_base_value[ARG_POINTER_REGNUM]
944 = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx);
945 reg_base_value[FRAME_POINTER_REGNUM]
946 = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx);
947 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
948 reg_base_value[HARD_FRAME_POINTER_REGNUM]
949 = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx);
953 copying_arguments = 1;
954 /* Fill in the entries with known constant values. */
955 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
957 if (flag_alias_check && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
959 /* If this insn has a noalias note, process it, Otherwise,
960 scan for sets. A simple set will have no side effects
961 which could change the base value of any other register. */
963 if (GET_CODE (PATTERN (insn)) == SET
964 && (noalias_note = find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
965 record_set (SET_DEST (PATTERN (insn)), 0);
967 note_stores (PATTERN (insn), record_set);
969 else if (GET_CODE (insn) == NOTE
970 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
971 copying_arguments = 0;
973 if ((set = single_set (insn)) != 0
974 && GET_CODE (SET_DEST (set)) == REG
975 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
976 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
977 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
978 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
979 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
981 int regno = REGNO (SET_DEST (set));
982 reg_known_value[regno] = XEXP (note, 0);
983 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
987 /* Fill in the remaining entries. */
988 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
989 if (reg_known_value[i] == 0)
990 reg_known_value[i] = regno_reg_rtx[i];
992 if (! flag_alias_check)
995 /* Simplify the reg_base_value array so that no register refers to
996 another register, except to special registers indirectly through
999 In theory this loop can take as long as O(registers^2), but unless
1000 there are very long dependency chains it will run in close to linear
1005 for (i = 0; i < reg_base_value_size; i++)
1007 rtx base = reg_base_value[i];
1008 if (base && GET_CODE (base) == REG)
1010 int base_regno = REGNO (base);
1011 if (base_regno == i) /* register set from itself */
1012 reg_base_value[i] = 0;
1014 reg_base_value[i] = reg_base_value[base_regno];
1025 end_alias_analysis ()
1027 reg_known_value = 0;
1029 reg_base_value_size = 0;