1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
30 static rtx canon_rtx PROTO((rtx));
31 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
32 static rtx find_symbolic_term PROTO((rtx));
33 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
35 static void record_set PROTO((rtx, rtx));
36 static rtx find_base_term PROTO((rtx));
37 static int base_alias_check PROTO((rtx, rtx));
39 /* Set up all info needed to perform alias analysis on memory references. */
41 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
43 /* Cap the number of passes we make over the insns propagating alias
44 information through set chains.
46 10 is a completely arbitrary choice. */
47 #define MAX_ALIAS_LOOP_PASSES 10
49 /* reg_base_value[N] gives an address to which register N is related.
50 If all sets after the first add or subtract to the current value
51 or otherwise modify it so it does not point to a different top level
52 object, reg_base_value[N] is equal to the address part of the source
55 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
56 expressions represent certain special values: function arguments and
57 the stack, frame, and argument pointers. The contents of an address
58 expression are not used (but they are descriptive for debugging);
59 only the address and mode matter. Pointer equality, not rtx_equal_p,
60 determines whether two ADDRESS expressions refer to the same base
61 address. The mode determines whether it is a function argument or
62 other special value. */
65 rtx *new_reg_base_value;
66 unsigned int reg_base_value_size; /* size of reg_base_value array */
67 #define REG_BASE_VALUE(X) \
68 (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
70 /* Vector of known invariant relationships between registers. Set in
71 loop unrolling. Indexed by register number, if nonzero the value
72 is an expression describing this register in terms of another.
74 The length of this array is REG_BASE_VALUE_SIZE.
76 Because this array contains only pseudo registers it has no effect
78 static rtx *alias_invariant;
80 /* Vector indexed by N giving the initial (unchanging) value known
81 for pseudo-register N. */
84 /* Indicates number of valid entries in reg_known_value. */
85 static int reg_known_value_size;
87 /* Vector recording for each reg_known_value whether it is due to a
88 REG_EQUIV note. Future passes (viz., reload) may replace the
89 pseudo with the equivalent expression and so we account for the
90 dependences that would be introduced if that happens. */
91 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
92 assign_parms mention the arg pointer, and there are explicit insns in the
93 RTL that modify the arg pointer. Thus we must ensure that such insns don't
94 get scheduled across each other because that would invalidate the REG_EQUIV
95 notes. One could argue that the REG_EQUIV notes are wrong, but solving
96 the problem in the scheduler will likely give better code, so we do it
98 char *reg_known_equiv_p;
100 /* True when scanning insns from the start of the rtl to the
101 NOTE_INSN_FUNCTION_BEG note. */
103 static int copying_arguments;
105 /* Inside SRC, the source of a SET, find a base address. */
108 find_base_value (src)
111 switch (GET_CODE (src))
118 /* At the start of a function argument registers have known base
119 values which may be lost later. Returning an ADDRESS
120 expression here allows optimization based on argument values
121 even when the argument registers are used for other purposes. */
122 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
123 return new_reg_base_value[REGNO (src)];
125 /* If a pseudo has a known base value, return it. Do not do this
126 for hard regs since it can result in a circular dependency
127 chain for registers which have values at function entry.
129 The test above is not sufficient because the scheduler may move
130 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
131 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
132 && reg_base_value[REGNO (src)])
133 return reg_base_value[REGNO (src)];
138 /* Check for an argument passed in memory. Only record in the
139 copying-arguments block; it is too hard to track changes
141 if (copying_arguments
142 && (XEXP (src, 0) == arg_pointer_rtx
143 || (GET_CODE (XEXP (src, 0)) == PLUS
144 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
145 return gen_rtx_ADDRESS (VOIDmode, src);
150 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
157 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
159 /* If either operand is a REG, then see if we already have
160 a known value for it. */
161 if (GET_CODE (src_0) == REG)
163 temp = find_base_value (src_0);
168 if (GET_CODE (src_1) == REG)
170 temp = find_base_value (src_1);
175 /* Guess which operand is the base address.
177 If either operand is a symbol, then it is the base. If
178 either operand is a CONST_INT, then the other is the base. */
180 if (GET_CODE (src_1) == CONST_INT
181 || GET_CODE (src_0) == SYMBOL_REF
182 || GET_CODE (src_0) == LABEL_REF
183 || GET_CODE (src_0) == CONST)
184 return find_base_value (src_0);
186 if (GET_CODE (src_0) == CONST_INT
187 || GET_CODE (src_1) == SYMBOL_REF
188 || GET_CODE (src_1) == LABEL_REF
189 || GET_CODE (src_1) == CONST)
190 return find_base_value (src_1);
192 /* This might not be necessary anymore.
194 If either operand is a REG that is a known pointer, then it
196 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
197 return find_base_value (src_0);
199 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
200 return find_base_value (src_1);
206 /* The standard form is (lo_sum reg sym) so look only at the
208 return find_base_value (XEXP (src, 1));
211 /* If the second operand is constant set the base
212 address to the first operand. */
213 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
214 return find_base_value (XEXP (src, 0));
218 case SIGN_EXTEND: /* used for NT/Alpha pointers */
220 return find_base_value (XEXP (src, 0));
229 /* Called from init_alias_analysis indirectly through note_stores. */
231 /* while scanning insns to find base values, reg_seen[N] is nonzero if
232 register N has been set in this function. */
233 static char *reg_seen;
235 /* Addresses which are known not to alias anything else are identified
236 by a unique integer. */
237 static int unique_id;
240 record_set (dest, set)
246 if (GET_CODE (dest) != REG)
249 regno = REGNO (dest);
253 /* A CLOBBER wipes out any old value but does not prevent a previously
254 unset register from acquiring a base address (i.e. reg_seen is not
256 if (GET_CODE (set) == CLOBBER)
258 new_reg_base_value[regno] = 0;
267 new_reg_base_value[regno] = 0;
271 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
272 GEN_INT (unique_id++));
276 /* This is not the first set. If the new value is not related to the
277 old value, forget the base value. Note that the following code is
279 extern int x, y; int *p = &x; p += (&y-&x);
280 ANSI C does not allow computing the difference of addresses
281 of distinct top level objects. */
282 if (new_reg_base_value[regno])
283 switch (GET_CODE (src))
288 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
289 new_reg_base_value[regno] = 0;
292 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
293 new_reg_base_value[regno] = 0;
296 new_reg_base_value[regno] = 0;
299 /* If this is the first set of a register, record the value. */
300 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
301 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
302 new_reg_base_value[regno] = find_base_value (src);
307 /* Called from loop optimization when a new pseudo-register is created. */
309 record_base_value (regno, val, invariant)
314 if (regno >= reg_base_value_size)
317 /* If INVARIANT is true then this value also describes an invariant
318 relationship which can be used to deduce that two registers with
319 unknown values are different. */
320 if (invariant && alias_invariant)
321 alias_invariant[regno] = val;
323 if (GET_CODE (val) == REG)
325 if (REGNO (val) < reg_base_value_size)
327 reg_base_value[regno] = reg_base_value[REGNO (val)];
331 reg_base_value[regno] = find_base_value (val);
338 /* Recursively look for equivalences. */
339 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
340 && REGNO (x) < reg_known_value_size)
341 return reg_known_value[REGNO (x)] == x
342 ? x : canon_rtx (reg_known_value[REGNO (x)]);
343 else if (GET_CODE (x) == PLUS)
345 rtx x0 = canon_rtx (XEXP (x, 0));
346 rtx x1 = canon_rtx (XEXP (x, 1));
348 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
350 /* We can tolerate LO_SUMs being offset here; these
351 rtl are used for nothing other than comparisons. */
352 if (GET_CODE (x0) == CONST_INT)
353 return plus_constant_for_output (x1, INTVAL (x0));
354 else if (GET_CODE (x1) == CONST_INT)
355 return plus_constant_for_output (x0, INTVAL (x1));
356 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
359 /* This gives us much better alias analysis when called from
360 the loop optimizer. Note we want to leave the original
361 MEM alone, but need to return the canonicalized MEM with
362 all the flags with their original values. */
363 else if (GET_CODE (x) == MEM)
365 rtx addr = canon_rtx (XEXP (x, 0));
366 if (addr != XEXP (x, 0))
368 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
369 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
370 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
371 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
378 /* Return 1 if X and Y are identical-looking rtx's.
380 We use the data in reg_known_value above to see if two registers with
381 different numbers are, in fact, equivalent. */
384 rtx_equal_for_memref_p (x, y)
389 register enum rtx_code code;
392 if (x == 0 && y == 0)
394 if (x == 0 || y == 0)
403 /* Rtx's of different codes cannot be equal. */
404 if (code != GET_CODE (y))
407 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
408 (REG:SI x) and (REG:HI x) are NOT equivalent. */
410 if (GET_MODE (x) != GET_MODE (y))
413 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
416 return REGNO (x) == REGNO (y);
417 if (code == LABEL_REF)
418 return XEXP (x, 0) == XEXP (y, 0);
419 if (code == SYMBOL_REF)
420 return XSTR (x, 0) == XSTR (y, 0);
421 if (code == CONST_INT)
422 return INTVAL (x) == INTVAL (y);
423 if (code == ADDRESSOF)
424 return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
426 /* For commutative operations, the RTX match if the operand match in any
427 order. Also handle the simple binary and unary cases without a loop. */
428 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
429 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
430 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
431 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
432 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
433 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
434 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
435 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
436 else if (GET_RTX_CLASS (code) == '1')
437 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
439 /* Compare the elements. If any pair of corresponding elements
440 fail to match, return 0 for the whole things.
442 Limit cases to types which actually appear in addresses. */
444 fmt = GET_RTX_FORMAT (code);
445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
450 if (XINT (x, i) != XINT (y, i))
455 /* Two vectors must have the same length. */
456 if (XVECLEN (x, i) != XVECLEN (y, i))
459 /* And the corresponding elements must match. */
460 for (j = 0; j < XVECLEN (x, i); j++)
461 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
466 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
470 /* This can happen for an asm which clobbers memory. */
474 /* It is believed that rtx's at this level will never
475 contain anything but integers and other rtx's,
476 except for within LABEL_REFs and SYMBOL_REFs. */
484 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
485 X and return it, or return 0 if none found. */
488 find_symbolic_term (x)
492 register enum rtx_code code;
496 if (code == SYMBOL_REF || code == LABEL_REF)
498 if (GET_RTX_CLASS (code) == 'o')
501 fmt = GET_RTX_FORMAT (code);
502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
508 t = find_symbolic_term (XEXP (x, i));
512 else if (fmt[i] == 'E')
522 switch (GET_CODE (x))
525 return REG_BASE_VALUE (x);
528 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
534 return find_base_term (XEXP (x, 0));
538 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
545 rtx tmp = find_base_term (XEXP (x, 0));
548 return find_base_term (XEXP (x, 1));
552 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
553 return REG_BASE_VALUE (XEXP (x, 0));
565 /* Return 0 if the addresses X and Y are known to point to different
566 objects, 1 if they might be pointers to the same object. */
569 base_alias_check (x, y)
572 rtx x_base = find_base_term (x);
573 rtx y_base = find_base_term (y);
575 /* If the address itself has no known base see if a known equivalent
576 value has one. If either address still has no known base, nothing
577 is known about aliasing. */
581 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
583 x_base = find_base_term (x_c);
591 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
593 y_base = find_base_term (y_c);
598 /* If the base addresses are equal nothing is known about aliasing. */
599 if (rtx_equal_p (x_base, y_base))
602 /* The base addresses of the read and write are different
603 expressions. If they are both symbols and they are not accessed
604 via AND, there is no conflict. */
605 /* XXX: We can bring knowledge of object alignment and offset into
606 play here. For example, on alpha, "char a, b;" can alias one
607 another, though "char a; long b;" cannot. Similarly, offsets
608 into strutures may be brought into play. Given "char a, b[40];",
609 a and b[1] may overlap, but a and b[20] do not. */
610 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
612 return GET_CODE (x) == AND || GET_CODE (y) == AND;
615 /* If one address is a stack reference there can be no alias:
616 stack references using different base registers do not alias,
617 a stack reference can not alias a parameter, and a stack reference
618 can not alias a global. */
619 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
620 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
623 if (! flag_argument_noalias)
626 if (flag_argument_noalias > 1)
629 /* Weak noalias assertion (arguments are distinct, but may match globals). */
630 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
633 /* Return nonzero if X and Y (memory addresses) could reference the
634 same location in memory. C is an offset accumulator. When
635 C is nonzero, we are testing aliases between X and Y + C.
636 XSIZE is the size in bytes of the X reference,
637 similarly YSIZE is the size in bytes for Y.
639 If XSIZE or YSIZE is zero, we do not know the amount of memory being
640 referenced (the reference was BLKmode), so make the most pessimistic
643 If XSIZE or YSIZE is negative, we may access memory outside the object
644 being referenced as a side effect. This can happen when using AND to
645 align memory references, as is done on the Alpha.
647 Nice to notice that varying addresses cannot conflict with fp if no
648 local variables had their addresses taken, but that's too hard now. */
652 memrefs_conflict_p (xsize, x, ysize, y, c)
657 if (GET_CODE (x) == HIGH)
659 else if (GET_CODE (x) == LO_SUM)
663 if (GET_CODE (y) == HIGH)
665 else if (GET_CODE (y) == LO_SUM)
670 if (rtx_equal_for_memref_p (x, y))
672 if (xsize <= 0 || ysize <= 0)
674 if (c >= 0 && xsize > c)
676 if (c < 0 && ysize+c > 0)
681 /* This code used to check for conflicts involving stack references and
682 globals but the base address alias code now handles these cases. */
684 if (GET_CODE (x) == PLUS)
686 /* The fact that X is canonicalized means that this
687 PLUS rtx is canonicalized. */
688 rtx x0 = XEXP (x, 0);
689 rtx x1 = XEXP (x, 1);
691 if (GET_CODE (y) == PLUS)
693 /* The fact that Y is canonicalized means that this
694 PLUS rtx is canonicalized. */
695 rtx y0 = XEXP (y, 0);
696 rtx y1 = XEXP (y, 1);
698 if (rtx_equal_for_memref_p (x1, y1))
699 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
700 if (rtx_equal_for_memref_p (x0, y0))
701 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
702 if (GET_CODE (x1) == CONST_INT)
703 if (GET_CODE (y1) == CONST_INT)
704 return memrefs_conflict_p (xsize, x0, ysize, y0,
705 c - INTVAL (x1) + INTVAL (y1));
707 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
708 else if (GET_CODE (y1) == CONST_INT)
709 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
713 else if (GET_CODE (x1) == CONST_INT)
714 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
716 else if (GET_CODE (y) == PLUS)
718 /* The fact that Y is canonicalized means that this
719 PLUS rtx is canonicalized. */
720 rtx y0 = XEXP (y, 0);
721 rtx y1 = XEXP (y, 1);
723 if (GET_CODE (y1) == CONST_INT)
724 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
729 if (GET_CODE (x) == GET_CODE (y))
730 switch (GET_CODE (x))
734 /* Handle cases where we expect the second operands to be the
735 same, and check only whether the first operand would conflict
738 rtx x1 = canon_rtx (XEXP (x, 1));
739 rtx y1 = canon_rtx (XEXP (y, 1));
740 if (! rtx_equal_for_memref_p (x1, y1))
742 x0 = canon_rtx (XEXP (x, 0));
743 y0 = canon_rtx (XEXP (y, 0));
744 if (rtx_equal_for_memref_p (x0, y0))
745 return (xsize == 0 || ysize == 0
746 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
748 /* Can't properly adjust our sizes. */
749 if (GET_CODE (x1) != CONST_INT)
751 xsize /= INTVAL (x1);
752 ysize /= INTVAL (x1);
754 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
758 /* Are these registers known not to be equal? */
761 int r_x = REGNO (x), r_y = REGNO (y);
762 rtx i_x, i_y; /* invariant relationships of X and Y */
764 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
765 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
767 if (i_x == 0 && i_y == 0)
770 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
771 ysize, i_y ? i_y : y, c))
780 /* Treat an access through an AND (e.g. a subword access on an Alpha)
781 as an access with indeterminate size.
782 ??? Could instead convert an n byte reference at (and x y) to an
783 n-y byte reference at (plus x y). */
784 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
785 return memrefs_conflict_p (-1, XEXP (x, 0), ysize, y, c);
786 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
788 /* XXX: If we are indexing far enough into the array/structure, we
789 may yet be able to determine that we can not overlap. But we
790 also need to that we are far enough from the end not to overlap
791 a following reference, so we do nothing for now. */
792 return memrefs_conflict_p (xsize, x, -1, XEXP (y, 0), c);
797 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
799 c += (INTVAL (y) - INTVAL (x));
800 return (xsize <= 0 || ysize <= 0
801 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
804 if (GET_CODE (x) == CONST)
806 if (GET_CODE (y) == CONST)
807 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
808 ysize, canon_rtx (XEXP (y, 0)), c);
810 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
813 if (GET_CODE (y) == CONST)
814 return memrefs_conflict_p (xsize, x, ysize,
815 canon_rtx (XEXP (y, 0)), c);
818 return (xsize < 0 || ysize < 0
819 || (rtx_equal_for_memref_p (x, y)
820 && (xsize == 0 || ysize == 0
821 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
828 /* Functions to compute memory dependencies.
830 Since we process the insns in execution order, we can build tables
831 to keep track of what registers are fixed (and not aliased), what registers
832 are varying in known ways, and what registers are varying in unknown
835 If both memory references are volatile, then there must always be a
836 dependence between the two references, since their order can not be
837 changed. A volatile and non-volatile reference can be interchanged
840 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
841 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
842 allow QImode aliasing because the ANSI C standard allows character
843 pointers to alias anything. We are assuming that characters are
844 always QImode here. We also must allow AND addresses, because they may
845 generate accesses outside the object being referenced. This is used to
846 generate aligned addresses from unaligned addresses, for instance, the
847 alpha storeqi_unaligned pattern. */
849 /* Read dependence: X is read after read in MEM takes place. There can
850 only be a dependence here if both reads are volatile. */
853 read_dependence (mem, x)
857 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
860 /* True dependence: X is read after store in MEM takes place. */
863 true_dependence (mem, mem_mode, x, varies)
865 enum machine_mode mem_mode;
869 register rtx x_addr, mem_addr;
871 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
874 /* If X is an unchanging read, then it can't possibly conflict with any
875 non-unchanging store. It may conflict with an unchanging write though,
876 because there may be a single store to this address to initialize it.
877 Just fall through to the code below to resolve the case where we have
878 both an unchanging read and an unchanging write. This won't handle all
879 cases optimally, but the possible performance loss should be
881 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
884 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
887 x_addr = canon_rtx (XEXP (x, 0));
888 mem_addr = canon_rtx (XEXP (mem, 0));
890 if (mem_mode == VOIDmode)
891 mem_mode = GET_MODE (mem);
893 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
894 SIZE_FOR_MODE (x), x_addr, 0))
897 /* If both references are struct references, or both are not, nothing
898 is known about aliasing.
900 If either reference is QImode or BLKmode, ANSI C permits aliasing.
902 If both addresses are constant, or both are not, nothing is known
904 if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem)
905 || mem_mode == QImode || mem_mode == BLKmode
906 || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode
907 || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND
908 || varies (x_addr) == varies (mem_addr))
911 /* One memory reference is to a constant address, one is not.
912 One is to a structure, the other is not.
914 If either memory reference is a variable structure the other is a
915 fixed scalar and there is no aliasing. */
916 if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr))
917 || (MEM_IN_STRUCT_P (x) && varies (x_addr)))
923 /* Anti dependence: X is written after read in MEM takes place. */
926 anti_dependence (mem, x)
930 rtx x_addr, mem_addr;
932 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
935 /* If MEM is an unchanging read, then it can't possibly conflict with
936 the store to X, because there is at most one store to MEM, and it must
937 have occurred somewhere before MEM. */
938 if (RTX_UNCHANGING_P (mem))
941 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
945 mem = canon_rtx (mem);
947 x_addr = XEXP (x, 0);
948 mem_addr = XEXP (mem, 0);
950 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
951 SIZE_FOR_MODE (x), x_addr, 0)
952 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
953 && GET_MODE (mem) != QImode
954 && GET_CODE (mem_addr) != AND
955 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
956 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
957 && GET_MODE (x) != QImode
958 && GET_CODE (x_addr) != AND
959 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
962 /* Output dependence: X is written after store in MEM takes place. */
965 output_dependence (mem, x)
969 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
972 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0)))
976 mem = canon_rtx (mem);
978 return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0),
979 SIZE_FOR_MODE (x), XEXP (x, 0), 0)
980 && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem)
981 && GET_MODE (mem) != QImode
982 && GET_CODE (XEXP (mem, 0)) != AND
983 && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x))
984 && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x)
985 && GET_MODE (x) != QImode
986 && GET_CODE (XEXP (x, 0)) != AND
987 && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem)));
991 static HARD_REG_SET argument_registers;
998 #ifndef OUTGOING_REGNO
999 #define OUTGOING_REGNO(N) N
1001 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1002 /* Check whether this register can hold an incoming pointer
1003 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1004 numbers, so translate if necessary due to register windows. */
1005 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1006 && HARD_REGNO_MODE_OK (i, Pmode))
1007 SET_HARD_REG_BIT (argument_registers, i);
1011 init_alias_analysis ()
1013 int maxreg = max_reg_num ();
1018 reg_known_value_size = maxreg;
1021 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1022 - FIRST_PSEUDO_REGISTER;
1024 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1025 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1026 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1027 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1028 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1030 /* Overallocate reg_base_value to allow some growth during loop
1031 optimization. Loop unrolling can create a large number of
1033 reg_base_value_size = maxreg * 2;
1034 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1035 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1036 reg_seen = (char *)alloca (reg_base_value_size);
1037 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1038 if (! reload_completed && flag_unroll_loops)
1040 alias_invariant = (rtx *)xrealloc (alias_invariant,
1041 reg_base_value_size * sizeof (rtx));
1042 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1046 /* The basic idea is that each pass through this loop will use the
1047 "constant" information from the previous pass to propagate alias
1048 information through another level of assignments.
1050 This could get expensive if the assignment chains are long. Maybe
1051 we should throttle the number of iterations, possibly based on
1052 the optimization level or flag_expensive_optimizations.
1054 We could propagate more information in the first pass by making use
1055 of REG_N_SETS to determine immediately that the alias information
1056 for a pseudo is "constant".
1058 A program with an uninitialized variable can cause an infinite loop
1059 here. Instead of doing a full dataflow analysis to detect such problems
1060 we just cap the number of iterations for the loop.
1062 The state of the arrays for the set chain in question does not matter
1063 since the program has undefined behavior. */
1068 /* Assume nothing will change this iteration of the loop. */
1071 /* We want to assign the same IDs each iteration of this loop, so
1072 start counting from zero each iteration of the loop. */
1075 /* We're at the start of the funtion each iteration through the
1076 loop, so we're copying arguments. */
1077 copying_arguments = 1;
1079 /* Wipe the potential alias information clean for this pass. */
1080 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1082 /* Wipe the reg_seen array clean. */
1083 bzero ((char *) reg_seen, reg_base_value_size);
1085 /* Mark all hard registers which may contain an address.
1086 The stack, frame and argument pointers may contain an address.
1087 An argument register which can hold a Pmode value may contain
1088 an address even if it is not in BASE_REGS.
1090 The address expression is VOIDmode for an argument and
1091 Pmode for other registers. */
1093 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1094 if (TEST_HARD_REG_BIT (argument_registers, i))
1095 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1096 gen_rtx_REG (Pmode, i));
1098 new_reg_base_value[STACK_POINTER_REGNUM]
1099 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1100 new_reg_base_value[ARG_POINTER_REGNUM]
1101 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1102 new_reg_base_value[FRAME_POINTER_REGNUM]
1103 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1104 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1105 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1106 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1108 if (struct_value_incoming_rtx
1109 && GET_CODE (struct_value_incoming_rtx) == REG)
1110 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1111 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1113 if (static_chain_rtx
1114 && GET_CODE (static_chain_rtx) == REG)
1115 new_reg_base_value[REGNO (static_chain_rtx)]
1116 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1118 /* Walk the insns adding values to the new_reg_base_value array. */
1119 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1121 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1124 /* If this insn has a noalias note, process it, Otherwise,
1125 scan for sets. A simple set will have no side effects
1126 which could change the base value of any other register. */
1128 if (GET_CODE (PATTERN (insn)) == SET
1129 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1130 record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1132 note_stores (PATTERN (insn), record_set);
1134 set = single_set (insn);
1137 && GET_CODE (SET_DEST (set)) == REG
1138 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1139 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1140 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1141 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1142 && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1144 int regno = REGNO (SET_DEST (set));
1145 reg_known_value[regno] = XEXP (note, 0);
1146 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1149 else if (GET_CODE (insn) == NOTE
1150 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1151 copying_arguments = 0;
1154 /* Now propagate values from new_reg_base_value to reg_base_value. */
1155 for (i = 0; i < reg_base_value_size; i++)
1157 if (new_reg_base_value[i]
1158 && new_reg_base_value[i] != reg_base_value[i]
1159 && ! rtx_equal_p (new_reg_base_value[i], reg_base_value[i]))
1161 reg_base_value[i] = new_reg_base_value[i];
1166 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1168 /* Fill in the remaining entries. */
1169 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1170 if (reg_known_value[i] == 0)
1171 reg_known_value[i] = regno_reg_rtx[i];
1173 /* Simplify the reg_base_value array so that no register refers to
1174 another register, except to special registers indirectly through
1175 ADDRESS expressions.
1177 In theory this loop can take as long as O(registers^2), but unless
1178 there are very long dependency chains it will run in close to linear
1181 This loop may not be needed any longer now that the main loop does
1182 a better job at propagating alias information. */
1188 for (i = 0; i < reg_base_value_size; i++)
1190 rtx base = reg_base_value[i];
1191 if (base && GET_CODE (base) == REG)
1193 int base_regno = REGNO (base);
1194 if (base_regno == i) /* register set from itself */
1195 reg_base_value[i] = 0;
1197 reg_base_value[i] = reg_base_value[base_regno];
1202 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1204 new_reg_base_value = 0;
1209 end_alias_analysis ()
1211 reg_known_value = 0;
1213 reg_base_value_size = 0;
1214 if (alias_invariant)
1216 free ((char *)alias_invariant);
1217 alias_invariant = 0;