1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
30 /* Forward declarations */
31 static void set_of_1 PARAMS ((rtx, rtx, void *));
32 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
33 static int computed_jump_p_1 PARAMS ((rtx));
34 static void parms_set PARAMS ((rtx, rtx, void *));
36 /* Bit flags that specify the machine subtype we are compiling for.
37 Bits are tested using macros TARGET_... defined in the tm.h file
38 and set by `-m...' switches. Must be defined in rtlanal.c. */
42 /* Return 1 if the value of X is unstable
43 (would be different at a different point in the program).
44 The frame pointer, arg pointer, etc. are considered stable
45 (within one function) and so is anything marked `unchanging'. */
51 RTX_CODE code = GET_CODE (x);
58 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
73 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
74 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
75 /* The arg pointer varies if it is not a fixed register. */
76 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
77 || RTX_UNCHANGING_P (x))
79 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
80 /* ??? When call-clobbered, the value is stable modulo the restore
81 that must happen after a call. This currently screws up local-alloc
82 into believing that the restore is not needed. */
83 if (x == pic_offset_table_rtx)
89 if (MEM_VOLATILE_P (x))
98 fmt = GET_RTX_FORMAT (code);
99 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
102 if (rtx_unstable_p (XEXP (x, i)))
105 else if (fmt[i] == 'E')
108 for (j = 0; j < XVECLEN (x, i); j++)
109 if (rtx_unstable_p (XVECEXP (x, i, j)))
116 /* Return 1 if X has a value that can vary even between two
117 executions of the program. 0 means X can be compared reliably
118 against certain constants or near-constants.
119 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
120 zero, we are slightly more conservative.
121 The frame pointer and the arg pointer are considered constant. */
124 rtx_varies_p (x, for_alias)
128 RTX_CODE code = GET_CODE (x);
135 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
149 /* Note that we have to test for the actual rtx used for the frame
150 and arg pointers and not just the register number in case we have
151 eliminated the frame and/or arg pointer and are using it
153 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
154 /* The arg pointer varies if it is not a fixed register. */
155 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
157 if (x == pic_offset_table_rtx
158 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
159 /* ??? When call-clobbered, the value is stable modulo the restore
160 that must happen after a call. This currently screws up
161 local-alloc into believing that the restore is not needed, so we
162 must return 0 only if we are called from alias analysis. */
170 /* The operand 0 of a LO_SUM is considered constant
171 (in fact it is related specifically to operand 1)
172 during alias analysis. */
173 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
174 || rtx_varies_p (XEXP (x, 1), for_alias);
177 if (MEM_VOLATILE_P (x))
186 fmt = GET_RTX_FORMAT (code);
187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
190 if (rtx_varies_p (XEXP (x, i), for_alias))
193 else if (fmt[i] == 'E')
196 for (j = 0; j < XVECLEN (x, i); j++)
197 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
204 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
207 rtx_addr_can_trap_p (x)
210 enum rtx_code code = GET_CODE (x);
215 return SYMBOL_REF_WEAK (x);
221 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
222 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
223 || x == stack_pointer_rtx
224 /* The arg pointer varies if it is not a fixed register. */
225 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
227 /* All of the virtual frame registers are stack references. */
228 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
229 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
234 return rtx_addr_can_trap_p (XEXP (x, 0));
237 /* An address is assumed not to trap if it is an address that can't
238 trap plus a constant integer or it is the pic register plus a
240 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
241 && GET_CODE (XEXP (x, 1)) == CONST_INT)
242 || (XEXP (x, 0) == pic_offset_table_rtx
243 && CONSTANT_P (XEXP (x, 1))));
247 return rtx_addr_can_trap_p (XEXP (x, 1));
254 return rtx_addr_can_trap_p (XEXP (x, 0));
260 /* If it isn't one of the case above, it can cause a trap. */
264 /* Return 1 if X refers to a memory location whose address
265 cannot be compared reliably with constant addresses,
266 or if X refers to a BLKmode memory object.
267 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
268 zero, we are slightly more conservative. */
271 rtx_addr_varies_p (x, for_alias)
284 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
286 fmt = GET_RTX_FORMAT (code);
287 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
290 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
293 else if (fmt[i] == 'E')
296 for (j = 0; j < XVECLEN (x, i); j++)
297 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
303 /* Return the value of the integer term in X, if one is apparent;
305 Only obvious integer terms are detected.
306 This is used in cse.c with the `related_value' field. */
312 if (GET_CODE (x) == CONST)
315 if (GET_CODE (x) == MINUS
316 && GET_CODE (XEXP (x, 1)) == CONST_INT)
317 return - INTVAL (XEXP (x, 1));
318 if (GET_CODE (x) == PLUS
319 && GET_CODE (XEXP (x, 1)) == CONST_INT)
320 return INTVAL (XEXP (x, 1));
324 /* If X is a constant, return the value sans apparent integer term;
326 Only obvious integer terms are detected. */
329 get_related_value (x)
332 if (GET_CODE (x) != CONST)
335 if (GET_CODE (x) == PLUS
336 && GET_CODE (XEXP (x, 1)) == CONST_INT)
338 else if (GET_CODE (x) == MINUS
339 && GET_CODE (XEXP (x, 1)) == CONST_INT)
344 /* Given a tablejump insn INSN, return the RTL expression for the offset
345 into the jump table. If the offset cannot be determined, then return
348 If EARLIEST is non-zero, it is a pointer to a place where the earliest
349 insn used in locating the offset was found. */
352 get_jump_table_offset (insn, earliest)
366 if (GET_CODE (insn) != JUMP_INSN
367 || ! (label = JUMP_LABEL (insn))
368 || ! (table = NEXT_INSN (label))
369 || GET_CODE (table) != JUMP_INSN
370 || (GET_CODE (PATTERN (table)) != ADDR_VEC
371 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
372 || ! (set = single_set (insn)))
377 /* Some targets (eg, ARM) emit a tablejump that also
378 contains the out-of-range target. */
379 if (GET_CODE (x) == IF_THEN_ELSE
380 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
383 /* Search backwards and locate the expression stored in X. */
384 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
385 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
388 /* If X is an expression using a relative address then strip
389 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
390 or the jump table label. */
391 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
392 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
394 for (i = 0; i < 2; i++)
399 if (y == pc_rtx || y == pic_offset_table_rtx)
402 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
403 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
406 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
415 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
416 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
420 /* Strip off any sign or zero extension. */
421 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
425 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
426 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
430 /* If X isn't a MEM then this isn't a tablejump we understand. */
431 if (GET_CODE (x) != MEM)
434 /* Strip off the MEM. */
437 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
438 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
441 /* If X isn't a PLUS than this isn't a tablejump we understand. */
442 if (GET_CODE (x) != PLUS)
445 /* At this point we should have an expression representing the jump table
446 plus an offset. Examine each operand in order to determine which one
447 represents the jump table. Knowing that tells us that the other operand
448 must represent the offset. */
449 for (i = 0; i < 2; i++)
454 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
455 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
458 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
459 && reg_mentioned_p (label, y))
468 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
469 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
470 for (i = 0; i < 2; i++)
471 if (XEXP (x, i) == pic_offset_table_rtx)
480 /* Return the RTL expression representing the offset. */
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (x, find, count_dest)
494 const char *format_ptr;
515 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
520 if (SET_DEST (x) == find && ! count_dest)
521 return count_occurrences (SET_SRC (x), find, count_dest);
528 format_ptr = GET_RTX_FORMAT (code);
531 for (i = 0; i < GET_RTX_LENGTH (code); i++)
533 switch (*format_ptr++)
536 count += count_occurrences (XEXP (x, i), find, count_dest);
540 for (j = 0; j < XVECLEN (x, i); j++)
541 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
548 /* Nonzero if register REG appears somewhere within IN.
549 Also works if REG is not a register; in this case it checks
550 for a subexpression of IN that is Lisp "equal" to REG. */
553 reg_mentioned_p (reg, in)
566 if (GET_CODE (in) == LABEL_REF)
567 return reg == XEXP (in, 0);
569 code = GET_CODE (in);
573 /* Compare registers by number. */
575 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
577 /* These codes have no constituent expressions
585 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
589 /* These are kept unique for a given value. */
596 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
599 fmt = GET_RTX_FORMAT (code);
601 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
606 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
607 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
610 else if (fmt[i] == 'e'
611 && reg_mentioned_p (reg, XEXP (in, i)))
617 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
618 no CODE_LABEL insn. */
621 no_labels_between_p (beg, end)
627 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
628 if (GET_CODE (p) == CODE_LABEL)
633 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
634 no JUMP_INSN insn. */
637 no_jumps_between_p (beg, end)
641 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
642 if (GET_CODE (p) == JUMP_INSN)
647 /* Nonzero if register REG is used in an insn between
648 FROM_INSN and TO_INSN (exclusive of those two). */
651 reg_used_between_p (reg, from_insn, to_insn)
652 rtx reg, from_insn, to_insn;
656 if (from_insn == to_insn)
659 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
661 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
662 || (GET_CODE (insn) == CALL_INSN
663 && (find_reg_fusage (insn, USE, reg)
664 || find_reg_fusage (insn, CLOBBER, reg)))))
669 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
670 is entirely replaced by a new value and the only use is as a SET_DEST,
671 we do not consider it a reference. */
674 reg_referenced_p (x, body)
680 switch (GET_CODE (body))
683 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
686 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
687 of a REG that occupies all of the REG, the insn references X if
688 it is mentioned in the destination. */
689 if (GET_CODE (SET_DEST (body)) != CC0
690 && GET_CODE (SET_DEST (body)) != PC
691 && GET_CODE (SET_DEST (body)) != REG
692 && ! (GET_CODE (SET_DEST (body)) == SUBREG
693 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
694 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
695 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
696 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
697 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
698 && reg_overlap_mentioned_p (x, SET_DEST (body)))
703 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
704 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
711 return reg_overlap_mentioned_p (x, body);
714 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
717 return reg_overlap_mentioned_p (x, XEXP (body, 0));
720 case UNSPEC_VOLATILE:
721 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
722 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
728 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
733 if (GET_CODE (XEXP (body, 0)) == MEM)
734 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
739 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
741 return reg_referenced_p (x, COND_EXEC_CODE (body));
748 /* Nonzero if register REG is referenced in an insn between
749 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
753 reg_referenced_between_p (reg, from_insn, to_insn)
754 rtx reg, from_insn, to_insn;
758 if (from_insn == to_insn)
761 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
763 && (reg_referenced_p (reg, PATTERN (insn))
764 || (GET_CODE (insn) == CALL_INSN
765 && find_reg_fusage (insn, USE, reg))))
770 /* Nonzero if register REG is set or clobbered in an insn between
771 FROM_INSN and TO_INSN (exclusive of those two). */
774 reg_set_between_p (reg, from_insn, to_insn)
775 rtx reg, from_insn, to_insn;
779 if (from_insn == to_insn)
782 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
783 if (INSN_P (insn) && reg_set_p (reg, insn))
788 /* Internals of reg_set_between_p. */
790 reg_set_p (reg, insn)
795 /* We can be passed an insn or part of one. If we are passed an insn,
796 check if a side-effect of the insn clobbers REG. */
799 if (FIND_REG_INC_NOTE (insn, reg)
800 || (GET_CODE (insn) == CALL_INSN
801 /* We'd like to test call_used_regs here, but rtlanal.c can't
802 reference that variable due to its use in genattrtab. So
803 we'll just be more conservative.
805 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
806 information holds all clobbered registers. */
807 && ((GET_CODE (reg) == REG
808 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
809 || GET_CODE (reg) == MEM
810 || find_reg_fusage (insn, CLOBBER, reg))))
813 body = PATTERN (insn);
816 return set_of (reg, insn) != NULL_RTX;
819 /* Similar to reg_set_between_p, but check all registers in X. Return 0
820 only if none of them are modified between START and END. Do not
821 consider non-registers one way or the other. */
824 regs_set_between_p (x, start, end)
828 enum rtx_code code = GET_CODE (x);
845 return reg_set_between_p (x, start, end);
851 fmt = GET_RTX_FORMAT (code);
852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
854 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
857 else if (fmt[i] == 'E')
858 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
859 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
866 /* Similar to reg_set_between_p, but check all registers in X. Return 0
867 only if none of them are modified between START and END. Return 1 if
868 X contains a MEM; this routine does not perform any memory aliasing. */
871 modified_between_p (x, start, end)
875 enum rtx_code code = GET_CODE (x);
894 /* If the memory is not constant, assume it is modified. If it is
895 constant, we still have to check the address. */
896 if (! RTX_UNCHANGING_P (x))
901 return reg_set_between_p (x, start, end);
907 fmt = GET_RTX_FORMAT (code);
908 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
910 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
913 else if (fmt[i] == 'E')
914 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
915 if (modified_between_p (XVECEXP (x, i, j), start, end))
922 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
923 of them are modified in INSN. Return 1 if X contains a MEM; this routine
924 does not perform any memory aliasing. */
927 modified_in_p (x, insn)
931 enum rtx_code code = GET_CODE (x);
950 /* If the memory is not constant, assume it is modified. If it is
951 constant, we still have to check the address. */
952 if (! RTX_UNCHANGING_P (x))
957 return reg_set_p (x, insn);
963 fmt = GET_RTX_FORMAT (code);
964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
966 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
969 else if (fmt[i] == 'E')
970 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
971 if (modified_in_p (XVECEXP (x, i, j), insn))
978 /* Return true if anything in insn X is (anti,output,true) dependent on
979 anything in insn Y. */
982 insn_dependent_p (x, y)
987 if (! INSN_P (x) || ! INSN_P (y))
991 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
996 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1003 /* A helper routine for insn_dependent_p called through note_stores. */
1006 insn_dependent_p_1 (x, pat, data)
1008 rtx pat ATTRIBUTE_UNUSED;
1011 rtx * pinsn = (rtx *) data;
1013 if (*pinsn && reg_mentioned_p (x, *pinsn))
1017 /* Helper function for set_of. */
1025 set_of_1 (x, pat, data1)
1030 struct set_of_data *data = (struct set_of_data *) (data1);
1031 if (rtx_equal_p (x, data->pat)
1032 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1036 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1037 (either directly or via STRICT_LOW_PART and similar modifiers). */
1042 struct set_of_data data;
1043 data.found = NULL_RTX;
1045 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1049 /* Given an INSN, return a SET expression if this insn has only a single SET.
1050 It may also have CLOBBERs, USEs, or SET whose output
1051 will not be used, which we ignore. */
1054 single_set_2 (insn, pat)
1058 int set_verified = 1;
1061 if (GET_CODE (pat) == PARALLEL)
1063 for (i = 0; i < XVECLEN (pat, 0); i++)
1065 rtx sub = XVECEXP (pat, 0, i);
1066 switch (GET_CODE (sub))
1073 /* We can consider insns having multiple sets, where all
1074 but one are dead as single set insns. In common case
1075 only single set is present in the pattern so we want
1076 to avoid checking for REG_UNUSED notes unless necessary.
1078 When we reach set first time, we just expect this is
1079 the single set we are looking for and only when more
1080 sets are found in the insn, we check them. */
1083 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1084 && !side_effects_p (set))
1090 set = sub, set_verified = 0;
1091 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1092 || side_effects_p (sub))
1104 /* Given an INSN, return nonzero if it has more than one SET, else return
1108 multiple_sets (insn)
1114 /* INSN must be an insn. */
1115 if (! INSN_P (insn))
1118 /* Only a PARALLEL can have multiple SETs. */
1119 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1121 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1122 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1124 /* If we have already found a SET, then return now. */
1132 /* Either zero or one SET. */
1136 /* Return nonzero if the destination of SET equals the source
1137 and there are no side effects. */
1143 rtx src = SET_SRC (set);
1144 rtx dst = SET_DEST (set);
1146 if (side_effects_p (src) || side_effects_p (dst))
1149 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1150 return rtx_equal_p (dst, src);
1152 if (dst == pc_rtx && src == pc_rtx)
1155 if (GET_CODE (dst) == SIGN_EXTRACT
1156 || GET_CODE (dst) == ZERO_EXTRACT)
1157 return rtx_equal_p (XEXP (dst, 0), src)
1158 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1160 if (GET_CODE (dst) == STRICT_LOW_PART)
1161 dst = XEXP (dst, 0);
1163 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1165 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1167 src = SUBREG_REG (src);
1168 dst = SUBREG_REG (dst);
1171 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1172 && REGNO (src) == REGNO (dst));
1175 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1182 rtx pat = PATTERN (insn);
1184 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1187 /* Insns carrying these notes are useful later on. */
1188 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1191 /* For now treat an insn with a REG_RETVAL note as a
1192 a special insn which should not be considered a no-op. */
1193 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1196 if (GET_CODE (pat) == SET && set_noop_p (pat))
1199 if (GET_CODE (pat) == PARALLEL)
1202 /* If nothing but SETs of registers to themselves,
1203 this insn can also be deleted. */
1204 for (i = 0; i < XVECLEN (pat, 0); i++)
1206 rtx tem = XVECEXP (pat, 0, i);
1208 if (GET_CODE (tem) == USE
1209 || GET_CODE (tem) == CLOBBER)
1212 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1222 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1223 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1224 If the object was modified, if we hit a partial assignment to X, or hit a
1225 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1226 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1230 find_last_value (x, pinsn, valid_to, allow_hwreg)
1238 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1242 rtx set = single_set (p);
1243 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1245 if (set && rtx_equal_p (x, SET_DEST (set)))
1247 rtx src = SET_SRC (set);
1249 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1250 src = XEXP (note, 0);
1252 if ((valid_to == NULL_RTX
1253 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1254 /* Reject hard registers because we don't usually want
1255 to use them; we'd rather use a pseudo. */
1256 && (! (GET_CODE (src) == REG
1257 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1264 /* If set in non-simple way, we don't have a value. */
1265 if (reg_set_p (x, p))
1272 /* Return nonzero if register in range [REGNO, ENDREGNO)
1273 appears either explicitly or implicitly in X
1274 other than being stored into.
1276 References contained within the substructure at LOC do not count.
1277 LOC may be zero, meaning don't ignore anything. */
1280 refers_to_regno_p (regno, endregno, x, loc)
1281 unsigned int regno, endregno;
1286 unsigned int x_regno;
1291 /* The contents of a REG_NONNEG note is always zero, so we must come here
1292 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1296 code = GET_CODE (x);
1301 x_regno = REGNO (x);
1303 /* If we modifying the stack, frame, or argument pointer, it will
1304 clobber a virtual register. In fact, we could be more precise,
1305 but it isn't worth it. */
1306 if ((x_regno == STACK_POINTER_REGNUM
1307 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1308 || x_regno == ARG_POINTER_REGNUM
1310 || x_regno == FRAME_POINTER_REGNUM)
1311 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1314 return (endregno > x_regno
1315 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1316 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1320 /* If this is a SUBREG of a hard reg, we can see exactly which
1321 registers are being modified. Otherwise, handle normally. */
1322 if (GET_CODE (SUBREG_REG (x)) == REG
1323 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1325 unsigned int inner_regno = subreg_regno (x);
1326 unsigned int inner_endregno
1327 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1328 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1330 return endregno > inner_regno && regno < inner_endregno;
1336 if (&SET_DEST (x) != loc
1337 /* Note setting a SUBREG counts as referring to the REG it is in for
1338 a pseudo but not for hard registers since we can
1339 treat each word individually. */
1340 && ((GET_CODE (SET_DEST (x)) == SUBREG
1341 && loc != &SUBREG_REG (SET_DEST (x))
1342 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1343 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1344 && refers_to_regno_p (regno, endregno,
1345 SUBREG_REG (SET_DEST (x)), loc))
1346 || (GET_CODE (SET_DEST (x)) != REG
1347 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1350 if (code == CLOBBER || loc == &SET_SRC (x))
1359 /* X does not match, so try its subexpressions. */
1361 fmt = GET_RTX_FORMAT (code);
1362 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1364 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1372 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1375 else if (fmt[i] == 'E')
1378 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1379 if (loc != &XVECEXP (x, i, j)
1380 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1387 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1388 we check if any register number in X conflicts with the relevant register
1389 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1390 contains a MEM (we don't bother checking for memory addresses that can't
1391 conflict because we expect this to be a rare case. */
1394 reg_overlap_mentioned_p (x, in)
1397 unsigned int regno, endregno;
1399 /* Overly conservative. */
1400 if (GET_CODE (x) == STRICT_LOW_PART)
1403 /* If either argument is a constant, then modifying X can not affect IN. */
1404 if (CONSTANT_P (x) || CONSTANT_P (in))
1407 switch (GET_CODE (x))
1410 regno = REGNO (SUBREG_REG (x));
1411 if (regno < FIRST_PSEUDO_REGISTER)
1412 regno = subreg_regno (x);
1418 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1419 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1420 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1427 if (GET_CODE (in) == MEM)
1430 fmt = GET_RTX_FORMAT (GET_CODE (in));
1431 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1432 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1441 return reg_mentioned_p (x, in);
1447 /* If any register in here refers to it we return true. */
1448 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1449 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1450 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1462 /* Return the last value to which REG was set prior to INSN. If we can't
1463 find it easily, return 0.
1465 We only return a REG, SUBREG, or constant because it is too hard to
1466 check if a MEM remains unchanged. */
1469 reg_set_last (x, insn)
1473 rtx orig_insn = insn;
1475 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1476 Stop when we reach a label or X is a hard reg and we reach a
1477 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1479 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1481 /* We compare with <= here, because reg_set_last_last_regno
1482 is actually the number of the first reg *not* in X. */
1484 insn && GET_CODE (insn) != CODE_LABEL
1485 && ! (GET_CODE (insn) == CALL_INSN
1486 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1487 insn = PREV_INSN (insn))
1490 rtx set = set_of (x, insn);
1491 /* OK, this function modify our register. See if we understand it. */
1495 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1497 last_value = SET_SRC (x);
1498 if (CONSTANT_P (last_value)
1499 || ((GET_CODE (last_value) == REG
1500 || GET_CODE (last_value) == SUBREG)
1501 && ! reg_set_between_p (last_value,
1512 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1513 (X would be the pattern of an insn).
1514 FUN receives two arguments:
1515 the REG, MEM, CC0 or PC being stored in or clobbered,
1516 the SET or CLOBBER rtx that does the store.
1518 If the item being stored in or clobbered is a SUBREG of a hard register,
1519 the SUBREG will be passed. */
1522 note_stores (x, fun, data)
1524 void (*fun) PARAMS ((rtx, rtx, void *));
1529 if (GET_CODE (x) == COND_EXEC)
1530 x = COND_EXEC_CODE (x);
1532 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1534 rtx dest = SET_DEST (x);
1536 while ((GET_CODE (dest) == SUBREG
1537 && (GET_CODE (SUBREG_REG (dest)) != REG
1538 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1539 || GET_CODE (dest) == ZERO_EXTRACT
1540 || GET_CODE (dest) == SIGN_EXTRACT
1541 || GET_CODE (dest) == STRICT_LOW_PART)
1542 dest = XEXP (dest, 0);
1544 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1545 each of whose first operand is a register. We can't know what
1546 precisely is being set in these cases, so make up a CLOBBER to pass
1548 if (GET_CODE (dest) == PARALLEL)
1550 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1551 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1552 (*fun) (XEXP (XVECEXP (dest, 0, i), 0),
1553 gen_rtx_CLOBBER (VOIDmode,
1554 XEXP (XVECEXP (dest, 0, i), 0)),
1558 (*fun) (dest, x, data);
1561 else if (GET_CODE (x) == PARALLEL)
1562 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1563 note_stores (XVECEXP (x, 0, i), fun, data);
1566 /* Like notes_stores, but call FUN for each expression that is being
1567 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1568 FUN for each expression, not any interior subexpressions. FUN receives a
1569 pointer to the expression and the DATA passed to this function.
1571 Note that this is not quite the same test as that done in reg_referenced_p
1572 since that considers something as being referenced if it is being
1573 partially set, while we do not. */
1576 note_uses (pbody, fun, data)
1578 void (*fun) PARAMS ((rtx *, void *));
1584 switch (GET_CODE (body))
1587 (*fun) (&COND_EXEC_TEST (body), data);
1588 note_uses (&COND_EXEC_CODE (body), fun, data);
1592 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1593 note_uses (&XVECEXP (body, 0, i), fun, data);
1597 (*fun) (&XEXP (body, 0), data);
1601 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1602 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1606 (*fun) (&TRAP_CONDITION (body), data);
1610 (*fun) (&XEXP (body, 0), data);
1614 case UNSPEC_VOLATILE:
1615 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1616 (*fun) (&XVECEXP (body, 0, i), data);
1620 if (GET_CODE (XEXP (body, 0)) == MEM)
1621 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1626 rtx dest = SET_DEST (body);
1628 /* For sets we replace everything in source plus registers in memory
1629 expression in store and operands of a ZERO_EXTRACT. */
1630 (*fun) (&SET_SRC (body), data);
1632 if (GET_CODE (dest) == ZERO_EXTRACT)
1634 (*fun) (&XEXP (dest, 1), data);
1635 (*fun) (&XEXP (dest, 2), data);
1638 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1639 dest = XEXP (dest, 0);
1641 if (GET_CODE (dest) == MEM)
1642 (*fun) (&XEXP (dest, 0), data);
1647 /* All the other possibilities never store. */
1648 (*fun) (pbody, data);
1653 /* Return nonzero if X's old contents don't survive after INSN.
1654 This will be true if X is (cc0) or if X is a register and
1655 X dies in INSN or because INSN entirely sets X.
1657 "Entirely set" means set directly and not through a SUBREG,
1658 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1659 Likewise, REG_INC does not count.
1661 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1662 but for this use that makes no difference, since regs don't overlap
1663 during their lifetimes. Therefore, this function may be used
1664 at any time after deaths have been computed (in flow.c).
1666 If REG is a hard reg that occupies multiple machine registers, this
1667 function will only return 1 if each of those registers will be replaced
1671 dead_or_set_p (insn, x)
1675 unsigned int regno, last_regno;
1678 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1679 if (GET_CODE (x) == CC0)
1682 if (GET_CODE (x) != REG)
1686 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1687 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1689 for (i = regno; i <= last_regno; i++)
1690 if (! dead_or_set_regno_p (insn, i))
1696 /* Utility function for dead_or_set_p to check an individual register. Also
1697 called from flow.c. */
1700 dead_or_set_regno_p (insn, test_regno)
1702 unsigned int test_regno;
1704 unsigned int regno, endregno;
1707 /* See if there is a death note for something that includes TEST_REGNO. */
1708 if (find_regno_note (insn, REG_DEAD, test_regno))
1711 if (GET_CODE (insn) == CALL_INSN
1712 && find_regno_fusage (insn, CLOBBER, test_regno))
1715 pattern = PATTERN (insn);
1717 if (GET_CODE (pattern) == COND_EXEC)
1718 pattern = COND_EXEC_CODE (pattern);
1720 if (GET_CODE (pattern) == SET)
1722 rtx dest = SET_DEST (PATTERN (insn));
1724 /* A value is totally replaced if it is the destination or the
1725 destination is a SUBREG of REGNO that does not change the number of
1727 if (GET_CODE (dest) == SUBREG
1728 && (((GET_MODE_SIZE (GET_MODE (dest))
1729 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1730 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1731 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1732 dest = SUBREG_REG (dest);
1734 if (GET_CODE (dest) != REG)
1737 regno = REGNO (dest);
1738 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1739 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1741 return (test_regno >= regno && test_regno < endregno);
1743 else if (GET_CODE (pattern) == PARALLEL)
1747 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1749 rtx body = XVECEXP (pattern, 0, i);
1751 if (GET_CODE (body) == COND_EXEC)
1752 body = COND_EXEC_CODE (body);
1754 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1756 rtx dest = SET_DEST (body);
1758 if (GET_CODE (dest) == SUBREG
1759 && (((GET_MODE_SIZE (GET_MODE (dest))
1760 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1761 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1762 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1763 dest = SUBREG_REG (dest);
1765 if (GET_CODE (dest) != REG)
1768 regno = REGNO (dest);
1769 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1770 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1772 if (test_regno >= regno && test_regno < endregno)
1781 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1782 If DATUM is nonzero, look for one whose datum is DATUM. */
1785 find_reg_note (insn, kind, datum)
1792 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1793 if (! INSN_P (insn))
1796 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1797 if (REG_NOTE_KIND (link) == kind
1798 && (datum == 0 || datum == XEXP (link, 0)))
1803 /* Return the reg-note of kind KIND in insn INSN which applies to register
1804 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1805 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1806 it might be the case that the note overlaps REGNO. */
1809 find_regno_note (insn, kind, regno)
1816 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1817 if (! INSN_P (insn))
1820 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1821 if (REG_NOTE_KIND (link) == kind
1822 /* Verify that it is a register, so that scratch and MEM won't cause a
1824 && GET_CODE (XEXP (link, 0)) == REG
1825 && REGNO (XEXP (link, 0)) <= regno
1826 && ((REGNO (XEXP (link, 0))
1827 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1828 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1829 GET_MODE (XEXP (link, 0)))))
1835 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1839 find_reg_equal_equiv_note (insn)
1844 if (single_set (insn) == 0)
1846 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1849 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1852 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1853 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1856 find_reg_fusage (insn, code, datum)
1861 /* If it's not a CALL_INSN, it can't possibly have a
1862 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1863 if (GET_CODE (insn) != CALL_INSN)
1869 if (GET_CODE (datum) != REG)
1873 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1875 link = XEXP (link, 1))
1876 if (GET_CODE (XEXP (link, 0)) == code
1877 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1882 unsigned int regno = REGNO (datum);
1884 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1885 to pseudo registers, so don't bother checking. */
1887 if (regno < FIRST_PSEUDO_REGISTER)
1889 unsigned int end_regno
1890 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1893 for (i = regno; i < end_regno; i++)
1894 if (find_regno_fusage (insn, code, i))
1902 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1903 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1906 find_regno_fusage (insn, code, regno)
1913 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1914 to pseudo registers, so don't bother checking. */
1916 if (regno >= FIRST_PSEUDO_REGISTER
1917 || GET_CODE (insn) != CALL_INSN )
1920 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1922 unsigned int regnote;
1925 if (GET_CODE (op = XEXP (link, 0)) == code
1926 && GET_CODE (reg = XEXP (op, 0)) == REG
1927 && (regnote = REGNO (reg)) <= regno
1928 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
1935 /* Remove register note NOTE from the REG_NOTES of INSN. */
1938 remove_note (insn, note)
1944 if (note == NULL_RTX)
1947 if (REG_NOTES (insn) == note)
1949 REG_NOTES (insn) = XEXP (note, 1);
1953 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1954 if (XEXP (link, 1) == note)
1956 XEXP (link, 1) = XEXP (note, 1);
1963 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1964 return 1 if it is found. A simple equality test is used to determine if
1968 in_expr_list_p (listp, node)
1974 for (x = listp; x; x = XEXP (x, 1))
1975 if (node == XEXP (x, 0))
1981 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1982 remove that entry from the list if it is found.
1984 A simple equality test is used to determine if NODE matches. */
1987 remove_node_from_expr_list (node, listp)
1992 rtx prev = NULL_RTX;
1996 if (node == XEXP (temp, 0))
1998 /* Splice the node out of the list. */
2000 XEXP (prev, 1) = XEXP (temp, 1);
2002 *listp = XEXP (temp, 1);
2008 temp = XEXP (temp, 1);
2012 /* Nonzero if X contains any volatile instructions. These are instructions
2013 which may cause unpredictable machine state instructions, and thus no
2014 instructions should be moved or combined across them. This includes
2015 only volatile asms and UNSPEC_VOLATILE instructions. */
2023 code = GET_CODE (x);
2044 case UNSPEC_VOLATILE:
2045 /* case TRAP_IF: This isn't clear yet. */
2049 if (MEM_VOLATILE_P (x))
2056 /* Recursively scan the operands of this expression. */
2059 const char *fmt = GET_RTX_FORMAT (code);
2062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2066 if (volatile_insn_p (XEXP (x, i)))
2069 else if (fmt[i] == 'E')
2072 for (j = 0; j < XVECLEN (x, i); j++)
2073 if (volatile_insn_p (XVECEXP (x, i, j)))
2081 /* Nonzero if X contains any volatile memory references
2082 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2090 code = GET_CODE (x);
2110 case UNSPEC_VOLATILE:
2111 /* case TRAP_IF: This isn't clear yet. */
2116 if (MEM_VOLATILE_P (x))
2123 /* Recursively scan the operands of this expression. */
2126 const char *fmt = GET_RTX_FORMAT (code);
2129 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2133 if (volatile_refs_p (XEXP (x, i)))
2136 else if (fmt[i] == 'E')
2139 for (j = 0; j < XVECLEN (x, i); j++)
2140 if (volatile_refs_p (XVECEXP (x, i, j)))
2148 /* Similar to above, except that it also rejects register pre- and post-
2157 code = GET_CODE (x);
2176 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2177 when some combination can't be done. If we see one, don't think
2178 that we can simplify the expression. */
2179 return (GET_MODE (x) != VOIDmode);
2188 case UNSPEC_VOLATILE:
2189 /* case TRAP_IF: This isn't clear yet. */
2194 if (MEM_VOLATILE_P (x))
2201 /* Recursively scan the operands of this expression. */
2204 const char *fmt = GET_RTX_FORMAT (code);
2207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2211 if (side_effects_p (XEXP (x, i)))
2214 else if (fmt[i] == 'E')
2217 for (j = 0; j < XVECLEN (x, i); j++)
2218 if (side_effects_p (XVECEXP (x, i, j)))
2226 /* Return nonzero if evaluating rtx X might cause a trap. */
2238 code = GET_CODE (x);
2241 /* Handle these cases quickly. */
2255 case UNSPEC_VOLATILE:
2260 return MEM_VOLATILE_P (x);
2262 /* Memory ref can trap unless it's a static var or a stack slot. */
2264 return rtx_addr_can_trap_p (XEXP (x, 0));
2266 /* Division by a non-constant might trap. */
2271 if (! CONSTANT_P (XEXP (x, 1))
2272 || GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2274 /* This was const0_rtx, but by not using that,
2275 we can link this file into other programs. */
2276 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2281 /* An EXPR_LIST is used to represent a function call. This
2282 certainly may trap. */
2290 /* Some floating point comparisons may trap. */
2291 /* ??? There is no machine independent way to check for tests that trap
2292 when COMPARE is used, though many targets do make this distinction.
2293 For instance, sparc uses CCFPE for compares which generate exceptions
2294 and CCFP for compares which do not generate exceptions. */
2295 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2297 /* But often the compare has some CC mode, so check operand
2299 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2300 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2306 /* These operations don't trap even with floating point. */
2310 /* Any floating arithmetic may trap. */
2311 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2315 fmt = GET_RTX_FORMAT (code);
2316 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2320 if (may_trap_p (XEXP (x, i)))
2323 else if (fmt[i] == 'E')
2326 for (j = 0; j < XVECLEN (x, i); j++)
2327 if (may_trap_p (XVECEXP (x, i, j)))
2334 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2335 i.e., an inequality. */
2338 inequality_comparisons_p (x)
2343 enum rtx_code code = GET_CODE (x);
2373 len = GET_RTX_LENGTH (code);
2374 fmt = GET_RTX_FORMAT (code);
2376 for (i = 0; i < len; i++)
2380 if (inequality_comparisons_p (XEXP (x, i)))
2383 else if (fmt[i] == 'E')
2386 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2387 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2395 /* Replace any occurrence of FROM in X with TO. The function does
2396 not enter into CONST_DOUBLE for the replace.
2398 Note that copying is not done so X must not be shared unless all copies
2399 are to be modified. */
2402 replace_rtx (x, from, to)
2408 /* The following prevents loops occurrence when we change MEM in
2409 CONST_DOUBLE onto the same CONST_DOUBLE. */
2410 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2416 /* Allow this function to make replacements in EXPR_LISTs. */
2420 fmt = GET_RTX_FORMAT (GET_CODE (x));
2421 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2424 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2425 else if (fmt[i] == 'E')
2426 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2427 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2433 /* Throughout the rtx X, replace many registers according to REG_MAP.
2434 Return the replacement for X (which may be X with altered contents).
2435 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2436 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2438 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2439 should not be mapped to pseudos or vice versa since validate_change
2442 If REPLACE_DEST is 1, replacements are also done in destinations;
2443 otherwise, only sources are replaced. */
2446 replace_regs (x, reg_map, nregs, replace_dest)
2459 code = GET_CODE (x);
2474 /* Verify that the register has an entry before trying to access it. */
2475 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2477 /* SUBREGs can't be shared. Always return a copy to ensure that if
2478 this replacement occurs more than once then each instance will
2479 get distinct rtx. */
2480 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2481 return copy_rtx (reg_map[REGNO (x)]);
2482 return reg_map[REGNO (x)];
2487 /* Prevent making nested SUBREGs. */
2488 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2489 && reg_map[REGNO (SUBREG_REG (x))] != 0
2490 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2492 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2493 return simplify_gen_subreg (GET_MODE (x), map_val,
2494 GET_MODE (SUBREG_REG (x)),
2501 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2503 else if (GET_CODE (SET_DEST (x)) == MEM
2504 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2505 /* Even if we are not to replace destinations, replace register if it
2506 is CONTAINED in destination (destination is memory or
2507 STRICT_LOW_PART). */
2508 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2510 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2511 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2514 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2521 fmt = GET_RTX_FORMAT (code);
2522 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2525 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2526 else if (fmt[i] == 'E')
2529 for (j = 0; j < XVECLEN (x, i); j++)
2530 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2531 nregs, replace_dest);
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2542 computed_jump_p_1 (x)
2545 enum rtx_code code = GET_CODE (x);
2564 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2565 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2568 return (computed_jump_p_1 (XEXP (x, 1))
2569 || computed_jump_p_1 (XEXP (x, 2)));
2575 fmt = GET_RTX_FORMAT (code);
2576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2579 && computed_jump_p_1 (XEXP (x, i)))
2582 else if (fmt[i] == 'E')
2583 for (j = 0; j < XVECLEN (x, i); j++)
2584 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2591 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2593 Tablejumps and casesi insns are not considered indirect jumps;
2594 we can recognize them by a (use (label_ref)). */
2597 computed_jump_p (insn)
2601 if (GET_CODE (insn) == JUMP_INSN)
2603 rtx pat = PATTERN (insn);
2605 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2607 else if (GET_CODE (pat) == PARALLEL)
2609 int len = XVECLEN (pat, 0);
2610 int has_use_labelref = 0;
2612 for (i = len - 1; i >= 0; i--)
2613 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2614 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2616 has_use_labelref = 1;
2618 if (! has_use_labelref)
2619 for (i = len - 1; i >= 0; i--)
2620 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2621 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2622 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2625 else if (GET_CODE (pat) == SET
2626 && SET_DEST (pat) == pc_rtx
2627 && computed_jump_p_1 (SET_SRC (pat)))
2633 /* Traverse X via depth-first search, calling F for each
2634 sub-expression (including X itself). F is also passed the DATA.
2635 If F returns -1, do not traverse sub-expressions, but continue
2636 traversing the rest of the tree. If F ever returns any other
2637 non-zero value, stop the traversal, and return the value returned
2638 by F. Otherwise, return 0. This function does not traverse inside
2639 tree structure that contains RTX_EXPRs, or into sub-expressions
2640 whose format code is `0' since it is not known whether or not those
2641 codes are actually RTL.
2643 This routine is very general, and could (should?) be used to
2644 implement many of the other routines in this file. */
2647 for_each_rtx (x, f, data)
2658 result = (*f) (x, data);
2660 /* Do not traverse sub-expressions. */
2662 else if (result != 0)
2663 /* Stop the traversal. */
2667 /* There are no sub-expressions. */
2670 length = GET_RTX_LENGTH (GET_CODE (*x));
2671 format = GET_RTX_FORMAT (GET_CODE (*x));
2673 for (i = 0; i < length; ++i)
2678 result = for_each_rtx (&XEXP (*x, i), f, data);
2685 if (XVEC (*x, i) != 0)
2688 for (j = 0; j < XVECLEN (*x, i); ++j)
2690 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2698 /* Nothing to do. */
2707 /* Searches X for any reference to REGNO, returning the rtx of the
2708 reference found if any. Otherwise, returns NULL_RTX. */
2711 regno_use_in (regno, x)
2719 if (GET_CODE (x) == REG && REGNO (x) == regno)
2722 fmt = GET_RTX_FORMAT (GET_CODE (x));
2723 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2727 if ((tem = regno_use_in (regno, XEXP (x, i))))
2730 else if (fmt[i] == 'E')
2731 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2732 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2739 /* Return a value indicating whether OP, an operand of a commutative
2740 operation, is preferred as the first or second operand. The higher
2741 the value, the stronger the preference for being the first operand.
2742 We use negative values to indicate a preference for the first operand
2743 and positive values for the second operand. */
2746 commutative_operand_precedence (op)
2749 /* Constants always come the second operand. Prefer "nice" constants. */
2750 if (GET_CODE (op) == CONST_INT)
2752 if (GET_CODE (op) == CONST_DOUBLE)
2754 if (CONSTANT_P (op))
2757 /* SUBREGs of objects should come second. */
2758 if (GET_CODE (op) == SUBREG
2759 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2762 /* If only one operand is a `neg', `not',
2763 `mult', `plus', or `minus' expression, it will be the first
2765 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2766 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2767 || GET_CODE (op) == MINUS)
2770 /* Complex expressions should be the first, so decrease priority
2772 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2777 /* Return 1 iff it is necessary to swap operands of commutative operation
2778 in order to canonicalize expression. */
2781 swap_commutative_operands_p (x, y)
2784 return (commutative_operand_precedence (x)
2785 < commutative_operand_precedence (y));
2788 /* Return 1 if X is an autoincrement side effect and the register is
2789 not the stack pointer. */
2794 switch (GET_CODE (x))
2802 /* There are no REG_INC notes for SP. */
2803 if (XEXP (x, 0) != stack_pointer_rtx)
2811 /* Return 1 if the sequence of instructions beginning with FROM and up
2812 to and including TO is safe to move. If NEW_TO is non-NULL, and
2813 the sequence is not already safe to move, but can be easily
2814 extended to a sequence which is safe, then NEW_TO will point to the
2815 end of the extended sequence.
2817 For now, this function only checks that the region contains whole
2818 exception regions, but it could be extended to check additional
2819 conditions as well. */
2822 insns_safe_to_move_p (from, to, new_to)
2827 int eh_region_count = 0;
2831 /* By default, assume the end of the region will be what was
2838 if (GET_CODE (r) == NOTE)
2840 switch (NOTE_LINE_NUMBER (r))
2842 case NOTE_INSN_EH_REGION_BEG:
2846 case NOTE_INSN_EH_REGION_END:
2847 if (eh_region_count == 0)
2848 /* This sequence of instructions contains the end of
2849 an exception region, but not he beginning. Moving
2850 it will cause chaos. */
2861 /* If we've passed TO, and we see a non-note instruction, we
2862 can't extend the sequence to a movable sequence. */
2868 /* It's OK to move the sequence if there were matched sets of
2869 exception region notes. */
2870 return eh_region_count == 0;
2875 /* It's OK to move the sequence if there were matched sets of
2876 exception region notes. */
2877 if (past_to_p && eh_region_count == 0)
2883 /* Go to the next instruction. */
2890 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
2892 loc_mentioned_in_p (loc, in)
2895 enum rtx_code code = GET_CODE (in);
2896 const char *fmt = GET_RTX_FORMAT (code);
2899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2901 if (loc == &in->fld[i].rtx)
2905 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2908 else if (fmt[i] == 'E')
2909 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2910 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2916 /* Given a subreg X, return the bit offset where the subreg begins
2917 (counting from the least significant bit of the reg). */
2923 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
2924 enum machine_mode mode = GET_MODE (x);
2925 unsigned int bitpos;
2929 /* A paradoxical subreg begins at bit position 0. */
2930 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
2933 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2934 /* If the subreg crosses a word boundary ensure that
2935 it also begins and ends on a word boundary. */
2936 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
2937 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
2938 && (SUBREG_BYTE (x) % UNITS_PER_WORD
2939 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
2942 if (WORDS_BIG_ENDIAN)
2943 word = (GET_MODE_SIZE (inner_mode)
2944 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
2946 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
2947 bitpos = word * BITS_PER_WORD;
2949 if (BYTES_BIG_ENDIAN)
2950 byte = (GET_MODE_SIZE (inner_mode)
2951 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
2953 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
2954 bitpos += byte * BITS_PER_UNIT;
2959 /* This function returns the regno offset of a subreg expression.
2960 xregno - A regno of an inner hard subreg_reg (or what will become one).
2961 xmode - The mode of xregno.
2962 offset - The byte offset.
2963 ymode - The mode of a top level SUBREG (or what may become one).
2964 RETURN - The regno offset which would be used. */
2966 subreg_regno_offset (xregno, xmode, offset, ymode)
2967 unsigned int xregno;
2968 enum machine_mode xmode;
2969 unsigned int offset;
2970 enum machine_mode ymode;
2972 int nregs_xmode, nregs_ymode;
2973 int mode_multiple, nregs_multiple;
2976 if (xregno >= FIRST_PSEUDO_REGISTER)
2979 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
2980 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
2981 if (offset == 0 || nregs_xmode == nregs_ymode)
2984 /* size of ymode must not be greater than the size of xmode. */
2985 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2986 if (mode_multiple == 0)
2989 y_offset = offset / GET_MODE_SIZE (ymode);
2990 nregs_multiple = nregs_xmode / nregs_ymode;
2991 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2994 /* Return the final regno that a subreg expression refers to. */
3000 rtx subreg = SUBREG_REG (x);
3001 int regno = REGNO (subreg);
3003 ret = regno + subreg_regno_offset (regno,
3010 struct parms_set_data
3016 /* Helper function for noticing stores to parameter registers. */
3018 parms_set (x, pat, data)
3019 rtx x, pat ATTRIBUTE_UNUSED;
3022 struct parms_set_data *d = data;
3023 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3024 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3026 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3031 /* Look backward for first parameter to be loaded.
3032 Do not skip BOUNDARY. */
3034 find_first_parameter_load (call_insn, boundary)
3035 rtx call_insn, boundary;
3037 struct parms_set_data parm;
3040 /* Since different machines initialize their parameter registers
3041 in different orders, assume nothing. Collect the set of all
3042 parameter registers. */
3043 CLEAR_HARD_REG_SET (parm.regs);
3045 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3046 if (GET_CODE (XEXP (p, 0)) == USE
3047 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3049 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3052 /* We only care about registers which can hold function
3054 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3057 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3062 /* Search backward for the first set of a register in this set. */
3063 while (parm.nregs && before != boundary)
3065 before = PREV_INSN (before);
3067 /* It is possible that some loads got CSEed from one call to
3068 another. Stop in that case. */
3069 if (GET_CODE (before) == CALL_INSN)
3072 /* Our caller needs either ensure that we will find all sets
3073 (in case code has not been optimized yet), or take care
3074 for possible labels in a way by setting boundary to preceding
3076 if (GET_CODE (before) == CODE_LABEL)
3078 if (before != boundary)
3083 if (INSN_P (before))
3084 note_stores (PATTERN (before), parms_set, &parm);