1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
34 #include "basic-block.h"
38 /* Forward declarations */
39 static int global_reg_mentioned_p_1 (rtx *, void *);
40 static void set_of_1 (rtx, rtx, void *);
41 static void insn_dependent_p_1 (rtx, rtx, void *);
42 static int rtx_referenced_p_1 (rtx *, void *);
43 static int computed_jump_p_1 (rtx);
44 static void parms_set (rtx, rtx, void *);
45 static bool hoist_test_store (rtx, rtx, regset);
46 static void hoist_update_store (rtx, rtx *, rtx, rtx);
48 /* Bit flags that specify the machine subtype we are compiling for.
49 Bits are tested using macros TARGET_... defined in the tm.h file
50 and set by `-m...' switches. Must be defined in rtlanal.c. */
54 /* Return 1 if the value of X is unstable
55 (would be different at a different point in the program).
56 The frame pointer, arg pointer, etc. are considered stable
57 (within one function) and so is anything marked `unchanging'. */
60 rtx_unstable_p (rtx x)
62 RTX_CODE code = GET_CODE (x);
69 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
84 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
85 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
86 /* The arg pointer varies if it is not a fixed register. */
87 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
88 || RTX_UNCHANGING_P (x))
90 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
91 /* ??? When call-clobbered, the value is stable modulo the restore
92 that must happen after a call. This currently screws up local-alloc
93 into believing that the restore is not needed. */
94 if (x == pic_offset_table_rtx)
100 if (MEM_VOLATILE_P (x))
109 fmt = GET_RTX_FORMAT (code);
110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
113 if (rtx_unstable_p (XEXP (x, i)))
116 else if (fmt[i] == 'E')
119 for (j = 0; j < XVECLEN (x, i); j++)
120 if (rtx_unstable_p (XVECEXP (x, i, j)))
127 /* Return 1 if X has a value that can vary even between two
128 executions of the program. 0 means X can be compared reliably
129 against certain constants or near-constants.
130 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
131 zero, we are slightly more conservative.
132 The frame pointer and the arg pointer are considered constant. */
135 rtx_varies_p (rtx x, int for_alias)
148 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 /* This will resolve to some offset from the frame pointer. */
166 /* Note that we have to test for the actual rtx used for the frame
167 and arg pointers and not just the register number in case we have
168 eliminated the frame and/or arg pointer and are using it
170 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
171 /* The arg pointer varies if it is not a fixed register. */
172 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
174 if (x == pic_offset_table_rtx
175 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
176 /* ??? When call-clobbered, the value is stable modulo the restore
177 that must happen after a call. This currently screws up
178 local-alloc into believing that the restore is not needed, so we
179 must return 0 only if we are called from alias analysis. */
187 /* The operand 0 of a LO_SUM is considered constant
188 (in fact it is related specifically to operand 1)
189 during alias analysis. */
190 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
191 || rtx_varies_p (XEXP (x, 1), for_alias);
194 if (MEM_VOLATILE_P (x))
203 fmt = GET_RTX_FORMAT (code);
204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
207 if (rtx_varies_p (XEXP (x, i), for_alias))
210 else if (fmt[i] == 'E')
213 for (j = 0; j < XVECLEN (x, i); j++)
214 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
221 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
224 rtx_addr_can_trap_p (rtx x)
226 enum rtx_code code = GET_CODE (x);
231 return SYMBOL_REF_WEAK (x);
237 /* This will resolve to some offset from the frame pointer. */
241 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
242 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
243 || x == stack_pointer_rtx
244 /* The arg pointer varies if it is not a fixed register. */
245 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
247 /* All of the virtual frame registers are stack references. */
248 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
249 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
254 return rtx_addr_can_trap_p (XEXP (x, 0));
257 /* An address is assumed not to trap if it is an address that can't
258 trap plus a constant integer or it is the pic register plus a
260 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
261 && GET_CODE (XEXP (x, 1)) == CONST_INT)
262 || (XEXP (x, 0) == pic_offset_table_rtx
263 && CONSTANT_P (XEXP (x, 1))));
267 return rtx_addr_can_trap_p (XEXP (x, 1));
274 return rtx_addr_can_trap_p (XEXP (x, 0));
280 /* If it isn't one of the case above, it can cause a trap. */
284 /* Return true if X is an address that is known to not be zero. */
287 nonzero_address_p (rtx x)
289 enum rtx_code code = GET_CODE (x);
294 return !SYMBOL_REF_WEAK (x);
300 /* This will resolve to some offset from the frame pointer. */
304 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
305 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
306 || x == stack_pointer_rtx
307 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
309 /* All of the virtual frame registers are stack references. */
310 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
311 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
316 return nonzero_address_p (XEXP (x, 0));
319 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
321 /* Pointers aren't allowed to wrap. If we've got a register
322 that is known to be a pointer, and a positive offset, then
323 the composite can't be zero. */
324 if (INTVAL (XEXP (x, 1)) > 0
325 && REG_P (XEXP (x, 0))
326 && REG_POINTER (XEXP (x, 0)))
329 return nonzero_address_p (XEXP (x, 0));
331 /* Handle PIC references. */
332 else if (XEXP (x, 0) == pic_offset_table_rtx
333 && CONSTANT_P (XEXP (x, 1)))
338 /* Similar to the above; allow positive offsets. Further, since
339 auto-inc is only allowed in memories, the register must be a
341 if (GET_CODE (XEXP (x, 1)) == CONST_INT
342 && INTVAL (XEXP (x, 1)) > 0)
344 return nonzero_address_p (XEXP (x, 0));
347 /* Similarly. Further, the offset is always positive. */
354 return nonzero_address_p (XEXP (x, 0));
357 return nonzero_address_p (XEXP (x, 1));
363 /* If it isn't one of the case above, might be zero. */
367 /* Return 1 if X refers to a memory location whose address
368 cannot be compared reliably with constant addresses,
369 or if X refers to a BLKmode memory object.
370 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
371 zero, we are slightly more conservative. */
374 rtx_addr_varies_p (rtx x, int for_alias)
385 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
387 fmt = GET_RTX_FORMAT (code);
388 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
394 else if (fmt[i] == 'E')
397 for (j = 0; j < XVECLEN (x, i); j++)
398 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
404 /* Return the value of the integer term in X, if one is apparent;
406 Only obvious integer terms are detected.
407 This is used in cse.c with the `related_value' field. */
410 get_integer_term (rtx x)
412 if (GET_CODE (x) == CONST)
415 if (GET_CODE (x) == MINUS
416 && GET_CODE (XEXP (x, 1)) == CONST_INT)
417 return - INTVAL (XEXP (x, 1));
418 if (GET_CODE (x) == PLUS
419 && GET_CODE (XEXP (x, 1)) == CONST_INT)
420 return INTVAL (XEXP (x, 1));
424 /* If X is a constant, return the value sans apparent integer term;
426 Only obvious integer terms are detected. */
429 get_related_value (rtx x)
431 if (GET_CODE (x) != CONST)
434 if (GET_CODE (x) == PLUS
435 && GET_CODE (XEXP (x, 1)) == CONST_INT)
437 else if (GET_CODE (x) == MINUS
438 && GET_CODE (XEXP (x, 1)) == CONST_INT)
443 /* Given a tablejump insn INSN, return the RTL expression for the offset
444 into the jump table. If the offset cannot be determined, then return
447 If EARLIEST is nonzero, it is a pointer to a place where the earliest
448 insn used in locating the offset was found. */
451 get_jump_table_offset (rtx insn, rtx *earliest)
463 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
468 /* Some targets (eg, ARM) emit a tablejump that also
469 contains the out-of-range target. */
470 if (GET_CODE (x) == IF_THEN_ELSE
471 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
474 /* Search backwards and locate the expression stored in X. */
475 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
476 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
479 /* If X is an expression using a relative address then strip
480 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
481 or the jump table label. */
482 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
483 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
485 for (i = 0; i < 2; i++)
490 if (y == pc_rtx || y == pic_offset_table_rtx)
493 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
494 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
497 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
506 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
507 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
511 /* Strip off any sign or zero extension. */
512 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
516 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
517 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
521 /* If X isn't a MEM then this isn't a tablejump we understand. */
522 if (GET_CODE (x) != MEM)
525 /* Strip off the MEM. */
528 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
529 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
532 /* If X isn't a PLUS than this isn't a tablejump we understand. */
533 if (GET_CODE (x) != PLUS)
536 /* At this point we should have an expression representing the jump table
537 plus an offset. Examine each operand in order to determine which one
538 represents the jump table. Knowing that tells us that the other operand
539 must represent the offset. */
540 for (i = 0; i < 2; i++)
545 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
546 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
549 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
550 && reg_mentioned_p (label, y))
559 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
560 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
561 for (i = 0; i < 2; i++)
562 if (XEXP (x, i) == pic_offset_table_rtx)
571 /* Return the RTL expression representing the offset. */
575 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
576 a global register. */
579 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
587 switch (GET_CODE (x))
590 if (GET_CODE (SUBREG_REG (x)) == REG)
592 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
593 && global_regs[subreg_regno (x)])
601 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
615 /* A non-constant call might use a global register. */
625 /* Returns nonzero if X mentions a global register. */
628 global_reg_mentioned_p (rtx x)
632 if (GET_CODE (x) == CALL_INSN)
634 if (! CONST_OR_PURE_CALL_P (x))
636 x = CALL_INSN_FUNCTION_USAGE (x);
644 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
647 /* Return the number of places FIND appears within X. If COUNT_DEST is
648 zero, we do not count occurrences inside the destination of a SET. */
651 count_occurrences (rtx x, rtx find, int count_dest)
655 const char *format_ptr;
676 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
681 if (SET_DEST (x) == find && ! count_dest)
682 return count_occurrences (SET_SRC (x), find, count_dest);
689 format_ptr = GET_RTX_FORMAT (code);
692 for (i = 0; i < GET_RTX_LENGTH (code); i++)
694 switch (*format_ptr++)
697 count += count_occurrences (XEXP (x, i), find, count_dest);
701 for (j = 0; j < XVECLEN (x, i); j++)
702 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
709 /* Nonzero if register REG appears somewhere within IN.
710 Also works if REG is not a register; in this case it checks
711 for a subexpression of IN that is Lisp "equal" to REG. */
714 reg_mentioned_p (rtx reg, rtx in)
726 if (GET_CODE (in) == LABEL_REF)
727 return reg == XEXP (in, 0);
729 code = GET_CODE (in);
733 /* Compare registers by number. */
735 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
737 /* These codes have no constituent expressions
747 /* These are kept unique for a given value. */
754 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
757 fmt = GET_RTX_FORMAT (code);
759 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
764 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
765 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
768 else if (fmt[i] == 'e'
769 && reg_mentioned_p (reg, XEXP (in, i)))
775 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
776 no CODE_LABEL insn. */
779 no_labels_between_p (rtx beg, rtx end)
784 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
785 if (GET_CODE (p) == CODE_LABEL)
790 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
791 no JUMP_INSN insn. */
794 no_jumps_between_p (rtx beg, rtx end)
797 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
798 if (GET_CODE (p) == JUMP_INSN)
803 /* Nonzero if register REG is used in an insn between
804 FROM_INSN and TO_INSN (exclusive of those two). */
807 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
811 if (from_insn == to_insn)
814 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
816 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
817 || (GET_CODE (insn) == CALL_INSN
818 && (find_reg_fusage (insn, USE, reg)
819 || find_reg_fusage (insn, CLOBBER, reg)))))
824 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
825 is entirely replaced by a new value and the only use is as a SET_DEST,
826 we do not consider it a reference. */
829 reg_referenced_p (rtx x, rtx body)
833 switch (GET_CODE (body))
836 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
839 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
840 of a REG that occupies all of the REG, the insn references X if
841 it is mentioned in the destination. */
842 if (GET_CODE (SET_DEST (body)) != CC0
843 && GET_CODE (SET_DEST (body)) != PC
844 && GET_CODE (SET_DEST (body)) != REG
845 && ! (GET_CODE (SET_DEST (body)) == SUBREG
846 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
847 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
848 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
849 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
850 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
851 && reg_overlap_mentioned_p (x, SET_DEST (body)))
856 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
857 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
864 return reg_overlap_mentioned_p (x, body);
867 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
870 return reg_overlap_mentioned_p (x, XEXP (body, 0));
873 case UNSPEC_VOLATILE:
874 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
875 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
880 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
881 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
886 if (GET_CODE (XEXP (body, 0)) == MEM)
887 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
892 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
894 return reg_referenced_p (x, COND_EXEC_CODE (body));
901 /* Nonzero if register REG is referenced in an insn between
902 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
906 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
910 if (from_insn == to_insn)
913 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
915 && (reg_referenced_p (reg, PATTERN (insn))
916 || (GET_CODE (insn) == CALL_INSN
917 && find_reg_fusage (insn, USE, reg))))
922 /* Nonzero if register REG is set or clobbered in an insn between
923 FROM_INSN and TO_INSN (exclusive of those two). */
926 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
930 if (from_insn == to_insn)
933 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
934 if (INSN_P (insn) && reg_set_p (reg, insn))
939 /* Internals of reg_set_between_p. */
941 reg_set_p (rtx reg, rtx insn)
943 /* We can be passed an insn or part of one. If we are passed an insn,
944 check if a side-effect of the insn clobbers REG. */
946 && (FIND_REG_INC_NOTE (insn, reg)
947 || (GET_CODE (insn) == CALL_INSN
948 /* We'd like to test call_used_regs here, but rtlanal.c can't
949 reference that variable due to its use in genattrtab. So
950 we'll just be more conservative.
952 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
953 information holds all clobbered registers. */
954 && ((GET_CODE (reg) == REG
955 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
956 || GET_CODE (reg) == MEM
957 || find_reg_fusage (insn, CLOBBER, reg)))))
960 return set_of (reg, insn) != NULL_RTX;
963 /* Similar to reg_set_between_p, but check all registers in X. Return 0
964 only if none of them are modified between START and END. Do not
965 consider non-registers one way or the other. */
968 regs_set_between_p (rtx x, rtx start, rtx end)
970 enum rtx_code code = GET_CODE (x);
987 return reg_set_between_p (x, start, end);
993 fmt = GET_RTX_FORMAT (code);
994 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
996 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
999 else if (fmt[i] == 'E')
1000 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1001 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1008 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1009 only if none of them are modified between START and END. Return 1 if
1010 X contains a MEM; this routine does usememory aliasing. */
1013 modified_between_p (rtx x, rtx start, rtx end)
1015 enum rtx_code code = GET_CODE (x);
1038 if (RTX_UNCHANGING_P (x))
1040 if (modified_between_p (XEXP (x, 0), start, end))
1042 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1043 if (memory_modified_in_insn_p (x, insn))
1049 return reg_set_between_p (x, start, end);
1055 fmt = GET_RTX_FORMAT (code);
1056 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1058 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1061 else if (fmt[i] == 'E')
1062 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1063 if (modified_between_p (XVECEXP (x, i, j), start, end))
1070 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1071 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1072 does use memory aliasing. */
1075 modified_in_p (rtx x, rtx insn)
1077 enum rtx_code code = GET_CODE (x);
1096 if (RTX_UNCHANGING_P (x))
1098 if (modified_in_p (XEXP (x, 0), insn))
1100 if (memory_modified_in_insn_p (x, insn))
1106 return reg_set_p (x, insn);
1112 fmt = GET_RTX_FORMAT (code);
1113 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1115 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1118 else if (fmt[i] == 'E')
1119 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1120 if (modified_in_p (XVECEXP (x, i, j), insn))
1127 /* Return true if anything in insn X is (anti,output,true) dependent on
1128 anything in insn Y. */
1131 insn_dependent_p (rtx x, rtx y)
1135 if (! INSN_P (x) || ! INSN_P (y))
1139 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1140 if (tmp == NULL_RTX)
1144 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1145 if (tmp == NULL_RTX)
1151 /* A helper routine for insn_dependent_p called through note_stores. */
1154 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1156 rtx * pinsn = (rtx *) data;
1158 if (*pinsn && reg_mentioned_p (x, *pinsn))
1162 /* Helper function for set_of. */
1170 set_of_1 (rtx x, rtx pat, void *data1)
1172 struct set_of_data *data = (struct set_of_data *) (data1);
1173 if (rtx_equal_p (x, data->pat)
1174 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1178 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1179 (either directly or via STRICT_LOW_PART and similar modifiers). */
1181 set_of (rtx pat, rtx insn)
1183 struct set_of_data data;
1184 data.found = NULL_RTX;
1186 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1190 /* Given an INSN, return a SET expression if this insn has only a single SET.
1191 It may also have CLOBBERs, USEs, or SET whose output
1192 will not be used, which we ignore. */
1195 single_set_2 (rtx insn, rtx pat)
1198 int set_verified = 1;
1201 if (GET_CODE (pat) == PARALLEL)
1203 for (i = 0; i < XVECLEN (pat, 0); i++)
1205 rtx sub = XVECEXP (pat, 0, i);
1206 switch (GET_CODE (sub))
1213 /* We can consider insns having multiple sets, where all
1214 but one are dead as single set insns. In common case
1215 only single set is present in the pattern so we want
1216 to avoid checking for REG_UNUSED notes unless necessary.
1218 When we reach set first time, we just expect this is
1219 the single set we are looking for and only when more
1220 sets are found in the insn, we check them. */
1223 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1224 && !side_effects_p (set))
1230 set = sub, set_verified = 0;
1231 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1232 || side_effects_p (sub))
1244 /* Given an INSN, return nonzero if it has more than one SET, else return
1248 multiple_sets (rtx insn)
1253 /* INSN must be an insn. */
1254 if (! INSN_P (insn))
1257 /* Only a PARALLEL can have multiple SETs. */
1258 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1260 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1261 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1263 /* If we have already found a SET, then return now. */
1271 /* Either zero or one SET. */
1275 /* Return nonzero if the destination of SET equals the source
1276 and there are no side effects. */
1279 set_noop_p (rtx set)
1281 rtx src = SET_SRC (set);
1282 rtx dst = SET_DEST (set);
1284 if (dst == pc_rtx && src == pc_rtx)
1287 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1288 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1290 if (GET_CODE (dst) == SIGN_EXTRACT
1291 || GET_CODE (dst) == ZERO_EXTRACT)
1292 return rtx_equal_p (XEXP (dst, 0), src)
1293 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1294 && !side_effects_p (src);
1296 if (GET_CODE (dst) == STRICT_LOW_PART)
1297 dst = XEXP (dst, 0);
1299 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1301 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1303 src = SUBREG_REG (src);
1304 dst = SUBREG_REG (dst);
1307 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1308 && REGNO (src) == REGNO (dst));
1311 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1315 noop_move_p (rtx insn)
1317 rtx pat = PATTERN (insn);
1319 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1322 /* Insns carrying these notes are useful later on. */
1323 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1326 /* For now treat an insn with a REG_RETVAL note as a
1327 a special insn which should not be considered a no-op. */
1328 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1331 if (GET_CODE (pat) == SET && set_noop_p (pat))
1334 if (GET_CODE (pat) == PARALLEL)
1337 /* If nothing but SETs of registers to themselves,
1338 this insn can also be deleted. */
1339 for (i = 0; i < XVECLEN (pat, 0); i++)
1341 rtx tem = XVECEXP (pat, 0, i);
1343 if (GET_CODE (tem) == USE
1344 || GET_CODE (tem) == CLOBBER)
1347 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1357 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1358 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1359 If the object was modified, if we hit a partial assignment to X, or hit a
1360 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1361 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1365 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1369 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1373 rtx set = single_set (p);
1374 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1376 if (set && rtx_equal_p (x, SET_DEST (set)))
1378 rtx src = SET_SRC (set);
1380 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1381 src = XEXP (note, 0);
1383 if ((valid_to == NULL_RTX
1384 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1385 /* Reject hard registers because we don't usually want
1386 to use them; we'd rather use a pseudo. */
1387 && (! (GET_CODE (src) == REG
1388 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1395 /* If set in non-simple way, we don't have a value. */
1396 if (reg_set_p (x, p))
1403 /* Return nonzero if register in range [REGNO, ENDREGNO)
1404 appears either explicitly or implicitly in X
1405 other than being stored into.
1407 References contained within the substructure at LOC do not count.
1408 LOC may be zero, meaning don't ignore anything. */
1411 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1415 unsigned int x_regno;
1420 /* The contents of a REG_NONNEG note is always zero, so we must come here
1421 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1425 code = GET_CODE (x);
1430 x_regno = REGNO (x);
1432 /* If we modifying the stack, frame, or argument pointer, it will
1433 clobber a virtual register. In fact, we could be more precise,
1434 but it isn't worth it. */
1435 if ((x_regno == STACK_POINTER_REGNUM
1436 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1437 || x_regno == ARG_POINTER_REGNUM
1439 || x_regno == FRAME_POINTER_REGNUM)
1440 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1443 return (endregno > x_regno
1444 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1445 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1449 /* If this is a SUBREG of a hard reg, we can see exactly which
1450 registers are being modified. Otherwise, handle normally. */
1451 if (GET_CODE (SUBREG_REG (x)) == REG
1452 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1454 unsigned int inner_regno = subreg_regno (x);
1455 unsigned int inner_endregno
1456 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1457 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1459 return endregno > inner_regno && regno < inner_endregno;
1465 if (&SET_DEST (x) != loc
1466 /* Note setting a SUBREG counts as referring to the REG it is in for
1467 a pseudo but not for hard registers since we can
1468 treat each word individually. */
1469 && ((GET_CODE (SET_DEST (x)) == SUBREG
1470 && loc != &SUBREG_REG (SET_DEST (x))
1471 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1472 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1473 && refers_to_regno_p (regno, endregno,
1474 SUBREG_REG (SET_DEST (x)), loc))
1475 || (GET_CODE (SET_DEST (x)) != REG
1476 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1479 if (code == CLOBBER || loc == &SET_SRC (x))
1488 /* X does not match, so try its subexpressions. */
1490 fmt = GET_RTX_FORMAT (code);
1491 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1493 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1501 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1504 else if (fmt[i] == 'E')
1507 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1508 if (loc != &XVECEXP (x, i, j)
1509 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1516 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1517 we check if any register number in X conflicts with the relevant register
1518 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1519 contains a MEM (we don't bother checking for memory addresses that can't
1520 conflict because we expect this to be a rare case. */
1523 reg_overlap_mentioned_p (rtx x, rtx in)
1525 unsigned int regno, endregno;
1527 /* If either argument is a constant, then modifying X can not
1528 affect IN. Here we look at IN, we can profitably combine
1529 CONSTANT_P (x) with the switch statement below. */
1530 if (CONSTANT_P (in))
1534 switch (GET_CODE (x))
1536 case STRICT_LOW_PART:
1539 /* Overly conservative. */
1544 regno = REGNO (SUBREG_REG (x));
1545 if (regno < FIRST_PSEUDO_REGISTER)
1546 regno = subreg_regno (x);
1552 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1553 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1554 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1561 if (GET_CODE (in) == MEM)
1564 fmt = GET_RTX_FORMAT (GET_CODE (in));
1565 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1566 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1575 return reg_mentioned_p (x, in);
1581 /* If any register in here refers to it we return true. */
1582 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1583 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1584 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1590 #ifdef ENABLE_CHECKING
1591 if (!CONSTANT_P (x))
1599 /* Return the last value to which REG was set prior to INSN. If we can't
1600 find it easily, return 0.
1602 We only return a REG, SUBREG, or constant because it is too hard to
1603 check if a MEM remains unchanged. */
1606 reg_set_last (rtx x, rtx insn)
1608 rtx orig_insn = insn;
1610 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1611 Stop when we reach a label or X is a hard reg and we reach a
1612 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1614 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1616 /* We compare with <= here, because reg_set_last_last_regno
1617 is actually the number of the first reg *not* in X. */
1619 insn && GET_CODE (insn) != CODE_LABEL
1620 && ! (GET_CODE (insn) == CALL_INSN
1621 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1622 insn = PREV_INSN (insn))
1625 rtx set = set_of (x, insn);
1626 /* OK, this function modify our register. See if we understand it. */
1630 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1632 last_value = SET_SRC (x);
1633 if (CONSTANT_P (last_value)
1634 || ((GET_CODE (last_value) == REG
1635 || GET_CODE (last_value) == SUBREG)
1636 && ! reg_set_between_p (last_value,
1647 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1648 (X would be the pattern of an insn).
1649 FUN receives two arguments:
1650 the REG, MEM, CC0 or PC being stored in or clobbered,
1651 the SET or CLOBBER rtx that does the store.
1653 If the item being stored in or clobbered is a SUBREG of a hard register,
1654 the SUBREG will be passed. */
1657 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1661 if (GET_CODE (x) == COND_EXEC)
1662 x = COND_EXEC_CODE (x);
1664 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1666 rtx dest = SET_DEST (x);
1668 while ((GET_CODE (dest) == SUBREG
1669 && (GET_CODE (SUBREG_REG (dest)) != REG
1670 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1671 || GET_CODE (dest) == ZERO_EXTRACT
1672 || GET_CODE (dest) == SIGN_EXTRACT
1673 || GET_CODE (dest) == STRICT_LOW_PART)
1674 dest = XEXP (dest, 0);
1676 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1677 each of whose first operand is a register. */
1678 if (GET_CODE (dest) == PARALLEL)
1680 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1681 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1682 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1685 (*fun) (dest, x, data);
1688 else if (GET_CODE (x) == PARALLEL)
1689 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1690 note_stores (XVECEXP (x, 0, i), fun, data);
1693 /* Like notes_stores, but call FUN for each expression that is being
1694 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1695 FUN for each expression, not any interior subexpressions. FUN receives a
1696 pointer to the expression and the DATA passed to this function.
1698 Note that this is not quite the same test as that done in reg_referenced_p
1699 since that considers something as being referenced if it is being
1700 partially set, while we do not. */
1703 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1708 switch (GET_CODE (body))
1711 (*fun) (&COND_EXEC_TEST (body), data);
1712 note_uses (&COND_EXEC_CODE (body), fun, data);
1716 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1717 note_uses (&XVECEXP (body, 0, i), fun, data);
1721 (*fun) (&XEXP (body, 0), data);
1725 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1726 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1730 (*fun) (&TRAP_CONDITION (body), data);
1734 (*fun) (&XEXP (body, 0), data);
1738 case UNSPEC_VOLATILE:
1739 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1740 (*fun) (&XVECEXP (body, 0, i), data);
1744 if (GET_CODE (XEXP (body, 0)) == MEM)
1745 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1750 rtx dest = SET_DEST (body);
1752 /* For sets we replace everything in source plus registers in memory
1753 expression in store and operands of a ZERO_EXTRACT. */
1754 (*fun) (&SET_SRC (body), data);
1756 if (GET_CODE (dest) == ZERO_EXTRACT)
1758 (*fun) (&XEXP (dest, 1), data);
1759 (*fun) (&XEXP (dest, 2), data);
1762 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1763 dest = XEXP (dest, 0);
1765 if (GET_CODE (dest) == MEM)
1766 (*fun) (&XEXP (dest, 0), data);
1771 /* All the other possibilities never store. */
1772 (*fun) (pbody, data);
1777 /* Return nonzero if X's old contents don't survive after INSN.
1778 This will be true if X is (cc0) or if X is a register and
1779 X dies in INSN or because INSN entirely sets X.
1781 "Entirely set" means set directly and not through a SUBREG,
1782 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1783 Likewise, REG_INC does not count.
1785 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1786 but for this use that makes no difference, since regs don't overlap
1787 during their lifetimes. Therefore, this function may be used
1788 at any time after deaths have been computed (in flow.c).
1790 If REG is a hard reg that occupies multiple machine registers, this
1791 function will only return 1 if each of those registers will be replaced
1795 dead_or_set_p (rtx insn, rtx x)
1797 unsigned int regno, last_regno;
1800 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1801 if (GET_CODE (x) == CC0)
1804 if (GET_CODE (x) != REG)
1808 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1809 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1811 for (i = regno; i <= last_regno; i++)
1812 if (! dead_or_set_regno_p (insn, i))
1818 /* Utility function for dead_or_set_p to check an individual register. Also
1819 called from flow.c. */
1822 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1824 unsigned int regno, endregno;
1827 /* See if there is a death note for something that includes TEST_REGNO. */
1828 if (find_regno_note (insn, REG_DEAD, test_regno))
1831 if (GET_CODE (insn) == CALL_INSN
1832 && find_regno_fusage (insn, CLOBBER, test_regno))
1835 pattern = PATTERN (insn);
1837 if (GET_CODE (pattern) == COND_EXEC)
1838 pattern = COND_EXEC_CODE (pattern);
1840 if (GET_CODE (pattern) == SET)
1842 rtx dest = SET_DEST (pattern);
1844 /* A value is totally replaced if it is the destination or the
1845 destination is a SUBREG of REGNO that does not change the number of
1847 if (GET_CODE (dest) == SUBREG
1848 && (((GET_MODE_SIZE (GET_MODE (dest))
1849 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1850 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1851 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1852 dest = SUBREG_REG (dest);
1854 if (GET_CODE (dest) != REG)
1857 regno = REGNO (dest);
1858 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1859 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1861 return (test_regno >= regno && test_regno < endregno);
1863 else if (GET_CODE (pattern) == PARALLEL)
1867 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1869 rtx body = XVECEXP (pattern, 0, i);
1871 if (GET_CODE (body) == COND_EXEC)
1872 body = COND_EXEC_CODE (body);
1874 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1876 rtx dest = SET_DEST (body);
1878 if (GET_CODE (dest) == SUBREG
1879 && (((GET_MODE_SIZE (GET_MODE (dest))
1880 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1881 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1882 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1883 dest = SUBREG_REG (dest);
1885 if (GET_CODE (dest) != REG)
1888 regno = REGNO (dest);
1889 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1890 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1892 if (test_regno >= regno && test_regno < endregno)
1901 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1902 If DATUM is nonzero, look for one whose datum is DATUM. */
1905 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1909 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1910 if (! INSN_P (insn))
1913 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1914 if (REG_NOTE_KIND (link) == kind
1915 && (datum == 0 || datum == XEXP (link, 0)))
1920 /* Return the reg-note of kind KIND in insn INSN which applies to register
1921 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1922 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1923 it might be the case that the note overlaps REGNO. */
1926 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1930 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1931 if (! INSN_P (insn))
1934 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1935 if (REG_NOTE_KIND (link) == kind
1936 /* Verify that it is a register, so that scratch and MEM won't cause a
1938 && GET_CODE (XEXP (link, 0)) == REG
1939 && REGNO (XEXP (link, 0)) <= regno
1940 && ((REGNO (XEXP (link, 0))
1941 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1942 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1943 [GET_MODE (XEXP (link, 0))]))
1949 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1953 find_reg_equal_equiv_note (rtx insn)
1959 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1960 if (REG_NOTE_KIND (link) == REG_EQUAL
1961 || REG_NOTE_KIND (link) == REG_EQUIV)
1963 if (single_set (insn) == 0)
1970 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1971 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1974 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1976 /* If it's not a CALL_INSN, it can't possibly have a
1977 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1978 if (GET_CODE (insn) != CALL_INSN)
1984 if (GET_CODE (datum) != REG)
1988 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1990 link = XEXP (link, 1))
1991 if (GET_CODE (XEXP (link, 0)) == code
1992 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1997 unsigned int regno = REGNO (datum);
1999 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2000 to pseudo registers, so don't bother checking. */
2002 if (regno < FIRST_PSEUDO_REGISTER)
2004 unsigned int end_regno
2005 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
2008 for (i = regno; i < end_regno; i++)
2009 if (find_regno_fusage (insn, code, i))
2017 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2018 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2021 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2025 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2026 to pseudo registers, so don't bother checking. */
2028 if (regno >= FIRST_PSEUDO_REGISTER
2029 || GET_CODE (insn) != CALL_INSN )
2032 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2034 unsigned int regnote;
2037 if (GET_CODE (op = XEXP (link, 0)) == code
2038 && GET_CODE (reg = XEXP (op, 0)) == REG
2039 && (regnote = REGNO (reg)) <= regno
2040 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
2047 /* Return true if INSN is a call to a pure function. */
2050 pure_call_p (rtx insn)
2054 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2057 /* Look for the note that differentiates const and pure functions. */
2058 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2062 if (GET_CODE (u = XEXP (link, 0)) == USE
2063 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2064 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2071 /* Remove register note NOTE from the REG_NOTES of INSN. */
2074 remove_note (rtx insn, rtx note)
2078 if (note == NULL_RTX)
2081 if (REG_NOTES (insn) == note)
2083 REG_NOTES (insn) = XEXP (note, 1);
2087 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2088 if (XEXP (link, 1) == note)
2090 XEXP (link, 1) = XEXP (note, 1);
2097 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2098 return 1 if it is found. A simple equality test is used to determine if
2102 in_expr_list_p (rtx listp, rtx node)
2106 for (x = listp; x; x = XEXP (x, 1))
2107 if (node == XEXP (x, 0))
2113 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2114 remove that entry from the list if it is found.
2116 A simple equality test is used to determine if NODE matches. */
2119 remove_node_from_expr_list (rtx node, rtx *listp)
2122 rtx prev = NULL_RTX;
2126 if (node == XEXP (temp, 0))
2128 /* Splice the node out of the list. */
2130 XEXP (prev, 1) = XEXP (temp, 1);
2132 *listp = XEXP (temp, 1);
2138 temp = XEXP (temp, 1);
2142 /* Nonzero if X contains any volatile instructions. These are instructions
2143 which may cause unpredictable machine state instructions, and thus no
2144 instructions should be moved or combined across them. This includes
2145 only volatile asms and UNSPEC_VOLATILE instructions. */
2148 volatile_insn_p (rtx x)
2152 code = GET_CODE (x);
2172 case UNSPEC_VOLATILE:
2173 /* case TRAP_IF: This isn't clear yet. */
2178 if (MEM_VOLATILE_P (x))
2185 /* Recursively scan the operands of this expression. */
2188 const char *fmt = GET_RTX_FORMAT (code);
2191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2195 if (volatile_insn_p (XEXP (x, i)))
2198 else if (fmt[i] == 'E')
2201 for (j = 0; j < XVECLEN (x, i); j++)
2202 if (volatile_insn_p (XVECEXP (x, i, j)))
2210 /* Nonzero if X contains any volatile memory references
2211 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2214 volatile_refs_p (rtx x)
2218 code = GET_CODE (x);
2236 case UNSPEC_VOLATILE:
2242 if (MEM_VOLATILE_P (x))
2249 /* Recursively scan the operands of this expression. */
2252 const char *fmt = GET_RTX_FORMAT (code);
2255 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2259 if (volatile_refs_p (XEXP (x, i)))
2262 else if (fmt[i] == 'E')
2265 for (j = 0; j < XVECLEN (x, i); j++)
2266 if (volatile_refs_p (XVECEXP (x, i, j)))
2274 /* Similar to above, except that it also rejects register pre- and post-
2278 side_effects_p (rtx x)
2282 code = GET_CODE (x);
2300 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2301 when some combination can't be done. If we see one, don't think
2302 that we can simplify the expression. */
2303 return (GET_MODE (x) != VOIDmode);
2312 case UNSPEC_VOLATILE:
2313 /* case TRAP_IF: This isn't clear yet. */
2319 if (MEM_VOLATILE_P (x))
2326 /* Recursively scan the operands of this expression. */
2329 const char *fmt = GET_RTX_FORMAT (code);
2332 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2336 if (side_effects_p (XEXP (x, i)))
2339 else if (fmt[i] == 'E')
2342 for (j = 0; j < XVECLEN (x, i); j++)
2343 if (side_effects_p (XVECEXP (x, i, j)))
2351 /* Return nonzero if evaluating rtx X might cause a trap. */
2362 code = GET_CODE (x);
2365 /* Handle these cases quickly. */
2379 case UNSPEC_VOLATILE:
2384 return MEM_VOLATILE_P (x);
2386 /* Memory ref can trap unless it's a static var or a stack slot. */
2388 if (MEM_NOTRAP_P (x))
2390 return rtx_addr_can_trap_p (XEXP (x, 0));
2392 /* Division by a non-constant might trap. */
2397 if (HONOR_SNANS (GET_MODE (x)))
2399 if (! CONSTANT_P (XEXP (x, 1))
2400 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2401 && flag_trapping_math))
2403 if (XEXP (x, 1) == const0_rtx)
2408 /* An EXPR_LIST is used to represent a function call. This
2409 certainly may trap. */
2417 /* Some floating point comparisons may trap. */
2418 if (!flag_trapping_math)
2420 /* ??? There is no machine independent way to check for tests that trap
2421 when COMPARE is used, though many targets do make this distinction.
2422 For instance, sparc uses CCFPE for compares which generate exceptions
2423 and CCFP for compares which do not generate exceptions. */
2424 if (HONOR_NANS (GET_MODE (x)))
2426 /* But often the compare has some CC mode, so check operand
2428 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2429 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2435 if (HONOR_SNANS (GET_MODE (x)))
2437 /* Often comparison is CC mode, so check operand modes. */
2438 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2439 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2444 /* Conversion of floating point might trap. */
2445 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2451 /* These operations don't trap even with floating point. */
2455 /* Any floating arithmetic may trap. */
2456 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2457 && flag_trapping_math)
2461 fmt = GET_RTX_FORMAT (code);
2462 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2466 if (may_trap_p (XEXP (x, i)))
2469 else if (fmt[i] == 'E')
2472 for (j = 0; j < XVECLEN (x, i); j++)
2473 if (may_trap_p (XVECEXP (x, i, j)))
2480 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2481 i.e., an inequality. */
2484 inequality_comparisons_p (rtx x)
2488 enum rtx_code code = GET_CODE (x);
2518 len = GET_RTX_LENGTH (code);
2519 fmt = GET_RTX_FORMAT (code);
2521 for (i = 0; i < len; i++)
2525 if (inequality_comparisons_p (XEXP (x, i)))
2528 else if (fmt[i] == 'E')
2531 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2532 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2540 /* Replace any occurrence of FROM in X with TO. The function does
2541 not enter into CONST_DOUBLE for the replace.
2543 Note that copying is not done so X must not be shared unless all copies
2544 are to be modified. */
2547 replace_rtx (rtx x, rtx from, rtx to)
2552 /* The following prevents loops occurrence when we change MEM in
2553 CONST_DOUBLE onto the same CONST_DOUBLE. */
2554 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2560 /* Allow this function to make replacements in EXPR_LISTs. */
2564 if (GET_CODE (x) == SUBREG)
2566 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2568 if (GET_CODE (new) == CONST_INT)
2570 x = simplify_subreg (GET_MODE (x), new,
2571 GET_MODE (SUBREG_REG (x)),
2577 SUBREG_REG (x) = new;
2581 else if (GET_CODE (x) == ZERO_EXTEND)
2583 rtx new = replace_rtx (XEXP (x, 0), from, to);
2585 if (GET_CODE (new) == CONST_INT)
2587 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2588 new, GET_MODE (XEXP (x, 0)));
2598 fmt = GET_RTX_FORMAT (GET_CODE (x));
2599 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2602 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2603 else if (fmt[i] == 'E')
2604 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2605 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2611 /* Throughout the rtx X, replace many registers according to REG_MAP.
2612 Return the replacement for X (which may be X with altered contents).
2613 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2614 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2616 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2617 should not be mapped to pseudos or vice versa since validate_change
2620 If REPLACE_DEST is 1, replacements are also done in destinations;
2621 otherwise, only sources are replaced. */
2624 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2633 code = GET_CODE (x);
2648 /* Verify that the register has an entry before trying to access it. */
2649 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2651 /* SUBREGs can't be shared. Always return a copy to ensure that if
2652 this replacement occurs more than once then each instance will
2653 get distinct rtx. */
2654 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2655 return copy_rtx (reg_map[REGNO (x)]);
2656 return reg_map[REGNO (x)];
2661 /* Prevent making nested SUBREGs. */
2662 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2663 && reg_map[REGNO (SUBREG_REG (x))] != 0
2664 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2666 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2667 return simplify_gen_subreg (GET_MODE (x), map_val,
2668 GET_MODE (SUBREG_REG (x)),
2675 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2677 else if (GET_CODE (SET_DEST (x)) == MEM
2678 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2679 /* Even if we are not to replace destinations, replace register if it
2680 is CONTAINED in destination (destination is memory or
2681 STRICT_LOW_PART). */
2682 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2684 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2685 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2688 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2695 fmt = GET_RTX_FORMAT (code);
2696 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2699 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2700 else if (fmt[i] == 'E')
2703 for (j = 0; j < XVECLEN (x, i); j++)
2704 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2705 nregs, replace_dest);
2711 /* Replace occurrences of the old label in *X with the new one.
2712 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2715 replace_label (rtx *x, void *data)
2719 rtx old_label = ((replace_label_data *) data)->r1;
2720 rtx new_label = ((replace_label_data *) data)->r2;
2721 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2726 if (GET_CODE (l) == MEM
2727 && (tmp = XEXP (l, 0)) != NULL_RTX
2728 && GET_CODE (tmp) == SYMBOL_REF
2729 && CONSTANT_POOL_ADDRESS_P (tmp))
2731 rtx c = get_pool_constant (tmp);
2732 if (rtx_referenced_p (old_label, c))
2735 replace_label_data *d = (replace_label_data *) data;
2737 /* Create a copy of constant C; replace the label inside
2738 but do not update LABEL_NUSES because uses in constant pool
2740 new_c = copy_rtx (c);
2741 d->update_label_nuses = false;
2742 for_each_rtx (&new_c, replace_label, data);
2743 d->update_label_nuses = update_label_nuses;
2745 /* Add the new constant NEW_C to constant pool and replace
2746 the old reference to constant by new reference. */
2747 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2748 *x = replace_rtx (l, l, new_l);
2753 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2754 field. This is not handled by for_each_rtx because it doesn't
2755 handle unprinted ('0') fields. */
2756 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2757 JUMP_LABEL (l) = new_label;
2759 if ((GET_CODE (l) == LABEL_REF
2760 || GET_CODE (l) == INSN_LIST)
2761 && XEXP (l, 0) == old_label)
2763 XEXP (l, 0) = new_label;
2764 if (update_label_nuses)
2766 ++LABEL_NUSES (new_label);
2767 --LABEL_NUSES (old_label);
2775 /* When *BODY is equal to X or X is directly referenced by *BODY
2776 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2777 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2780 rtx_referenced_p_1 (rtx *body, void *x)
2784 if (*body == NULL_RTX)
2785 return y == NULL_RTX;
2787 /* Return true if a label_ref *BODY refers to label Y. */
2788 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2789 return XEXP (*body, 0) == y;
2791 /* If *BODY is a reference to pool constant traverse the constant. */
2792 if (GET_CODE (*body) == SYMBOL_REF
2793 && CONSTANT_POOL_ADDRESS_P (*body))
2794 return rtx_referenced_p (y, get_pool_constant (*body));
2796 /* By default, compare the RTL expressions. */
2797 return rtx_equal_p (*body, y);
2800 /* Return true if X is referenced in BODY. */
2803 rtx_referenced_p (rtx x, rtx body)
2805 return for_each_rtx (&body, rtx_referenced_p_1, x);
2808 /* If INSN is a tablejump return true and store the label (before jump table) to
2809 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2812 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2816 if (GET_CODE (insn) == JUMP_INSN
2817 && (label = JUMP_LABEL (insn)) != NULL_RTX
2818 && (table = next_active_insn (label)) != NULL_RTX
2819 && GET_CODE (table) == JUMP_INSN
2820 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2821 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2832 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2833 constant that is not in the constant pool and not in the condition
2834 of an IF_THEN_ELSE. */
2837 computed_jump_p_1 (rtx x)
2839 enum rtx_code code = GET_CODE (x);
2858 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2859 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2862 return (computed_jump_p_1 (XEXP (x, 1))
2863 || computed_jump_p_1 (XEXP (x, 2)));
2869 fmt = GET_RTX_FORMAT (code);
2870 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2873 && computed_jump_p_1 (XEXP (x, i)))
2876 else if (fmt[i] == 'E')
2877 for (j = 0; j < XVECLEN (x, i); j++)
2878 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2885 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2887 Tablejumps and casesi insns are not considered indirect jumps;
2888 we can recognize them by a (use (label_ref)). */
2891 computed_jump_p (rtx insn)
2894 if (GET_CODE (insn) == JUMP_INSN)
2896 rtx pat = PATTERN (insn);
2898 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2900 else if (GET_CODE (pat) == PARALLEL)
2902 int len = XVECLEN (pat, 0);
2903 int has_use_labelref = 0;
2905 for (i = len - 1; i >= 0; i--)
2906 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2907 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2909 has_use_labelref = 1;
2911 if (! has_use_labelref)
2912 for (i = len - 1; i >= 0; i--)
2913 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2914 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2915 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2918 else if (GET_CODE (pat) == SET
2919 && SET_DEST (pat) == pc_rtx
2920 && computed_jump_p_1 (SET_SRC (pat)))
2926 /* Traverse X via depth-first search, calling F for each
2927 sub-expression (including X itself). F is also passed the DATA.
2928 If F returns -1, do not traverse sub-expressions, but continue
2929 traversing the rest of the tree. If F ever returns any other
2930 nonzero value, stop the traversal, and return the value returned
2931 by F. Otherwise, return 0. This function does not traverse inside
2932 tree structure that contains RTX_EXPRs, or into sub-expressions
2933 whose format code is `0' since it is not known whether or not those
2934 codes are actually RTL.
2936 This routine is very general, and could (should?) be used to
2937 implement many of the other routines in this file. */
2940 for_each_rtx (rtx *x, rtx_function f, void *data)
2948 result = (*f) (x, data);
2950 /* Do not traverse sub-expressions. */
2952 else if (result != 0)
2953 /* Stop the traversal. */
2957 /* There are no sub-expressions. */
2960 length = GET_RTX_LENGTH (GET_CODE (*x));
2961 format = GET_RTX_FORMAT (GET_CODE (*x));
2963 for (i = 0; i < length; ++i)
2968 result = for_each_rtx (&XEXP (*x, i), f, data);
2975 if (XVEC (*x, i) != 0)
2978 for (j = 0; j < XVECLEN (*x, i); ++j)
2980 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2988 /* Nothing to do. */
2997 /* Searches X for any reference to REGNO, returning the rtx of the
2998 reference found if any. Otherwise, returns NULL_RTX. */
3001 regno_use_in (unsigned int regno, rtx x)
3007 if (GET_CODE (x) == REG && REGNO (x) == regno)
3010 fmt = GET_RTX_FORMAT (GET_CODE (x));
3011 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3015 if ((tem = regno_use_in (regno, XEXP (x, i))))
3018 else if (fmt[i] == 'E')
3019 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3020 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3027 /* Return a value indicating whether OP, an operand of a commutative
3028 operation, is preferred as the first or second operand. The higher
3029 the value, the stronger the preference for being the first operand.
3030 We use negative values to indicate a preference for the first operand
3031 and positive values for the second operand. */
3034 commutative_operand_precedence (rtx op)
3036 enum rtx_code code = GET_CODE (op);
3039 /* Constants always come the second operand. Prefer "nice" constants. */
3040 if (code == CONST_INT)
3042 if (code == CONST_DOUBLE)
3044 op = avoid_constant_pool_reference (op);
3045 if (code == CONST_INT)
3047 if (code == CONST_DOUBLE)
3049 if (CONSTANT_P (op))
3052 /* SUBREGs of objects should come second. */
3054 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3057 class = GET_RTX_CLASS (code);
3059 /* Prefer operands that are themselves commutative to be first.
3060 This helps to make things linear. In particular,
3061 (and (and (reg) (reg)) (not (reg))) is canonical. */
3065 /* If only one operand is a binary expression, it will be the first
3066 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3067 is canonical, although it will usually be further simplified. */
3071 /* Then prefer NEG and NOT. */
3072 if (code == NEG || code == NOT)
3075 /* Complex expressions should be the first, so decrease priority
3077 if (GET_RTX_CLASS (code) == 'o')
3082 /* Return 1 iff it is necessary to swap operands of commutative operation
3083 in order to canonicalize expression. */
3086 swap_commutative_operands_p (rtx x, rtx y)
3088 return (commutative_operand_precedence (x)
3089 < commutative_operand_precedence (y));
3092 /* Return 1 if X is an autoincrement side effect and the register is
3093 not the stack pointer. */
3097 switch (GET_CODE (x))
3105 /* There are no REG_INC notes for SP. */
3106 if (XEXP (x, 0) != stack_pointer_rtx)
3114 /* Return 1 if the sequence of instructions beginning with FROM and up
3115 to and including TO is safe to move. If NEW_TO is non-NULL, and
3116 the sequence is not already safe to move, but can be easily
3117 extended to a sequence which is safe, then NEW_TO will point to the
3118 end of the extended sequence.
3120 For now, this function only checks that the region contains whole
3121 exception regions, but it could be extended to check additional
3122 conditions as well. */
3125 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3127 int eh_region_count = 0;
3131 /* By default, assume the end of the region will be what was
3138 if (GET_CODE (r) == NOTE)
3140 switch (NOTE_LINE_NUMBER (r))
3142 case NOTE_INSN_EH_REGION_BEG:
3146 case NOTE_INSN_EH_REGION_END:
3147 if (eh_region_count == 0)
3148 /* This sequence of instructions contains the end of
3149 an exception region, but not he beginning. Moving
3150 it will cause chaos. */
3161 /* If we've passed TO, and we see a non-note instruction, we
3162 can't extend the sequence to a movable sequence. */
3168 /* It's OK to move the sequence if there were matched sets of
3169 exception region notes. */
3170 return eh_region_count == 0;
3175 /* It's OK to move the sequence if there were matched sets of
3176 exception region notes. */
3177 if (past_to_p && eh_region_count == 0)
3183 /* Go to the next instruction. */
3190 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3192 loc_mentioned_in_p (rtx *loc, rtx in)
3194 enum rtx_code code = GET_CODE (in);
3195 const char *fmt = GET_RTX_FORMAT (code);
3198 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3200 if (loc == &in->u.fld[i].rtx)
3204 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3207 else if (fmt[i] == 'E')
3208 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3209 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3215 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3216 and SUBREG_BYTE, return the bit offset where the subreg begins
3217 (counting from the least significant bit of the operand). */
3220 subreg_lsb_1 (enum machine_mode outer_mode,
3221 enum machine_mode inner_mode,
3222 unsigned int subreg_byte)
3224 unsigned int bitpos;
3228 /* A paradoxical subreg begins at bit position 0. */
3229 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3232 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3233 /* If the subreg crosses a word boundary ensure that
3234 it also begins and ends on a word boundary. */
3235 if ((subreg_byte % UNITS_PER_WORD
3236 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3237 && (subreg_byte % UNITS_PER_WORD
3238 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3241 if (WORDS_BIG_ENDIAN)
3242 word = (GET_MODE_SIZE (inner_mode)
3243 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3245 word = subreg_byte / UNITS_PER_WORD;
3246 bitpos = word * BITS_PER_WORD;
3248 if (BYTES_BIG_ENDIAN)
3249 byte = (GET_MODE_SIZE (inner_mode)
3250 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3252 byte = subreg_byte % UNITS_PER_WORD;
3253 bitpos += byte * BITS_PER_UNIT;
3258 /* Given a subreg X, return the bit offset where the subreg begins
3259 (counting from the least significant bit of the reg). */
3264 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3268 /* This function returns the regno offset of a subreg expression.
3269 xregno - A regno of an inner hard subreg_reg (or what will become one).
3270 xmode - The mode of xregno.
3271 offset - The byte offset.
3272 ymode - The mode of a top level SUBREG (or what may become one).
3273 RETURN - The regno offset which would be used. */
3275 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3276 unsigned int offset, enum machine_mode ymode)
3278 int nregs_xmode, nregs_ymode;
3279 int mode_multiple, nregs_multiple;
3282 if (xregno >= FIRST_PSEUDO_REGISTER)
3285 nregs_xmode = hard_regno_nregs[xregno][xmode];
3286 nregs_ymode = hard_regno_nregs[xregno][ymode];
3288 /* If this is a big endian paradoxical subreg, which uses more actual
3289 hard registers than the original register, we must return a negative
3290 offset so that we find the proper highpart of the register. */
3292 && nregs_ymode > nregs_xmode
3293 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3294 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3295 return nregs_xmode - nregs_ymode;
3297 if (offset == 0 || nregs_xmode == nregs_ymode)
3300 /* size of ymode must not be greater than the size of xmode. */
3301 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3302 if (mode_multiple == 0)
3305 y_offset = offset / GET_MODE_SIZE (ymode);
3306 nregs_multiple = nregs_xmode / nregs_ymode;
3307 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3310 /* This function returns true when the offset is representable via
3311 subreg_offset in the given regno.
3312 xregno - A regno of an inner hard subreg_reg (or what will become one).
3313 xmode - The mode of xregno.
3314 offset - The byte offset.
3315 ymode - The mode of a top level SUBREG (or what may become one).
3316 RETURN - The regno offset which would be used. */
3318 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3319 unsigned int offset, enum machine_mode ymode)
3321 int nregs_xmode, nregs_ymode;
3322 int mode_multiple, nregs_multiple;
3325 if (xregno >= FIRST_PSEUDO_REGISTER)
3328 nregs_xmode = hard_regno_nregs[xregno][xmode];
3329 nregs_ymode = hard_regno_nregs[xregno][ymode];
3331 /* paradoxical subregs are always valid. */
3333 && nregs_ymode > nregs_xmode
3334 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3335 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3338 /* Lowpart subregs are always valid. */
3339 if (offset == subreg_lowpart_offset (ymode, xmode))
3342 #ifdef ENABLE_CHECKING
3343 /* This should always pass, otherwise we don't know how to verify the
3344 constraint. These conditions may be relaxed but subreg_offset would
3345 need to be redesigned. */
3346 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3347 || GET_MODE_SIZE (ymode) % nregs_ymode
3348 || nregs_xmode % nregs_ymode)
3352 /* The XMODE value can be seen as a vector of NREGS_XMODE
3353 values. The subreg must represent a lowpart of given field.
3354 Compute what field it is. */
3355 offset -= subreg_lowpart_offset (ymode,
3356 mode_for_size (GET_MODE_BITSIZE (xmode)
3360 /* size of ymode must not be greater than the size of xmode. */
3361 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3362 if (mode_multiple == 0)
3365 y_offset = offset / GET_MODE_SIZE (ymode);
3366 nregs_multiple = nregs_xmode / nregs_ymode;
3367 #ifdef ENABLE_CHECKING
3368 if (offset % GET_MODE_SIZE (ymode)
3369 || mode_multiple % nregs_multiple)
3372 return (!(y_offset % (mode_multiple / nregs_multiple)));
3375 /* Return the final regno that a subreg expression refers to. */
3377 subreg_regno (rtx x)
3380 rtx subreg = SUBREG_REG (x);
3381 int regno = REGNO (subreg);
3383 ret = regno + subreg_regno_offset (regno,
3390 struct parms_set_data
3396 /* Helper function for noticing stores to parameter registers. */
3398 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3400 struct parms_set_data *d = data;
3401 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3402 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3404 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3409 /* Look backward for first parameter to be loaded.
3410 Do not skip BOUNDARY. */
3412 find_first_parameter_load (rtx call_insn, rtx boundary)
3414 struct parms_set_data parm;
3417 /* Since different machines initialize their parameter registers
3418 in different orders, assume nothing. Collect the set of all
3419 parameter registers. */
3420 CLEAR_HARD_REG_SET (parm.regs);
3422 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3423 if (GET_CODE (XEXP (p, 0)) == USE
3424 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3426 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3429 /* We only care about registers which can hold function
3431 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3434 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3439 /* Search backward for the first set of a register in this set. */
3440 while (parm.nregs && before != boundary)
3442 before = PREV_INSN (before);
3444 /* It is possible that some loads got CSEed from one call to
3445 another. Stop in that case. */
3446 if (GET_CODE (before) == CALL_INSN)
3449 /* Our caller needs either ensure that we will find all sets
3450 (in case code has not been optimized yet), or take care
3451 for possible labels in a way by setting boundary to preceding
3453 if (GET_CODE (before) == CODE_LABEL)
3455 if (before != boundary)
3460 if (INSN_P (before))
3461 note_stores (PATTERN (before), parms_set, &parm);
3466 /* Return true if we should avoid inserting code between INSN and preceding
3467 call instruction. */
3470 keep_with_call_p (rtx insn)
3474 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3476 if (GET_CODE (SET_DEST (set)) == REG
3477 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3478 && fixed_regs[REGNO (SET_DEST (set))]
3479 && general_operand (SET_SRC (set), VOIDmode))
3481 if (GET_CODE (SET_SRC (set)) == REG
3482 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3483 && GET_CODE (SET_DEST (set)) == REG
3484 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3486 /* There may be a stack pop just after the call and before the store
3487 of the return register. Search for the actual store when deciding
3488 if we can break or not. */
3489 if (SET_DEST (set) == stack_pointer_rtx)
3491 rtx i2 = next_nonnote_insn (insn);
3492 if (i2 && keep_with_call_p (i2))
3499 /* Return true when store to register X can be hoisted to the place
3500 with LIVE registers (can be NULL). Value VAL contains destination
3501 whose value will be used. */
3504 hoist_test_store (rtx x, rtx val, regset live)
3506 if (GET_CODE (x) == SCRATCH)
3509 if (rtx_equal_p (x, val))
3512 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3513 Then we would need to update all users to care hoisting the store too.
3514 Caller may represent that by specifying whole subreg as val. */
3516 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3518 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3519 && GET_MODE_BITSIZE (GET_MODE (x)) <
3520 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3524 if (GET_CODE (x) == SUBREG)
3527 /* Anything except register store is not hoistable. This includes the
3528 partial stores to registers. */
3533 /* Pseudo registers can be always replaced by another pseudo to avoid
3534 the side effect, for hard register we must ensure that they are dead.
3535 Eventually we may want to add code to try turn pseudos to hards, but it
3536 is unlikely useful. */
3538 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3540 int regno = REGNO (x);
3541 int n = hard_regno_nregs[regno][GET_MODE (x)];
3545 if (REGNO_REG_SET_P (live, regno))
3548 if (REGNO_REG_SET_P (live, regno + n))
3555 /* Return true if INSN can be hoisted to place with LIVE hard registers
3556 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3557 and used by the hoisting pass. */
3560 can_hoist_insn_p (rtx insn, rtx val, regset live)
3562 rtx pat = PATTERN (insn);
3565 /* It probably does not worth the complexity to handle multiple
3567 if (!single_set (insn))
3569 /* We can move CALL_INSN, but we need to check that all caller clobbered
3571 if (GET_CODE (insn) == CALL_INSN)
3573 /* In future we will handle hoisting of libcall sequences, but
3575 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3577 switch (GET_CODE (pat))
3580 if (!hoist_test_store (SET_DEST (pat), val, live))
3584 /* USES do have sick semantics, so do not move them. */
3588 if (!hoist_test_store (XEXP (pat, 0), val, live))
3592 for (i = 0; i < XVECLEN (pat, 0); i++)
3594 rtx x = XVECEXP (pat, 0, i);
3595 switch (GET_CODE (x))
3598 if (!hoist_test_store (SET_DEST (x), val, live))
3602 /* We need to fix callers to really ensure availability
3603 of all values insn uses, but for now it is safe to prohibit
3604 hoisting of any insn having such a hidden uses. */
3608 if (!hoist_test_store (SET_DEST (x), val, live))
3622 /* Update store after hoisting - replace all stores to pseudo registers
3623 by new ones to avoid clobbering of values except for store to VAL that will
3624 be updated to NEW. */
3627 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3631 if (GET_CODE (x) == SCRATCH)
3634 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3635 validate_change (insn, xp,
3636 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3637 SUBREG_BYTE (x)), 1);
3638 if (rtx_equal_p (x, val))
3640 validate_change (insn, xp, new, 1);
3643 if (GET_CODE (x) == SUBREG)
3645 xp = &SUBREG_REG (x);
3652 /* We've verified that hard registers are dead, so we may keep the side
3653 effect. Otherwise replace it by new pseudo. */
3654 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3655 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3657 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3660 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3661 and each other side effect to pseudo register by new pseudo register. */
3664 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3670 insn = emit_copy_of_insn_after (insn, after);
3671 pat = PATTERN (insn);
3673 /* Remove REG_UNUSED notes as we will re-emit them. */
3674 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3675 remove_note (insn, note);
3677 /* To get this working callers must ensure to move everything referenced
3678 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3680 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3681 remove_note (insn, note);
3682 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3683 remove_note (insn, note);
3685 /* Remove REG_DEAD notes as they might not be valid anymore in case
3686 we create redundancy. */
3687 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3688 remove_note (insn, note);
3689 switch (GET_CODE (pat))
3692 hoist_update_store (insn, &SET_DEST (pat), val, new);
3697 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3700 for (i = 0; i < XVECLEN (pat, 0); i++)
3702 rtx x = XVECEXP (pat, 0, i);
3703 switch (GET_CODE (x))
3706 hoist_update_store (insn, &SET_DEST (x), val, new);
3711 hoist_update_store (insn, &SET_DEST (x), val, new);
3721 if (!apply_change_group ())
3728 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3732 /* We cannot insert instructions on an abnormal critical edge.
3733 It will be easier to find the culprit if we die now. */
3734 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3737 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3738 stuff. We also emit CALL_INSNS and friends. */
3739 if (e->insns == NULL_RTX)
3742 emit_note (NOTE_INSN_DELETED);
3745 push_to_sequence (e->insns);
3747 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3749 e->insns = get_insns ();
3754 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3755 to non-complex jumps. That is, direct unconditional, conditional,
3756 and tablejumps, but not computed jumps or returns. It also does
3757 not apply to the fallthru case of a conditional jump. */
3760 label_is_jump_target_p (rtx label, rtx jump_insn)
3762 rtx tmp = JUMP_LABEL (jump_insn);
3767 if (tablejump_p (jump_insn, NULL, &tmp))
3769 rtvec vec = XVEC (PATTERN (tmp),
3770 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3771 int i, veclen = GET_NUM_ELEM (vec);
3773 for (i = 0; i < veclen; ++i)
3774 if (XEXP (RTVEC_ELT (vec, i), 0) == label)