1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx);
70 static void mark_all_labels (rtx);
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72 static int invert_exp_1 (rtx, rtx);
73 static int returnjump_p_1 (rtx *, void *);
75 /* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79 rebuild_jump_labels (rtx f)
83 timevar_push (TV_REBUILD_JUMP);
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (LABEL_P (XEXP (insn, 0)))
93 LABEL_NUSES (XEXP (insn, 0))++;
94 timevar_pop (TV_REBUILD_JUMP);
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
102 This simple pass moves barriers and removes duplicates so that the
106 cleanup_barriers (void)
108 rtx insn, next, prev;
109 for (insn = get_insns (); insn; insn = next)
111 next = NEXT_INSN (insn);
112 if (BARRIER_P (insn))
114 prev = prev_nonnote_insn (insn);
115 if (BARRIER_P (prev))
117 else if (prev != PREV_INSN (insn))
118 reorder_insns (insn, insn, prev);
124 struct tree_opt_pass pass_cleanup_barriers =
126 "barriers", /* name */
128 cleanup_barriers, /* execute */
131 0, /* static_pass_number */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 TODO_dump_func, /* todo_flags_finish */
142 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
143 notes whose labels don't occur in the insn any more. Returns the
144 largest INSN_UID found. */
146 init_label_info (rtx f)
150 for (insn = f; insn; insn = NEXT_INSN (insn))
152 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
153 else if (JUMP_P (insn))
154 JUMP_LABEL (insn) = 0;
155 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
159 for (note = REG_NOTES (insn); note; note = next)
161 next = XEXP (note, 1);
162 if (REG_NOTE_KIND (note) == REG_LABEL
163 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
164 remove_note (insn, note);
169 /* Mark the label each jump jumps to.
170 Combine consecutive labels, and count uses of labels. */
173 mark_all_labels (rtx f)
177 for (insn = f; insn; insn = NEXT_INSN (insn))
180 mark_jump_label (PATTERN (insn), insn, 0);
181 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
183 /* When we know the LABEL_REF contained in a REG used in
184 an indirect jump, we'll have a REG_LABEL note so that
185 flow can tell where it's going. */
186 if (JUMP_LABEL (insn) == 0)
188 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
191 /* But a LABEL_REF around the REG_LABEL note, so
192 that we can canonicalize it. */
193 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
194 XEXP (label_note, 0));
196 mark_jump_label (label_ref, insn, 0);
197 XEXP (label_note, 0) = XEXP (label_ref, 0);
198 JUMP_LABEL (insn) = XEXP (label_note, 0);
204 /* If we are in cfglayout mode, there may be non-insns between the
205 basic blocks. If those non-insns represent tablejump data, they
206 contain label references that we must record. */
207 if (current_ir_type () == IR_RTL_CFGLAYOUT)
213 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
216 gcc_assert (JUMP_TABLE_DATA_P (insn));
217 mark_jump_label (PATTERN (insn), insn, 0);
220 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
223 gcc_assert (JUMP_TABLE_DATA_P (insn));
224 mark_jump_label (PATTERN (insn), insn, 0);
230 /* Move all block-beg, block-end and loop-beg notes between START and END out
231 before START. START and END may be such notes. Returns the values of the
232 new starting and ending insns, which may be different if the original ones
233 were such notes. Return true if there were only such notes and no real
237 squeeze_notes (rtx* startp, rtx* endp)
245 rtx past_end = NEXT_INSN (end);
247 for (insn = start; insn != past_end; insn = next)
249 next = NEXT_INSN (insn);
251 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
252 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
254 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
255 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
256 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
262 rtx prev = PREV_INSN (insn);
263 PREV_INSN (insn) = PREV_INSN (start);
264 NEXT_INSN (insn) = start;
265 NEXT_INSN (PREV_INSN (insn)) = insn;
266 PREV_INSN (NEXT_INSN (insn)) = insn;
267 NEXT_INSN (prev) = next;
268 PREV_INSN (next) = prev;
275 /* There were no real instructions. */
276 if (start == past_end)
286 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
287 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
288 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
289 know whether it's source is floating point or integer comparison. Machine
290 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
291 to help this function avoid overhead in these cases. */
293 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
295 enum machine_mode mode;
297 /* If this is not actually a comparison, we can't reverse it. */
298 if (GET_RTX_CLASS (code) != RTX_COMPARE
299 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
302 mode = GET_MODE (arg0);
303 if (mode == VOIDmode)
304 mode = GET_MODE (arg1);
306 /* First see if machine description supplies us way to reverse the
307 comparison. Give it priority over everything else to allow
308 machine description to do tricks. */
309 if (GET_MODE_CLASS (mode) == MODE_CC
310 && REVERSIBLE_CC_MODE (mode))
312 #ifdef REVERSE_CONDITION
313 return REVERSE_CONDITION (code, mode);
315 return reverse_condition (code);
318 /* Try a few special cases based on the comparison code. */
327 /* It is always safe to reverse EQ and NE, even for the floating
328 point. Similarly the unsigned comparisons are never used for
329 floating point so we can reverse them in the default way. */
330 return reverse_condition (code);
335 /* In case we already see unordered comparison, we can be sure to
336 be dealing with floating point so we don't need any more tests. */
337 return reverse_condition_maybe_unordered (code);
342 /* We don't have safe way to reverse these yet. */
348 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
351 /* Try to search for the comparison to determine the real mode.
352 This code is expensive, but with sane machine description it
353 will be never used, since REVERSIBLE_CC_MODE will return true
358 for (prev = prev_nonnote_insn (insn);
359 prev != 0 && !LABEL_P (prev);
360 prev = prev_nonnote_insn (prev))
362 rtx set = set_of (arg0, prev);
363 if (set && GET_CODE (set) == SET
364 && rtx_equal_p (SET_DEST (set), arg0))
366 rtx src = SET_SRC (set);
368 if (GET_CODE (src) == COMPARE)
370 rtx comparison = src;
371 arg0 = XEXP (src, 0);
372 mode = GET_MODE (arg0);
373 if (mode == VOIDmode)
374 mode = GET_MODE (XEXP (comparison, 1));
377 /* We can get past reg-reg moves. This may be useful for model
378 of i387 comparisons that first move flag registers around. */
385 /* If register is clobbered in some ununderstandable way,
392 /* Test for an integer condition, or a floating-point comparison
393 in which NaNs can be ignored. */
394 if (GET_CODE (arg0) == CONST_INT
395 || (GET_MODE (arg0) != VOIDmode
396 && GET_MODE_CLASS (mode) != MODE_CC
397 && !HONOR_NANS (mode)))
398 return reverse_condition (code);
403 /* A wrapper around the previous function to take COMPARISON as rtx
404 expression. This simplifies many callers. */
406 reversed_comparison_code (rtx comparison, rtx insn)
408 if (!COMPARISON_P (comparison))
410 return reversed_comparison_code_parts (GET_CODE (comparison),
411 XEXP (comparison, 0),
412 XEXP (comparison, 1), insn);
415 /* Return comparison with reversed code of EXP.
416 Return NULL_RTX in case we fail to do the reversal. */
418 reversed_comparison (rtx exp, enum machine_mode mode)
420 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
421 if (reversed_code == UNKNOWN)
424 return simplify_gen_relational (reversed_code, mode, VOIDmode,
425 XEXP (exp, 0), XEXP (exp, 1));
429 /* Given an rtx-code for a comparison, return the code for the negated
430 comparison. If no such code exists, return UNKNOWN.
432 WATCH OUT! reverse_condition is not safe to use on a jump that might
433 be acting on the results of an IEEE floating point comparison, because
434 of the special treatment of non-signaling nans in comparisons.
435 Use reversed_comparison_code instead. */
438 reverse_condition (enum rtx_code code)
480 /* Similar, but we're allowed to generate unordered comparisons, which
481 makes it safe for IEEE floating-point. Of course, we have to recognize
482 that the target will support them too... */
485 reverse_condition_maybe_unordered (enum rtx_code code)
523 /* Similar, but return the code when two operands of a comparison are swapped.
524 This IS safe for IEEE floating-point. */
527 swap_condition (enum rtx_code code)
569 /* Given a comparison CODE, return the corresponding unsigned comparison.
570 If CODE is an equality comparison or already an unsigned comparison,
574 unsigned_condition (enum rtx_code code)
600 /* Similarly, return the signed version of a comparison. */
603 signed_condition (enum rtx_code code)
629 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
630 truth of CODE1 implies the truth of CODE2. */
633 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
635 /* UNKNOWN comparison codes can happen as a result of trying to revert
637 They can't match anything, so we have to reject them here. */
638 if (code1 == UNKNOWN || code2 == UNKNOWN)
647 if (code2 == UNLE || code2 == UNGE)
652 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
658 if (code2 == UNLE || code2 == NE)
663 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
668 if (code2 == UNGE || code2 == NE)
673 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
679 if (code2 == ORDERED)
684 if (code2 == NE || code2 == ORDERED)
689 if (code2 == LEU || code2 == NE)
694 if (code2 == GEU || code2 == NE)
699 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
700 || code2 == UNGE || code2 == UNGT)
711 /* Return 1 if INSN is an unconditional jump and nothing else. */
714 simplejump_p (rtx insn)
716 return (JUMP_P (insn)
717 && GET_CODE (PATTERN (insn)) == SET
718 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
719 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
722 /* Return nonzero if INSN is a (possibly) conditional jump
725 Use of this function is deprecated, since we need to support combined
726 branch and compare insns. Use any_condjump_p instead whenever possible. */
729 condjump_p (rtx insn)
731 rtx x = PATTERN (insn);
733 if (GET_CODE (x) != SET
734 || GET_CODE (SET_DEST (x)) != PC)
738 if (GET_CODE (x) == LABEL_REF)
741 return (GET_CODE (x) == IF_THEN_ELSE
742 && ((GET_CODE (XEXP (x, 2)) == PC
743 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
744 || GET_CODE (XEXP (x, 1)) == RETURN))
745 || (GET_CODE (XEXP (x, 1)) == PC
746 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
747 || GET_CODE (XEXP (x, 2)) == RETURN))));
750 /* Return nonzero if INSN is a (possibly) conditional jump inside a
753 Use this function is deprecated, since we need to support combined
754 branch and compare insns. Use any_condjump_p instead whenever possible. */
757 condjump_in_parallel_p (rtx insn)
759 rtx x = PATTERN (insn);
761 if (GET_CODE (x) != PARALLEL)
764 x = XVECEXP (x, 0, 0);
766 if (GET_CODE (x) != SET)
768 if (GET_CODE (SET_DEST (x)) != PC)
770 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
772 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
774 if (XEXP (SET_SRC (x), 2) == pc_rtx
775 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
776 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
778 if (XEXP (SET_SRC (x), 1) == pc_rtx
779 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
780 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
785 /* Return set of PC, otherwise NULL. */
793 pat = PATTERN (insn);
795 /* The set is allowed to appear either as the insn pattern or
796 the first set in a PARALLEL. */
797 if (GET_CODE (pat) == PARALLEL)
798 pat = XVECEXP (pat, 0, 0);
799 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
805 /* Return true when insn is an unconditional direct jump,
806 possibly bundled inside a PARALLEL. */
809 any_uncondjump_p (rtx insn)
811 rtx x = pc_set (insn);
814 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
816 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
821 /* Return true when insn is a conditional jump. This function works for
822 instructions containing PC sets in PARALLELs. The instruction may have
823 various other effects so before removing the jump you must verify
826 Note that unlike condjump_p it returns false for unconditional jumps. */
829 any_condjump_p (rtx insn)
831 rtx x = pc_set (insn);
836 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
839 a = GET_CODE (XEXP (SET_SRC (x), 1));
840 b = GET_CODE (XEXP (SET_SRC (x), 2));
842 return ((b == PC && (a == LABEL_REF || a == RETURN))
843 || (a == PC && (b == LABEL_REF || b == RETURN)));
846 /* Return the label of a conditional jump. */
849 condjump_label (rtx insn)
851 rtx x = pc_set (insn);
856 if (GET_CODE (x) == LABEL_REF)
858 if (GET_CODE (x) != IF_THEN_ELSE)
860 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
862 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
867 /* Return true if INSN is a (possibly conditional) return insn. */
870 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
874 return x && (GET_CODE (x) == RETURN
875 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
879 returnjump_p (rtx insn)
883 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
886 /* Return true if INSN is a jump that only transfers control and
890 onlyjump_p (rtx insn)
897 set = single_set (insn);
900 if (GET_CODE (SET_DEST (set)) != PC)
902 if (side_effects_p (SET_SRC (set)))
910 /* Return nonzero if X is an RTX that only sets the condition codes
911 and has no side effects. */
914 only_sets_cc0_p (rtx x)
922 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
925 /* Return 1 if X is an RTX that does nothing but set the condition codes
926 and CLOBBER or USE registers.
927 Return -1 if X does explicitly set the condition codes,
928 but also does other things. */
939 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
941 if (GET_CODE (x) == PARALLEL)
945 int other_things = 0;
946 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
948 if (GET_CODE (XVECEXP (x, 0, i)) == SET
949 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
951 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
954 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
960 /* Find all CODE_LABELs referred to in X, and increment their use counts.
961 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
962 in INSN, then store one of them in JUMP_LABEL (INSN).
963 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
964 referenced in INSN, add a REG_LABEL note containing that label to INSN.
965 Also, when there are consecutive labels, canonicalize on the last of them.
967 Note that two labels separated by a loop-beginning note
968 must be kept distinct if we have not yet done loop-optimization,
969 because the gap between them is where loop-optimize
970 will want to move invariant code to. CROSS_JUMP tells us
971 that loop-optimization is done with. */
974 mark_jump_label (rtx x, rtx insn, int in_mem)
976 RTX_CODE code = GET_CODE (x);
996 for (i = 0; i < XVECLEN (x, 0); i++)
997 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
998 XVECEXP (x, 0, i), 0);
1005 /* If this is a constant-pool reference, see if it is a label. */
1006 if (CONSTANT_POOL_ADDRESS_P (x))
1007 mark_jump_label (get_pool_constant (x), insn, in_mem);
1012 rtx label = XEXP (x, 0);
1014 /* Ignore remaining references to unreachable labels that
1015 have been deleted. */
1017 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1020 gcc_assert (LABEL_P (label));
1022 /* Ignore references to labels of containing functions. */
1023 if (LABEL_REF_NONLOCAL_P (x))
1026 XEXP (x, 0) = label;
1027 if (! insn || ! INSN_DELETED_P (insn))
1028 ++LABEL_NUSES (label);
1033 JUMP_LABEL (insn) = label;
1036 /* Add a REG_LABEL note for LABEL unless there already
1037 is one. All uses of a label, except for labels
1038 that are the targets of jumps, must have a
1040 if (! find_reg_note (insn, REG_LABEL, label))
1041 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1048 /* Do walk the labels in a vector, but not the first operand of an
1049 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1052 if (! INSN_DELETED_P (insn))
1054 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1056 for (i = 0; i < XVECLEN (x, eltnum); i++)
1057 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1065 fmt = GET_RTX_FORMAT (code);
1066 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1069 mark_jump_label (XEXP (x, i), insn, in_mem);
1070 else if (fmt[i] == 'E')
1073 for (j = 0; j < XVECLEN (x, i); j++)
1074 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1080 /* Delete insn INSN from the chain of insns and update label ref counts
1081 and delete insns now unreachable.
1083 Returns the first insn after INSN that was not deleted.
1085 Usage of this instruction is deprecated. Use delete_insn instead and
1086 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1089 delete_related_insns (rtx insn)
1091 int was_code_label = (LABEL_P (insn));
1093 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1095 while (next && INSN_DELETED_P (next))
1096 next = NEXT_INSN (next);
1098 /* This insn is already deleted => return first following nondeleted. */
1099 if (INSN_DELETED_P (insn))
1104 /* If instruction is followed by a barrier,
1105 delete the barrier too. */
1107 if (next != 0 && BARRIER_P (next))
1110 /* If deleting a jump, decrement the count of the label,
1111 and delete the label if it is now unused. */
1113 if (JUMP_P (insn) && JUMP_LABEL (insn))
1115 rtx lab = JUMP_LABEL (insn), lab_next;
1117 if (LABEL_NUSES (lab) == 0)
1119 /* This can delete NEXT or PREV,
1120 either directly if NEXT is JUMP_LABEL (INSN),
1121 or indirectly through more levels of jumps. */
1122 delete_related_insns (lab);
1124 /* I feel a little doubtful about this loop,
1125 but I see no clean and sure alternative way
1126 to find the first insn after INSN that is not now deleted.
1127 I hope this works. */
1128 while (next && INSN_DELETED_P (next))
1129 next = NEXT_INSN (next);
1132 else if (tablejump_p (insn, NULL, &lab_next))
1134 /* If we're deleting the tablejump, delete the dispatch table.
1135 We may not be able to kill the label immediately preceding
1136 just yet, as it might be referenced in code leading up to
1138 delete_related_insns (lab_next);
1142 /* Likewise if we're deleting a dispatch table. */
1145 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1146 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1148 rtx pat = PATTERN (insn);
1149 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1150 int len = XVECLEN (pat, diff_vec_p);
1152 for (i = 0; i < len; i++)
1153 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1154 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1155 while (next && INSN_DELETED_P (next))
1156 next = NEXT_INSN (next);
1160 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1161 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1162 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1163 if (REG_NOTE_KIND (note) == REG_LABEL
1164 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1165 && LABEL_P (XEXP (note, 0)))
1166 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1167 delete_related_insns (XEXP (note, 0));
1169 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1170 prev = PREV_INSN (prev);
1172 /* If INSN was a label and a dispatch table follows it,
1173 delete the dispatch table. The tablejump must have gone already.
1174 It isn't useful to fall through into a table. */
1177 && NEXT_INSN (insn) != 0
1178 && JUMP_P (NEXT_INSN (insn))
1179 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1180 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1181 next = delete_related_insns (NEXT_INSN (insn));
1183 /* If INSN was a label, delete insns following it if now unreachable. */
1185 if (was_code_label && prev && BARRIER_P (prev))
1190 code = GET_CODE (next);
1192 next = NEXT_INSN (next);
1193 /* Keep going past other deleted labels to delete what follows. */
1194 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1195 next = NEXT_INSN (next);
1196 else if (code == BARRIER || INSN_P (next))
1197 /* Note: if this deletes a jump, it can cause more
1198 deletion of unreachable code, after a different label.
1199 As long as the value from this recursive call is correct,
1200 this invocation functions correctly. */
1201 next = delete_related_insns (next);
1210 /* Delete a range of insns from FROM to TO, inclusive.
1211 This is for the sake of peephole optimization, so assume
1212 that whatever these insns do will still be done by a new
1213 peephole insn that will replace them. */
1216 delete_for_peephole (rtx from, rtx to)
1222 rtx next = NEXT_INSN (insn);
1223 rtx prev = PREV_INSN (insn);
1227 INSN_DELETED_P (insn) = 1;
1229 /* Patch this insn out of the chain. */
1230 /* We don't do this all at once, because we
1231 must preserve all NOTEs. */
1233 NEXT_INSN (prev) = next;
1236 PREV_INSN (next) = prev;
1244 /* Note that if TO is an unconditional jump
1245 we *do not* delete the BARRIER that follows,
1246 since the peephole that replaces this sequence
1247 is also an unconditional jump in that case. */
1250 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1251 NLABEL as a return. Accrue modifications into the change group. */
1254 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1257 RTX_CODE code = GET_CODE (x);
1261 if (code == LABEL_REF)
1263 if (XEXP (x, 0) == olabel)
1267 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1269 n = gen_rtx_RETURN (VOIDmode);
1271 validate_change (insn, loc, n, 1);
1275 else if (code == RETURN && olabel == 0)
1278 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1280 x = gen_rtx_RETURN (VOIDmode);
1281 if (loc == &PATTERN (insn))
1282 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1283 validate_change (insn, loc, x, 1);
1287 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1288 && GET_CODE (SET_SRC (x)) == LABEL_REF
1289 && XEXP (SET_SRC (x), 0) == olabel)
1291 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1295 fmt = GET_RTX_FORMAT (code);
1296 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1299 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1300 else if (fmt[i] == 'E')
1303 for (j = 0; j < XVECLEN (x, i); j++)
1304 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1309 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1310 the modifications into the change group. Return false if we did
1311 not see how to do that. */
1314 redirect_jump_1 (rtx jump, rtx nlabel)
1316 int ochanges = num_validated_changes ();
1319 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1320 loc = &XVECEXP (PATTERN (jump), 0, 0);
1322 loc = &PATTERN (jump);
1324 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1325 return num_validated_changes () > ochanges;
1328 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1329 jump target label is unused as a result, it and the code following
1332 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1335 The return value will be 1 if the change was made, 0 if it wasn't
1336 (this can only occur for NLABEL == 0). */
1339 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1341 rtx olabel = JUMP_LABEL (jump);
1343 if (nlabel == olabel)
1346 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1349 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1353 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1355 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1356 count has dropped to zero. */
1358 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1363 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1364 moving FUNCTION_END note. Just sanity check that no user still worry
1366 gcc_assert (delete_unused >= 0);
1367 JUMP_LABEL (jump) = nlabel;
1369 ++LABEL_NUSES (nlabel);
1371 /* Update labels in any REG_EQUAL note. */
1372 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1374 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1375 remove_note (jump, note);
1378 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1379 confirm_change_group ();
1383 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1384 /* Undefined labels will remain outside the insn stream. */
1385 && INSN_UID (olabel))
1386 delete_related_insns (olabel);
1388 invert_br_probabilities (jump);
1391 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1392 modifications into the change group. Return nonzero for success. */
1394 invert_exp_1 (rtx x, rtx insn)
1396 RTX_CODE code = GET_CODE (x);
1398 if (code == IF_THEN_ELSE)
1400 rtx comp = XEXP (x, 0);
1402 enum rtx_code reversed_code;
1404 /* We can do this in two ways: The preferable way, which can only
1405 be done if this is not an integer comparison, is to reverse
1406 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1407 of the IF_THEN_ELSE. If we can't do either, fail. */
1409 reversed_code = reversed_comparison_code (comp, insn);
1411 if (reversed_code != UNKNOWN)
1413 validate_change (insn, &XEXP (x, 0),
1414 gen_rtx_fmt_ee (reversed_code,
1415 GET_MODE (comp), XEXP (comp, 0),
1422 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1423 validate_change (insn, &XEXP (x, 2), tem, 1);
1430 /* Invert the condition of the jump JUMP, and make it jump to label
1431 NLABEL instead of where it jumps now. Accrue changes into the
1432 change group. Return false if we didn't see how to perform the
1433 inversion and redirection. */
1436 invert_jump_1 (rtx jump, rtx nlabel)
1438 rtx x = pc_set (jump);
1442 ochanges = num_validated_changes ();
1444 ok = invert_exp_1 (SET_SRC (x), jump);
1447 if (num_validated_changes () == ochanges)
1450 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1451 in Pmode, so checking this is not merely an optimization. */
1452 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1455 /* Invert the condition of the jump JUMP, and make it jump to label
1456 NLABEL instead of where it jumps now. Return true if successful. */
1459 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1461 rtx olabel = JUMP_LABEL (jump);
1463 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1465 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1473 /* Like rtx_equal_p except that it considers two REGs as equal
1474 if they renumber to the same value and considers two commutative
1475 operations to be the same if the order of the operands has been
1479 rtx_renumbered_equal_p (rtx x, rtx y)
1482 enum rtx_code code = GET_CODE (x);
1488 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1489 && (REG_P (y) || (GET_CODE (y) == SUBREG
1490 && REG_P (SUBREG_REG (y)))))
1492 int reg_x = -1, reg_y = -1;
1493 int byte_x = 0, byte_y = 0;
1495 if (GET_MODE (x) != GET_MODE (y))
1498 /* If we haven't done any renumbering, don't
1499 make any assumptions. */
1500 if (reg_renumber == 0)
1501 return rtx_equal_p (x, y);
1505 reg_x = REGNO (SUBREG_REG (x));
1506 byte_x = SUBREG_BYTE (x);
1508 if (reg_renumber[reg_x] >= 0)
1510 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1511 GET_MODE (SUBREG_REG (x)),
1520 if (reg_renumber[reg_x] >= 0)
1521 reg_x = reg_renumber[reg_x];
1524 if (GET_CODE (y) == SUBREG)
1526 reg_y = REGNO (SUBREG_REG (y));
1527 byte_y = SUBREG_BYTE (y);
1529 if (reg_renumber[reg_y] >= 0)
1531 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1532 GET_MODE (SUBREG_REG (y)),
1541 if (reg_renumber[reg_y] >= 0)
1542 reg_y = reg_renumber[reg_y];
1545 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1548 /* Now we have disposed of all the cases
1549 in which different rtx codes can match. */
1550 if (code != GET_CODE (y))
1564 /* We can't assume nonlocal labels have their following insns yet. */
1565 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1566 return XEXP (x, 0) == XEXP (y, 0);
1568 /* Two label-refs are equivalent if they point at labels
1569 in the same position in the instruction stream. */
1570 return (next_real_insn (XEXP (x, 0))
1571 == next_real_insn (XEXP (y, 0)));
1574 return XSTR (x, 0) == XSTR (y, 0);
1577 /* If we didn't match EQ equality above, they aren't the same. */
1584 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1586 if (GET_MODE (x) != GET_MODE (y))
1589 /* For commutative operations, the RTX match if the operand match in any
1590 order. Also handle the simple binary and unary cases without a loop. */
1591 if (targetm.commutative_p (x, UNKNOWN))
1592 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1593 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1594 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1595 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1596 else if (NON_COMMUTATIVE_P (x))
1597 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1598 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1599 else if (UNARY_P (x))
1600 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1602 /* Compare the elements. If any pair of corresponding elements
1603 fail to match, return 0 for the whole things. */
1605 fmt = GET_RTX_FORMAT (code);
1606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1612 if (XWINT (x, i) != XWINT (y, i))
1617 if (XINT (x, i) != XINT (y, i))
1622 if (XTREE (x, i) != XTREE (y, i))
1627 if (strcmp (XSTR (x, i), XSTR (y, i)))
1632 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1637 if (XEXP (x, i) != XEXP (y, i))
1644 if (XVECLEN (x, i) != XVECLEN (y, i))
1646 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1647 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1658 /* If X is a hard register or equivalent to one or a subregister of one,
1659 return the hard register number. If X is a pseudo register that was not
1660 assigned a hard register, return the pseudo register number. Otherwise,
1661 return -1. Any rtx is valid for X. */
1668 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1669 return reg_renumber[REGNO (x)];
1672 if (GET_CODE (x) == SUBREG)
1674 int base = true_regnum (SUBREG_REG (x));
1676 && base < FIRST_PSEUDO_REGISTER
1677 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1678 GET_MODE (SUBREG_REG (x)),
1679 SUBREG_BYTE (x), GET_MODE (x)))
1680 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1681 GET_MODE (SUBREG_REG (x)),
1682 SUBREG_BYTE (x), GET_MODE (x));
1687 /* Return regno of the register REG and handle subregs too. */
1689 reg_or_subregno (rtx reg)
1691 if (GET_CODE (reg) == SUBREG)
1692 reg = SUBREG_REG (reg);
1693 gcc_assert (REG_P (reg));