1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically a set of utility functions to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
39 #include "coretypes.h"
44 #include "hard-reg-set.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
53 #include "diagnostic.h"
58 #include "tree-pass.h"
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
68 static void init_label_info (rtx);
69 static void mark_all_labels (rtx);
70 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
71 static int invert_exp_1 (rtx, rtx);
72 static int returnjump_p_1 (rtx *, void *);
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (rtx f)
82 timevar_push (TV_REBUILD_JUMP);
86 /* Keep track of labels used from static data; we don't track them
87 closely enough to delete them here, so make sure their reference
88 count doesn't drop to zero. */
90 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
91 if (LABEL_P (XEXP (insn, 0)))
92 LABEL_NUSES (XEXP (insn, 0))++;
93 timevar_pop (TV_REBUILD_JUMP);
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
105 cleanup_barriers (void)
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
110 next = NEXT_INSN (insn);
111 if (BARRIER_P (insn))
113 prev = prev_nonnote_insn (insn);
114 if (BARRIER_P (prev))
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
123 struct tree_opt_pass pass_cleanup_barriers =
125 "barriers", /* name */
127 cleanup_barriers, /* execute */
130 0, /* static_pass_number */
132 0, /* properties_required */
133 0, /* properties_provided */
134 0, /* properties_destroyed */
135 0, /* todo_flags_start */
136 TODO_dump_func, /* todo_flags_finish */
141 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
142 notes whose labels don't occur in the insn any more. Returns the
143 largest INSN_UID found. */
145 init_label_info (rtx f)
149 for (insn = f; insn; insn = NEXT_INSN (insn))
151 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
152 else if (JUMP_P (insn))
153 JUMP_LABEL (insn) = 0;
154 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
158 for (note = REG_NOTES (insn); note; note = next)
160 next = XEXP (note, 1);
161 if (REG_NOTE_KIND (note) == REG_LABEL
162 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
163 remove_note (insn, note);
168 /* Mark the label each jump jumps to.
169 Combine consecutive labels, and count uses of labels. */
172 mark_all_labels (rtx f)
176 for (insn = f; insn; insn = NEXT_INSN (insn))
179 mark_jump_label (PATTERN (insn), insn, 0);
180 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
182 /* When we know the LABEL_REF contained in a REG used in
183 an indirect jump, we'll have a REG_LABEL note so that
184 flow can tell where it's going. */
185 if (JUMP_LABEL (insn) == 0)
187 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
190 /* But a LABEL_REF around the REG_LABEL note, so
191 that we can canonicalize it. */
192 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
193 XEXP (label_note, 0));
195 mark_jump_label (label_ref, insn, 0);
196 XEXP (label_note, 0) = XEXP (label_ref, 0);
197 JUMP_LABEL (insn) = XEXP (label_note, 0);
203 /* If we are in cfglayout mode, there may be non-insns between the
204 basic blocks. If those non-insns represent tablejump data, they
205 contain label references that we must record. */
206 if (current_ir_type () == IR_RTL_CFGLAYOUT)
212 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
215 gcc_assert (JUMP_TABLE_DATA_P (insn));
216 mark_jump_label (PATTERN (insn), insn, 0);
219 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
222 gcc_assert (JUMP_TABLE_DATA_P (insn));
223 mark_jump_label (PATTERN (insn), insn, 0);
229 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
230 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
231 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
232 know whether it's source is floating point or integer comparison. Machine
233 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
234 to help this function avoid overhead in these cases. */
236 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
238 enum machine_mode mode;
240 /* If this is not actually a comparison, we can't reverse it. */
241 if (GET_RTX_CLASS (code) != RTX_COMPARE
242 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
245 mode = GET_MODE (arg0);
246 if (mode == VOIDmode)
247 mode = GET_MODE (arg1);
249 /* First see if machine description supplies us way to reverse the
250 comparison. Give it priority over everything else to allow
251 machine description to do tricks. */
252 if (GET_MODE_CLASS (mode) == MODE_CC
253 && REVERSIBLE_CC_MODE (mode))
255 #ifdef REVERSE_CONDITION
256 return REVERSE_CONDITION (code, mode);
258 return reverse_condition (code);
261 /* Try a few special cases based on the comparison code. */
270 /* It is always safe to reverse EQ and NE, even for the floating
271 point. Similarly the unsigned comparisons are never used for
272 floating point so we can reverse them in the default way. */
273 return reverse_condition (code);
278 /* In case we already see unordered comparison, we can be sure to
279 be dealing with floating point so we don't need any more tests. */
280 return reverse_condition_maybe_unordered (code);
285 /* We don't have safe way to reverse these yet. */
291 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
294 /* Try to search for the comparison to determine the real mode.
295 This code is expensive, but with sane machine description it
296 will be never used, since REVERSIBLE_CC_MODE will return true
301 for (prev = prev_nonnote_insn (insn);
302 prev != 0 && !LABEL_P (prev);
303 prev = prev_nonnote_insn (prev))
305 const_rtx set = set_of (arg0, prev);
306 if (set && GET_CODE (set) == SET
307 && rtx_equal_p (SET_DEST (set), arg0))
309 rtx src = SET_SRC (set);
311 if (GET_CODE (src) == COMPARE)
313 rtx comparison = src;
314 arg0 = XEXP (src, 0);
315 mode = GET_MODE (arg0);
316 if (mode == VOIDmode)
317 mode = GET_MODE (XEXP (comparison, 1));
320 /* We can get past reg-reg moves. This may be useful for model
321 of i387 comparisons that first move flag registers around. */
328 /* If register is clobbered in some ununderstandable way,
335 /* Test for an integer condition, or a floating-point comparison
336 in which NaNs can be ignored. */
337 if (GET_CODE (arg0) == CONST_INT
338 || (GET_MODE (arg0) != VOIDmode
339 && GET_MODE_CLASS (mode) != MODE_CC
340 && !HONOR_NANS (mode)))
341 return reverse_condition (code);
346 /* A wrapper around the previous function to take COMPARISON as rtx
347 expression. This simplifies many callers. */
349 reversed_comparison_code (rtx comparison, rtx insn)
351 if (!COMPARISON_P (comparison))
353 return reversed_comparison_code_parts (GET_CODE (comparison),
354 XEXP (comparison, 0),
355 XEXP (comparison, 1), insn);
358 /* Return comparison with reversed code of EXP.
359 Return NULL_RTX in case we fail to do the reversal. */
361 reversed_comparison (rtx exp, enum machine_mode mode)
363 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
364 if (reversed_code == UNKNOWN)
367 return simplify_gen_relational (reversed_code, mode, VOIDmode,
368 XEXP (exp, 0), XEXP (exp, 1));
372 /* Given an rtx-code for a comparison, return the code for the negated
373 comparison. If no such code exists, return UNKNOWN.
375 WATCH OUT! reverse_condition is not safe to use on a jump that might
376 be acting on the results of an IEEE floating point comparison, because
377 of the special treatment of non-signaling nans in comparisons.
378 Use reversed_comparison_code instead. */
381 reverse_condition (enum rtx_code code)
423 /* Similar, but we're allowed to generate unordered comparisons, which
424 makes it safe for IEEE floating-point. Of course, we have to recognize
425 that the target will support them too... */
428 reverse_condition_maybe_unordered (enum rtx_code code)
466 /* Similar, but return the code when two operands of a comparison are swapped.
467 This IS safe for IEEE floating-point. */
470 swap_condition (enum rtx_code code)
512 /* Given a comparison CODE, return the corresponding unsigned comparison.
513 If CODE is an equality comparison or already an unsigned comparison,
517 unsigned_condition (enum rtx_code code)
543 /* Similarly, return the signed version of a comparison. */
546 signed_condition (enum rtx_code code)
572 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
573 truth of CODE1 implies the truth of CODE2. */
576 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
578 /* UNKNOWN comparison codes can happen as a result of trying to revert
580 They can't match anything, so we have to reject them here. */
581 if (code1 == UNKNOWN || code2 == UNKNOWN)
590 if (code2 == UNLE || code2 == UNGE)
595 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
601 if (code2 == UNLE || code2 == NE)
606 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
611 if (code2 == UNGE || code2 == NE)
616 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
622 if (code2 == ORDERED)
627 if (code2 == NE || code2 == ORDERED)
632 if (code2 == LEU || code2 == NE)
637 if (code2 == GEU || code2 == NE)
642 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
643 || code2 == UNGE || code2 == UNGT)
654 /* Return 1 if INSN is an unconditional jump and nothing else. */
657 simplejump_p (const_rtx insn)
659 return (JUMP_P (insn)
660 && GET_CODE (PATTERN (insn)) == SET
661 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
662 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
665 /* Return nonzero if INSN is a (possibly) conditional jump
668 Use of this function is deprecated, since we need to support combined
669 branch and compare insns. Use any_condjump_p instead whenever possible. */
672 condjump_p (const_rtx insn)
674 const_rtx x = PATTERN (insn);
676 if (GET_CODE (x) != SET
677 || GET_CODE (SET_DEST (x)) != PC)
681 if (GET_CODE (x) == LABEL_REF)
684 return (GET_CODE (x) == IF_THEN_ELSE
685 && ((GET_CODE (XEXP (x, 2)) == PC
686 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
687 || GET_CODE (XEXP (x, 1)) == RETURN))
688 || (GET_CODE (XEXP (x, 1)) == PC
689 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
690 || GET_CODE (XEXP (x, 2)) == RETURN))));
693 /* Return nonzero if INSN is a (possibly) conditional jump inside a
696 Use this function is deprecated, since we need to support combined
697 branch and compare insns. Use any_condjump_p instead whenever possible. */
700 condjump_in_parallel_p (const_rtx insn)
702 const_rtx x = PATTERN (insn);
704 if (GET_CODE (x) != PARALLEL)
707 x = XVECEXP (x, 0, 0);
709 if (GET_CODE (x) != SET)
711 if (GET_CODE (SET_DEST (x)) != PC)
713 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
715 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
717 if (XEXP (SET_SRC (x), 2) == pc_rtx
718 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
719 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
721 if (XEXP (SET_SRC (x), 1) == pc_rtx
722 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
723 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
728 /* Return set of PC, otherwise NULL. */
731 pc_set (const_rtx insn)
736 pat = PATTERN (insn);
738 /* The set is allowed to appear either as the insn pattern or
739 the first set in a PARALLEL. */
740 if (GET_CODE (pat) == PARALLEL)
741 pat = XVECEXP (pat, 0, 0);
742 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
748 /* Return true when insn is an unconditional direct jump,
749 possibly bundled inside a PARALLEL. */
752 any_uncondjump_p (const_rtx insn)
754 const_rtx x = pc_set (insn);
757 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
759 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
764 /* Return true when insn is a conditional jump. This function works for
765 instructions containing PC sets in PARALLELs. The instruction may have
766 various other effects so before removing the jump you must verify
769 Note that unlike condjump_p it returns false for unconditional jumps. */
772 any_condjump_p (const_rtx insn)
774 const_rtx x = pc_set (insn);
779 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
782 a = GET_CODE (XEXP (SET_SRC (x), 1));
783 b = GET_CODE (XEXP (SET_SRC (x), 2));
785 return ((b == PC && (a == LABEL_REF || a == RETURN))
786 || (a == PC && (b == LABEL_REF || b == RETURN)));
789 /* Return the label of a conditional jump. */
792 condjump_label (rtx insn)
794 rtx x = pc_set (insn);
799 if (GET_CODE (x) == LABEL_REF)
801 if (GET_CODE (x) != IF_THEN_ELSE)
803 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
805 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
810 /* Return true if INSN is a (possibly conditional) return insn. */
813 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
817 return x && (GET_CODE (x) == RETURN
818 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
822 returnjump_p (rtx insn)
826 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
829 /* Return true if INSN is a jump that only transfers control and
833 onlyjump_p (const_rtx insn)
840 set = single_set (insn);
843 if (GET_CODE (SET_DEST (set)) != PC)
845 if (side_effects_p (SET_SRC (set)))
853 /* Return nonzero if X is an RTX that only sets the condition codes
854 and has no side effects. */
857 only_sets_cc0_p (const_rtx x)
865 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
868 /* Return 1 if X is an RTX that does nothing but set the condition codes
869 and CLOBBER or USE registers.
870 Return -1 if X does explicitly set the condition codes,
871 but also does other things. */
874 sets_cc0_p (const_rtx x)
882 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
884 if (GET_CODE (x) == PARALLEL)
888 int other_things = 0;
889 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
891 if (GET_CODE (XVECEXP (x, 0, i)) == SET
892 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
894 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
897 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
903 /* Find all CODE_LABELs referred to in X, and increment their use counts.
904 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
905 in INSN, then store one of them in JUMP_LABEL (INSN).
906 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
907 referenced in INSN, add a REG_LABEL note containing that label to INSN.
908 Also, when there are consecutive labels, canonicalize on the last of them.
910 Note that two labels separated by a loop-beginning note
911 must be kept distinct if we have not yet done loop-optimization,
912 because the gap between them is where loop-optimize
913 will want to move invariant code to. CROSS_JUMP tells us
914 that loop-optimization is done with. */
917 mark_jump_label (rtx x, rtx insn, int in_mem)
919 RTX_CODE code = GET_CODE (x);
939 for (i = 0; i < XVECLEN (x, 0); i++)
940 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
941 XVECEXP (x, 0, i), 0);
948 /* If this is a constant-pool reference, see if it is a label. */
949 if (CONSTANT_POOL_ADDRESS_P (x))
950 mark_jump_label (get_pool_constant (x), insn, in_mem);
955 rtx label = XEXP (x, 0);
957 /* Ignore remaining references to unreachable labels that
958 have been deleted. */
960 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
963 gcc_assert (LABEL_P (label));
965 /* Ignore references to labels of containing functions. */
966 if (LABEL_REF_NONLOCAL_P (x))
970 if (! insn || ! INSN_DELETED_P (insn))
971 ++LABEL_NUSES (label);
976 JUMP_LABEL (insn) = label;
979 /* Add a REG_LABEL note for LABEL unless there already
980 is one. All uses of a label, except for labels
981 that are the targets of jumps, must have a
983 if (! find_reg_note (insn, REG_LABEL, label))
984 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
991 /* Do walk the labels in a vector, but not the first operand of an
992 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
995 if (! INSN_DELETED_P (insn))
997 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
999 for (i = 0; i < XVECLEN (x, eltnum); i++)
1000 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1008 fmt = GET_RTX_FORMAT (code);
1009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1012 mark_jump_label (XEXP (x, i), insn, in_mem);
1013 else if (fmt[i] == 'E')
1016 for (j = 0; j < XVECLEN (x, i); j++)
1017 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1023 /* Delete insn INSN from the chain of insns and update label ref counts
1024 and delete insns now unreachable.
1026 Returns the first insn after INSN that was not deleted.
1028 Usage of this instruction is deprecated. Use delete_insn instead and
1029 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1032 delete_related_insns (rtx insn)
1034 int was_code_label = (LABEL_P (insn));
1036 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1038 while (next && INSN_DELETED_P (next))
1039 next = NEXT_INSN (next);
1041 /* This insn is already deleted => return first following nondeleted. */
1042 if (INSN_DELETED_P (insn))
1047 /* If instruction is followed by a barrier,
1048 delete the barrier too. */
1050 if (next != 0 && BARRIER_P (next))
1053 /* If deleting a jump, decrement the count of the label,
1054 and delete the label if it is now unused. */
1056 if (JUMP_P (insn) && JUMP_LABEL (insn))
1058 rtx lab = JUMP_LABEL (insn), lab_next;
1060 if (LABEL_NUSES (lab) == 0)
1062 /* This can delete NEXT or PREV,
1063 either directly if NEXT is JUMP_LABEL (INSN),
1064 or indirectly through more levels of jumps. */
1065 delete_related_insns (lab);
1067 /* I feel a little doubtful about this loop,
1068 but I see no clean and sure alternative way
1069 to find the first insn after INSN that is not now deleted.
1070 I hope this works. */
1071 while (next && INSN_DELETED_P (next))
1072 next = NEXT_INSN (next);
1075 else if (tablejump_p (insn, NULL, &lab_next))
1077 /* If we're deleting the tablejump, delete the dispatch table.
1078 We may not be able to kill the label immediately preceding
1079 just yet, as it might be referenced in code leading up to
1081 delete_related_insns (lab_next);
1085 /* Likewise if we're deleting a dispatch table. */
1088 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1089 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1091 rtx pat = PATTERN (insn);
1092 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1093 int len = XVECLEN (pat, diff_vec_p);
1095 for (i = 0; i < len; i++)
1096 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1097 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1098 while (next && INSN_DELETED_P (next))
1099 next = NEXT_INSN (next);
1103 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1104 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1105 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1106 if (REG_NOTE_KIND (note) == REG_LABEL
1107 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1108 && LABEL_P (XEXP (note, 0)))
1109 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1110 delete_related_insns (XEXP (note, 0));
1112 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1113 prev = PREV_INSN (prev);
1115 /* If INSN was a label and a dispatch table follows it,
1116 delete the dispatch table. The tablejump must have gone already.
1117 It isn't useful to fall through into a table. */
1120 && NEXT_INSN (insn) != 0
1121 && JUMP_P (NEXT_INSN (insn))
1122 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1123 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1124 next = delete_related_insns (NEXT_INSN (insn));
1126 /* If INSN was a label, delete insns following it if now unreachable. */
1128 if (was_code_label && prev && BARRIER_P (prev))
1133 code = GET_CODE (next);
1135 next = NEXT_INSN (next);
1136 /* Keep going past other deleted labels to delete what follows. */
1137 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1138 next = NEXT_INSN (next);
1139 else if (code == BARRIER || INSN_P (next))
1140 /* Note: if this deletes a jump, it can cause more
1141 deletion of unreachable code, after a different label.
1142 As long as the value from this recursive call is correct,
1143 this invocation functions correctly. */
1144 next = delete_related_insns (next);
1153 /* Delete a range of insns from FROM to TO, inclusive.
1154 This is for the sake of peephole optimization, so assume
1155 that whatever these insns do will still be done by a new
1156 peephole insn that will replace them. */
1159 delete_for_peephole (rtx from, rtx to)
1165 rtx next = NEXT_INSN (insn);
1166 rtx prev = PREV_INSN (insn);
1170 INSN_DELETED_P (insn) = 1;
1172 /* Patch this insn out of the chain. */
1173 /* We don't do this all at once, because we
1174 must preserve all NOTEs. */
1176 NEXT_INSN (prev) = next;
1179 PREV_INSN (next) = prev;
1187 /* Note that if TO is an unconditional jump
1188 we *do not* delete the BARRIER that follows,
1189 since the peephole that replaces this sequence
1190 is also an unconditional jump in that case. */
1193 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1194 NLABEL as a return. Accrue modifications into the change group. */
1197 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1200 RTX_CODE code = GET_CODE (x);
1204 if (code == LABEL_REF)
1206 if (XEXP (x, 0) == olabel)
1210 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1212 n = gen_rtx_RETURN (VOIDmode);
1214 validate_change (insn, loc, n, 1);
1218 else if (code == RETURN && olabel == 0)
1221 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1223 x = gen_rtx_RETURN (VOIDmode);
1224 if (loc == &PATTERN (insn))
1225 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1226 validate_change (insn, loc, x, 1);
1230 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1231 && GET_CODE (SET_SRC (x)) == LABEL_REF
1232 && XEXP (SET_SRC (x), 0) == olabel)
1234 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1238 fmt = GET_RTX_FORMAT (code);
1239 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1242 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1243 else if (fmt[i] == 'E')
1246 for (j = 0; j < XVECLEN (x, i); j++)
1247 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1252 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1253 the modifications into the change group. Return false if we did
1254 not see how to do that. */
1257 redirect_jump_1 (rtx jump, rtx nlabel)
1259 int ochanges = num_validated_changes ();
1262 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1263 loc = &XVECEXP (PATTERN (jump), 0, 0);
1265 loc = &PATTERN (jump);
1267 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1268 return num_validated_changes () > ochanges;
1271 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1272 jump target label is unused as a result, it and the code following
1275 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1278 The return value will be 1 if the change was made, 0 if it wasn't
1279 (this can only occur for NLABEL == 0). */
1282 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1284 rtx olabel = JUMP_LABEL (jump);
1286 if (nlabel == olabel)
1289 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1292 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1296 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1298 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1299 count has dropped to zero. */
1301 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1306 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1307 moving FUNCTION_END note. Just sanity check that no user still worry
1309 gcc_assert (delete_unused >= 0);
1310 JUMP_LABEL (jump) = nlabel;
1312 ++LABEL_NUSES (nlabel);
1314 /* Update labels in any REG_EQUAL note. */
1315 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1317 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1318 remove_note (jump, note);
1321 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1322 confirm_change_group ();
1326 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1327 /* Undefined labels will remain outside the insn stream. */
1328 && INSN_UID (olabel))
1329 delete_related_insns (olabel);
1331 invert_br_probabilities (jump);
1334 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1335 modifications into the change group. Return nonzero for success. */
1337 invert_exp_1 (rtx x, rtx insn)
1339 RTX_CODE code = GET_CODE (x);
1341 if (code == IF_THEN_ELSE)
1343 rtx comp = XEXP (x, 0);
1345 enum rtx_code reversed_code;
1347 /* We can do this in two ways: The preferable way, which can only
1348 be done if this is not an integer comparison, is to reverse
1349 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1350 of the IF_THEN_ELSE. If we can't do either, fail. */
1352 reversed_code = reversed_comparison_code (comp, insn);
1354 if (reversed_code != UNKNOWN)
1356 validate_change (insn, &XEXP (x, 0),
1357 gen_rtx_fmt_ee (reversed_code,
1358 GET_MODE (comp), XEXP (comp, 0),
1365 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1366 validate_change (insn, &XEXP (x, 2), tem, 1);
1373 /* Invert the condition of the jump JUMP, and make it jump to label
1374 NLABEL instead of where it jumps now. Accrue changes into the
1375 change group. Return false if we didn't see how to perform the
1376 inversion and redirection. */
1379 invert_jump_1 (rtx jump, rtx nlabel)
1381 rtx x = pc_set (jump);
1385 ochanges = num_validated_changes ();
1387 ok = invert_exp_1 (SET_SRC (x), jump);
1390 if (num_validated_changes () == ochanges)
1393 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1394 in Pmode, so checking this is not merely an optimization. */
1395 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1398 /* Invert the condition of the jump JUMP, and make it jump to label
1399 NLABEL instead of where it jumps now. Return true if successful. */
1402 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1404 rtx olabel = JUMP_LABEL (jump);
1406 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1408 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1416 /* Like rtx_equal_p except that it considers two REGs as equal
1417 if they renumber to the same value and considers two commutative
1418 operations to be the same if the order of the operands has been
1422 rtx_renumbered_equal_p (rtx x, rtx y)
1425 const enum rtx_code code = GET_CODE (x);
1431 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1432 && (REG_P (y) || (GET_CODE (y) == SUBREG
1433 && REG_P (SUBREG_REG (y)))))
1435 int reg_x = -1, reg_y = -1;
1436 int byte_x = 0, byte_y = 0;
1438 if (GET_MODE (x) != GET_MODE (y))
1441 /* If we haven't done any renumbering, don't
1442 make any assumptions. */
1443 if (reg_renumber == 0)
1444 return rtx_equal_p (x, y);
1448 reg_x = REGNO (SUBREG_REG (x));
1449 byte_x = SUBREG_BYTE (x);
1451 if (reg_renumber[reg_x] >= 0)
1453 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1454 GET_MODE (SUBREG_REG (x)),
1463 if (reg_renumber[reg_x] >= 0)
1464 reg_x = reg_renumber[reg_x];
1467 if (GET_CODE (y) == SUBREG)
1469 reg_y = REGNO (SUBREG_REG (y));
1470 byte_y = SUBREG_BYTE (y);
1472 if (reg_renumber[reg_y] >= 0)
1474 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1475 GET_MODE (SUBREG_REG (y)),
1484 if (reg_renumber[reg_y] >= 0)
1485 reg_y = reg_renumber[reg_y];
1488 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1491 /* Now we have disposed of all the cases
1492 in which different rtx codes can match. */
1493 if (code != GET_CODE (y))
1507 /* We can't assume nonlocal labels have their following insns yet. */
1508 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1509 return XEXP (x, 0) == XEXP (y, 0);
1511 /* Two label-refs are equivalent if they point at labels
1512 in the same position in the instruction stream. */
1513 return (next_real_insn (XEXP (x, 0))
1514 == next_real_insn (XEXP (y, 0)));
1517 return XSTR (x, 0) == XSTR (y, 0);
1520 /* If we didn't match EQ equality above, they aren't the same. */
1527 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1529 if (GET_MODE (x) != GET_MODE (y))
1532 /* For commutative operations, the RTX match if the operand match in any
1533 order. Also handle the simple binary and unary cases without a loop. */
1534 if (targetm.commutative_p (x, UNKNOWN))
1535 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1536 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1537 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1538 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1539 else if (NON_COMMUTATIVE_P (x))
1540 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1541 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1542 else if (UNARY_P (x))
1543 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1545 /* Compare the elements. If any pair of corresponding elements
1546 fail to match, return 0 for the whole things. */
1548 fmt = GET_RTX_FORMAT (code);
1549 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1555 if (XWINT (x, i) != XWINT (y, i))
1560 if (XINT (x, i) != XINT (y, i))
1565 if (XTREE (x, i) != XTREE (y, i))
1570 if (strcmp (XSTR (x, i), XSTR (y, i)))
1575 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1580 if (XEXP (x, i) != XEXP (y, i))
1587 if (XVECLEN (x, i) != XVECLEN (y, i))
1589 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1590 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1601 /* If X is a hard register or equivalent to one or a subregister of one,
1602 return the hard register number. If X is a pseudo register that was not
1603 assigned a hard register, return the pseudo register number. Otherwise,
1604 return -1. Any rtx is valid for X. */
1607 true_regnum (const_rtx x)
1611 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1612 return reg_renumber[REGNO (x)];
1615 if (GET_CODE (x) == SUBREG)
1617 int base = true_regnum (SUBREG_REG (x));
1619 && base < FIRST_PSEUDO_REGISTER
1620 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1621 GET_MODE (SUBREG_REG (x)),
1622 SUBREG_BYTE (x), GET_MODE (x)))
1623 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1624 GET_MODE (SUBREG_REG (x)),
1625 SUBREG_BYTE (x), GET_MODE (x));
1630 /* Return regno of the register REG and handle subregs too. */
1632 reg_or_subregno (const_rtx reg)
1634 if (GET_CODE (reg) == SUBREG)
1635 reg = SUBREG_REG (reg);
1636 gcc_assert (REG_P (reg));