1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
73 /* Alternate entry into the jump optimizer. This entry point only rebuilds
74 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 rebuild_jump_labels (f)
83 max_uid = init_label_info (f) + 1;
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
93 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
110 next = NEXT_INSN (insn);
111 if (GET_CODE (insn) == BARRIER)
113 prev = prev_nonnote_insn (insn);
114 if (GET_CODE (prev) == BARRIER)
115 delete_barrier (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
123 copy_loop_headers (f)
127 /* Now iterate optimizing jumps until nothing changes over one pass. */
128 for (insn = f; insn; insn = next)
132 next = NEXT_INSN (insn);
134 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
135 jump. Try to optimize by duplicating the loop exit test if so.
136 This is only safe immediately after regscan, because it uses
137 the values of regno_first_uid and regno_last_uid. */
138 if (GET_CODE (insn) == NOTE
139 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
140 && (temp1 = next_nonnote_insn (insn)) != 0
141 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
143 temp = PREV_INSN (insn);
144 if (duplicate_loop_exit_test (insn))
146 next = NEXT_INSN (temp);
153 purge_line_number_notes (f)
158 /* Delete extraneous line number notes.
159 Note that two consecutive notes for different lines are not really
160 extraneous. There should be some indication where that line belonged,
161 even if it became empty. */
163 for (insn = f; insn; insn = NEXT_INSN (insn))
164 if (GET_CODE (insn) == NOTE)
166 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
167 /* Any previous line note was for the prologue; gdb wants a new
168 note after the prologue even if it is for the same line. */
169 last_note = NULL_RTX;
170 else if (NOTE_LINE_NUMBER (insn) >= 0)
172 /* Delete this note if it is identical to previous note. */
174 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
175 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
177 delete_related_insns (insn);
186 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
187 notes whose labels don't occur in the insn any more. Returns the
188 largest INSN_UID found. */
196 for (insn = f; insn; insn = NEXT_INSN (insn))
198 if (GET_CODE (insn) == CODE_LABEL)
199 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
200 else if (GET_CODE (insn) == JUMP_INSN)
201 JUMP_LABEL (insn) = 0;
202 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
206 for (note = REG_NOTES (insn); note; note = next)
208 next = XEXP (note, 1);
209 if (REG_NOTE_KIND (note) == REG_LABEL
210 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
211 remove_note (insn, note);
214 if (INSN_UID (insn) > largest_uid)
215 largest_uid = INSN_UID (insn);
221 /* Mark the label each jump jumps to.
222 Combine consecutive labels, and count uses of labels. */
230 for (insn = f; insn; insn = NEXT_INSN (insn))
233 if (GET_CODE (insn) == CALL_INSN
234 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
236 mark_all_labels (XEXP (PATTERN (insn), 0));
237 mark_all_labels (XEXP (PATTERN (insn), 1));
238 mark_all_labels (XEXP (PATTERN (insn), 2));
240 /* Canonicalize the tail recursion label attached to the
241 CALL_PLACEHOLDER insn. */
242 if (XEXP (PATTERN (insn), 3))
244 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
245 XEXP (PATTERN (insn), 3));
246 mark_jump_label (label_ref, insn, 0);
247 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
253 mark_jump_label (PATTERN (insn), insn, 0);
254 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
256 /* When we know the LABEL_REF contained in a REG used in
257 an indirect jump, we'll have a REG_LABEL note so that
258 flow can tell where it's going. */
259 if (JUMP_LABEL (insn) == 0)
261 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
264 /* But a LABEL_REF around the REG_LABEL note, so
265 that we can canonicalize it. */
266 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
267 XEXP (label_note, 0));
269 mark_jump_label (label_ref, insn, 0);
270 XEXP (label_note, 0) = XEXP (label_ref, 0);
271 JUMP_LABEL (insn) = XEXP (label_note, 0);
278 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
279 jump. Assume that this unconditional jump is to the exit test code. If
280 the code is sufficiently simple, make a copy of it before INSN,
281 followed by a jump to the exit of the loop. Then delete the unconditional
284 Return 1 if we made the change, else 0.
286 This is only safe immediately after a regscan pass because it uses the
287 values of regno_first_uid and regno_last_uid. */
290 duplicate_loop_exit_test (loop_start)
293 rtx insn, set, reg, p, link;
294 rtx copy = 0, first_copy = 0;
296 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
298 int max_reg = max_reg_num ();
300 rtx loop_pre_header_label;
302 /* Scan the exit code. We do not perform this optimization if any insn:
306 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
307 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
309 We also do not do this if we find an insn with ASM_OPERANDS. While
310 this restriction should not be necessary, copying an insn with
311 ASM_OPERANDS can confuse asm_noperands in some cases.
313 Also, don't do this if the exit code is more than 20 insns. */
315 for (insn = exitcode;
317 && ! (GET_CODE (insn) == NOTE
318 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
319 insn = NEXT_INSN (insn))
321 switch (GET_CODE (insn))
329 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
330 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
331 /* If we were to duplicate this code, we would not move
332 the BLOCK notes, and so debugging the moved code would
333 be difficult. Thus, we only move the code with -O2 or
340 /* The code below would grossly mishandle REG_WAS_0 notes,
341 so get rid of them here. */
342 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
343 remove_note (insn, p);
345 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
346 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
354 /* Unless INSN is zero, we can do the optimization. */
360 /* See if any insn sets a register only used in the loop exit code and
361 not a user variable. If so, replace it with a new register. */
362 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
363 if (GET_CODE (insn) == INSN
364 && (set = single_set (insn)) != 0
365 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
366 || (GET_CODE (reg) == SUBREG
367 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
368 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
369 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
371 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
372 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
377 /* We can do the replacement. Allocate reg_map if this is the
378 first replacement we found. */
380 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
382 REG_LOOP_TEST_P (reg) = 1;
384 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
387 loop_pre_header_label = gen_label_rtx ();
389 /* Now copy each insn. */
390 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
392 switch (GET_CODE (insn))
395 copy = emit_barrier_before (loop_start);
398 /* Only copy line-number notes. */
399 if (NOTE_LINE_NUMBER (insn) >= 0)
401 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
402 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
407 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
409 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
411 mark_jump_label (PATTERN (copy), copy, 0);
412 INSN_SCOPE (copy) = INSN_SCOPE (insn);
414 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
416 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
417 if (REG_NOTE_KIND (link) != REG_LABEL)
419 if (GET_CODE (link) == EXPR_LIST)
421 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
426 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
431 if (reg_map && REG_NOTES (copy))
432 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
436 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
438 INSN_SCOPE (copy) = INSN_SCOPE (insn);
440 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
441 mark_jump_label (PATTERN (copy), copy, 0);
442 if (REG_NOTES (insn))
444 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
446 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
449 /* Predict conditional jump that do make loop looping as taken.
450 Other jumps are probably exit conditions, so predict
452 if (any_condjump_p (copy))
454 rtx label = JUMP_LABEL (copy);
457 /* The jump_insn after loop_start should be followed
458 by barrier and loopback label. */
459 if (prev_nonnote_insn (label)
460 && (prev_nonnote_insn (prev_nonnote_insn (label))
461 == next_nonnote_insn (loop_start)))
463 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
464 /* To keep pre-header, we need to redirect all loop
465 entrances before the LOOP_BEG note. */
466 redirect_jump (copy, loop_pre_header_label, 0);
469 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
478 /* Record the first insn we copied. We need it so that we can
479 scan the copied insns for new pseudo registers. */
484 /* Now clean up by emitting a jump to the end label and deleting the jump
485 at the start of the loop. */
486 if (! copy || GET_CODE (copy) != BARRIER)
488 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
491 /* Record the first insn we copied. We need it so that we can
492 scan the copied insns for new pseudo registers. This may not
493 be strictly necessary since we should have copied at least one
494 insn above. But I am going to be safe. */
498 mark_jump_label (PATTERN (copy), copy, 0);
499 emit_barrier_before (loop_start);
502 emit_label_before (loop_pre_header_label, loop_start);
504 /* Now scan from the first insn we copied to the last insn we copied
505 (copy) for new pseudo registers. Do this after the code to jump to
506 the end label since that might create a new pseudo too. */
507 reg_scan_update (first_copy, copy, max_reg);
509 /* Mark the exit code as the virtual top of the converted loop. */
510 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
512 delete_related_insns (next_nonnote_insn (loop_start));
521 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
522 notes between START and END out before START. START and END may be such
523 notes. Returns the values of the new starting and ending insns, which
524 may be different if the original ones were such notes.
525 Return true if there were only such notes and no real instructions. */
528 squeeze_notes (startp, endp)
538 rtx past_end = NEXT_INSN (end);
540 for (insn = start; insn != past_end; insn = next)
542 next = NEXT_INSN (insn);
543 if (GET_CODE (insn) == NOTE
544 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
545 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
546 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
547 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
548 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
549 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
555 rtx prev = PREV_INSN (insn);
556 PREV_INSN (insn) = PREV_INSN (start);
557 NEXT_INSN (insn) = start;
558 NEXT_INSN (PREV_INSN (insn)) = insn;
559 PREV_INSN (NEXT_INSN (insn)) = insn;
560 NEXT_INSN (prev) = next;
561 PREV_INSN (next) = prev;
568 /* There were no real instructions. */
569 if (start == past_end)
579 /* Return the label before INSN, or put a new label there. */
582 get_label_before (insn)
587 /* Find an existing label at this point
588 or make a new one if there is none. */
589 label = prev_nonnote_insn (insn);
591 if (label == 0 || GET_CODE (label) != CODE_LABEL)
593 rtx prev = PREV_INSN (insn);
595 label = gen_label_rtx ();
596 emit_label_after (label, prev);
597 LABEL_NUSES (label) = 0;
602 /* Return the label after INSN, or put a new label there. */
605 get_label_after (insn)
610 /* Find an existing label at this point
611 or make a new one if there is none. */
612 label = next_nonnote_insn (insn);
614 if (label == 0 || GET_CODE (label) != CODE_LABEL)
616 label = gen_label_rtx ();
617 emit_label_after (label, insn);
618 LABEL_NUSES (label) = 0;
623 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
624 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
625 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
626 know whether it's source is floating point or integer comparison. Machine
627 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
628 to help this function avoid overhead in these cases. */
630 reversed_comparison_code_parts (code, arg0, arg1, insn)
631 rtx insn, arg0, arg1;
634 enum machine_mode mode;
636 /* If this is not actually a comparison, we can't reverse it. */
637 if (GET_RTX_CLASS (code) != '<')
640 mode = GET_MODE (arg0);
641 if (mode == VOIDmode)
642 mode = GET_MODE (arg1);
644 /* First see if machine description supply us way to reverse the comparison.
645 Give it priority over everything else to allow machine description to do
647 #ifdef REVERSIBLE_CC_MODE
648 if (GET_MODE_CLASS (mode) == MODE_CC
649 && REVERSIBLE_CC_MODE (mode))
651 #ifdef REVERSE_CONDITION
652 return REVERSE_CONDITION (code, mode);
654 return reverse_condition (code);
658 /* Try a few special cases based on the comparison code. */
667 /* It is always safe to reverse EQ and NE, even for the floating
668 point. Similary the unsigned comparisons are never used for
669 floating point so we can reverse them in the default way. */
670 return reverse_condition (code);
675 /* In case we already see unordered comparison, we can be sure to
676 be dealing with floating point so we don't need any more tests. */
677 return reverse_condition_maybe_unordered (code);
682 /* We don't have safe way to reverse these yet. */
688 if (GET_MODE_CLASS (mode) == MODE_CC
695 /* Try to search for the comparison to determine the real mode.
696 This code is expensive, but with sane machine description it
697 will be never used, since REVERSIBLE_CC_MODE will return true
702 for (prev = prev_nonnote_insn (insn);
703 prev != 0 && GET_CODE (prev) != CODE_LABEL;
704 prev = prev_nonnote_insn (prev))
706 rtx set = set_of (arg0, prev);
707 if (set && GET_CODE (set) == SET
708 && rtx_equal_p (SET_DEST (set), arg0))
710 rtx src = SET_SRC (set);
712 if (GET_CODE (src) == COMPARE)
714 rtx comparison = src;
715 arg0 = XEXP (src, 0);
716 mode = GET_MODE (arg0);
717 if (mode == VOIDmode)
718 mode = GET_MODE (XEXP (comparison, 1));
721 /* We can get past reg-reg moves. This may be useful for model
722 of i387 comparisons that first move flag registers around. */
729 /* If register is clobbered in some ununderstandable way,
736 /* Test for an integer condition, or a floating-point comparison
737 in which NaNs can be ignored. */
738 if (GET_CODE (arg0) == CONST_INT
739 || (GET_MODE (arg0) != VOIDmode
740 && GET_MODE_CLASS (mode) != MODE_CC
741 && !HONOR_NANS (mode)))
742 return reverse_condition (code);
747 /* An wrapper around the previous function to take COMPARISON as rtx
748 expression. This simplifies many callers. */
750 reversed_comparison_code (comparison, insn)
751 rtx comparison, insn;
753 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
755 return reversed_comparison_code_parts (GET_CODE (comparison),
756 XEXP (comparison, 0),
757 XEXP (comparison, 1), insn);
760 /* Given an rtx-code for a comparison, return the code for the negated
761 comparison. If no such code exists, return UNKNOWN.
763 WATCH OUT! reverse_condition is not safe to use on a jump that might
764 be acting on the results of an IEEE floating point comparison, because
765 of the special treatment of non-signaling nans in comparisons.
766 Use reversed_comparison_code instead. */
769 reverse_condition (code)
812 /* Similar, but we're allowed to generate unordered comparisons, which
813 makes it safe for IEEE floating-point. Of course, we have to recognize
814 that the target will support them too... */
817 reverse_condition_maybe_unordered (code)
856 /* Similar, but return the code when two operands of a comparison are swapped.
857 This IS safe for IEEE floating-point. */
860 swap_condition (code)
903 /* Given a comparison CODE, return the corresponding unsigned comparison.
904 If CODE is an equality comparison or already an unsigned comparison,
908 unsigned_condition (code)
935 /* Similarly, return the signed version of a comparison. */
938 signed_condition (code)
965 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
966 truth of CODE1 implies the truth of CODE2. */
969 comparison_dominates_p (code1, code2)
970 enum rtx_code code1, code2;
972 /* UNKNOWN comparison codes can happen as a result of trying to revert
974 They can't match anything, so we have to reject them here. */
975 if (code1 == UNKNOWN || code2 == UNKNOWN)
984 if (code2 == UNLE || code2 == UNGE)
989 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
995 if (code2 == UNLE || code2 == NE)
1000 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1005 if (code2 == UNGE || code2 == NE)
1010 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1016 if (code2 == ORDERED)
1021 if (code2 == NE || code2 == ORDERED)
1026 if (code2 == LEU || code2 == NE)
1031 if (code2 == GEU || code2 == NE)
1036 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1037 || code2 == UNGE || code2 == UNGT)
1048 /* Return 1 if INSN is an unconditional jump and nothing else. */
1054 return (GET_CODE (insn) == JUMP_INSN
1055 && GET_CODE (PATTERN (insn)) == SET
1056 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1057 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1060 /* Return nonzero if INSN is a (possibly) conditional jump
1063 Use this function is deprecated, since we need to support combined
1064 branch and compare insns. Use any_condjump_p instead whenever possible. */
1070 rtx x = PATTERN (insn);
1072 if (GET_CODE (x) != SET
1073 || GET_CODE (SET_DEST (x)) != PC)
1077 if (GET_CODE (x) == LABEL_REF)
1080 return (GET_CODE (x) == IF_THEN_ELSE
1081 && ((GET_CODE (XEXP (x, 2)) == PC
1082 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1083 || GET_CODE (XEXP (x, 1)) == RETURN))
1084 || (GET_CODE (XEXP (x, 1)) == PC
1085 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1086 || GET_CODE (XEXP (x, 2)) == RETURN))));
1091 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1094 Use this function is deprecated, since we need to support combined
1095 branch and compare insns. Use any_condjump_p instead whenever possible. */
1098 condjump_in_parallel_p (insn)
1101 rtx x = PATTERN (insn);
1103 if (GET_CODE (x) != PARALLEL)
1106 x = XVECEXP (x, 0, 0);
1108 if (GET_CODE (x) != SET)
1110 if (GET_CODE (SET_DEST (x)) != PC)
1112 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1114 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1116 if (XEXP (SET_SRC (x), 2) == pc_rtx
1117 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1118 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1120 if (XEXP (SET_SRC (x), 1) == pc_rtx
1121 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1122 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1127 /* Return set of PC, otherwise NULL. */
1134 if (GET_CODE (insn) != JUMP_INSN)
1136 pat = PATTERN (insn);
1138 /* The set is allowed to appear either as the insn pattern or
1139 the first set in a PARALLEL. */
1140 if (GET_CODE (pat) == PARALLEL)
1141 pat = XVECEXP (pat, 0, 0);
1142 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1148 /* Return true when insn is an unconditional direct jump,
1149 possibly bundled inside a PARALLEL. */
1152 any_uncondjump_p (insn)
1155 rtx x = pc_set (insn);
1158 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1163 /* Return true when insn is a conditional jump. This function works for
1164 instructions containing PC sets in PARALLELs. The instruction may have
1165 various other effects so before removing the jump you must verify
1168 Note that unlike condjump_p it returns false for unconditional jumps. */
1171 any_condjump_p (insn)
1174 rtx x = pc_set (insn);
1179 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1182 a = GET_CODE (XEXP (SET_SRC (x), 1));
1183 b = GET_CODE (XEXP (SET_SRC (x), 2));
1185 return ((b == PC && (a == LABEL_REF || a == RETURN))
1186 || (a == PC && (b == LABEL_REF || b == RETURN)));
1189 /* Return the label of a conditional jump. */
1192 condjump_label (insn)
1195 rtx x = pc_set (insn);
1200 if (GET_CODE (x) == LABEL_REF)
1202 if (GET_CODE (x) != IF_THEN_ELSE)
1204 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1206 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1211 /* Return true if INSN is a (possibly conditional) return insn. */
1214 returnjump_p_1 (loc, data)
1216 void *data ATTRIBUTE_UNUSED;
1220 return x && (GET_CODE (x) == RETURN
1221 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1228 if (GET_CODE (insn) != JUMP_INSN)
1230 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1233 /* Return true if INSN is a jump that only transfers control and
1242 if (GET_CODE (insn) != JUMP_INSN)
1245 set = single_set (insn);
1248 if (GET_CODE (SET_DEST (set)) != PC)
1250 if (side_effects_p (SET_SRC (set)))
1258 /* Return nonzero if X is an RTX that only sets the condition codes
1259 and has no side effects. */
1272 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1275 /* Return 1 if X is an RTX that does nothing but set the condition codes
1276 and CLOBBER or USE registers.
1277 Return -1 if X does explicitly set the condition codes,
1278 but also does other things. */
1291 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1293 if (GET_CODE (x) == PARALLEL)
1297 int other_things = 0;
1298 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1300 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1301 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1303 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1306 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1312 /* Follow any unconditional jump at LABEL;
1313 return the ultimate label reached by any such chain of jumps.
1314 If LABEL is not followed by a jump, return LABEL.
1315 If the chain loops or we can't find end, return LABEL,
1316 since that tells caller to avoid changing the insn.
1318 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1319 a USE or CLOBBER. */
1322 follow_jumps (label)
1332 && (insn = next_active_insn (value)) != 0
1333 && GET_CODE (insn) == JUMP_INSN
1334 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1335 && onlyjump_p (insn))
1336 || GET_CODE (PATTERN (insn)) == RETURN)
1337 && (next = NEXT_INSN (insn))
1338 && GET_CODE (next) == BARRIER);
1341 /* Don't chain through the insn that jumps into a loop
1342 from outside the loop,
1343 since that would create multiple loop entry jumps
1344 and prevent loop optimization. */
1346 if (!reload_completed)
1347 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1348 if (GET_CODE (tem) == NOTE
1349 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1350 /* ??? Optional. Disables some optimizations, but makes
1351 gcov output more accurate with -O. */
1352 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1355 /* If we have found a cycle, make the insn jump to itself. */
1356 if (JUMP_LABEL (insn) == label)
1359 tem = next_active_insn (JUMP_LABEL (insn));
1360 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1361 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1364 value = JUMP_LABEL (insn);
1372 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1373 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1374 in INSN, then store one of them in JUMP_LABEL (INSN).
1375 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1376 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1377 Also, when there are consecutive labels, canonicalize on the last of them.
1379 Note that two labels separated by a loop-beginning note
1380 must be kept distinct if we have not yet done loop-optimization,
1381 because the gap between them is where loop-optimize
1382 will want to move invariant code to. CROSS_JUMP tells us
1383 that loop-optimization is done with. */
1386 mark_jump_label (x, insn, in_mem)
1391 RTX_CODE code = GET_CODE (x);
1414 /* If this is a constant-pool reference, see if it is a label. */
1415 if (CONSTANT_POOL_ADDRESS_P (x))
1416 mark_jump_label (get_pool_constant (x), insn, in_mem);
1421 rtx label = XEXP (x, 0);
1423 /* Ignore remaining references to unreachable labels that
1424 have been deleted. */
1425 if (GET_CODE (label) == NOTE
1426 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1429 if (GET_CODE (label) != CODE_LABEL)
1432 /* Ignore references to labels of containing functions. */
1433 if (LABEL_REF_NONLOCAL_P (x))
1436 XEXP (x, 0) = label;
1437 if (! insn || ! INSN_DELETED_P (insn))
1438 ++LABEL_NUSES (label);
1442 if (GET_CODE (insn) == JUMP_INSN)
1443 JUMP_LABEL (insn) = label;
1446 /* Add a REG_LABEL note for LABEL unless there already
1447 is one. All uses of a label, except for labels
1448 that are the targets of jumps, must have a
1450 if (! find_reg_note (insn, REG_LABEL, label))
1451 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1458 /* Do walk the labels in a vector, but not the first operand of an
1459 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1462 if (! INSN_DELETED_P (insn))
1464 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1466 for (i = 0; i < XVECLEN (x, eltnum); i++)
1467 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1475 fmt = GET_RTX_FORMAT (code);
1476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1479 mark_jump_label (XEXP (x, i), insn, in_mem);
1480 else if (fmt[i] == 'E')
1483 for (j = 0; j < XVECLEN (x, i); j++)
1484 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1489 /* If all INSN does is set the pc, delete it,
1490 and delete the insn that set the condition codes for it
1491 if that's what the previous thing was. */
1497 rtx set = single_set (insn);
1499 if (set && GET_CODE (SET_DEST (set)) == PC)
1500 delete_computation (insn);
1503 /* Verify INSN is a BARRIER and delete it. */
1506 delete_barrier (insn)
1509 if (GET_CODE (insn) != BARRIER)
1515 /* Recursively delete prior insns that compute the value (used only by INSN
1516 which the caller is deleting) stored in the register mentioned by NOTE
1517 which is a REG_DEAD note associated with INSN. */
1520 delete_prior_computation (note, insn)
1525 rtx reg = XEXP (note, 0);
1527 for (our_prev = prev_nonnote_insn (insn);
1528 our_prev && (GET_CODE (our_prev) == INSN
1529 || GET_CODE (our_prev) == CALL_INSN);
1530 our_prev = prev_nonnote_insn (our_prev))
1532 rtx pat = PATTERN (our_prev);
1534 /* If we reach a CALL which is not calling a const function
1535 or the callee pops the arguments, then give up. */
1536 if (GET_CODE (our_prev) == CALL_INSN
1537 && (! CONST_OR_PURE_CALL_P (our_prev)
1538 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1541 /* If we reach a SEQUENCE, it is too complex to try to
1542 do anything with it, so give up. We can be run during
1543 and after reorg, so SEQUENCE rtl can legitimately show
1545 if (GET_CODE (pat) == SEQUENCE)
1548 if (GET_CODE (pat) == USE
1549 && GET_CODE (XEXP (pat, 0)) == INSN)
1550 /* reorg creates USEs that look like this. We leave them
1551 alone because reorg needs them for its own purposes. */
1554 if (reg_set_p (reg, pat))
1556 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1559 if (GET_CODE (pat) == PARALLEL)
1561 /* If we find a SET of something else, we can't
1566 for (i = 0; i < XVECLEN (pat, 0); i++)
1568 rtx part = XVECEXP (pat, 0, i);
1570 if (GET_CODE (part) == SET
1571 && SET_DEST (part) != reg)
1575 if (i == XVECLEN (pat, 0))
1576 delete_computation (our_prev);
1578 else if (GET_CODE (pat) == SET
1579 && GET_CODE (SET_DEST (pat)) == REG)
1581 int dest_regno = REGNO (SET_DEST (pat));
1584 + (dest_regno < FIRST_PSEUDO_REGISTER
1585 ? HARD_REGNO_NREGS (dest_regno,
1586 GET_MODE (SET_DEST (pat))) : 1));
1587 int regno = REGNO (reg);
1590 + (regno < FIRST_PSEUDO_REGISTER
1591 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1593 if (dest_regno >= regno
1594 && dest_endregno <= endregno)
1595 delete_computation (our_prev);
1597 /* We may have a multi-word hard register and some, but not
1598 all, of the words of the register are needed in subsequent
1599 insns. Write REG_UNUSED notes for those parts that were not
1601 else if (dest_regno <= regno
1602 && dest_endregno >= endregno)
1606 REG_NOTES (our_prev)
1607 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1608 REG_NOTES (our_prev));
1610 for (i = dest_regno; i < dest_endregno; i++)
1611 if (! find_regno_note (our_prev, REG_UNUSED, i))
1614 if (i == dest_endregno)
1615 delete_computation (our_prev);
1622 /* If PAT references the register that dies here, it is an
1623 additional use. Hence any prior SET isn't dead. However, this
1624 insn becomes the new place for the REG_DEAD note. */
1625 if (reg_overlap_mentioned_p (reg, pat))
1627 XEXP (note, 1) = REG_NOTES (our_prev);
1628 REG_NOTES (our_prev) = note;
1634 /* Delete INSN and recursively delete insns that compute values used only
1635 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1636 If we are running before flow.c, we need do nothing since flow.c will
1637 delete dead code. We also can't know if the registers being used are
1638 dead or not at this point.
1640 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1641 nothing other than set a register that dies in this insn, we can delete
1644 On machines with CC0, if CC0 is used in this insn, we may be able to
1645 delete the insn that set it. */
1648 delete_computation (insn)
1654 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1656 rtx prev = prev_nonnote_insn (insn);
1657 /* We assume that at this stage
1658 CC's are always set explicitly
1659 and always immediately before the jump that
1660 will use them. So if the previous insn
1661 exists to set the CC's, delete it
1662 (unless it performs auto-increments, etc.). */
1663 if (prev && GET_CODE (prev) == INSN
1664 && sets_cc0_p (PATTERN (prev)))
1666 if (sets_cc0_p (PATTERN (prev)) > 0
1667 && ! side_effects_p (PATTERN (prev)))
1668 delete_computation (prev);
1670 /* Otherwise, show that cc0 won't be used. */
1671 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1672 cc0_rtx, REG_NOTES (prev));
1677 for (note = REG_NOTES (insn); note; note = next)
1679 next = XEXP (note, 1);
1681 if (REG_NOTE_KIND (note) != REG_DEAD
1682 /* Verify that the REG_NOTE is legitimate. */
1683 || GET_CODE (XEXP (note, 0)) != REG)
1686 delete_prior_computation (note, insn);
1689 delete_related_insns (insn);
1692 /* Delete insn INSN from the chain of insns and update label ref counts
1693 and delete insns now unreachable.
1695 Returns the first insn after INSN that was not deleted.
1697 Usage of this instruction is deprecated. Use delete_insn instead and
1698 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1701 delete_related_insns (insn)
1704 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1706 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1708 while (next && INSN_DELETED_P (next))
1709 next = NEXT_INSN (next);
1711 /* This insn is already deleted => return first following nondeleted. */
1712 if (INSN_DELETED_P (insn))
1717 /* If instruction is followed by a barrier,
1718 delete the barrier too. */
1720 if (next != 0 && GET_CODE (next) == BARRIER)
1723 /* If deleting a jump, decrement the count of the label,
1724 and delete the label if it is now unused. */
1726 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1728 rtx lab = JUMP_LABEL (insn), lab_next;
1730 if (LABEL_NUSES (lab) == 0)
1732 /* This can delete NEXT or PREV,
1733 either directly if NEXT is JUMP_LABEL (INSN),
1734 or indirectly through more levels of jumps. */
1735 delete_related_insns (lab);
1737 /* I feel a little doubtful about this loop,
1738 but I see no clean and sure alternative way
1739 to find the first insn after INSN that is not now deleted.
1740 I hope this works. */
1741 while (next && INSN_DELETED_P (next))
1742 next = NEXT_INSN (next);
1745 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1746 && GET_CODE (lab_next) == JUMP_INSN
1747 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1748 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1750 /* If we're deleting the tablejump, delete the dispatch table.
1751 We may not be able to kill the label immediately preceding
1752 just yet, as it might be referenced in code leading up to
1754 delete_related_insns (lab_next);
1758 /* Likewise if we're deleting a dispatch table. */
1760 if (GET_CODE (insn) == JUMP_INSN
1761 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1762 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1764 rtx pat = PATTERN (insn);
1765 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1766 int len = XVECLEN (pat, diff_vec_p);
1768 for (i = 0; i < len; i++)
1769 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1770 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1771 while (next && INSN_DELETED_P (next))
1772 next = NEXT_INSN (next);
1776 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1777 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1778 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1779 if (REG_NOTE_KIND (note) == REG_LABEL
1780 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1781 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1782 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1783 delete_related_insns (XEXP (note, 0));
1785 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1786 prev = PREV_INSN (prev);
1788 /* If INSN was a label and a dispatch table follows it,
1789 delete the dispatch table. The tablejump must have gone already.
1790 It isn't useful to fall through into a table. */
1793 && NEXT_INSN (insn) != 0
1794 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1795 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1796 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1797 next = delete_related_insns (NEXT_INSN (insn));
1799 /* If INSN was a label, delete insns following it if now unreachable. */
1801 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1805 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1806 || code == NOTE || code == BARRIER
1807 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1810 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1811 next = NEXT_INSN (next);
1812 /* Keep going past other deleted labels to delete what follows. */
1813 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1814 next = NEXT_INSN (next);
1816 /* Note: if this deletes a jump, it can cause more
1817 deletion of unreachable code, after a different label.
1818 As long as the value from this recursive call is correct,
1819 this invocation functions correctly. */
1820 next = delete_related_insns (next);
1827 /* Advance from INSN till reaching something not deleted
1828 then return that. May return INSN itself. */
1831 next_nondeleted_insn (insn)
1834 while (INSN_DELETED_P (insn))
1835 insn = NEXT_INSN (insn);
1839 /* Delete a range of insns from FROM to TO, inclusive.
1840 This is for the sake of peephole optimization, so assume
1841 that whatever these insns do will still be done by a new
1842 peephole insn that will replace them. */
1845 delete_for_peephole (from, to)
1852 rtx next = NEXT_INSN (insn);
1853 rtx prev = PREV_INSN (insn);
1855 if (GET_CODE (insn) != NOTE)
1857 INSN_DELETED_P (insn) = 1;
1859 /* Patch this insn out of the chain. */
1860 /* We don't do this all at once, because we
1861 must preserve all NOTEs. */
1863 NEXT_INSN (prev) = next;
1866 PREV_INSN (next) = prev;
1874 /* Note that if TO is an unconditional jump
1875 we *do not* delete the BARRIER that follows,
1876 since the peephole that replaces this sequence
1877 is also an unconditional jump in that case. */
1880 /* We have determined that INSN is never reached, and are about to
1881 delete it. Print a warning if the user asked for one.
1883 To try to make this warning more useful, this should only be called
1884 once per basic block not reached, and it only warns when the basic
1885 block contains more than one line from the current function, and
1886 contains at least one operation. CSE and inlining can duplicate insns,
1887 so it's possible to get spurious warnings from this. */
1890 never_reached_warning (avoided_insn, finish)
1891 rtx avoided_insn, finish;
1894 rtx a_line_note = NULL;
1895 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1897 if (! warn_notreached)
1900 /* Scan forwards, looking at LINE_NUMBER notes, until
1901 we hit a LABEL or we run out of insns. */
1903 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1905 if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
1908 if (GET_CODE (insn) == NOTE /* A line number note? */
1909 && NOTE_LINE_NUMBER (insn) >= 0)
1911 if (a_line_note == NULL)
1914 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1915 != NOTE_LINE_NUMBER (insn));
1917 else if (INSN_P (insn))
1919 if (reached_end || a_line_note == NULL)
1927 if (two_avoided_lines && contains_insn)
1928 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1929 NOTE_LINE_NUMBER (a_line_note),
1930 "will never be executed");
1933 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1934 NLABEL as a return. Accrue modifications into the change group. */
1937 redirect_exp_1 (loc, olabel, nlabel, insn)
1943 RTX_CODE code = GET_CODE (x);
1947 if (code == LABEL_REF)
1949 if (XEXP (x, 0) == olabel)
1953 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1955 n = gen_rtx_RETURN (VOIDmode);
1957 validate_change (insn, loc, n, 1);
1961 else if (code == RETURN && olabel == 0)
1963 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1964 if (loc == &PATTERN (insn))
1965 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1966 validate_change (insn, loc, x, 1);
1970 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1971 && GET_CODE (SET_SRC (x)) == LABEL_REF
1972 && XEXP (SET_SRC (x), 0) == olabel)
1974 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1978 fmt = GET_RTX_FORMAT (code);
1979 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1982 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1983 else if (fmt[i] == 'E')
1986 for (j = 0; j < XVECLEN (x, i); j++)
1987 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1992 /* Similar, but apply the change group and report success or failure. */
1995 redirect_exp (olabel, nlabel, insn)
2001 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2002 loc = &XVECEXP (PATTERN (insn), 0, 0);
2004 loc = &PATTERN (insn);
2006 redirect_exp_1 (loc, olabel, nlabel, insn);
2007 if (num_validated_changes () == 0)
2010 return apply_change_group ();
2013 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2014 the modifications into the change group. Return false if we did
2015 not see how to do that. */
2018 redirect_jump_1 (jump, nlabel)
2021 int ochanges = num_validated_changes ();
2024 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2025 loc = &XVECEXP (PATTERN (jump), 0, 0);
2027 loc = &PATTERN (jump);
2029 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2030 return num_validated_changes () > ochanges;
2033 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2034 jump target label is unused as a result, it and the code following
2037 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2040 The return value will be 1 if the change was made, 0 if it wasn't
2041 (this can only occur for NLABEL == 0). */
2044 redirect_jump (jump, nlabel, delete_unused)
2048 rtx olabel = JUMP_LABEL (jump);
2050 if (nlabel == olabel)
2053 if (! redirect_exp (olabel, nlabel, jump))
2056 JUMP_LABEL (jump) = nlabel;
2058 ++LABEL_NUSES (nlabel);
2060 /* If we're eliding the jump over exception cleanups at the end of a
2061 function, move the function end note so that -Wreturn-type works. */
2062 if (olabel && nlabel
2063 && NEXT_INSN (olabel)
2064 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2065 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2066 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2068 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2069 /* Undefined labels will remain outside the insn stream. */
2070 && INSN_UID (olabel))
2071 delete_related_insns (olabel);
2076 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2077 Accrue the modifications into the change group. */
2084 rtx x = pc_set (insn);
2090 code = GET_CODE (x);
2092 if (code == IF_THEN_ELSE)
2094 rtx comp = XEXP (x, 0);
2096 enum rtx_code reversed_code;
2098 /* We can do this in two ways: The preferable way, which can only
2099 be done if this is not an integer comparison, is to reverse
2100 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2101 of the IF_THEN_ELSE. If we can't do either, fail. */
2103 reversed_code = reversed_comparison_code (comp, insn);
2105 if (reversed_code != UNKNOWN)
2107 validate_change (insn, &XEXP (x, 0),
2108 gen_rtx_fmt_ee (reversed_code,
2109 GET_MODE (comp), XEXP (comp, 0),
2116 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2117 validate_change (insn, &XEXP (x, 2), tem, 1);
2123 /* Invert the jump condition of conditional jump insn, INSN.
2125 Return 1 if we can do so, 0 if we cannot find a way to do so that
2126 matches a pattern. */
2132 invert_exp_1 (insn);
2133 if (num_validated_changes () == 0)
2136 return apply_change_group ();
2139 /* Invert the condition of the jump JUMP, and make it jump to label
2140 NLABEL instead of where it jumps now. Accrue changes into the
2141 change group. Return false if we didn't see how to perform the
2142 inversion and redirection. */
2145 invert_jump_1 (jump, nlabel)
2150 ochanges = num_validated_changes ();
2151 invert_exp_1 (jump);
2152 if (num_validated_changes () == ochanges)
2155 return redirect_jump_1 (jump, nlabel);
2158 /* Invert the condition of the jump JUMP, and make it jump to label
2159 NLABEL instead of where it jumps now. Return true if successful. */
2162 invert_jump (jump, nlabel, delete_unused)
2166 /* We have to either invert the condition and change the label or
2167 do neither. Either operation could fail. We first try to invert
2168 the jump. If that succeeds, we try changing the label. If that fails,
2169 we invert the jump back to what it was. */
2171 if (! invert_exp (jump))
2174 if (redirect_jump (jump, nlabel, delete_unused))
2176 invert_br_probabilities (jump);
2181 if (! invert_exp (jump))
2182 /* This should just be putting it back the way it was. */
2189 /* Like rtx_equal_p except that it considers two REGs as equal
2190 if they renumber to the same value and considers two commutative
2191 operations to be the same if the order of the operands has been
2194 ??? Addition is not commutative on the PA due to the weird implicit
2195 space register selection rules for memory addresses. Therefore, we
2196 don't consider a + b == b + a.
2198 We could/should make this test a little tighter. Possibly only
2199 disabling it on the PA via some backend macro or only disabling this
2200 case when the PLUS is inside a MEM. */
2203 rtx_renumbered_equal_p (x, y)
2207 RTX_CODE code = GET_CODE (x);
2213 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2214 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2215 && GET_CODE (SUBREG_REG (y)) == REG)))
2217 int reg_x = -1, reg_y = -1;
2218 int byte_x = 0, byte_y = 0;
2220 if (GET_MODE (x) != GET_MODE (y))
2223 /* If we haven't done any renumbering, don't
2224 make any assumptions. */
2225 if (reg_renumber == 0)
2226 return rtx_equal_p (x, y);
2230 reg_x = REGNO (SUBREG_REG (x));
2231 byte_x = SUBREG_BYTE (x);
2233 if (reg_renumber[reg_x] >= 0)
2235 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2236 GET_MODE (SUBREG_REG (x)),
2245 if (reg_renumber[reg_x] >= 0)
2246 reg_x = reg_renumber[reg_x];
2249 if (GET_CODE (y) == SUBREG)
2251 reg_y = REGNO (SUBREG_REG (y));
2252 byte_y = SUBREG_BYTE (y);
2254 if (reg_renumber[reg_y] >= 0)
2256 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2257 GET_MODE (SUBREG_REG (y)),
2266 if (reg_renumber[reg_y] >= 0)
2267 reg_y = reg_renumber[reg_y];
2270 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2273 /* Now we have disposed of all the cases
2274 in which different rtx codes can match. */
2275 if (code != GET_CODE (y))
2287 return INTVAL (x) == INTVAL (y);
2290 /* We can't assume nonlocal labels have their following insns yet. */
2291 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2292 return XEXP (x, 0) == XEXP (y, 0);
2294 /* Two label-refs are equivalent if they point at labels
2295 in the same position in the instruction stream. */
2296 return (next_real_insn (XEXP (x, 0))
2297 == next_real_insn (XEXP (y, 0)));
2300 return XSTR (x, 0) == XSTR (y, 0);
2303 /* If we didn't match EQ equality above, they aren't the same. */
2310 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2312 if (GET_MODE (x) != GET_MODE (y))
2315 /* For commutative operations, the RTX match if the operand match in any
2316 order. Also handle the simple binary and unary cases without a loop.
2318 ??? Don't consider PLUS a commutative operator; see comments above. */
2319 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2321 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2322 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2323 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2324 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2325 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2326 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2327 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2328 else if (GET_RTX_CLASS (code) == '1')
2329 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2331 /* Compare the elements. If any pair of corresponding elements
2332 fail to match, return 0 for the whole things. */
2334 fmt = GET_RTX_FORMAT (code);
2335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2341 if (XWINT (x, i) != XWINT (y, i))
2346 if (XINT (x, i) != XINT (y, i))
2351 if (XTREE (x, i) != XTREE (y, i))
2356 if (strcmp (XSTR (x, i), XSTR (y, i)))
2361 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2366 if (XEXP (x, i) != XEXP (y, i))
2373 if (XVECLEN (x, i) != XVECLEN (y, i))
2375 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2376 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2387 /* If X is a hard register or equivalent to one or a subregister of one,
2388 return the hard register number. If X is a pseudo register that was not
2389 assigned a hard register, return the pseudo register number. Otherwise,
2390 return -1. Any rtx is valid for X. */
2396 if (GET_CODE (x) == REG)
2398 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2399 return reg_renumber[REGNO (x)];
2402 if (GET_CODE (x) == SUBREG)
2404 int base = true_regnum (SUBREG_REG (x));
2405 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2406 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2407 GET_MODE (SUBREG_REG (x)),
2408 SUBREG_BYTE (x), GET_MODE (x));
2413 /* Return regno of the register REG and handle subregs too. */
2415 reg_or_subregno (reg)
2420 if (GET_CODE (reg) == SUBREG)
2421 return REGNO (SUBREG_REG (reg));