1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
39 #include "coretypes.h"
44 #include "hard-reg-set.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
57 #include "diagnostic.h"
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
66 static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
67 static void init_label_info PARAMS ((rtx));
68 static void mark_all_labels PARAMS ((rtx));
69 static int duplicate_loop_exit_test PARAMS ((rtx));
70 static void delete_computation PARAMS ((rtx));
71 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
72 static int redirect_exp PARAMS ((rtx, rtx, rtx));
73 static void invert_exp_1 PARAMS ((rtx));
74 static int invert_exp PARAMS ((rtx));
75 static int returnjump_p_1 PARAMS ((rtx *, void *));
76 static void delete_prior_computation PARAMS ((rtx, rtx));
78 /* Alternate entry into the jump optimizer. This entry point only rebuilds
79 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
82 rebuild_jump_labels (f)
87 timevar_push (TV_REBUILD_JUMP);
91 /* Keep track of labels used from static data; we don't track them
92 closely enough to delete them here, so make sure their reference
93 count doesn't drop to zero. */
95 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
96 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
97 LABEL_NUSES (XEXP (insn, 0))++;
98 timevar_pop (TV_REBUILD_JUMP);
101 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
102 non-fallthru insn. This is not generally true, as multiple barriers
103 may have crept in, or the BARRIER may be separated from the last
104 real insn by one or more NOTEs.
106 This simple pass moves barriers and removes duplicates so that the
112 rtx insn, next, prev;
113 for (insn = get_insns (); insn; insn = next)
115 next = NEXT_INSN (insn);
116 if (GET_CODE (insn) == BARRIER)
118 prev = prev_nonnote_insn (insn);
119 if (GET_CODE (prev) == BARRIER)
120 delete_barrier (insn);
121 else if (prev != PREV_INSN (insn))
122 reorder_insns (insn, insn, prev);
127 /* Return the next insn after INSN that is not a NOTE and is in the loop,
128 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
129 This routine does not look inside SEQUENCEs. */
132 next_nonnote_insn_in_loop (insn)
137 insn = NEXT_INSN (insn);
138 if (insn == 0 || GET_CODE (insn) != NOTE)
140 if (GET_CODE (insn) == NOTE
141 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
149 copy_loop_headers (f)
153 /* Now iterate optimizing jumps until nothing changes over one pass. */
154 for (insn = f; insn; insn = next)
158 next = NEXT_INSN (insn);
160 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
161 jump. Try to optimize by duplicating the loop exit test if so.
162 This is only safe immediately after regscan, because it uses
163 the values of regno_first_uid and regno_last_uid. */
164 if (GET_CODE (insn) == NOTE
165 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
166 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
167 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
169 temp = PREV_INSN (insn);
170 if (duplicate_loop_exit_test (insn))
172 next = NEXT_INSN (temp);
179 purge_line_number_notes (f)
184 /* Delete extraneous line number notes.
185 Note that two consecutive notes for different lines are not really
186 extraneous. There should be some indication where that line belonged,
187 even if it became empty. */
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (GET_CODE (insn) == NOTE)
192 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
193 /* Any previous line note was for the prologue; gdb wants a new
194 note after the prologue even if it is for the same line. */
195 last_note = NULL_RTX;
196 else if (NOTE_LINE_NUMBER (insn) >= 0)
198 /* Delete this note if it is identical to previous note. */
200 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
201 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
203 delete_related_insns (insn);
212 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
213 notes whose labels don't occur in the insn any more. Returns the
214 largest INSN_UID found. */
221 for (insn = f; insn; insn = NEXT_INSN (insn))
222 if (GET_CODE (insn) == CODE_LABEL)
223 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
224 else if (GET_CODE (insn) == JUMP_INSN)
225 JUMP_LABEL (insn) = 0;
226 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
230 for (note = REG_NOTES (insn); note; note = next)
232 next = XEXP (note, 1);
233 if (REG_NOTE_KIND (note) == REG_LABEL
234 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
235 remove_note (insn, note);
240 /* Mark the label each jump jumps to.
241 Combine consecutive labels, and count uses of labels. */
249 for (insn = f; insn; insn = NEXT_INSN (insn))
252 if (GET_CODE (insn) == CALL_INSN
253 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
255 mark_all_labels (XEXP (PATTERN (insn), 0));
256 mark_all_labels (XEXP (PATTERN (insn), 1));
257 mark_all_labels (XEXP (PATTERN (insn), 2));
259 /* Canonicalize the tail recursion label attached to the
260 CALL_PLACEHOLDER insn. */
261 if (XEXP (PATTERN (insn), 3))
263 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
264 XEXP (PATTERN (insn), 3));
265 mark_jump_label (label_ref, insn, 0);
266 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
272 mark_jump_label (PATTERN (insn), insn, 0);
273 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
275 /* When we know the LABEL_REF contained in a REG used in
276 an indirect jump, we'll have a REG_LABEL note so that
277 flow can tell where it's going. */
278 if (JUMP_LABEL (insn) == 0)
280 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
283 /* But a LABEL_REF around the REG_LABEL note, so
284 that we can canonicalize it. */
285 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
286 XEXP (label_note, 0));
288 mark_jump_label (label_ref, insn, 0);
289 XEXP (label_note, 0) = XEXP (label_ref, 0);
290 JUMP_LABEL (insn) = XEXP (label_note, 0);
297 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
298 jump. Assume that this unconditional jump is to the exit test code. If
299 the code is sufficiently simple, make a copy of it before INSN,
300 followed by a jump to the exit of the loop. Then delete the unconditional
303 Return 1 if we made the change, else 0.
305 This is only safe immediately after a regscan pass because it uses the
306 values of regno_first_uid and regno_last_uid. */
309 duplicate_loop_exit_test (loop_start)
312 rtx insn, set, reg, p, link;
313 rtx copy = 0, first_copy = 0;
316 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
318 int max_reg = max_reg_num ();
320 rtx loop_pre_header_label;
322 /* Scan the exit code. We do not perform this optimization if any insn:
326 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
327 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
329 We also do not do this if we find an insn with ASM_OPERANDS. While
330 this restriction should not be necessary, copying an insn with
331 ASM_OPERANDS can confuse asm_noperands in some cases.
333 Also, don't do this if the exit code is more than 20 insns. */
335 for (insn = exitcode;
337 && ! (GET_CODE (insn) == NOTE
338 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
339 insn = NEXT_INSN (insn))
341 switch (GET_CODE (insn))
349 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
350 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
351 /* If we were to duplicate this code, we would not move
352 the BLOCK notes, and so debugging the moved code would
353 be difficult. Thus, we only move the code with -O2 or
360 /* The code below would grossly mishandle REG_WAS_0 notes,
361 so get rid of them here. */
362 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
363 remove_note (insn, p);
365 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
366 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
374 /* Unless INSN is zero, we can do the optimization. */
380 /* See if any insn sets a register only used in the loop exit code and
381 not a user variable. If so, replace it with a new register. */
382 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
383 if (GET_CODE (insn) == INSN
384 && (set = single_set (insn)) != 0
385 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
386 || (GET_CODE (reg) == SUBREG
387 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
388 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
389 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
391 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
392 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
397 /* We can do the replacement. Allocate reg_map if this is the
398 first replacement we found. */
400 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
402 REG_LOOP_TEST_P (reg) = 1;
404 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
407 loop_pre_header_label = gen_label_rtx ();
409 /* Now copy each insn. */
410 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
412 switch (GET_CODE (insn))
415 copy = emit_barrier_before (loop_start);
418 /* Only copy line-number notes. */
419 if (NOTE_LINE_NUMBER (insn) >= 0)
421 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
422 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
427 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
429 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
431 mark_jump_label (PATTERN (copy), copy, 0);
432 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
434 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
436 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
437 if (REG_NOTE_KIND (link) != REG_LABEL)
439 if (GET_CODE (link) == EXPR_LIST)
441 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
446 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
451 if (reg_map && REG_NOTES (copy))
452 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
456 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_related_insns (next_nonnote_insn (loop_start));
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes.
545 Return true if there were only such notes and no real instructions. */
548 squeeze_notes (startp, endp)
558 rtx past_end = NEXT_INSN (end);
560 for (insn = start; insn != past_end; insn = next)
562 next = NEXT_INSN (insn);
563 if (GET_CODE (insn) == NOTE
564 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
569 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
575 rtx prev = PREV_INSN (insn);
576 PREV_INSN (insn) = PREV_INSN (start);
577 NEXT_INSN (insn) = start;
578 NEXT_INSN (PREV_INSN (insn)) = insn;
579 PREV_INSN (NEXT_INSN (insn)) = insn;
580 NEXT_INSN (prev) = next;
581 PREV_INSN (next) = prev;
588 /* There were no real instructions. */
589 if (start == past_end)
599 /* Return the label before INSN, or put a new label there. */
602 get_label_before (insn)
607 /* Find an existing label at this point
608 or make a new one if there is none. */
609 label = prev_nonnote_insn (insn);
611 if (label == 0 || GET_CODE (label) != CODE_LABEL)
613 rtx prev = PREV_INSN (insn);
615 label = gen_label_rtx ();
616 emit_label_after (label, prev);
617 LABEL_NUSES (label) = 0;
622 /* Return the label after INSN, or put a new label there. */
625 get_label_after (insn)
630 /* Find an existing label at this point
631 or make a new one if there is none. */
632 label = next_nonnote_insn (insn);
634 if (label == 0 || GET_CODE (label) != CODE_LABEL)
636 label = gen_label_rtx ();
637 emit_label_after (label, insn);
638 LABEL_NUSES (label) = 0;
643 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
644 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
645 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
646 know whether it's source is floating point or integer comparison. Machine
647 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
648 to help this function avoid overhead in these cases. */
650 reversed_comparison_code_parts (code, arg0, arg1, insn)
651 rtx insn, arg0, arg1;
654 enum machine_mode mode;
656 /* If this is not actually a comparison, we can't reverse it. */
657 if (GET_RTX_CLASS (code) != '<')
660 mode = GET_MODE (arg0);
661 if (mode == VOIDmode)
662 mode = GET_MODE (arg1);
664 /* First see if machine description supply us way to reverse the comparison.
665 Give it priority over everything else to allow machine description to do
667 #ifdef REVERSIBLE_CC_MODE
668 if (GET_MODE_CLASS (mode) == MODE_CC
669 && REVERSIBLE_CC_MODE (mode))
671 #ifdef REVERSE_CONDITION
672 return REVERSE_CONDITION (code, mode);
674 return reverse_condition (code);
678 /* Try a few special cases based on the comparison code. */
687 /* It is always safe to reverse EQ and NE, even for the floating
688 point. Similary the unsigned comparisons are never used for
689 floating point so we can reverse them in the default way. */
690 return reverse_condition (code);
695 /* In case we already see unordered comparison, we can be sure to
696 be dealing with floating point so we don't need any more tests. */
697 return reverse_condition_maybe_unordered (code);
702 /* We don't have safe way to reverse these yet. */
708 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
711 /* Try to search for the comparison to determine the real mode.
712 This code is expensive, but with sane machine description it
713 will be never used, since REVERSIBLE_CC_MODE will return true
718 for (prev = prev_nonnote_insn (insn);
719 prev != 0 && GET_CODE (prev) != CODE_LABEL;
720 prev = prev_nonnote_insn (prev))
722 rtx set = set_of (arg0, prev);
723 if (set && GET_CODE (set) == SET
724 && rtx_equal_p (SET_DEST (set), arg0))
726 rtx src = SET_SRC (set);
728 if (GET_CODE (src) == COMPARE)
730 rtx comparison = src;
731 arg0 = XEXP (src, 0);
732 mode = GET_MODE (arg0);
733 if (mode == VOIDmode)
734 mode = GET_MODE (XEXP (comparison, 1));
737 /* We can get past reg-reg moves. This may be useful for model
738 of i387 comparisons that first move flag registers around. */
745 /* If register is clobbered in some ununderstandable way,
752 /* Test for an integer condition, or a floating-point comparison
753 in which NaNs can be ignored. */
754 if (GET_CODE (arg0) == CONST_INT
755 || (GET_MODE (arg0) != VOIDmode
756 && GET_MODE_CLASS (mode) != MODE_CC
757 && !HONOR_NANS (mode)))
758 return reverse_condition (code);
763 /* A wrapper around the previous function to take COMPARISON as rtx
764 expression. This simplifies many callers. */
766 reversed_comparison_code (comparison, insn)
767 rtx comparison, insn;
769 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
771 return reversed_comparison_code_parts (GET_CODE (comparison),
772 XEXP (comparison, 0),
773 XEXP (comparison, 1), insn);
776 /* Given an rtx-code for a comparison, return the code for the negated
777 comparison. If no such code exists, return UNKNOWN.
779 WATCH OUT! reverse_condition is not safe to use on a jump that might
780 be acting on the results of an IEEE floating point comparison, because
781 of the special treatment of non-signaling nans in comparisons.
782 Use reversed_comparison_code instead. */
785 reverse_condition (code)
828 /* Similar, but we're allowed to generate unordered comparisons, which
829 makes it safe for IEEE floating-point. Of course, we have to recognize
830 that the target will support them too... */
833 reverse_condition_maybe_unordered (code)
872 /* Similar, but return the code when two operands of a comparison are swapped.
873 This IS safe for IEEE floating-point. */
876 swap_condition (code)
919 /* Given a comparison CODE, return the corresponding unsigned comparison.
920 If CODE is an equality comparison or already an unsigned comparison,
924 unsigned_condition (code)
951 /* Similarly, return the signed version of a comparison. */
954 signed_condition (code)
981 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
982 truth of CODE1 implies the truth of CODE2. */
985 comparison_dominates_p (code1, code2)
986 enum rtx_code code1, code2;
988 /* UNKNOWN comparison codes can happen as a result of trying to revert
990 They can't match anything, so we have to reject them here. */
991 if (code1 == UNKNOWN || code2 == UNKNOWN)
1000 if (code2 == UNLE || code2 == UNGE)
1005 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1006 || code2 == ORDERED)
1011 if (code2 == UNLE || code2 == NE)
1016 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1021 if (code2 == UNGE || code2 == NE)
1026 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1032 if (code2 == ORDERED)
1037 if (code2 == NE || code2 == ORDERED)
1042 if (code2 == LEU || code2 == NE)
1047 if (code2 == GEU || code2 == NE)
1052 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1053 || code2 == UNGE || code2 == UNGT)
1064 /* Return 1 if INSN is an unconditional jump and nothing else. */
1070 return (GET_CODE (insn) == JUMP_INSN
1071 && GET_CODE (PATTERN (insn)) == SET
1072 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1073 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1076 /* Return nonzero if INSN is a (possibly) conditional jump
1079 Use this function is deprecated, since we need to support combined
1080 branch and compare insns. Use any_condjump_p instead whenever possible. */
1086 rtx x = PATTERN (insn);
1088 if (GET_CODE (x) != SET
1089 || GET_CODE (SET_DEST (x)) != PC)
1093 if (GET_CODE (x) == LABEL_REF)
1096 return (GET_CODE (x) == IF_THEN_ELSE
1097 && ((GET_CODE (XEXP (x, 2)) == PC
1098 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1099 || GET_CODE (XEXP (x, 1)) == RETURN))
1100 || (GET_CODE (XEXP (x, 1)) == PC
1101 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1102 || GET_CODE (XEXP (x, 2)) == RETURN))));
1107 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1110 Use this function is deprecated, since we need to support combined
1111 branch and compare insns. Use any_condjump_p instead whenever possible. */
1114 condjump_in_parallel_p (insn)
1117 rtx x = PATTERN (insn);
1119 if (GET_CODE (x) != PARALLEL)
1122 x = XVECEXP (x, 0, 0);
1124 if (GET_CODE (x) != SET)
1126 if (GET_CODE (SET_DEST (x)) != PC)
1128 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1130 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1132 if (XEXP (SET_SRC (x), 2) == pc_rtx
1133 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1134 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1136 if (XEXP (SET_SRC (x), 1) == pc_rtx
1137 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1138 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1143 /* Return set of PC, otherwise NULL. */
1150 if (GET_CODE (insn) != JUMP_INSN)
1152 pat = PATTERN (insn);
1154 /* The set is allowed to appear either as the insn pattern or
1155 the first set in a PARALLEL. */
1156 if (GET_CODE (pat) == PARALLEL)
1157 pat = XVECEXP (pat, 0, 0);
1158 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1164 /* Return true when insn is an unconditional direct jump,
1165 possibly bundled inside a PARALLEL. */
1168 any_uncondjump_p (insn)
1171 rtx x = pc_set (insn);
1174 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1179 /* Return true when insn is a conditional jump. This function works for
1180 instructions containing PC sets in PARALLELs. The instruction may have
1181 various other effects so before removing the jump you must verify
1184 Note that unlike condjump_p it returns false for unconditional jumps. */
1187 any_condjump_p (insn)
1190 rtx x = pc_set (insn);
1195 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1198 a = GET_CODE (XEXP (SET_SRC (x), 1));
1199 b = GET_CODE (XEXP (SET_SRC (x), 2));
1201 return ((b == PC && (a == LABEL_REF || a == RETURN))
1202 || (a == PC && (b == LABEL_REF || b == RETURN)));
1205 /* Return the label of a conditional jump. */
1208 condjump_label (insn)
1211 rtx x = pc_set (insn);
1216 if (GET_CODE (x) == LABEL_REF)
1218 if (GET_CODE (x) != IF_THEN_ELSE)
1220 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1222 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1227 /* Return true if INSN is a (possibly conditional) return insn. */
1230 returnjump_p_1 (loc, data)
1232 void *data ATTRIBUTE_UNUSED;
1236 return x && (GET_CODE (x) == RETURN
1237 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1244 if (GET_CODE (insn) != JUMP_INSN)
1246 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1249 /* Return true if INSN is a jump that only transfers control and
1258 if (GET_CODE (insn) != JUMP_INSN)
1261 set = single_set (insn);
1264 if (GET_CODE (SET_DEST (set)) != PC)
1266 if (side_effects_p (SET_SRC (set)))
1274 /* Return nonzero if X is an RTX that only sets the condition codes
1275 and has no side effects. */
1287 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1290 /* Return 1 if X is an RTX that does nothing but set the condition codes
1291 and CLOBBER or USE registers.
1292 Return -1 if X does explicitly set the condition codes,
1293 but also does other things. */
1305 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1307 if (GET_CODE (x) == PARALLEL)
1311 int other_things = 0;
1312 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1314 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1315 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1317 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1320 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1326 /* Follow any unconditional jump at LABEL;
1327 return the ultimate label reached by any such chain of jumps.
1328 If LABEL is not followed by a jump, return LABEL.
1329 If the chain loops or we can't find end, return LABEL,
1330 since that tells caller to avoid changing the insn.
1332 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1333 a USE or CLOBBER. */
1336 follow_jumps (label)
1346 && (insn = next_active_insn (value)) != 0
1347 && GET_CODE (insn) == JUMP_INSN
1348 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1349 && onlyjump_p (insn))
1350 || GET_CODE (PATTERN (insn)) == RETURN)
1351 && (next = NEXT_INSN (insn))
1352 && GET_CODE (next) == BARRIER);
1355 /* Don't chain through the insn that jumps into a loop
1356 from outside the loop,
1357 since that would create multiple loop entry jumps
1358 and prevent loop optimization. */
1360 if (!reload_completed)
1361 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1362 if (GET_CODE (tem) == NOTE
1363 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1364 /* ??? Optional. Disables some optimizations, but makes
1365 gcov output more accurate with -O. */
1366 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1369 /* If we have found a cycle, make the insn jump to itself. */
1370 if (JUMP_LABEL (insn) == label)
1373 tem = next_active_insn (JUMP_LABEL (insn));
1374 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1375 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1378 value = JUMP_LABEL (insn);
1386 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1387 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1388 in INSN, then store one of them in JUMP_LABEL (INSN).
1389 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1390 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1391 Also, when there are consecutive labels, canonicalize on the last of them.
1393 Note that two labels separated by a loop-beginning note
1394 must be kept distinct if we have not yet done loop-optimization,
1395 because the gap between them is where loop-optimize
1396 will want to move invariant code to. CROSS_JUMP tells us
1397 that loop-optimization is done with. */
1400 mark_jump_label (x, insn, in_mem)
1405 RTX_CODE code = GET_CODE (x);
1428 /* If this is a constant-pool reference, see if it is a label. */
1429 if (CONSTANT_POOL_ADDRESS_P (x))
1430 mark_jump_label (get_pool_constant (x), insn, in_mem);
1435 rtx label = XEXP (x, 0);
1437 /* Ignore remaining references to unreachable labels that
1438 have been deleted. */
1439 if (GET_CODE (label) == NOTE
1440 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1443 if (GET_CODE (label) != CODE_LABEL)
1446 /* Ignore references to labels of containing functions. */
1447 if (LABEL_REF_NONLOCAL_P (x))
1450 XEXP (x, 0) = label;
1451 if (! insn || ! INSN_DELETED_P (insn))
1452 ++LABEL_NUSES (label);
1456 if (GET_CODE (insn) == JUMP_INSN)
1457 JUMP_LABEL (insn) = label;
1460 /* Add a REG_LABEL note for LABEL unless there already
1461 is one. All uses of a label, except for labels
1462 that are the targets of jumps, must have a
1464 if (! find_reg_note (insn, REG_LABEL, label))
1465 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1472 /* Do walk the labels in a vector, but not the first operand of an
1473 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1476 if (! INSN_DELETED_P (insn))
1478 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1480 for (i = 0; i < XVECLEN (x, eltnum); i++)
1481 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1489 fmt = GET_RTX_FORMAT (code);
1490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1493 mark_jump_label (XEXP (x, i), insn, in_mem);
1494 else if (fmt[i] == 'E')
1497 for (j = 0; j < XVECLEN (x, i); j++)
1498 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1503 /* If all INSN does is set the pc, delete it,
1504 and delete the insn that set the condition codes for it
1505 if that's what the previous thing was. */
1511 rtx set = single_set (insn);
1513 if (set && GET_CODE (SET_DEST (set)) == PC)
1514 delete_computation (insn);
1517 /* Verify INSN is a BARRIER and delete it. */
1520 delete_barrier (insn)
1523 if (GET_CODE (insn) != BARRIER)
1529 /* Recursively delete prior insns that compute the value (used only by INSN
1530 which the caller is deleting) stored in the register mentioned by NOTE
1531 which is a REG_DEAD note associated with INSN. */
1534 delete_prior_computation (note, insn)
1539 rtx reg = XEXP (note, 0);
1541 for (our_prev = prev_nonnote_insn (insn);
1542 our_prev && (GET_CODE (our_prev) == INSN
1543 || GET_CODE (our_prev) == CALL_INSN);
1544 our_prev = prev_nonnote_insn (our_prev))
1546 rtx pat = PATTERN (our_prev);
1548 /* If we reach a CALL which is not calling a const function
1549 or the callee pops the arguments, then give up. */
1550 if (GET_CODE (our_prev) == CALL_INSN
1551 && (! CONST_OR_PURE_CALL_P (our_prev)
1552 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1555 /* If we reach a SEQUENCE, it is too complex to try to
1556 do anything with it, so give up. We can be run during
1557 and after reorg, so SEQUENCE rtl can legitimately show
1559 if (GET_CODE (pat) == SEQUENCE)
1562 if (GET_CODE (pat) == USE
1563 && GET_CODE (XEXP (pat, 0)) == INSN)
1564 /* reorg creates USEs that look like this. We leave them
1565 alone because reorg needs them for its own purposes. */
1568 if (reg_set_p (reg, pat))
1570 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1573 if (GET_CODE (pat) == PARALLEL)
1575 /* If we find a SET of something else, we can't
1580 for (i = 0; i < XVECLEN (pat, 0); i++)
1582 rtx part = XVECEXP (pat, 0, i);
1584 if (GET_CODE (part) == SET
1585 && SET_DEST (part) != reg)
1589 if (i == XVECLEN (pat, 0))
1590 delete_computation (our_prev);
1592 else if (GET_CODE (pat) == SET
1593 && GET_CODE (SET_DEST (pat)) == REG)
1595 int dest_regno = REGNO (SET_DEST (pat));
1598 + (dest_regno < FIRST_PSEUDO_REGISTER
1599 ? HARD_REGNO_NREGS (dest_regno,
1600 GET_MODE (SET_DEST (pat))) : 1));
1601 int regno = REGNO (reg);
1604 + (regno < FIRST_PSEUDO_REGISTER
1605 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1607 if (dest_regno >= regno
1608 && dest_endregno <= endregno)
1609 delete_computation (our_prev);
1611 /* We may have a multi-word hard register and some, but not
1612 all, of the words of the register are needed in subsequent
1613 insns. Write REG_UNUSED notes for those parts that were not
1615 else if (dest_regno <= regno
1616 && dest_endregno >= endregno)
1620 REG_NOTES (our_prev)
1621 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1622 REG_NOTES (our_prev));
1624 for (i = dest_regno; i < dest_endregno; i++)
1625 if (! find_regno_note (our_prev, REG_UNUSED, i))
1628 if (i == dest_endregno)
1629 delete_computation (our_prev);
1636 /* If PAT references the register that dies here, it is an
1637 additional use. Hence any prior SET isn't dead. However, this
1638 insn becomes the new place for the REG_DEAD note. */
1639 if (reg_overlap_mentioned_p (reg, pat))
1641 XEXP (note, 1) = REG_NOTES (our_prev);
1642 REG_NOTES (our_prev) = note;
1648 /* Delete INSN and recursively delete insns that compute values used only
1649 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1650 If we are running before flow.c, we need do nothing since flow.c will
1651 delete dead code. We also can't know if the registers being used are
1652 dead or not at this point.
1654 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1655 nothing other than set a register that dies in this insn, we can delete
1658 On machines with CC0, if CC0 is used in this insn, we may be able to
1659 delete the insn that set it. */
1662 delete_computation (insn)
1668 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1670 rtx prev = prev_nonnote_insn (insn);
1671 /* We assume that at this stage
1672 CC's are always set explicitly
1673 and always immediately before the jump that
1674 will use them. So if the previous insn
1675 exists to set the CC's, delete it
1676 (unless it performs auto-increments, etc.). */
1677 if (prev && GET_CODE (prev) == INSN
1678 && sets_cc0_p (PATTERN (prev)))
1680 if (sets_cc0_p (PATTERN (prev)) > 0
1681 && ! side_effects_p (PATTERN (prev)))
1682 delete_computation (prev);
1684 /* Otherwise, show that cc0 won't be used. */
1685 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1686 cc0_rtx, REG_NOTES (prev));
1691 for (note = REG_NOTES (insn); note; note = next)
1693 next = XEXP (note, 1);
1695 if (REG_NOTE_KIND (note) != REG_DEAD
1696 /* Verify that the REG_NOTE is legitimate. */
1697 || GET_CODE (XEXP (note, 0)) != REG)
1700 delete_prior_computation (note, insn);
1703 delete_related_insns (insn);
1706 /* Delete insn INSN from the chain of insns and update label ref counts
1707 and delete insns now unreachable.
1709 Returns the first insn after INSN that was not deleted.
1711 Usage of this instruction is deprecated. Use delete_insn instead and
1712 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1715 delete_related_insns (insn)
1718 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1720 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1722 while (next && INSN_DELETED_P (next))
1723 next = NEXT_INSN (next);
1725 /* This insn is already deleted => return first following nondeleted. */
1726 if (INSN_DELETED_P (insn))
1731 /* If instruction is followed by a barrier,
1732 delete the barrier too. */
1734 if (next != 0 && GET_CODE (next) == BARRIER)
1737 /* If deleting a jump, decrement the count of the label,
1738 and delete the label if it is now unused. */
1740 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1742 rtx lab = JUMP_LABEL (insn), lab_next;
1744 if (LABEL_NUSES (lab) == 0)
1746 /* This can delete NEXT or PREV,
1747 either directly if NEXT is JUMP_LABEL (INSN),
1748 or indirectly through more levels of jumps. */
1749 delete_related_insns (lab);
1751 /* I feel a little doubtful about this loop,
1752 but I see no clean and sure alternative way
1753 to find the first insn after INSN that is not now deleted.
1754 I hope this works. */
1755 while (next && INSN_DELETED_P (next))
1756 next = NEXT_INSN (next);
1759 else if (tablejump_p (insn, NULL, &lab_next))
1761 /* If we're deleting the tablejump, delete the dispatch table.
1762 We may not be able to kill the label immediately preceding
1763 just yet, as it might be referenced in code leading up to
1765 delete_related_insns (lab_next);
1769 /* Likewise if we're deleting a dispatch table. */
1771 if (GET_CODE (insn) == JUMP_INSN
1772 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1773 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1775 rtx pat = PATTERN (insn);
1776 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1777 int len = XVECLEN (pat, diff_vec_p);
1779 for (i = 0; i < len; i++)
1780 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1781 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1782 while (next && INSN_DELETED_P (next))
1783 next = NEXT_INSN (next);
1787 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1788 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1789 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1790 if (REG_NOTE_KIND (note) == REG_LABEL
1791 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1792 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1793 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1794 delete_related_insns (XEXP (note, 0));
1796 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1797 prev = PREV_INSN (prev);
1799 /* If INSN was a label and a dispatch table follows it,
1800 delete the dispatch table. The tablejump must have gone already.
1801 It isn't useful to fall through into a table. */
1804 && NEXT_INSN (insn) != 0
1805 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1806 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1807 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1808 next = delete_related_insns (NEXT_INSN (insn));
1810 /* If INSN was a label, delete insns following it if now unreachable. */
1812 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1816 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1817 || code == NOTE || code == BARRIER
1818 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1821 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1822 next = NEXT_INSN (next);
1823 /* Keep going past other deleted labels to delete what follows. */
1824 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1825 next = NEXT_INSN (next);
1827 /* Note: if this deletes a jump, it can cause more
1828 deletion of unreachable code, after a different label.
1829 As long as the value from this recursive call is correct,
1830 this invocation functions correctly. */
1831 next = delete_related_insns (next);
1838 /* Delete a range of insns from FROM to TO, inclusive.
1839 This is for the sake of peephole optimization, so assume
1840 that whatever these insns do will still be done by a new
1841 peephole insn that will replace them. */
1844 delete_for_peephole (from, to)
1851 rtx next = NEXT_INSN (insn);
1852 rtx prev = PREV_INSN (insn);
1854 if (GET_CODE (insn) != NOTE)
1856 INSN_DELETED_P (insn) = 1;
1858 /* Patch this insn out of the chain. */
1859 /* We don't do this all at once, because we
1860 must preserve all NOTEs. */
1862 NEXT_INSN (prev) = next;
1865 PREV_INSN (next) = prev;
1873 /* Note that if TO is an unconditional jump
1874 we *do not* delete the BARRIER that follows,
1875 since the peephole that replaces this sequence
1876 is also an unconditional jump in that case. */
1879 /* We have determined that AVOIDED_INSN is never reached, and are
1880 about to delete it. If the insn chain between AVOIDED_INSN and
1881 FINISH contains more than one line from the current function, and
1882 contains at least one operation, print a warning if the user asked
1883 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1885 CSE and inlining can duplicate insns, so it's possible to get
1886 spurious warnings from this. */
1889 never_reached_warning (avoided_insn, finish)
1890 rtx avoided_insn, finish;
1893 rtx a_line_note = NULL;
1894 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1896 if (!warn_notreached)
1899 /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1900 us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1902 insn = avoided_insn;
1905 rtx prev = PREV_INSN (insn);
1906 if (prev == NULL_RTX
1907 || GET_CODE (prev) != NOTE)
1912 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1913 in case FINISH is NULL, otherwise until we run out of insns. */
1915 for (; insn != NULL; insn = NEXT_INSN (insn))
1917 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1918 || GET_CODE (insn) == BARRIER)
1921 if (GET_CODE (insn) == NOTE /* A line number note? */
1922 && NOTE_LINE_NUMBER (insn) >= 0)
1924 if (a_line_note == NULL)
1927 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1928 != NOTE_LINE_NUMBER (insn));
1930 else if (INSN_P (insn))
1940 if (two_avoided_lines && contains_insn)
1943 locus.file = NOTE_SOURCE_FILE (a_line_note);
1944 locus.line = NOTE_LINE_NUMBER (a_line_note);
1945 warning ("%Hwill never be executed", &locus);
1949 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1950 NLABEL as a return. Accrue modifications into the change group. */
1953 redirect_exp_1 (loc, olabel, nlabel, insn)
1959 RTX_CODE code = GET_CODE (x);
1963 if (code == LABEL_REF)
1965 if (XEXP (x, 0) == olabel)
1969 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1971 n = gen_rtx_RETURN (VOIDmode);
1973 validate_change (insn, loc, n, 1);
1977 else if (code == RETURN && olabel == 0)
1979 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1980 if (loc == &PATTERN (insn))
1981 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1982 validate_change (insn, loc, x, 1);
1986 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1987 && GET_CODE (SET_SRC (x)) == LABEL_REF
1988 && XEXP (SET_SRC (x), 0) == olabel)
1990 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1994 fmt = GET_RTX_FORMAT (code);
1995 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1998 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1999 else if (fmt[i] == 'E')
2002 for (j = 0; j < XVECLEN (x, i); j++)
2003 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2008 /* Similar, but apply the change group and report success or failure. */
2011 redirect_exp (olabel, nlabel, insn)
2017 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2018 loc = &XVECEXP (PATTERN (insn), 0, 0);
2020 loc = &PATTERN (insn);
2022 redirect_exp_1 (loc, olabel, nlabel, insn);
2023 if (num_validated_changes () == 0)
2026 return apply_change_group ();
2029 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2030 the modifications into the change group. Return false if we did
2031 not see how to do that. */
2034 redirect_jump_1 (jump, nlabel)
2037 int ochanges = num_validated_changes ();
2040 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2041 loc = &XVECEXP (PATTERN (jump), 0, 0);
2043 loc = &PATTERN (jump);
2045 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2046 return num_validated_changes () > ochanges;
2049 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2050 jump target label is unused as a result, it and the code following
2053 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2056 The return value will be 1 if the change was made, 0 if it wasn't
2057 (this can only occur for NLABEL == 0). */
2060 redirect_jump (jump, nlabel, delete_unused)
2064 rtx olabel = JUMP_LABEL (jump);
2067 if (nlabel == olabel)
2070 if (! redirect_exp (olabel, nlabel, jump))
2073 JUMP_LABEL (jump) = nlabel;
2075 ++LABEL_NUSES (nlabel);
2077 /* Update labels in any REG_EQUAL note. */
2078 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
2080 if (nlabel && olabel)
2082 rtx dest = XEXP (note, 0);
2084 if (GET_CODE (dest) == IF_THEN_ELSE)
2086 if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
2087 && XEXP (XEXP (dest, 1), 0) == olabel)
2088 XEXP (XEXP (dest, 1), 0) = nlabel;
2089 if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
2090 && XEXP (XEXP (dest, 2), 0) == olabel)
2091 XEXP (XEXP (dest, 2), 0) = nlabel;
2094 remove_note (jump, note);
2097 remove_note (jump, note);
2100 /* If we're eliding the jump over exception cleanups at the end of a
2101 function, move the function end note so that -Wreturn-type works. */
2102 if (olabel && nlabel
2103 && NEXT_INSN (olabel)
2104 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2105 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2106 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2108 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2109 /* Undefined labels will remain outside the insn stream. */
2110 && INSN_UID (olabel))
2111 delete_related_insns (olabel);
2116 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2117 Accrue the modifications into the change group. */
2124 rtx x = pc_set (insn);
2130 code = GET_CODE (x);
2132 if (code == IF_THEN_ELSE)
2134 rtx comp = XEXP (x, 0);
2136 enum rtx_code reversed_code;
2138 /* We can do this in two ways: The preferable way, which can only
2139 be done if this is not an integer comparison, is to reverse
2140 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2141 of the IF_THEN_ELSE. If we can't do either, fail. */
2143 reversed_code = reversed_comparison_code (comp, insn);
2145 if (reversed_code != UNKNOWN)
2147 validate_change (insn, &XEXP (x, 0),
2148 gen_rtx_fmt_ee (reversed_code,
2149 GET_MODE (comp), XEXP (comp, 0),
2156 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2157 validate_change (insn, &XEXP (x, 2), tem, 1);
2163 /* Invert the jump condition of conditional jump insn, INSN.
2165 Return 1 if we can do so, 0 if we cannot find a way to do so that
2166 matches a pattern. */
2172 invert_exp_1 (insn);
2173 if (num_validated_changes () == 0)
2176 return apply_change_group ();
2179 /* Invert the condition of the jump JUMP, and make it jump to label
2180 NLABEL instead of where it jumps now. Accrue changes into the
2181 change group. Return false if we didn't see how to perform the
2182 inversion and redirection. */
2185 invert_jump_1 (jump, nlabel)
2190 ochanges = num_validated_changes ();
2191 invert_exp_1 (jump);
2192 if (num_validated_changes () == ochanges)
2195 return redirect_jump_1 (jump, nlabel);
2198 /* Invert the condition of the jump JUMP, and make it jump to label
2199 NLABEL instead of where it jumps now. Return true if successful. */
2202 invert_jump (jump, nlabel, delete_unused)
2206 /* We have to either invert the condition and change the label or
2207 do neither. Either operation could fail. We first try to invert
2208 the jump. If that succeeds, we try changing the label. If that fails,
2209 we invert the jump back to what it was. */
2211 if (! invert_exp (jump))
2214 if (redirect_jump (jump, nlabel, delete_unused))
2216 /* Remove REG_EQUAL note if we have one. */
2217 rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
2219 remove_note (jump, note);
2221 invert_br_probabilities (jump);
2226 if (! invert_exp (jump))
2227 /* This should just be putting it back the way it was. */
2234 /* Like rtx_equal_p except that it considers two REGs as equal
2235 if they renumber to the same value and considers two commutative
2236 operations to be the same if the order of the operands has been
2239 ??? Addition is not commutative on the PA due to the weird implicit
2240 space register selection rules for memory addresses. Therefore, we
2241 don't consider a + b == b + a.
2243 We could/should make this test a little tighter. Possibly only
2244 disabling it on the PA via some backend macro or only disabling this
2245 case when the PLUS is inside a MEM. */
2248 rtx_renumbered_equal_p (x, y)
2252 RTX_CODE code = GET_CODE (x);
2258 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2259 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2260 && GET_CODE (SUBREG_REG (y)) == REG)))
2262 int reg_x = -1, reg_y = -1;
2263 int byte_x = 0, byte_y = 0;
2265 if (GET_MODE (x) != GET_MODE (y))
2268 /* If we haven't done any renumbering, don't
2269 make any assumptions. */
2270 if (reg_renumber == 0)
2271 return rtx_equal_p (x, y);
2275 reg_x = REGNO (SUBREG_REG (x));
2276 byte_x = SUBREG_BYTE (x);
2278 if (reg_renumber[reg_x] >= 0)
2280 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2281 GET_MODE (SUBREG_REG (x)),
2290 if (reg_renumber[reg_x] >= 0)
2291 reg_x = reg_renumber[reg_x];
2294 if (GET_CODE (y) == SUBREG)
2296 reg_y = REGNO (SUBREG_REG (y));
2297 byte_y = SUBREG_BYTE (y);
2299 if (reg_renumber[reg_y] >= 0)
2301 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2302 GET_MODE (SUBREG_REG (y)),
2311 if (reg_renumber[reg_y] >= 0)
2312 reg_y = reg_renumber[reg_y];
2315 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2318 /* Now we have disposed of all the cases
2319 in which different rtx codes can match. */
2320 if (code != GET_CODE (y))
2332 return INTVAL (x) == INTVAL (y);
2335 /* We can't assume nonlocal labels have their following insns yet. */
2336 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2337 return XEXP (x, 0) == XEXP (y, 0);
2339 /* Two label-refs are equivalent if they point at labels
2340 in the same position in the instruction stream. */
2341 return (next_real_insn (XEXP (x, 0))
2342 == next_real_insn (XEXP (y, 0)));
2345 return XSTR (x, 0) == XSTR (y, 0);
2348 /* If we didn't match EQ equality above, they aren't the same. */
2355 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2357 if (GET_MODE (x) != GET_MODE (y))
2360 /* For commutative operations, the RTX match if the operand match in any
2361 order. Also handle the simple binary and unary cases without a loop.
2363 ??? Don't consider PLUS a commutative operator; see comments above. */
2364 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2366 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2367 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2368 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2369 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2370 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2371 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2372 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2373 else if (GET_RTX_CLASS (code) == '1')
2374 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2376 /* Compare the elements. If any pair of corresponding elements
2377 fail to match, return 0 for the whole things. */
2379 fmt = GET_RTX_FORMAT (code);
2380 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 if (XWINT (x, i) != XWINT (y, i))
2391 if (XINT (x, i) != XINT (y, i))
2396 if (XTREE (x, i) != XTREE (y, i))
2401 if (strcmp (XSTR (x, i), XSTR (y, i)))
2406 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2411 if (XEXP (x, i) != XEXP (y, i))
2418 if (XVECLEN (x, i) != XVECLEN (y, i))
2420 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2421 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2432 /* If X is a hard register or equivalent to one or a subregister of one,
2433 return the hard register number. If X is a pseudo register that was not
2434 assigned a hard register, return the pseudo register number. Otherwise,
2435 return -1. Any rtx is valid for X. */
2441 if (GET_CODE (x) == REG)
2443 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2444 return reg_renumber[REGNO (x)];
2447 if (GET_CODE (x) == SUBREG)
2449 int base = true_regnum (SUBREG_REG (x));
2450 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2451 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2452 GET_MODE (SUBREG_REG (x)),
2453 SUBREG_BYTE (x), GET_MODE (x));
2458 /* Return regno of the register REG and handle subregs too. */
2460 reg_or_subregno (reg)
2465 if (GET_CODE (reg) == SUBREG)
2466 return REGNO (SUBREG_REG (reg));