1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
73 /* Alternate entry into the jump optimizer. This entry point only rebuilds
74 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 rebuild_jump_labels (f)
83 max_uid = init_label_info (f) + 1;
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
93 LABEL_NUSES (XEXP (insn, 0))++;
95 /* Keep track of labels used for marking handlers for exception
96 regions; they cannot usually be deleted. */
98 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
99 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
100 LABEL_NUSES (XEXP (insn, 0))++;
104 copy_loop_headers (f)
107 register rtx insn, next;
108 /* Now iterate optimizing jumps until nothing changes over one pass. */
109 for (insn = f; insn; insn = next)
113 next = NEXT_INSN (insn);
115 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
116 jump. Try to optimize by duplicating the loop exit test if so.
117 This is only safe immediately after regscan, because it uses
118 the values of regno_first_uid and regno_last_uid. */
119 if (GET_CODE (insn) == NOTE
120 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
121 && (temp1 = next_nonnote_insn (insn)) != 0
122 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
124 temp = PREV_INSN (insn);
125 if (duplicate_loop_exit_test (insn))
127 next = NEXT_INSN (temp);
134 purge_line_number_notes (f)
139 /* Delete extraneous line number notes.
140 Note that two consecutive notes for different lines are not really
141 extraneous. There should be some indication where that line belonged,
142 even if it became empty. */
144 for (insn = f; insn; insn = NEXT_INSN (insn))
145 if (GET_CODE (insn) == NOTE)
147 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
148 /* Any previous line note was for the prologue; gdb wants a new
149 note after the prologue even if it is for the same line. */
150 last_note = NULL_RTX;
151 else if (NOTE_LINE_NUMBER (insn) >= 0)
153 /* Delete this note if it is identical to previous note. */
155 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
156 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
167 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
168 notes whose labels don't occur in the insn any more. Returns the
169 largest INSN_UID found. */
177 for (insn = f; insn; insn = NEXT_INSN (insn))
179 if (GET_CODE (insn) == CODE_LABEL)
180 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
181 else if (GET_CODE (insn) == JUMP_INSN)
182 JUMP_LABEL (insn) = 0;
183 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
187 for (note = REG_NOTES (insn); note; note = next)
189 next = XEXP (note, 1);
190 if (REG_NOTE_KIND (note) == REG_LABEL
191 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
192 remove_note (insn, note);
195 if (INSN_UID (insn) > largest_uid)
196 largest_uid = INSN_UID (insn);
202 /* Mark the label each jump jumps to.
203 Combine consecutive labels, and count uses of labels. */
211 for (insn = f; insn; insn = NEXT_INSN (insn))
214 if (GET_CODE (insn) == CALL_INSN
215 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
217 mark_all_labels (XEXP (PATTERN (insn), 0));
218 mark_all_labels (XEXP (PATTERN (insn), 1));
219 mark_all_labels (XEXP (PATTERN (insn), 2));
221 /* Canonicalize the tail recursion label attached to the
222 CALL_PLACEHOLDER insn. */
223 if (XEXP (PATTERN (insn), 3))
225 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
226 XEXP (PATTERN (insn), 3));
227 mark_jump_label (label_ref, insn, 0);
228 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
234 mark_jump_label (PATTERN (insn), insn, 0);
235 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
237 /* When we know the LABEL_REF contained in a REG used in
238 an indirect jump, we'll have a REG_LABEL note so that
239 flow can tell where it's going. */
240 if (JUMP_LABEL (insn) == 0)
242 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
245 /* But a LABEL_REF around the REG_LABEL note, so
246 that we can canonicalize it. */
247 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
248 XEXP (label_note, 0));
250 mark_jump_label (label_ref, insn, 0);
251 XEXP (label_note, 0) = XEXP (label_ref, 0);
252 JUMP_LABEL (insn) = XEXP (label_note, 0);
259 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
260 jump. Assume that this unconditional jump is to the exit test code. If
261 the code is sufficiently simple, make a copy of it before INSN,
262 followed by a jump to the exit of the loop. Then delete the unconditional
265 Return 1 if we made the change, else 0.
267 This is only safe immediately after a regscan pass because it uses the
268 values of regno_first_uid and regno_last_uid. */
271 duplicate_loop_exit_test (loop_start)
274 rtx insn, set, reg, p, link;
275 rtx copy = 0, first_copy = 0;
277 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
279 int max_reg = max_reg_num ();
282 /* Scan the exit code. We do not perform this optimization if any insn:
286 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
287 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
288 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
291 We also do not do this if we find an insn with ASM_OPERANDS. While
292 this restriction should not be necessary, copying an insn with
293 ASM_OPERANDS can confuse asm_noperands in some cases.
295 Also, don't do this if the exit code is more than 20 insns. */
297 for (insn = exitcode;
299 && ! (GET_CODE (insn) == NOTE
300 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
301 insn = NEXT_INSN (insn))
303 switch (GET_CODE (insn))
309 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
310 a jump immediately after the loop start that branches outside
311 the loop but within an outer loop, near the exit test.
312 If we copied this exit test and created a phony
313 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
314 before the exit test look like these could be safely moved
315 out of the loop even if they actually may be never executed.
316 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
318 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
319 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
323 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
324 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
325 /* If we were to duplicate this code, we would not move
326 the BLOCK notes, and so debugging the moved code would
327 be difficult. Thus, we only move the code with -O2 or
334 /* The code below would grossly mishandle REG_WAS_0 notes,
335 so get rid of them here. */
336 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
337 remove_note (insn, p);
339 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
340 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
348 /* Unless INSN is zero, we can do the optimization. */
354 /* See if any insn sets a register only used in the loop exit code and
355 not a user variable. If so, replace it with a new register. */
356 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
357 if (GET_CODE (insn) == INSN
358 && (set = single_set (insn)) != 0
359 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
360 || (GET_CODE (reg) == SUBREG
361 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
362 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
363 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
365 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
366 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
371 /* We can do the replacement. Allocate reg_map if this is the
372 first replacement we found. */
374 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
376 REG_LOOP_TEST_P (reg) = 1;
378 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
382 /* Now copy each insn. */
383 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 switch (GET_CODE (insn))
388 copy = emit_barrier_before (loop_start);
391 /* Only copy line-number notes. */
392 if (NOTE_LINE_NUMBER (insn) >= 0)
394 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
395 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
400 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
402 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
404 mark_jump_label (PATTERN (copy), copy, 0);
406 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
408 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
409 if (REG_NOTE_KIND (link) != REG_LABEL)
411 if (GET_CODE (link) == EXPR_LIST)
413 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
418 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
423 if (reg_map && REG_NOTES (copy))
424 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
428 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
432 mark_jump_label (PATTERN (copy), copy, 0);
433 if (REG_NOTES (insn))
435 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
437 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
440 /* Predict conditional jump that do make loop looping as taken.
441 Other jumps are probably exit conditions, so predict
443 if (any_condjump_p (copy))
445 rtx label = JUMP_LABEL (copy);
448 /* The jump_insn after loop_start should be followed
449 by barrier and loopback label. */
450 if (prev_nonnote_insn (label)
451 && (PREV_INSN (prev_nonnote_insn (label))
452 == NEXT_INSN (loop_start)))
453 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
455 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
464 /* Record the first insn we copied. We need it so that we can
465 scan the copied insns for new pseudo registers. */
470 /* Now clean up by emitting a jump to the end label and deleting the jump
471 at the start of the loop. */
472 if (! copy || GET_CODE (copy) != BARRIER)
474 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
477 /* Record the first insn we copied. We need it so that we can
478 scan the copied insns for new pseudo registers. This may not
479 be strictly necessary since we should have copied at least one
480 insn above. But I am going to be safe. */
484 mark_jump_label (PATTERN (copy), copy, 0);
485 emit_barrier_before (loop_start);
488 /* Now scan from the first insn we copied to the last insn we copied
489 (copy) for new pseudo registers. Do this after the code to jump to
490 the end label since that might create a new pseudo too. */
491 reg_scan_update (first_copy, copy, max_reg);
493 /* Mark the exit code as the virtual top of the converted loop. */
494 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
496 delete_insn (next_nonnote_insn (loop_start));
505 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
506 notes between START and END out before START. Assume that END is not
507 such a note. START may be such a note. Returns the value of the new
508 starting insn, which may be different if the original start was such a
512 squeeze_notes (start, end)
518 for (insn = start; insn != end; insn = next)
520 next = NEXT_INSN (insn);
521 if (GET_CODE (insn) == NOTE
522 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
523 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
524 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
525 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
526 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
527 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
533 rtx prev = PREV_INSN (insn);
534 PREV_INSN (insn) = PREV_INSN (start);
535 NEXT_INSN (insn) = start;
536 NEXT_INSN (PREV_INSN (insn)) = insn;
537 PREV_INSN (NEXT_INSN (insn)) = insn;
538 NEXT_INSN (prev) = next;
539 PREV_INSN (next) = prev;
547 /* Return the label before INSN, or put a new label there. */
550 get_label_before (insn)
555 /* Find an existing label at this point
556 or make a new one if there is none. */
557 label = prev_nonnote_insn (insn);
559 if (label == 0 || GET_CODE (label) != CODE_LABEL)
561 rtx prev = PREV_INSN (insn);
563 label = gen_label_rtx ();
564 emit_label_after (label, prev);
565 LABEL_NUSES (label) = 0;
570 /* Return the label after INSN, or put a new label there. */
573 get_label_after (insn)
578 /* Find an existing label at this point
579 or make a new one if there is none. */
580 label = next_nonnote_insn (insn);
582 if (label == 0 || GET_CODE (label) != CODE_LABEL)
584 label = gen_label_rtx ();
585 emit_label_after (label, insn);
586 LABEL_NUSES (label) = 0;
591 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
592 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
593 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
594 know whether it's source is floating point or integer comparison. Machine
595 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
596 to help this function avoid overhead in these cases. */
598 reversed_comparison_code_parts (code, arg0, arg1, insn)
599 rtx insn, arg0, arg1;
602 enum machine_mode mode;
604 /* If this is not actually a comparison, we can't reverse it. */
605 if (GET_RTX_CLASS (code) != '<')
608 mode = GET_MODE (arg0);
609 if (mode == VOIDmode)
610 mode = GET_MODE (arg1);
612 /* First see if machine description supply us way to reverse the comparison.
613 Give it priority over everything else to allow machine description to do
615 #ifdef REVERSIBLE_CC_MODE
616 if (GET_MODE_CLASS (mode) == MODE_CC
617 && REVERSIBLE_CC_MODE (mode))
619 #ifdef REVERSE_CONDITION
620 return REVERSE_CONDITION (code, mode);
622 return reverse_condition (code);
626 /* Try a few special cases based on the comparison code. */
635 /* It is always safe to reverse EQ and NE, even for the floating
636 point. Similary the unsigned comparisons are never used for
637 floating point so we can reverse them in the default way. */
638 return reverse_condition (code);
643 /* In case we already see unordered comparison, we can be sure to
644 be dealing with floating point so we don't need any more tests. */
645 return reverse_condition_maybe_unordered (code);
650 /* We don't have safe way to reverse these yet. */
656 /* In case we give up IEEE compatibility, all comparisons are reversible. */
657 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
658 || flag_unsafe_math_optimizations)
659 return reverse_condition (code);
661 if (GET_MODE_CLASS (mode) == MODE_CC
668 /* Try to search for the comparison to determine the real mode.
669 This code is expensive, but with sane machine description it
670 will be never used, since REVERSIBLE_CC_MODE will return true
675 for (prev = prev_nonnote_insn (insn);
676 prev != 0 && GET_CODE (prev) != CODE_LABEL;
677 prev = prev_nonnote_insn (prev))
679 rtx set = set_of (arg0, prev);
680 if (set && GET_CODE (set) == SET
681 && rtx_equal_p (SET_DEST (set), arg0))
683 rtx src = SET_SRC (set);
685 if (GET_CODE (src) == COMPARE)
687 rtx comparison = src;
688 arg0 = XEXP (src, 0);
689 mode = GET_MODE (arg0);
690 if (mode == VOIDmode)
691 mode = GET_MODE (XEXP (comparison, 1));
694 /* We can get past reg-reg moves. This may be usefull for model
695 of i387 comparisons that first move flag registers around. */
702 /* If register is clobbered in some ununderstandable way,
709 /* An integer condition. */
710 if (GET_CODE (arg0) == CONST_INT
711 || (GET_MODE (arg0) != VOIDmode
712 && GET_MODE_CLASS (mode) != MODE_CC
713 && ! FLOAT_MODE_P (mode)))
714 return reverse_condition (code);
719 /* An wrapper around the previous function to take COMPARISON as rtx
720 expression. This simplifies many callers. */
722 reversed_comparison_code (comparison, insn)
723 rtx comparison, insn;
725 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
727 return reversed_comparison_code_parts (GET_CODE (comparison),
728 XEXP (comparison, 0),
729 XEXP (comparison, 1), insn);
732 /* Given an rtx-code for a comparison, return the code for the negated
733 comparison. If no such code exists, return UNKNOWN.
735 WATCH OUT! reverse_condition is not safe to use on a jump that might
736 be acting on the results of an IEEE floating point comparison, because
737 of the special treatment of non-signaling nans in comparisons.
738 Use reversed_comparison_code instead. */
741 reverse_condition (code)
784 /* Similar, but we're allowed to generate unordered comparisons, which
785 makes it safe for IEEE floating-point. Of course, we have to recognize
786 that the target will support them too... */
789 reverse_condition_maybe_unordered (code)
792 /* Non-IEEE formats don't have unordered conditions. */
793 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
794 return reverse_condition (code);
832 /* Similar, but return the code when two operands of a comparison are swapped.
833 This IS safe for IEEE floating-point. */
836 swap_condition (code)
879 /* Given a comparison CODE, return the corresponding unsigned comparison.
880 If CODE is an equality comparison or already an unsigned comparison,
884 unsigned_condition (code)
911 /* Similarly, return the signed version of a comparison. */
914 signed_condition (code)
941 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
942 truth of CODE1 implies the truth of CODE2. */
945 comparison_dominates_p (code1, code2)
946 enum rtx_code code1, code2;
948 /* UNKNOWN comparison codes can happen as a result of trying to revert
950 They can't match anything, so we have to reject them here. */
951 if (code1 == UNKNOWN || code2 == UNKNOWN)
960 if (code2 == UNLE || code2 == UNGE)
965 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
971 if (code2 == UNLE || code2 == NE)
976 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
981 if (code2 == UNGE || code2 == NE)
986 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
992 if (code2 == ORDERED)
997 if (code2 == NE || code2 == ORDERED)
1002 if (code2 == LEU || code2 == NE)
1007 if (code2 == GEU || code2 == NE)
1012 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1013 || code2 == UNGE || code2 == UNGT)
1024 /* Return 1 if INSN is an unconditional jump and nothing else. */
1030 return (GET_CODE (insn) == JUMP_INSN
1031 && GET_CODE (PATTERN (insn)) == SET
1032 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1033 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1036 /* Return nonzero if INSN is a (possibly) conditional jump
1039 Use this function is deprecated, since we need to support combined
1040 branch and compare insns. Use any_condjump_p instead whenever possible. */
1046 register rtx x = PATTERN (insn);
1048 if (GET_CODE (x) != SET
1049 || GET_CODE (SET_DEST (x)) != PC)
1053 if (GET_CODE (x) == LABEL_REF)
1056 return (GET_CODE (x) == IF_THEN_ELSE
1057 && ((GET_CODE (XEXP (x, 2)) == PC
1058 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1059 || GET_CODE (XEXP (x, 1)) == RETURN))
1060 || (GET_CODE (XEXP (x, 1)) == PC
1061 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1062 || GET_CODE (XEXP (x, 2)) == RETURN))));
1067 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1070 Use this function is deprecated, since we need to support combined
1071 branch and compare insns. Use any_condjump_p instead whenever possible. */
1074 condjump_in_parallel_p (insn)
1077 register rtx x = PATTERN (insn);
1079 if (GET_CODE (x) != PARALLEL)
1082 x = XVECEXP (x, 0, 0);
1084 if (GET_CODE (x) != SET)
1086 if (GET_CODE (SET_DEST (x)) != PC)
1088 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1090 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1092 if (XEXP (SET_SRC (x), 2) == pc_rtx
1093 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1094 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1096 if (XEXP (SET_SRC (x), 1) == pc_rtx
1097 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1098 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1103 /* Return set of PC, otherwise NULL. */
1110 if (GET_CODE (insn) != JUMP_INSN)
1112 pat = PATTERN (insn);
1114 /* The set is allowed to appear either as the insn pattern or
1115 the first set in a PARALLEL. */
1116 if (GET_CODE (pat) == PARALLEL)
1117 pat = XVECEXP (pat, 0, 0);
1118 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1124 /* Return true when insn is an unconditional direct jump,
1125 possibly bundled inside a PARALLEL. */
1128 any_uncondjump_p (insn)
1131 rtx x = pc_set (insn);
1134 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1139 /* Return true when insn is a conditional jump. This function works for
1140 instructions containing PC sets in PARALLELs. The instruction may have
1141 various other effects so before removing the jump you must verify
1144 Note that unlike condjump_p it returns false for unconditional jumps. */
1147 any_condjump_p (insn)
1150 rtx x = pc_set (insn);
1155 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1158 a = GET_CODE (XEXP (SET_SRC (x), 1));
1159 b = GET_CODE (XEXP (SET_SRC (x), 2));
1161 return ((b == PC && (a == LABEL_REF || a == RETURN))
1162 || (a == PC && (b == LABEL_REF || b == RETURN)));
1165 /* Return the label of a conditional jump. */
1168 condjump_label (insn)
1171 rtx x = pc_set (insn);
1176 if (GET_CODE (x) == LABEL_REF)
1178 if (GET_CODE (x) != IF_THEN_ELSE)
1180 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1182 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1187 /* Return true if INSN is a (possibly conditional) return insn. */
1190 returnjump_p_1 (loc, data)
1192 void *data ATTRIBUTE_UNUSED;
1195 return x && GET_CODE (x) == RETURN;
1202 if (GET_CODE (insn) != JUMP_INSN)
1204 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1207 /* Return true if INSN is a jump that only transfers control and
1216 if (GET_CODE (insn) != JUMP_INSN)
1219 set = single_set (insn);
1222 if (GET_CODE (SET_DEST (set)) != PC)
1224 if (side_effects_p (SET_SRC (set)))
1232 /* Return 1 if X is an RTX that does nothing but set the condition codes
1233 and CLOBBER or USE registers.
1234 Return -1 if X does explicitly set the condition codes,
1235 but also does other things. */
1239 rtx x ATTRIBUTE_UNUSED;
1241 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1243 if (GET_CODE (x) == PARALLEL)
1247 int other_things = 0;
1248 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1250 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1251 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1253 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1256 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1262 /* Follow any unconditional jump at LABEL;
1263 return the ultimate label reached by any such chain of jumps.
1264 If LABEL is not followed by a jump, return LABEL.
1265 If the chain loops or we can't find end, return LABEL,
1266 since that tells caller to avoid changing the insn.
1268 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1269 a USE or CLOBBER. */
1272 follow_jumps (label)
1277 register rtx value = label;
1282 && (insn = next_active_insn (value)) != 0
1283 && GET_CODE (insn) == JUMP_INSN
1284 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1285 && onlyjump_p (insn))
1286 || GET_CODE (PATTERN (insn)) == RETURN)
1287 && (next = NEXT_INSN (insn))
1288 && GET_CODE (next) == BARRIER);
1291 /* Don't chain through the insn that jumps into a loop
1292 from outside the loop,
1293 since that would create multiple loop entry jumps
1294 and prevent loop optimization. */
1296 if (!reload_completed)
1297 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1298 if (GET_CODE (tem) == NOTE
1299 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1300 /* ??? Optional. Disables some optimizations, but makes
1301 gcov output more accurate with -O. */
1302 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1305 /* If we have found a cycle, make the insn jump to itself. */
1306 if (JUMP_LABEL (insn) == label)
1309 tem = next_active_insn (JUMP_LABEL (insn));
1310 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1311 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1314 value = JUMP_LABEL (insn);
1322 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1323 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1324 in INSN, then store one of them in JUMP_LABEL (INSN).
1325 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1326 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1327 Also, when there are consecutive labels, canonicalize on the last of them.
1329 Note that two labels separated by a loop-beginning note
1330 must be kept distinct if we have not yet done loop-optimization,
1331 because the gap between them is where loop-optimize
1332 will want to move invariant code to. CROSS_JUMP tells us
1333 that loop-optimization is done with. */
1336 mark_jump_label (x, insn, in_mem)
1341 register RTX_CODE code = GET_CODE (x);
1343 register const char *fmt;
1365 /* If this is a constant-pool reference, see if it is a label. */
1366 if (CONSTANT_POOL_ADDRESS_P (x))
1367 mark_jump_label (get_pool_constant (x), insn, in_mem);
1372 rtx label = XEXP (x, 0);
1374 /* Ignore remaining references to unreachable labels that
1375 have been deleted. */
1376 if (GET_CODE (label) == NOTE
1377 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1380 if (GET_CODE (label) != CODE_LABEL)
1383 /* Ignore references to labels of containing functions. */
1384 if (LABEL_REF_NONLOCAL_P (x))
1387 XEXP (x, 0) = label;
1388 if (! insn || ! INSN_DELETED_P (insn))
1389 ++LABEL_NUSES (label);
1393 if (GET_CODE (insn) == JUMP_INSN)
1394 JUMP_LABEL (insn) = label;
1397 /* Add a REG_LABEL note for LABEL unless there already
1398 is one. All uses of a label, except for labels
1399 that are the targets of jumps, must have a
1401 if (! find_reg_note (insn, REG_LABEL, label))
1402 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1409 /* Do walk the labels in a vector, but not the first operand of an
1410 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1413 if (! INSN_DELETED_P (insn))
1415 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1417 for (i = 0; i < XVECLEN (x, eltnum); i++)
1418 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1426 fmt = GET_RTX_FORMAT (code);
1427 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1430 mark_jump_label (XEXP (x, i), insn, in_mem);
1431 else if (fmt[i] == 'E')
1434 for (j = 0; j < XVECLEN (x, i); j++)
1435 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1440 /* If all INSN does is set the pc, delete it,
1441 and delete the insn that set the condition codes for it
1442 if that's what the previous thing was. */
1448 register rtx set = single_set (insn);
1450 if (set && GET_CODE (SET_DEST (set)) == PC)
1451 delete_computation (insn);
1454 /* Verify INSN is a BARRIER and delete it. */
1457 delete_barrier (insn)
1460 if (GET_CODE (insn) != BARRIER)
1466 /* Recursively delete prior insns that compute the value (used only by INSN
1467 which the caller is deleting) stored in the register mentioned by NOTE
1468 which is a REG_DEAD note associated with INSN. */
1471 delete_prior_computation (note, insn)
1476 rtx reg = XEXP (note, 0);
1478 for (our_prev = prev_nonnote_insn (insn);
1479 our_prev && (GET_CODE (our_prev) == INSN
1480 || GET_CODE (our_prev) == CALL_INSN);
1481 our_prev = prev_nonnote_insn (our_prev))
1483 rtx pat = PATTERN (our_prev);
1485 /* If we reach a CALL which is not calling a const function
1486 or the callee pops the arguments, then give up. */
1487 if (GET_CODE (our_prev) == CALL_INSN
1488 && (! CONST_CALL_P (our_prev)
1489 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1492 /* If we reach a SEQUENCE, it is too complex to try to
1493 do anything with it, so give up. */
1494 if (GET_CODE (pat) == SEQUENCE)
1497 if (GET_CODE (pat) == USE
1498 && GET_CODE (XEXP (pat, 0)) == INSN)
1499 /* reorg creates USEs that look like this. We leave them
1500 alone because reorg needs them for its own purposes. */
1503 if (reg_set_p (reg, pat))
1505 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1508 if (GET_CODE (pat) == PARALLEL)
1510 /* If we find a SET of something else, we can't
1515 for (i = 0; i < XVECLEN (pat, 0); i++)
1517 rtx part = XVECEXP (pat, 0, i);
1519 if (GET_CODE (part) == SET
1520 && SET_DEST (part) != reg)
1524 if (i == XVECLEN (pat, 0))
1525 delete_computation (our_prev);
1527 else if (GET_CODE (pat) == SET
1528 && GET_CODE (SET_DEST (pat)) == REG)
1530 int dest_regno = REGNO (SET_DEST (pat));
1533 + (dest_regno < FIRST_PSEUDO_REGISTER
1534 ? HARD_REGNO_NREGS (dest_regno,
1535 GET_MODE (SET_DEST (pat))) : 1));
1536 int regno = REGNO (reg);
1539 + (regno < FIRST_PSEUDO_REGISTER
1540 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1542 if (dest_regno >= regno
1543 && dest_endregno <= endregno)
1544 delete_computation (our_prev);
1546 /* We may have a multi-word hard register and some, but not
1547 all, of the words of the register are needed in subsequent
1548 insns. Write REG_UNUSED notes for those parts that were not
1550 else if (dest_regno <= regno
1551 && dest_endregno >= endregno)
1555 REG_NOTES (our_prev)
1556 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1557 REG_NOTES (our_prev));
1559 for (i = dest_regno; i < dest_endregno; i++)
1560 if (! find_regno_note (our_prev, REG_UNUSED, i))
1563 if (i == dest_endregno)
1564 delete_computation (our_prev);
1571 /* If PAT references the register that dies here, it is an
1572 additional use. Hence any prior SET isn't dead. However, this
1573 insn becomes the new place for the REG_DEAD note. */
1574 if (reg_overlap_mentioned_p (reg, pat))
1576 XEXP (note, 1) = REG_NOTES (our_prev);
1577 REG_NOTES (our_prev) = note;
1583 /* Delete INSN and recursively delete insns that compute values used only
1584 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1585 If we are running before flow.c, we need do nothing since flow.c will
1586 delete dead code. We also can't know if the registers being used are
1587 dead or not at this point.
1589 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1590 nothing other than set a register that dies in this insn, we can delete
1593 On machines with CC0, if CC0 is used in this insn, we may be able to
1594 delete the insn that set it. */
1597 delete_computation (insn)
1603 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1605 rtx prev = prev_nonnote_insn (insn);
1606 /* We assume that at this stage
1607 CC's are always set explicitly
1608 and always immediately before the jump that
1609 will use them. So if the previous insn
1610 exists to set the CC's, delete it
1611 (unless it performs auto-increments, etc.). */
1612 if (prev && GET_CODE (prev) == INSN
1613 && sets_cc0_p (PATTERN (prev)))
1615 if (sets_cc0_p (PATTERN (prev)) > 0
1616 && ! side_effects_p (PATTERN (prev)))
1617 delete_computation (prev);
1619 /* Otherwise, show that cc0 won't be used. */
1620 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1621 cc0_rtx, REG_NOTES (prev));
1626 for (note = REG_NOTES (insn); note; note = next)
1628 next = XEXP (note, 1);
1630 if (REG_NOTE_KIND (note) != REG_DEAD
1631 /* Verify that the REG_NOTE is legitimate. */
1632 || GET_CODE (XEXP (note, 0)) != REG)
1635 delete_prior_computation (note, insn);
1641 /* Delete insn INSN from the chain of insns and update label ref counts.
1642 May delete some following insns as a consequence; may even delete
1643 a label elsewhere and insns that follow it.
1645 Returns the first insn after INSN that was not deleted. */
1651 register rtx next = NEXT_INSN (insn);
1652 register rtx prev = PREV_INSN (insn);
1653 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1654 register int dont_really_delete = 0;
1657 while (next && INSN_DELETED_P (next))
1658 next = NEXT_INSN (next);
1660 /* This insn is already deleted => return first following nondeleted. */
1661 if (INSN_DELETED_P (insn))
1665 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1667 /* Don't delete user-declared labels. When optimizing, convert them
1668 to special NOTEs instead. When not optimizing, leave them alone. */
1669 if (was_code_label && LABEL_NAME (insn) != 0)
1673 const char *name = LABEL_NAME (insn);
1674 PUT_CODE (insn, NOTE);
1675 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
1676 NOTE_SOURCE_FILE (insn) = name;
1679 dont_really_delete = 1;
1682 /* Mark this insn as deleted. */
1683 INSN_DELETED_P (insn) = 1;
1685 /* If instruction is followed by a barrier,
1686 delete the barrier too. */
1688 if (next != 0 && GET_CODE (next) == BARRIER)
1690 INSN_DELETED_P (next) = 1;
1691 next = NEXT_INSN (next);
1694 /* Patch out INSN (and the barrier if any) */
1696 if (! dont_really_delete)
1700 NEXT_INSN (prev) = next;
1701 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
1702 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
1703 XVECLEN (PATTERN (prev), 0) - 1)) = next;
1708 PREV_INSN (next) = prev;
1709 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
1710 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
1713 if (prev && NEXT_INSN (prev) == 0)
1714 set_last_insn (prev);
1717 /* If deleting a jump, decrement the count of the label,
1718 and delete the label if it is now unused. */
1720 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1722 rtx lab = JUMP_LABEL (insn), lab_next;
1724 if (--LABEL_NUSES (lab) == 0)
1726 /* This can delete NEXT or PREV,
1727 either directly if NEXT is JUMP_LABEL (INSN),
1728 or indirectly through more levels of jumps. */
1731 /* I feel a little doubtful about this loop,
1732 but I see no clean and sure alternative way
1733 to find the first insn after INSN that is not now deleted.
1734 I hope this works. */
1735 while (next && INSN_DELETED_P (next))
1736 next = NEXT_INSN (next);
1739 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1740 && GET_CODE (lab_next) == JUMP_INSN
1741 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1742 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1744 /* If we're deleting the tablejump, delete the dispatch table.
1745 We may not be able to kill the label immediately preceeding
1746 just yet, as it might be referenced in code leading up to
1748 delete_insn (lab_next);
1752 /* Likewise if we're deleting a dispatch table. */
1754 if (GET_CODE (insn) == JUMP_INSN
1755 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1756 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1758 rtx pat = PATTERN (insn);
1759 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1760 int len = XVECLEN (pat, diff_vec_p);
1762 for (i = 0; i < len; i++)
1763 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1764 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1765 while (next && INSN_DELETED_P (next))
1766 next = NEXT_INSN (next);
1770 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1771 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1772 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1773 if (REG_NOTE_KIND (note) == REG_LABEL
1774 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1775 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1776 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
1777 delete_insn (XEXP (note, 0));
1779 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1780 prev = PREV_INSN (prev);
1782 /* If INSN was a label and a dispatch table follows it,
1783 delete the dispatch table. The tablejump must have gone already.
1784 It isn't useful to fall through into a table. */
1787 && NEXT_INSN (insn) != 0
1788 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1789 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1790 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1791 next = delete_insn (NEXT_INSN (insn));
1793 /* If INSN was a label, delete insns following it if now unreachable. */
1795 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1797 register RTX_CODE code;
1799 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1800 || code == NOTE || code == BARRIER
1801 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1804 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1805 next = NEXT_INSN (next);
1806 /* Keep going past other deleted labels to delete what follows. */
1807 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1808 next = NEXT_INSN (next);
1810 /* Note: if this deletes a jump, it can cause more
1811 deletion of unreachable code, after a different label.
1812 As long as the value from this recursive call is correct,
1813 this invocation functions correctly. */
1814 next = delete_insn (next);
1821 /* Advance from INSN till reaching something not deleted
1822 then return that. May return INSN itself. */
1825 next_nondeleted_insn (insn)
1828 while (INSN_DELETED_P (insn))
1829 insn = NEXT_INSN (insn);
1833 /* Delete a range of insns from FROM to TO, inclusive.
1834 This is for the sake of peephole optimization, so assume
1835 that whatever these insns do will still be done by a new
1836 peephole insn that will replace them. */
1839 delete_for_peephole (from, to)
1840 register rtx from, to;
1842 register rtx insn = from;
1846 register rtx next = NEXT_INSN (insn);
1847 register rtx prev = PREV_INSN (insn);
1849 if (GET_CODE (insn) != NOTE)
1851 INSN_DELETED_P (insn) = 1;
1853 /* Patch this insn out of the chain. */
1854 /* We don't do this all at once, because we
1855 must preserve all NOTEs. */
1857 NEXT_INSN (prev) = next;
1860 PREV_INSN (next) = prev;
1868 /* Note that if TO is an unconditional jump
1869 we *do not* delete the BARRIER that follows,
1870 since the peephole that replaces this sequence
1871 is also an unconditional jump in that case. */
1874 /* We have determined that INSN is never reached, and are about to
1875 delete it. Print a warning if the user asked for one.
1877 To try to make this warning more useful, this should only be called
1878 once per basic block not reached, and it only warns when the basic
1879 block contains more than one line from the current function, and
1880 contains at least one operation. CSE and inlining can duplicate insns,
1881 so it's possible to get spurious warnings from this. */
1884 never_reached_warning (avoided_insn)
1888 rtx a_line_note = NULL;
1889 int two_avoided_lines = 0;
1890 int contains_insn = 0;
1892 if (! warn_notreached)
1895 /* Scan forwards, looking at LINE_NUMBER notes, until
1896 we hit a LABEL or we run out of insns. */
1898 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1900 if (GET_CODE (insn) == CODE_LABEL)
1902 else if (GET_CODE (insn) == NOTE /* A line number note? */
1903 && NOTE_LINE_NUMBER (insn) >= 0)
1905 if (a_line_note == NULL)
1908 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1909 != NOTE_LINE_NUMBER (insn));
1911 else if (INSN_P (insn))
1914 if (two_avoided_lines && contains_insn)
1915 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1916 NOTE_LINE_NUMBER (a_line_note),
1917 "will never be executed");
1920 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1921 NLABEL as a return. Accrue modifications into the change group. */
1924 redirect_exp_1 (loc, olabel, nlabel, insn)
1929 register rtx x = *loc;
1930 register RTX_CODE code = GET_CODE (x);
1932 register const char *fmt;
1934 if (code == LABEL_REF)
1936 if (XEXP (x, 0) == olabel)
1940 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1942 n = gen_rtx_RETURN (VOIDmode);
1944 validate_change (insn, loc, n, 1);
1948 else if (code == RETURN && olabel == 0)
1950 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1951 if (loc == &PATTERN (insn))
1952 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1953 validate_change (insn, loc, x, 1);
1957 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1958 && GET_CODE (SET_SRC (x)) == LABEL_REF
1959 && XEXP (SET_SRC (x), 0) == olabel)
1961 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1965 fmt = GET_RTX_FORMAT (code);
1966 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1969 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1970 else if (fmt[i] == 'E')
1973 for (j = 0; j < XVECLEN (x, i); j++)
1974 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1979 /* Similar, but apply the change group and report success or failure. */
1982 redirect_exp (olabel, nlabel, insn)
1988 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1989 loc = &XVECEXP (PATTERN (insn), 0, 0);
1991 loc = &PATTERN (insn);
1993 redirect_exp_1 (loc, olabel, nlabel, insn);
1994 if (num_validated_changes () == 0)
1997 return apply_change_group ();
2000 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2001 the modifications into the change group. Return false if we did
2002 not see how to do that. */
2005 redirect_jump_1 (jump, nlabel)
2008 int ochanges = num_validated_changes ();
2011 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2012 loc = &XVECEXP (PATTERN (jump), 0, 0);
2014 loc = &PATTERN (jump);
2016 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2017 return num_validated_changes () > ochanges;
2020 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2021 jump target label is unused as a result, it and the code following
2024 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2027 The return value will be 1 if the change was made, 0 if it wasn't
2028 (this can only occur for NLABEL == 0). */
2031 redirect_jump (jump, nlabel, delete_unused)
2035 register rtx olabel = JUMP_LABEL (jump);
2037 if (nlabel == olabel)
2040 if (! redirect_exp (olabel, nlabel, jump))
2043 JUMP_LABEL (jump) = nlabel;
2045 ++LABEL_NUSES (nlabel);
2047 /* If we're eliding the jump over exception cleanups at the end of a
2048 function, move the function end note so that -Wreturn-type works. */
2049 if (olabel && nlabel
2050 && NEXT_INSN (olabel)
2051 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2052 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2053 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2055 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2056 delete_insn (olabel);
2061 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2062 Accrue the modifications into the change group. */
2068 register RTX_CODE code;
2069 rtx x = pc_set (insn);
2075 code = GET_CODE (x);
2077 if (code == IF_THEN_ELSE)
2079 register rtx comp = XEXP (x, 0);
2081 enum rtx_code reversed_code;
2083 /* We can do this in two ways: The preferable way, which can only
2084 be done if this is not an integer comparison, is to reverse
2085 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2086 of the IF_THEN_ELSE. If we can't do either, fail. */
2088 reversed_code = reversed_comparison_code (comp, insn);
2090 if (reversed_code != UNKNOWN)
2092 validate_change (insn, &XEXP (x, 0),
2093 gen_rtx_fmt_ee (reversed_code,
2094 GET_MODE (comp), XEXP (comp, 0),
2101 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2102 validate_change (insn, &XEXP (x, 2), tem, 1);
2108 /* Invert the jump condition of conditional jump insn, INSN.
2110 Return 1 if we can do so, 0 if we cannot find a way to do so that
2111 matches a pattern. */
2117 invert_exp_1 (insn);
2118 if (num_validated_changes () == 0)
2121 return apply_change_group ();
2124 /* Invert the condition of the jump JUMP, and make it jump to label
2125 NLABEL instead of where it jumps now. Accrue changes into the
2126 change group. Return false if we didn't see how to perform the
2127 inversion and redirection. */
2130 invert_jump_1 (jump, nlabel)
2135 ochanges = num_validated_changes ();
2136 invert_exp_1 (jump);
2137 if (num_validated_changes () == ochanges)
2140 return redirect_jump_1 (jump, nlabel);
2143 /* Invert the condition of the jump JUMP, and make it jump to label
2144 NLABEL instead of where it jumps now. Return true if successful. */
2147 invert_jump (jump, nlabel, delete_unused)
2151 /* We have to either invert the condition and change the label or
2152 do neither. Either operation could fail. We first try to invert
2153 the jump. If that succeeds, we try changing the label. If that fails,
2154 we invert the jump back to what it was. */
2156 if (! invert_exp (jump))
2159 if (redirect_jump (jump, nlabel, delete_unused))
2161 invert_br_probabilities (jump);
2166 if (! invert_exp (jump))
2167 /* This should just be putting it back the way it was. */
2174 /* Like rtx_equal_p except that it considers two REGs as equal
2175 if they renumber to the same value and considers two commutative
2176 operations to be the same if the order of the operands has been
2179 ??? Addition is not commutative on the PA due to the weird implicit
2180 space register selection rules for memory addresses. Therefore, we
2181 don't consider a + b == b + a.
2183 We could/should make this test a little tighter. Possibly only
2184 disabling it on the PA via some backend macro or only disabling this
2185 case when the PLUS is inside a MEM. */
2188 rtx_renumbered_equal_p (x, y)
2192 register RTX_CODE code = GET_CODE (x);
2193 register const char *fmt;
2198 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2199 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2200 && GET_CODE (SUBREG_REG (y)) == REG)))
2202 int reg_x = -1, reg_y = -1;
2203 int byte_x = 0, byte_y = 0;
2205 if (GET_MODE (x) != GET_MODE (y))
2208 /* If we haven't done any renumbering, don't
2209 make any assumptions. */
2210 if (reg_renumber == 0)
2211 return rtx_equal_p (x, y);
2215 reg_x = REGNO (SUBREG_REG (x));
2216 byte_x = SUBREG_BYTE (x);
2218 if (reg_renumber[reg_x] >= 0)
2220 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2221 GET_MODE (SUBREG_REG (x)),
2230 if (reg_renumber[reg_x] >= 0)
2231 reg_x = reg_renumber[reg_x];
2234 if (GET_CODE (y) == SUBREG)
2236 reg_y = REGNO (SUBREG_REG (y));
2237 byte_y = SUBREG_BYTE (y);
2239 if (reg_renumber[reg_y] >= 0)
2241 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2242 GET_MODE (SUBREG_REG (y)),
2251 if (reg_renumber[reg_y] >= 0)
2252 reg_y = reg_renumber[reg_y];
2255 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2258 /* Now we have disposed of all the cases
2259 in which different rtx codes can match. */
2260 if (code != GET_CODE (y))
2272 return INTVAL (x) == INTVAL (y);
2275 /* We can't assume nonlocal labels have their following insns yet. */
2276 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2277 return XEXP (x, 0) == XEXP (y, 0);
2279 /* Two label-refs are equivalent if they point at labels
2280 in the same position in the instruction stream. */
2281 return (next_real_insn (XEXP (x, 0))
2282 == next_real_insn (XEXP (y, 0)));
2285 return XSTR (x, 0) == XSTR (y, 0);
2288 /* If we didn't match EQ equality above, they aren't the same. */
2295 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2297 if (GET_MODE (x) != GET_MODE (y))
2300 /* For commutative operations, the RTX match if the operand match in any
2301 order. Also handle the simple binary and unary cases without a loop.
2303 ??? Don't consider PLUS a commutative operator; see comments above. */
2304 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2306 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2307 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2308 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2309 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2310 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2311 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2312 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2313 else if (GET_RTX_CLASS (code) == '1')
2314 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2316 /* Compare the elements. If any pair of corresponding elements
2317 fail to match, return 0 for the whole things. */
2319 fmt = GET_RTX_FORMAT (code);
2320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2326 if (XWINT (x, i) != XWINT (y, i))
2331 if (XINT (x, i) != XINT (y, i))
2336 if (XTREE (x, i) != XTREE (y, i))
2341 if (strcmp (XSTR (x, i), XSTR (y, i)))
2346 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2351 if (XEXP (x, i) != XEXP (y, i))
2358 if (XVECLEN (x, i) != XVECLEN (y, i))
2360 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2361 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2372 /* If X is a hard register or equivalent to one or a subregister of one,
2373 return the hard register number. If X is a pseudo register that was not
2374 assigned a hard register, return the pseudo register number. Otherwise,
2375 return -1. Any rtx is valid for X. */
2381 if (GET_CODE (x) == REG)
2383 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2384 return reg_renumber[REGNO (x)];
2387 if (GET_CODE (x) == SUBREG)
2389 int base = true_regnum (SUBREG_REG (x));
2390 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2391 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2392 GET_MODE (SUBREG_REG (x)),
2393 SUBREG_BYTE (x), GET_MODE (x));
2398 /* Optimize code of the form:
2400 for (x = a[i]; x; ...)
2402 for (x = a[i]; x; ...)
2406 Loop optimize will change the above code into
2410 { ...; if (! (x = ...)) break; }
2413 { ...; if (! (x = ...)) break; }
2416 In general, if the first test fails, the program can branch
2417 directly to `foo' and skip the second try which is doomed to fail.
2418 We run this after loop optimization and before flow analysis. */
2420 /* When comparing the insn patterns, we track the fact that different
2421 pseudo-register numbers may have been used in each computation.
2422 The following array stores an equivalence -- same_regs[I] == J means
2423 that pseudo register I was used in the first set of tests in a context
2424 where J was used in the second set. We also count the number of such
2425 pending equivalences. If nonzero, the expressions really aren't the
2428 static int *same_regs;
2430 static int num_same_regs;
2432 /* Track any registers modified between the target of the first jump and
2433 the second jump. They never compare equal. */
2435 static char *modified_regs;
2437 /* Record if memory was modified. */
2439 static int modified_mem;
2441 /* Called via note_stores on each insn between the target of the first
2442 branch and the second branch. It marks any changed registers. */
2445 mark_modified_reg (dest, x, data)
2448 void *data ATTRIBUTE_UNUSED;
2453 if (GET_CODE (dest) == SUBREG)
2454 dest = SUBREG_REG (dest);
2456 if (GET_CODE (dest) == MEM)
2459 if (GET_CODE (dest) != REG)
2462 regno = REGNO (dest);
2463 if (regno >= FIRST_PSEUDO_REGISTER)
2464 modified_regs[regno] = 1;
2465 /* Don't consider a hard condition code register as modified,
2466 if it is only being set. thread_jumps will check if it is set
2467 to the same value. */
2468 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2469 || GET_CODE (x) != SET
2470 || ! rtx_equal_p (dest, SET_DEST (x))
2471 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2472 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2473 modified_regs[regno + i] = 1;
2476 /* F is the first insn in the chain of insns. */
2479 thread_jumps (f, max_reg, flag_before_loop)
2482 int flag_before_loop;
2484 /* Basic algorithm is to find a conditional branch,
2485 the label it may branch to, and the branch after
2486 that label. If the two branches test the same condition,
2487 walk back from both branch paths until the insn patterns
2488 differ, or code labels are hit. If we make it back to
2489 the target of the first branch, then we know that the first branch
2490 will either always succeed or always fail depending on the relative
2491 senses of the two branches. So adjust the first branch accordingly
2494 rtx label, b1, b2, t1, t2;
2495 enum rtx_code code1, code2;
2496 rtx b1op0, b1op1, b2op0, b2op1;
2500 enum rtx_code reversed_code1, reversed_code2;
2502 /* Allocate register tables and quick-reset table. */
2503 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2504 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2505 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2506 for (i = 0; i < max_reg; i++)
2513 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2518 /* Get to a candidate branch insn. */
2519 if (GET_CODE (b1) != JUMP_INSN
2520 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2523 memset (modified_regs, 0, max_reg * sizeof (char));
2526 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2529 label = JUMP_LABEL (b1);
2531 /* Look for a branch after the target. Record any registers and
2532 memory modified between the target and the branch. Stop when we
2533 get to a label since we can't know what was changed there. */
2534 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2536 if (GET_CODE (b2) == CODE_LABEL)
2539 else if (GET_CODE (b2) == JUMP_INSN)
2541 /* If this is an unconditional jump and is the only use of
2542 its target label, we can follow it. */
2543 if (any_uncondjump_p (b2)
2545 && JUMP_LABEL (b2) != 0
2546 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2548 b2 = JUMP_LABEL (b2);
2555 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2558 if (GET_CODE (b2) == CALL_INSN)
2561 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2562 if (call_used_regs[i] && ! fixed_regs[i]
2563 && i != STACK_POINTER_REGNUM
2564 && i != FRAME_POINTER_REGNUM
2565 && i != HARD_FRAME_POINTER_REGNUM
2566 && i != ARG_POINTER_REGNUM)
2567 modified_regs[i] = 1;
2570 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2573 /* Check the next candidate branch insn from the label
2576 || GET_CODE (b2) != JUMP_INSN
2578 || !any_condjump_p (b2)
2579 || !onlyjump_p (b2))
2584 /* Get the comparison codes and operands, reversing the
2585 codes if appropriate. If we don't have comparison codes,
2586 we can't do anything. */
2587 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2588 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2589 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2590 reversed_code1 = code1;
2591 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2592 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2594 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2596 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2597 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2598 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2599 reversed_code2 = code2;
2600 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2601 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2603 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2605 /* If they test the same things and knowing that B1 branches
2606 tells us whether or not B2 branches, check if we
2607 can thread the branch. */
2608 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2609 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2610 && (comparison_dominates_p (code1, code2)
2611 || comparison_dominates_p (code1, reversed_code2)))
2614 t1 = prev_nonnote_insn (b1);
2615 t2 = prev_nonnote_insn (b2);
2617 while (t1 != 0 && t2 != 0)
2621 /* We have reached the target of the first branch.
2622 If there are no pending register equivalents,
2623 we know that this branch will either always
2624 succeed (if the senses of the two branches are
2625 the same) or always fail (if not). */
2628 if (num_same_regs != 0)
2631 if (comparison_dominates_p (code1, code2))
2632 new_label = JUMP_LABEL (b2);
2634 new_label = get_label_after (b2);
2636 if (JUMP_LABEL (b1) != new_label)
2638 rtx prev = PREV_INSN (new_label);
2640 if (flag_before_loop
2641 && GET_CODE (prev) == NOTE
2642 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2644 /* Don't thread to the loop label. If a loop
2645 label is reused, loop optimization will
2646 be disabled for that loop. */
2647 new_label = gen_label_rtx ();
2648 emit_label_after (new_label, PREV_INSN (prev));
2650 changed |= redirect_jump (b1, new_label, 1);
2655 /* If either of these is not a normal insn (it might be
2656 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2657 have already been skipped above.) Similarly, fail
2658 if the insns are different. */
2659 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2660 || recog_memoized (t1) != recog_memoized (t2)
2661 || ! rtx_equal_for_thread_p (PATTERN (t1),
2665 t1 = prev_nonnote_insn (t1);
2666 t2 = prev_nonnote_insn (t2);
2673 free (modified_regs);
2678 /* This is like RTX_EQUAL_P except that it knows about our handling of
2679 possibly equivalent registers and knows to consider volatile and
2680 modified objects as not equal.
2682 YINSN is the insn containing Y. */
2685 rtx_equal_for_thread_p (x, y, yinsn)
2691 register enum rtx_code code;
2692 register const char *fmt;
2694 code = GET_CODE (x);
2695 /* Rtx's of different codes cannot be equal. */
2696 if (code != GET_CODE (y))
2699 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2700 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2702 if (GET_MODE (x) != GET_MODE (y))
2705 /* For floating-point, consider everything unequal. This is a bit
2706 pessimistic, but this pass would only rarely do anything for FP
2708 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2709 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2712 /* For commutative operations, the RTX match if the operand match in any
2713 order. Also handle the simple binary and unary cases without a loop. */
2714 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2715 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2716 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2717 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2718 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2719 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2720 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2721 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2722 else if (GET_RTX_CLASS (code) == '1')
2723 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2725 /* Handle special-cases first. */
2729 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2732 /* If neither is user variable or hard register, check for possible
2734 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2735 || REGNO (x) < FIRST_PSEUDO_REGISTER
2736 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2739 if (same_regs[REGNO (x)] == -1)
2741 same_regs[REGNO (x)] = REGNO (y);
2744 /* If this is the first time we are seeing a register on the `Y'
2745 side, see if it is the last use. If not, we can't thread the
2746 jump, so mark it as not equivalent. */
2747 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2753 return (same_regs[REGNO (x)] == (int) REGNO (y));
2758 /* If memory modified or either volatile, not equivalent.
2759 Else, check address. */
2760 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2763 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2766 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2772 /* Cancel a pending `same_regs' if setting equivalenced registers.
2773 Then process source. */
2774 if (GET_CODE (SET_DEST (x)) == REG
2775 && GET_CODE (SET_DEST (y)) == REG)
2777 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2779 same_regs[REGNO (SET_DEST (x))] = -1;
2782 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2787 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2791 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2794 return XEXP (x, 0) == XEXP (y, 0);
2797 return XSTR (x, 0) == XSTR (y, 0);
2806 fmt = GET_RTX_FORMAT (code);
2807 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2812 if (XWINT (x, i) != XWINT (y, i))
2818 if (XINT (x, i) != XINT (y, i))
2824 /* Two vectors must have the same length. */
2825 if (XVECLEN (x, i) != XVECLEN (y, i))
2828 /* And the corresponding elements must match. */
2829 for (j = 0; j < XVECLEN (x, i); j++)
2830 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2831 XVECEXP (y, i, j), yinsn) == 0)
2836 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2842 if (strcmp (XSTR (x, i), XSTR (y, i)))
2847 /* These are just backpointers, so they don't matter. */
2854 /* It is believed that rtx's at this level will never
2855 contain anything but integers and other rtx's,
2856 except for within LABEL_REFs and SYMBOL_REFs. */