1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (f)
84 max_uid = init_label_info (f) + 1;
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
131 copy_loop_headers (f)
134 register rtx insn, next;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
140 next = NEXT_INSN (insn);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
154 next = NEXT_INSN (temp);
161 purge_line_number_notes (f)
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
180 /* Delete this note if it is identical to previous note. */
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
204 for (insn = f; insn; insn = NEXT_INSN (insn))
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
238 for (insn = f; insn; insn = NEXT_INSN (insn))
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
298 duplicate_loop_exit_test (loop_start)
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
306 int max_reg = max_reg_num ();
309 /* Scan the exit code. We do not perform this optimization if any insn:
313 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
314 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
315 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
318 We also do not do this if we find an insn with ASM_OPERANDS. While
319 this restriction should not be necessary, copying an insn with
320 ASM_OPERANDS can confuse asm_noperands in some cases.
322 Also, don't do this if the exit code is more than 20 insns. */
324 for (insn = exitcode;
326 && ! (GET_CODE (insn) == NOTE
327 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
328 insn = NEXT_INSN (insn))
330 switch (GET_CODE (insn))
336 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
337 a jump immediately after the loop start that branches outside
338 the loop but within an outer loop, near the exit test.
339 If we copied this exit test and created a phony
340 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
341 before the exit test look like these could be safely moved
342 out of the loop even if they actually may be never executed.
343 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
345 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
346 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
350 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
351 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
352 /* If we were to duplicate this code, we would not move
353 the BLOCK notes, and so debugging the moved code would
354 be difficult. Thus, we only move the code with -O2 or
361 /* The code below would grossly mishandle REG_WAS_0 notes,
362 so get rid of them here. */
363 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
364 remove_note (insn, p);
366 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
367 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
375 /* Unless INSN is zero, we can do the optimization. */
381 /* See if any insn sets a register only used in the loop exit code and
382 not a user variable. If so, replace it with a new register. */
383 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
384 if (GET_CODE (insn) == INSN
385 && (set = single_set (insn)) != 0
386 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
387 || (GET_CODE (reg) == SUBREG
388 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
389 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
390 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
392 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
393 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
398 /* We can do the replacement. Allocate reg_map if this is the
399 first replacement we found. */
401 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
403 REG_LOOP_TEST_P (reg) = 1;
405 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
409 /* Now copy each insn. */
410 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
412 switch (GET_CODE (insn))
415 copy = emit_barrier_before (loop_start);
418 /* Only copy line-number notes. */
419 if (NOTE_LINE_NUMBER (insn) >= 0)
421 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
422 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
427 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
429 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
431 mark_jump_label (PATTERN (copy), copy, 0);
433 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
435 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
436 if (REG_NOTE_KIND (link) != REG_LABEL)
438 if (GET_CODE (link) == EXPR_LIST)
440 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
445 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
450 if (reg_map && REG_NOTES (copy))
451 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
455 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
459 mark_jump_label (PATTERN (copy), copy, 0);
460 if (REG_NOTES (insn))
462 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
464 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
467 /* Predict conditional jump that do make loop looping as taken.
468 Other jumps are probably exit conditions, so predict
470 if (any_condjump_p (copy))
472 rtx label = JUMP_LABEL (copy);
475 /* The jump_insn after loop_start should be followed
476 by barrier and loopback label. */
477 if (prev_nonnote_insn (label)
478 && (PREV_INSN (prev_nonnote_insn (label))
479 == NEXT_INSN (loop_start)))
480 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
482 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
491 /* Record the first insn we copied. We need it so that we can
492 scan the copied insns for new pseudo registers. */
497 /* Now clean up by emitting a jump to the end label and deleting the jump
498 at the start of the loop. */
499 if (! copy || GET_CODE (copy) != BARRIER)
501 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
504 /* Record the first insn we copied. We need it so that we can
505 scan the copied insns for new pseudo registers. This may not
506 be strictly necessary since we should have copied at least one
507 insn above. But I am going to be safe. */
511 mark_jump_label (PATTERN (copy), copy, 0);
512 emit_barrier_before (loop_start);
515 /* Now scan from the first insn we copied to the last insn we copied
516 (copy) for new pseudo registers. Do this after the code to jump to
517 the end label since that might create a new pseudo too. */
518 reg_scan_update (first_copy, copy, max_reg);
520 /* Mark the exit code as the virtual top of the converted loop. */
521 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
523 delete_insn (next_nonnote_insn (loop_start));
532 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
533 notes between START and END out before START. Assume that END is not
534 such a note. START may be such a note. Returns the value of the new
535 starting insn, which may be different if the original start was such a
539 squeeze_notes (start, end)
545 for (insn = start; insn != end; insn = next)
547 next = NEXT_INSN (insn);
548 if (GET_CODE (insn) == NOTE
549 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
550 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
551 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
552 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
553 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
554 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
560 rtx prev = PREV_INSN (insn);
561 PREV_INSN (insn) = PREV_INSN (start);
562 NEXT_INSN (insn) = start;
563 NEXT_INSN (PREV_INSN (insn)) = insn;
564 PREV_INSN (NEXT_INSN (insn)) = insn;
565 NEXT_INSN (prev) = next;
566 PREV_INSN (next) = prev;
574 /* Return the label before INSN, or put a new label there. */
577 get_label_before (insn)
582 /* Find an existing label at this point
583 or make a new one if there is none. */
584 label = prev_nonnote_insn (insn);
586 if (label == 0 || GET_CODE (label) != CODE_LABEL)
588 rtx prev = PREV_INSN (insn);
590 label = gen_label_rtx ();
591 emit_label_after (label, prev);
592 LABEL_NUSES (label) = 0;
597 /* Return the label after INSN, or put a new label there. */
600 get_label_after (insn)
605 /* Find an existing label at this point
606 or make a new one if there is none. */
607 label = next_nonnote_insn (insn);
609 if (label == 0 || GET_CODE (label) != CODE_LABEL)
611 label = gen_label_rtx ();
612 emit_label_after (label, insn);
613 LABEL_NUSES (label) = 0;
618 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
619 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
620 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
621 know whether it's source is floating point or integer comparison. Machine
622 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
623 to help this function avoid overhead in these cases. */
625 reversed_comparison_code_parts (code, arg0, arg1, insn)
626 rtx insn, arg0, arg1;
629 enum machine_mode mode;
631 /* If this is not actually a comparison, we can't reverse it. */
632 if (GET_RTX_CLASS (code) != '<')
635 mode = GET_MODE (arg0);
636 if (mode == VOIDmode)
637 mode = GET_MODE (arg1);
639 /* First see if machine description supply us way to reverse the comparison.
640 Give it priority over everything else to allow machine description to do
642 #ifdef REVERSIBLE_CC_MODE
643 if (GET_MODE_CLASS (mode) == MODE_CC
644 && REVERSIBLE_CC_MODE (mode))
646 #ifdef REVERSE_CONDITION
647 return REVERSE_CONDITION (code, mode);
649 return reverse_condition (code);
653 /* Try a few special cases based on the comparison code. */
662 /* It is always safe to reverse EQ and NE, even for the floating
663 point. Similary the unsigned comparisons are never used for
664 floating point so we can reverse them in the default way. */
665 return reverse_condition (code);
670 /* In case we already see unordered comparison, we can be sure to
671 be dealing with floating point so we don't need any more tests. */
672 return reverse_condition_maybe_unordered (code);
677 /* We don't have safe way to reverse these yet. */
683 /* In case we give up IEEE compatibility, all comparisons are reversible. */
684 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
685 || flag_unsafe_math_optimizations)
686 return reverse_condition (code);
688 if (GET_MODE_CLASS (mode) == MODE_CC
695 /* Try to search for the comparison to determine the real mode.
696 This code is expensive, but with sane machine description it
697 will be never used, since REVERSIBLE_CC_MODE will return true
702 for (prev = prev_nonnote_insn (insn);
703 prev != 0 && GET_CODE (prev) != CODE_LABEL;
704 prev = prev_nonnote_insn (prev))
706 rtx set = set_of (arg0, prev);
707 if (set && GET_CODE (set) == SET
708 && rtx_equal_p (SET_DEST (set), arg0))
710 rtx src = SET_SRC (set);
712 if (GET_CODE (src) == COMPARE)
714 rtx comparison = src;
715 arg0 = XEXP (src, 0);
716 mode = GET_MODE (arg0);
717 if (mode == VOIDmode)
718 mode = GET_MODE (XEXP (comparison, 1));
721 /* We can get past reg-reg moves. This may be usefull for model
722 of i387 comparisons that first move flag registers around. */
729 /* If register is clobbered in some ununderstandable way,
736 /* An integer condition. */
737 if (GET_CODE (arg0) == CONST_INT
738 || (GET_MODE (arg0) != VOIDmode
739 && GET_MODE_CLASS (mode) != MODE_CC
740 && ! FLOAT_MODE_P (mode)))
741 return reverse_condition (code);
746 /* An wrapper around the previous function to take COMPARISON as rtx
747 expression. This simplifies many callers. */
749 reversed_comparison_code (comparison, insn)
750 rtx comparison, insn;
752 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
754 return reversed_comparison_code_parts (GET_CODE (comparison),
755 XEXP (comparison, 0),
756 XEXP (comparison, 1), insn);
759 /* Given an rtx-code for a comparison, return the code for the negated
760 comparison. If no such code exists, return UNKNOWN.
762 WATCH OUT! reverse_condition is not safe to use on a jump that might
763 be acting on the results of an IEEE floating point comparison, because
764 of the special treatment of non-signaling nans in comparisons.
765 Use reversed_comparison_code instead. */
768 reverse_condition (code)
811 /* Similar, but we're allowed to generate unordered comparisons, which
812 makes it safe for IEEE floating-point. Of course, we have to recognize
813 that the target will support them too... */
816 reverse_condition_maybe_unordered (code)
819 /* Non-IEEE formats don't have unordered conditions. */
820 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
821 return reverse_condition (code);
859 /* Similar, but return the code when two operands of a comparison are swapped.
860 This IS safe for IEEE floating-point. */
863 swap_condition (code)
906 /* Given a comparison CODE, return the corresponding unsigned comparison.
907 If CODE is an equality comparison or already an unsigned comparison,
911 unsigned_condition (code)
938 /* Similarly, return the signed version of a comparison. */
941 signed_condition (code)
968 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
969 truth of CODE1 implies the truth of CODE2. */
972 comparison_dominates_p (code1, code2)
973 enum rtx_code code1, code2;
975 /* UNKNOWN comparison codes can happen as a result of trying to revert
977 They can't match anything, so we have to reject them here. */
978 if (code1 == UNKNOWN || code2 == UNKNOWN)
987 if (code2 == UNLE || code2 == UNGE)
992 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
998 if (code2 == UNLE || code2 == NE)
1003 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1008 if (code2 == UNGE || code2 == NE)
1013 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1019 if (code2 == ORDERED)
1024 if (code2 == NE || code2 == ORDERED)
1029 if (code2 == LEU || code2 == NE)
1034 if (code2 == GEU || code2 == NE)
1039 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1040 || code2 == UNGE || code2 == UNGT)
1051 /* Return 1 if INSN is an unconditional jump and nothing else. */
1057 return (GET_CODE (insn) == JUMP_INSN
1058 && GET_CODE (PATTERN (insn)) == SET
1059 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1060 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1063 /* Return nonzero if INSN is a (possibly) conditional jump
1066 Use this function is deprecated, since we need to support combined
1067 branch and compare insns. Use any_condjump_p instead whenever possible. */
1073 register rtx x = PATTERN (insn);
1075 if (GET_CODE (x) != SET
1076 || GET_CODE (SET_DEST (x)) != PC)
1080 if (GET_CODE (x) == LABEL_REF)
1083 return (GET_CODE (x) == IF_THEN_ELSE
1084 && ((GET_CODE (XEXP (x, 2)) == PC
1085 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1086 || GET_CODE (XEXP (x, 1)) == RETURN))
1087 || (GET_CODE (XEXP (x, 1)) == PC
1088 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1089 || GET_CODE (XEXP (x, 2)) == RETURN))));
1094 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1097 Use this function is deprecated, since we need to support combined
1098 branch and compare insns. Use any_condjump_p instead whenever possible. */
1101 condjump_in_parallel_p (insn)
1104 register rtx x = PATTERN (insn);
1106 if (GET_CODE (x) != PARALLEL)
1109 x = XVECEXP (x, 0, 0);
1111 if (GET_CODE (x) != SET)
1113 if (GET_CODE (SET_DEST (x)) != PC)
1115 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1117 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1119 if (XEXP (SET_SRC (x), 2) == pc_rtx
1120 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1121 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1123 if (XEXP (SET_SRC (x), 1) == pc_rtx
1124 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1125 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1130 /* Return set of PC, otherwise NULL. */
1137 if (GET_CODE (insn) != JUMP_INSN)
1139 pat = PATTERN (insn);
1141 /* The set is allowed to appear either as the insn pattern or
1142 the first set in a PARALLEL. */
1143 if (GET_CODE (pat) == PARALLEL)
1144 pat = XVECEXP (pat, 0, 0);
1145 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1151 /* Return true when insn is an unconditional direct jump,
1152 possibly bundled inside a PARALLEL. */
1155 any_uncondjump_p (insn)
1158 rtx x = pc_set (insn);
1161 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1166 /* Return true when insn is a conditional jump. This function works for
1167 instructions containing PC sets in PARALLELs. The instruction may have
1168 various other effects so before removing the jump you must verify
1171 Note that unlike condjump_p it returns false for unconditional jumps. */
1174 any_condjump_p (insn)
1177 rtx x = pc_set (insn);
1182 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1185 a = GET_CODE (XEXP (SET_SRC (x), 1));
1186 b = GET_CODE (XEXP (SET_SRC (x), 2));
1188 return ((b == PC && (a == LABEL_REF || a == RETURN))
1189 || (a == PC && (b == LABEL_REF || b == RETURN)));
1192 /* Return the label of a conditional jump. */
1195 condjump_label (insn)
1198 rtx x = pc_set (insn);
1203 if (GET_CODE (x) == LABEL_REF)
1205 if (GET_CODE (x) != IF_THEN_ELSE)
1207 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1209 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1214 /* Return true if INSN is a (possibly conditional) return insn. */
1217 returnjump_p_1 (loc, data)
1219 void *data ATTRIBUTE_UNUSED;
1222 return x && GET_CODE (x) == RETURN;
1229 if (GET_CODE (insn) != JUMP_INSN)
1231 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1234 /* Return true if INSN is a jump that only transfers control and
1243 if (GET_CODE (insn) != JUMP_INSN)
1246 set = single_set (insn);
1249 if (GET_CODE (SET_DEST (set)) != PC)
1251 if (side_effects_p (SET_SRC (set)))
1259 /* Return 1 if X is an RTX that does nothing but set the condition codes
1260 and CLOBBER or USE registers.
1261 Return -1 if X does explicitly set the condition codes,
1262 but also does other things. */
1266 rtx x ATTRIBUTE_UNUSED;
1268 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1270 if (GET_CODE (x) == PARALLEL)
1274 int other_things = 0;
1275 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1277 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1278 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1280 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1283 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1289 /* Follow any unconditional jump at LABEL;
1290 return the ultimate label reached by any such chain of jumps.
1291 If LABEL is not followed by a jump, return LABEL.
1292 If the chain loops or we can't find end, return LABEL,
1293 since that tells caller to avoid changing the insn.
1295 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1296 a USE or CLOBBER. */
1299 follow_jumps (label)
1304 register rtx value = label;
1309 && (insn = next_active_insn (value)) != 0
1310 && GET_CODE (insn) == JUMP_INSN
1311 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1312 && onlyjump_p (insn))
1313 || GET_CODE (PATTERN (insn)) == RETURN)
1314 && (next = NEXT_INSN (insn))
1315 && GET_CODE (next) == BARRIER);
1318 /* Don't chain through the insn that jumps into a loop
1319 from outside the loop,
1320 since that would create multiple loop entry jumps
1321 and prevent loop optimization. */
1323 if (!reload_completed)
1324 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1325 if (GET_CODE (tem) == NOTE
1326 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1327 /* ??? Optional. Disables some optimizations, but makes
1328 gcov output more accurate with -O. */
1329 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1332 /* If we have found a cycle, make the insn jump to itself. */
1333 if (JUMP_LABEL (insn) == label)
1336 tem = next_active_insn (JUMP_LABEL (insn));
1337 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1338 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1341 value = JUMP_LABEL (insn);
1349 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1350 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1351 in INSN, then store one of them in JUMP_LABEL (INSN).
1352 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1353 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1354 Also, when there are consecutive labels, canonicalize on the last of them.
1356 Note that two labels separated by a loop-beginning note
1357 must be kept distinct if we have not yet done loop-optimization,
1358 because the gap between them is where loop-optimize
1359 will want to move invariant code to. CROSS_JUMP tells us
1360 that loop-optimization is done with. */
1363 mark_jump_label (x, insn, in_mem)
1368 register RTX_CODE code = GET_CODE (x);
1370 register const char *fmt;
1392 /* If this is a constant-pool reference, see if it is a label. */
1393 if (CONSTANT_POOL_ADDRESS_P (x))
1394 mark_jump_label (get_pool_constant (x), insn, in_mem);
1399 rtx label = XEXP (x, 0);
1401 /* Ignore remaining references to unreachable labels that
1402 have been deleted. */
1403 if (GET_CODE (label) == NOTE
1404 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1407 if (GET_CODE (label) != CODE_LABEL)
1410 /* Ignore references to labels of containing functions. */
1411 if (LABEL_REF_NONLOCAL_P (x))
1414 XEXP (x, 0) = label;
1415 if (! insn || ! INSN_DELETED_P (insn))
1416 ++LABEL_NUSES (label);
1420 if (GET_CODE (insn) == JUMP_INSN)
1421 JUMP_LABEL (insn) = label;
1424 /* Add a REG_LABEL note for LABEL unless there already
1425 is one. All uses of a label, except for labels
1426 that are the targets of jumps, must have a
1428 if (! find_reg_note (insn, REG_LABEL, label))
1429 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1436 /* Do walk the labels in a vector, but not the first operand of an
1437 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1440 if (! INSN_DELETED_P (insn))
1442 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1444 for (i = 0; i < XVECLEN (x, eltnum); i++)
1445 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1453 fmt = GET_RTX_FORMAT (code);
1454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1457 mark_jump_label (XEXP (x, i), insn, in_mem);
1458 else if (fmt[i] == 'E')
1461 for (j = 0; j < XVECLEN (x, i); j++)
1462 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1467 /* If all INSN does is set the pc, delete it,
1468 and delete the insn that set the condition codes for it
1469 if that's what the previous thing was. */
1475 register rtx set = single_set (insn);
1477 if (set && GET_CODE (SET_DEST (set)) == PC)
1478 delete_computation (insn);
1481 /* Verify INSN is a BARRIER and delete it. */
1484 delete_barrier (insn)
1487 if (GET_CODE (insn) != BARRIER)
1493 /* Recursively delete prior insns that compute the value (used only by INSN
1494 which the caller is deleting) stored in the register mentioned by NOTE
1495 which is a REG_DEAD note associated with INSN. */
1498 delete_prior_computation (note, insn)
1503 rtx reg = XEXP (note, 0);
1505 for (our_prev = prev_nonnote_insn (insn);
1506 our_prev && (GET_CODE (our_prev) == INSN
1507 || GET_CODE (our_prev) == CALL_INSN);
1508 our_prev = prev_nonnote_insn (our_prev))
1510 rtx pat = PATTERN (our_prev);
1512 /* If we reach a CALL which is not calling a const function
1513 or the callee pops the arguments, then give up. */
1514 if (GET_CODE (our_prev) == CALL_INSN
1515 && (! CONST_CALL_P (our_prev)
1516 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1519 /* If we reach a SEQUENCE, it is too complex to try to
1520 do anything with it, so give up. */
1521 if (GET_CODE (pat) == SEQUENCE)
1524 if (GET_CODE (pat) == USE
1525 && GET_CODE (XEXP (pat, 0)) == INSN)
1526 /* reorg creates USEs that look like this. We leave them
1527 alone because reorg needs them for its own purposes. */
1530 if (reg_set_p (reg, pat))
1532 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1535 if (GET_CODE (pat) == PARALLEL)
1537 /* If we find a SET of something else, we can't
1542 for (i = 0; i < XVECLEN (pat, 0); i++)
1544 rtx part = XVECEXP (pat, 0, i);
1546 if (GET_CODE (part) == SET
1547 && SET_DEST (part) != reg)
1551 if (i == XVECLEN (pat, 0))
1552 delete_computation (our_prev);
1554 else if (GET_CODE (pat) == SET
1555 && GET_CODE (SET_DEST (pat)) == REG)
1557 int dest_regno = REGNO (SET_DEST (pat));
1560 + (dest_regno < FIRST_PSEUDO_REGISTER
1561 ? HARD_REGNO_NREGS (dest_regno,
1562 GET_MODE (SET_DEST (pat))) : 1));
1563 int regno = REGNO (reg);
1566 + (regno < FIRST_PSEUDO_REGISTER
1567 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1569 if (dest_regno >= regno
1570 && dest_endregno <= endregno)
1571 delete_computation (our_prev);
1573 /* We may have a multi-word hard register and some, but not
1574 all, of the words of the register are needed in subsequent
1575 insns. Write REG_UNUSED notes for those parts that were not
1577 else if (dest_regno <= regno
1578 && dest_endregno >= endregno)
1582 REG_NOTES (our_prev)
1583 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1584 REG_NOTES (our_prev));
1586 for (i = dest_regno; i < dest_endregno; i++)
1587 if (! find_regno_note (our_prev, REG_UNUSED, i))
1590 if (i == dest_endregno)
1591 delete_computation (our_prev);
1598 /* If PAT references the register that dies here, it is an
1599 additional use. Hence any prior SET isn't dead. However, this
1600 insn becomes the new place for the REG_DEAD note. */
1601 if (reg_overlap_mentioned_p (reg, pat))
1603 XEXP (note, 1) = REG_NOTES (our_prev);
1604 REG_NOTES (our_prev) = note;
1610 /* Delete INSN and recursively delete insns that compute values used only
1611 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1612 If we are running before flow.c, we need do nothing since flow.c will
1613 delete dead code. We also can't know if the registers being used are
1614 dead or not at this point.
1616 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1617 nothing other than set a register that dies in this insn, we can delete
1620 On machines with CC0, if CC0 is used in this insn, we may be able to
1621 delete the insn that set it. */
1624 delete_computation (insn)
1630 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1632 rtx prev = prev_nonnote_insn (insn);
1633 /* We assume that at this stage
1634 CC's are always set explicitly
1635 and always immediately before the jump that
1636 will use them. So if the previous insn
1637 exists to set the CC's, delete it
1638 (unless it performs auto-increments, etc.). */
1639 if (prev && GET_CODE (prev) == INSN
1640 && sets_cc0_p (PATTERN (prev)))
1642 if (sets_cc0_p (PATTERN (prev)) > 0
1643 && ! side_effects_p (PATTERN (prev)))
1644 delete_computation (prev);
1646 /* Otherwise, show that cc0 won't be used. */
1647 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1648 cc0_rtx, REG_NOTES (prev));
1653 for (note = REG_NOTES (insn); note; note = next)
1655 next = XEXP (note, 1);
1657 if (REG_NOTE_KIND (note) != REG_DEAD
1658 /* Verify that the REG_NOTE is legitimate. */
1659 || GET_CODE (XEXP (note, 0)) != REG)
1662 delete_prior_computation (note, insn);
1668 /* Delete insn INSN from the chain of insns and update label ref counts.
1669 May delete some following insns as a consequence; may even delete
1670 a label elsewhere and insns that follow it.
1672 Returns the first insn after INSN that was not deleted. */
1678 register rtx next = NEXT_INSN (insn);
1679 register rtx prev = PREV_INSN (insn);
1680 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1681 register int dont_really_delete = 0;
1684 while (next && INSN_DELETED_P (next))
1685 next = NEXT_INSN (next);
1687 /* This insn is already deleted => return first following nondeleted. */
1688 if (INSN_DELETED_P (insn))
1692 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1694 /* Don't delete user-declared labels. When optimizing, convert them
1695 to special NOTEs instead. When not optimizing, leave them alone. */
1696 if (was_code_label && LABEL_NAME (insn) != 0)
1700 const char *name = LABEL_NAME (insn);
1701 PUT_CODE (insn, NOTE);
1702 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
1703 NOTE_SOURCE_FILE (insn) = name;
1706 dont_really_delete = 1;
1709 /* Mark this insn as deleted. */
1710 INSN_DELETED_P (insn) = 1;
1712 /* If instruction is followed by a barrier,
1713 delete the barrier too. */
1715 if (next != 0 && GET_CODE (next) == BARRIER)
1717 INSN_DELETED_P (next) = 1;
1718 next = NEXT_INSN (next);
1721 /* Patch out INSN (and the barrier if any) */
1723 if (! dont_really_delete)
1727 NEXT_INSN (prev) = next;
1728 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
1729 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
1730 XVECLEN (PATTERN (prev), 0) - 1)) = next;
1735 PREV_INSN (next) = prev;
1736 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
1737 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
1740 if (prev && NEXT_INSN (prev) == 0)
1741 set_last_insn (prev);
1744 /* If deleting a jump, decrement the count of the label,
1745 and delete the label if it is now unused. */
1747 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1749 rtx lab = JUMP_LABEL (insn), lab_next;
1751 if (--LABEL_NUSES (lab) == 0)
1753 /* This can delete NEXT or PREV,
1754 either directly if NEXT is JUMP_LABEL (INSN),
1755 or indirectly through more levels of jumps. */
1758 /* I feel a little doubtful about this loop,
1759 but I see no clean and sure alternative way
1760 to find the first insn after INSN that is not now deleted.
1761 I hope this works. */
1762 while (next && INSN_DELETED_P (next))
1763 next = NEXT_INSN (next);
1766 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1767 && GET_CODE (lab_next) == JUMP_INSN
1768 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1769 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1771 /* If we're deleting the tablejump, delete the dispatch table.
1772 We may not be able to kill the label immediately preceeding
1773 just yet, as it might be referenced in code leading up to
1775 delete_insn (lab_next);
1779 /* Likewise if we're deleting a dispatch table. */
1781 if (GET_CODE (insn) == JUMP_INSN
1782 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1783 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1785 rtx pat = PATTERN (insn);
1786 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1787 int len = XVECLEN (pat, diff_vec_p);
1789 for (i = 0; i < len; i++)
1790 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1791 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1792 while (next && INSN_DELETED_P (next))
1793 next = NEXT_INSN (next);
1797 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1798 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1799 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1800 if (REG_NOTE_KIND (note) == REG_LABEL
1801 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1802 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1803 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
1804 delete_insn (XEXP (note, 0));
1806 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1807 prev = PREV_INSN (prev);
1809 /* If INSN was a label and a dispatch table follows it,
1810 delete the dispatch table. The tablejump must have gone already.
1811 It isn't useful to fall through into a table. */
1814 && NEXT_INSN (insn) != 0
1815 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1816 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1817 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1818 next = delete_insn (NEXT_INSN (insn));
1820 /* If INSN was a label, delete insns following it if now unreachable. */
1822 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1824 register RTX_CODE code;
1826 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1827 || code == NOTE || code == BARRIER
1828 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1831 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1832 next = NEXT_INSN (next);
1833 /* Keep going past other deleted labels to delete what follows. */
1834 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1835 next = NEXT_INSN (next);
1837 /* Note: if this deletes a jump, it can cause more
1838 deletion of unreachable code, after a different label.
1839 As long as the value from this recursive call is correct,
1840 this invocation functions correctly. */
1841 next = delete_insn (next);
1848 /* Advance from INSN till reaching something not deleted
1849 then return that. May return INSN itself. */
1852 next_nondeleted_insn (insn)
1855 while (INSN_DELETED_P (insn))
1856 insn = NEXT_INSN (insn);
1860 /* Delete a range of insns from FROM to TO, inclusive.
1861 This is for the sake of peephole optimization, so assume
1862 that whatever these insns do will still be done by a new
1863 peephole insn that will replace them. */
1866 delete_for_peephole (from, to)
1867 register rtx from, to;
1869 register rtx insn = from;
1873 register rtx next = NEXT_INSN (insn);
1874 register rtx prev = PREV_INSN (insn);
1876 if (GET_CODE (insn) != NOTE)
1878 INSN_DELETED_P (insn) = 1;
1880 /* Patch this insn out of the chain. */
1881 /* We don't do this all at once, because we
1882 must preserve all NOTEs. */
1884 NEXT_INSN (prev) = next;
1887 PREV_INSN (next) = prev;
1895 /* Note that if TO is an unconditional jump
1896 we *do not* delete the BARRIER that follows,
1897 since the peephole that replaces this sequence
1898 is also an unconditional jump in that case. */
1901 /* We have determined that INSN is never reached, and are about to
1902 delete it. Print a warning if the user asked for one.
1904 To try to make this warning more useful, this should only be called
1905 once per basic block not reached, and it only warns when the basic
1906 block contains more than one line from the current function, and
1907 contains at least one operation. CSE and inlining can duplicate insns,
1908 so it's possible to get spurious warnings from this. */
1911 never_reached_warning (avoided_insn)
1915 rtx a_line_note = NULL;
1916 int two_avoided_lines = 0;
1917 int contains_insn = 0;
1919 if (! warn_notreached)
1922 /* Scan forwards, looking at LINE_NUMBER notes, until
1923 we hit a LABEL or we run out of insns. */
1925 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1927 if (GET_CODE (insn) == CODE_LABEL)
1929 else if (GET_CODE (insn) == NOTE /* A line number note? */
1930 && NOTE_LINE_NUMBER (insn) >= 0)
1932 if (a_line_note == NULL)
1935 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1936 != NOTE_LINE_NUMBER (insn));
1938 else if (INSN_P (insn))
1941 if (two_avoided_lines && contains_insn)
1942 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1943 NOTE_LINE_NUMBER (a_line_note),
1944 "will never be executed");
1947 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1948 NLABEL as a return. Accrue modifications into the change group. */
1951 redirect_exp_1 (loc, olabel, nlabel, insn)
1956 register rtx x = *loc;
1957 register RTX_CODE code = GET_CODE (x);
1959 register const char *fmt;
1961 if (code == LABEL_REF)
1963 if (XEXP (x, 0) == olabel)
1967 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1969 n = gen_rtx_RETURN (VOIDmode);
1971 validate_change (insn, loc, n, 1);
1975 else if (code == RETURN && olabel == 0)
1977 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1978 if (loc == &PATTERN (insn))
1979 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1980 validate_change (insn, loc, x, 1);
1984 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1985 && GET_CODE (SET_SRC (x)) == LABEL_REF
1986 && XEXP (SET_SRC (x), 0) == olabel)
1988 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1992 fmt = GET_RTX_FORMAT (code);
1993 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1996 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1997 else if (fmt[i] == 'E')
2000 for (j = 0; j < XVECLEN (x, i); j++)
2001 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2006 /* Similar, but apply the change group and report success or failure. */
2009 redirect_exp (olabel, nlabel, insn)
2015 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2016 loc = &XVECEXP (PATTERN (insn), 0, 0);
2018 loc = &PATTERN (insn);
2020 redirect_exp_1 (loc, olabel, nlabel, insn);
2021 if (num_validated_changes () == 0)
2024 return apply_change_group ();
2027 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2028 the modifications into the change group. Return false if we did
2029 not see how to do that. */
2032 redirect_jump_1 (jump, nlabel)
2035 int ochanges = num_validated_changes ();
2038 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2039 loc = &XVECEXP (PATTERN (jump), 0, 0);
2041 loc = &PATTERN (jump);
2043 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2044 return num_validated_changes () > ochanges;
2047 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2048 jump target label is unused as a result, it and the code following
2051 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2054 The return value will be 1 if the change was made, 0 if it wasn't
2055 (this can only occur for NLABEL == 0). */
2058 redirect_jump (jump, nlabel, delete_unused)
2062 register rtx olabel = JUMP_LABEL (jump);
2064 if (nlabel == olabel)
2067 if (! redirect_exp (olabel, nlabel, jump))
2070 JUMP_LABEL (jump) = nlabel;
2072 ++LABEL_NUSES (nlabel);
2074 /* If we're eliding the jump over exception cleanups at the end of a
2075 function, move the function end note so that -Wreturn-type works. */
2076 if (olabel && nlabel
2077 && NEXT_INSN (olabel)
2078 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2079 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2080 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2082 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2083 delete_insn (olabel);
2088 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2089 Accrue the modifications into the change group. */
2095 register RTX_CODE code;
2096 rtx x = pc_set (insn);
2102 code = GET_CODE (x);
2104 if (code == IF_THEN_ELSE)
2106 register rtx comp = XEXP (x, 0);
2108 enum rtx_code reversed_code;
2110 /* We can do this in two ways: The preferable way, which can only
2111 be done if this is not an integer comparison, is to reverse
2112 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2113 of the IF_THEN_ELSE. If we can't do either, fail. */
2115 reversed_code = reversed_comparison_code (comp, insn);
2117 if (reversed_code != UNKNOWN)
2119 validate_change (insn, &XEXP (x, 0),
2120 gen_rtx_fmt_ee (reversed_code,
2121 GET_MODE (comp), XEXP (comp, 0),
2128 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2129 validate_change (insn, &XEXP (x, 2), tem, 1);
2135 /* Invert the jump condition of conditional jump insn, INSN.
2137 Return 1 if we can do so, 0 if we cannot find a way to do so that
2138 matches a pattern. */
2144 invert_exp_1 (insn);
2145 if (num_validated_changes () == 0)
2148 return apply_change_group ();
2151 /* Invert the condition of the jump JUMP, and make it jump to label
2152 NLABEL instead of where it jumps now. Accrue changes into the
2153 change group. Return false if we didn't see how to perform the
2154 inversion and redirection. */
2157 invert_jump_1 (jump, nlabel)
2162 ochanges = num_validated_changes ();
2163 invert_exp_1 (jump);
2164 if (num_validated_changes () == ochanges)
2167 return redirect_jump_1 (jump, nlabel);
2170 /* Invert the condition of the jump JUMP, and make it jump to label
2171 NLABEL instead of where it jumps now. Return true if successful. */
2174 invert_jump (jump, nlabel, delete_unused)
2178 /* We have to either invert the condition and change the label or
2179 do neither. Either operation could fail. We first try to invert
2180 the jump. If that succeeds, we try changing the label. If that fails,
2181 we invert the jump back to what it was. */
2183 if (! invert_exp (jump))
2186 if (redirect_jump (jump, nlabel, delete_unused))
2188 invert_br_probabilities (jump);
2193 if (! invert_exp (jump))
2194 /* This should just be putting it back the way it was. */
2201 /* Like rtx_equal_p except that it considers two REGs as equal
2202 if they renumber to the same value and considers two commutative
2203 operations to be the same if the order of the operands has been
2206 ??? Addition is not commutative on the PA due to the weird implicit
2207 space register selection rules for memory addresses. Therefore, we
2208 don't consider a + b == b + a.
2210 We could/should make this test a little tighter. Possibly only
2211 disabling it on the PA via some backend macro or only disabling this
2212 case when the PLUS is inside a MEM. */
2215 rtx_renumbered_equal_p (x, y)
2219 register RTX_CODE code = GET_CODE (x);
2220 register const char *fmt;
2225 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2226 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2227 && GET_CODE (SUBREG_REG (y)) == REG)))
2229 int reg_x = -1, reg_y = -1;
2230 int byte_x = 0, byte_y = 0;
2232 if (GET_MODE (x) != GET_MODE (y))
2235 /* If we haven't done any renumbering, don't
2236 make any assumptions. */
2237 if (reg_renumber == 0)
2238 return rtx_equal_p (x, y);
2242 reg_x = REGNO (SUBREG_REG (x));
2243 byte_x = SUBREG_BYTE (x);
2245 if (reg_renumber[reg_x] >= 0)
2247 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2248 GET_MODE (SUBREG_REG (x)),
2257 if (reg_renumber[reg_x] >= 0)
2258 reg_x = reg_renumber[reg_x];
2261 if (GET_CODE (y) == SUBREG)
2263 reg_y = REGNO (SUBREG_REG (y));
2264 byte_y = SUBREG_BYTE (y);
2266 if (reg_renumber[reg_y] >= 0)
2268 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2269 GET_MODE (SUBREG_REG (y)),
2278 if (reg_renumber[reg_y] >= 0)
2279 reg_y = reg_renumber[reg_y];
2282 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2285 /* Now we have disposed of all the cases
2286 in which different rtx codes can match. */
2287 if (code != GET_CODE (y))
2299 return INTVAL (x) == INTVAL (y);
2302 /* We can't assume nonlocal labels have their following insns yet. */
2303 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2304 return XEXP (x, 0) == XEXP (y, 0);
2306 /* Two label-refs are equivalent if they point at labels
2307 in the same position in the instruction stream. */
2308 return (next_real_insn (XEXP (x, 0))
2309 == next_real_insn (XEXP (y, 0)));
2312 return XSTR (x, 0) == XSTR (y, 0);
2315 /* If we didn't match EQ equality above, they aren't the same. */
2322 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2324 if (GET_MODE (x) != GET_MODE (y))
2327 /* For commutative operations, the RTX match if the operand match in any
2328 order. Also handle the simple binary and unary cases without a loop.
2330 ??? Don't consider PLUS a commutative operator; see comments above. */
2331 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2333 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2334 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2335 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2336 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2337 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2338 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2339 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2340 else if (GET_RTX_CLASS (code) == '1')
2341 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2343 /* Compare the elements. If any pair of corresponding elements
2344 fail to match, return 0 for the whole things. */
2346 fmt = GET_RTX_FORMAT (code);
2347 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2353 if (XWINT (x, i) != XWINT (y, i))
2358 if (XINT (x, i) != XINT (y, i))
2363 if (XTREE (x, i) != XTREE (y, i))
2368 if (strcmp (XSTR (x, i), XSTR (y, i)))
2373 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2378 if (XEXP (x, i) != XEXP (y, i))
2385 if (XVECLEN (x, i) != XVECLEN (y, i))
2387 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2388 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2399 /* If X is a hard register or equivalent to one or a subregister of one,
2400 return the hard register number. If X is a pseudo register that was not
2401 assigned a hard register, return the pseudo register number. Otherwise,
2402 return -1. Any rtx is valid for X. */
2408 if (GET_CODE (x) == REG)
2410 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2411 return reg_renumber[REGNO (x)];
2414 if (GET_CODE (x) == SUBREG)
2416 int base = true_regnum (SUBREG_REG (x));
2417 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2418 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2419 GET_MODE (SUBREG_REG (x)),
2420 SUBREG_BYTE (x), GET_MODE (x));
2425 /* Optimize code of the form:
2427 for (x = a[i]; x; ...)
2429 for (x = a[i]; x; ...)
2433 Loop optimize will change the above code into
2437 { ...; if (! (x = ...)) break; }
2440 { ...; if (! (x = ...)) break; }
2443 In general, if the first test fails, the program can branch
2444 directly to `foo' and skip the second try which is doomed to fail.
2445 We run this after loop optimization and before flow analysis. */
2447 /* When comparing the insn patterns, we track the fact that different
2448 pseudo-register numbers may have been used in each computation.
2449 The following array stores an equivalence -- same_regs[I] == J means
2450 that pseudo register I was used in the first set of tests in a context
2451 where J was used in the second set. We also count the number of such
2452 pending equivalences. If nonzero, the expressions really aren't the
2455 static int *same_regs;
2457 static int num_same_regs;
2459 /* Track any registers modified between the target of the first jump and
2460 the second jump. They never compare equal. */
2462 static char *modified_regs;
2464 /* Record if memory was modified. */
2466 static int modified_mem;
2468 /* Called via note_stores on each insn between the target of the first
2469 branch and the second branch. It marks any changed registers. */
2472 mark_modified_reg (dest, x, data)
2475 void *data ATTRIBUTE_UNUSED;
2480 if (GET_CODE (dest) == SUBREG)
2481 dest = SUBREG_REG (dest);
2483 if (GET_CODE (dest) == MEM)
2486 if (GET_CODE (dest) != REG)
2489 regno = REGNO (dest);
2490 if (regno >= FIRST_PSEUDO_REGISTER)
2491 modified_regs[regno] = 1;
2492 /* Don't consider a hard condition code register as modified,
2493 if it is only being set. thread_jumps will check if it is set
2494 to the same value. */
2495 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2496 || GET_CODE (x) != SET
2497 || ! rtx_equal_p (dest, SET_DEST (x))
2498 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2499 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2500 modified_regs[regno + i] = 1;
2503 /* F is the first insn in the chain of insns. */
2506 thread_jumps (f, max_reg, flag_before_loop)
2509 int flag_before_loop;
2511 /* Basic algorithm is to find a conditional branch,
2512 the label it may branch to, and the branch after
2513 that label. If the two branches test the same condition,
2514 walk back from both branch paths until the insn patterns
2515 differ, or code labels are hit. If we make it back to
2516 the target of the first branch, then we know that the first branch
2517 will either always succeed or always fail depending on the relative
2518 senses of the two branches. So adjust the first branch accordingly
2521 rtx label, b1, b2, t1, t2;
2522 enum rtx_code code1, code2;
2523 rtx b1op0, b1op1, b2op0, b2op1;
2527 enum rtx_code reversed_code1, reversed_code2;
2529 /* Allocate register tables and quick-reset table. */
2530 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2531 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2532 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2533 for (i = 0; i < max_reg; i++)
2540 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2545 /* Get to a candidate branch insn. */
2546 if (GET_CODE (b1) != JUMP_INSN
2547 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2550 memset (modified_regs, 0, max_reg * sizeof (char));
2553 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2556 label = JUMP_LABEL (b1);
2558 /* Look for a branch after the target. Record any registers and
2559 memory modified between the target and the branch. Stop when we
2560 get to a label since we can't know what was changed there. */
2561 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2563 if (GET_CODE (b2) == CODE_LABEL)
2566 else if (GET_CODE (b2) == JUMP_INSN)
2568 /* If this is an unconditional jump and is the only use of
2569 its target label, we can follow it. */
2570 if (any_uncondjump_p (b2)
2572 && JUMP_LABEL (b2) != 0
2573 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2575 b2 = JUMP_LABEL (b2);
2582 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2585 if (GET_CODE (b2) == CALL_INSN)
2588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2589 if (call_used_regs[i] && ! fixed_regs[i]
2590 && i != STACK_POINTER_REGNUM
2591 && i != FRAME_POINTER_REGNUM
2592 && i != HARD_FRAME_POINTER_REGNUM
2593 && i != ARG_POINTER_REGNUM)
2594 modified_regs[i] = 1;
2597 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2600 /* Check the next candidate branch insn from the label
2603 || GET_CODE (b2) != JUMP_INSN
2605 || !any_condjump_p (b2)
2606 || !onlyjump_p (b2))
2611 /* Get the comparison codes and operands, reversing the
2612 codes if appropriate. If we don't have comparison codes,
2613 we can't do anything. */
2614 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2615 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2616 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2617 reversed_code1 = code1;
2618 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2619 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2621 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2623 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2624 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2625 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2626 reversed_code2 = code2;
2627 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2628 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2630 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2632 /* If they test the same things and knowing that B1 branches
2633 tells us whether or not B2 branches, check if we
2634 can thread the branch. */
2635 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2636 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2637 && (comparison_dominates_p (code1, code2)
2638 || comparison_dominates_p (code1, reversed_code2)))
2641 t1 = prev_nonnote_insn (b1);
2642 t2 = prev_nonnote_insn (b2);
2644 while (t1 != 0 && t2 != 0)
2648 /* We have reached the target of the first branch.
2649 If there are no pending register equivalents,
2650 we know that this branch will either always
2651 succeed (if the senses of the two branches are
2652 the same) or always fail (if not). */
2655 if (num_same_regs != 0)
2658 if (comparison_dominates_p (code1, code2))
2659 new_label = JUMP_LABEL (b2);
2661 new_label = get_label_after (b2);
2663 if (JUMP_LABEL (b1) != new_label)
2665 rtx prev = PREV_INSN (new_label);
2667 if (flag_before_loop
2668 && GET_CODE (prev) == NOTE
2669 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2671 /* Don't thread to the loop label. If a loop
2672 label is reused, loop optimization will
2673 be disabled for that loop. */
2674 new_label = gen_label_rtx ();
2675 emit_label_after (new_label, PREV_INSN (prev));
2677 changed |= redirect_jump (b1, new_label, 1);
2682 /* If either of these is not a normal insn (it might be
2683 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2684 have already been skipped above.) Similarly, fail
2685 if the insns are different. */
2686 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2687 || recog_memoized (t1) != recog_memoized (t2)
2688 || ! rtx_equal_for_thread_p (PATTERN (t1),
2692 t1 = prev_nonnote_insn (t1);
2693 t2 = prev_nonnote_insn (t2);
2700 free (modified_regs);
2705 /* This is like RTX_EQUAL_P except that it knows about our handling of
2706 possibly equivalent registers and knows to consider volatile and
2707 modified objects as not equal.
2709 YINSN is the insn containing Y. */
2712 rtx_equal_for_thread_p (x, y, yinsn)
2718 register enum rtx_code code;
2719 register const char *fmt;
2721 code = GET_CODE (x);
2722 /* Rtx's of different codes cannot be equal. */
2723 if (code != GET_CODE (y))
2726 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2727 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2729 if (GET_MODE (x) != GET_MODE (y))
2732 /* For floating-point, consider everything unequal. This is a bit
2733 pessimistic, but this pass would only rarely do anything for FP
2735 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2736 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2739 /* For commutative operations, the RTX match if the operand match in any
2740 order. Also handle the simple binary and unary cases without a loop. */
2741 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2742 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2743 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2744 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2745 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2746 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2747 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2748 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2749 else if (GET_RTX_CLASS (code) == '1')
2750 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2752 /* Handle special-cases first. */
2756 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2759 /* If neither is user variable or hard register, check for possible
2761 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2762 || REGNO (x) < FIRST_PSEUDO_REGISTER
2763 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2766 if (same_regs[REGNO (x)] == -1)
2768 same_regs[REGNO (x)] = REGNO (y);
2771 /* If this is the first time we are seeing a register on the `Y'
2772 side, see if it is the last use. If not, we can't thread the
2773 jump, so mark it as not equivalent. */
2774 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2780 return (same_regs[REGNO (x)] == (int) REGNO (y));
2785 /* If memory modified or either volatile, not equivalent.
2786 Else, check address. */
2787 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2790 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2793 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2799 /* Cancel a pending `same_regs' if setting equivalenced registers.
2800 Then process source. */
2801 if (GET_CODE (SET_DEST (x)) == REG
2802 && GET_CODE (SET_DEST (y)) == REG)
2804 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2806 same_regs[REGNO (SET_DEST (x))] = -1;
2809 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2814 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2818 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2821 return XEXP (x, 0) == XEXP (y, 0);
2824 return XSTR (x, 0) == XSTR (y, 0);
2833 fmt = GET_RTX_FORMAT (code);
2834 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2839 if (XWINT (x, i) != XWINT (y, i))
2845 if (XINT (x, i) != XINT (y, i))
2851 /* Two vectors must have the same length. */
2852 if (XVECLEN (x, i) != XVECLEN (y, i))
2855 /* And the corresponding elements must match. */
2856 for (j = 0; j < XVECLEN (x, i); j++)
2857 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2858 XVECEXP (y, i, j), yinsn) == 0)
2863 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2869 if (strcmp (XSTR (x, i), XSTR (y, i)))
2874 /* These are just backpointers, so they don't matter. */
2881 /* It is believed that rtx's at this level will never
2882 contain anything but integers and other rtx's,
2883 except for within LABEL_REFs and SYMBOL_REFs. */