1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
73 /* Alternate entry into the jump optimizer. This entry point only rebuilds
74 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 rebuild_jump_labels (f)
83 max_uid = init_label_info (f) + 1;
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
93 LABEL_NUSES (XEXP (insn, 0))++;
95 /* Keep track of labels used for marking handlers for exception
96 regions; they cannot usually be deleted. */
98 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
99 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
100 LABEL_NUSES (XEXP (insn, 0))++;
103 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
104 non-fallthru insn. This is not generally true, as multiple barriers
105 may have crept in, or the BARRIER may be separated from the last
106 real insn by one or more NOTEs.
108 This simple pass moves barriers and removes duplicates so that the
114 rtx insn, next, prev;
115 for (insn = get_insns (); insn; insn = next)
117 next = NEXT_INSN (insn);
118 if (GET_CODE (insn) == BARRIER)
120 prev = prev_nonnote_insn (insn);
121 if (GET_CODE (prev) == BARRIER)
122 delete_barrier (insn);
123 else if (prev != PREV_INSN (insn))
124 reorder_insns (insn, insn, prev);
130 copy_loop_headers (f)
133 register rtx insn, next;
134 /* Now iterate optimizing jumps until nothing changes over one pass. */
135 for (insn = f; insn; insn = next)
139 next = NEXT_INSN (insn);
141 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
142 jump. Try to optimize by duplicating the loop exit test if so.
143 This is only safe immediately after regscan, because it uses
144 the values of regno_first_uid and regno_last_uid. */
145 if (GET_CODE (insn) == NOTE
146 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
147 && (temp1 = next_nonnote_insn (insn)) != 0
148 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
150 temp = PREV_INSN (insn);
151 if (duplicate_loop_exit_test (insn))
153 next = NEXT_INSN (temp);
160 purge_line_number_notes (f)
165 /* Delete extraneous line number notes.
166 Note that two consecutive notes for different lines are not really
167 extraneous. There should be some indication where that line belonged,
168 even if it became empty. */
170 for (insn = f; insn; insn = NEXT_INSN (insn))
171 if (GET_CODE (insn) == NOTE)
173 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
174 /* Any previous line note was for the prologue; gdb wants a new
175 note after the prologue even if it is for the same line. */
176 last_note = NULL_RTX;
177 else if (NOTE_LINE_NUMBER (insn) >= 0)
179 /* Delete this note if it is identical to previous note. */
181 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
182 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
193 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
194 notes whose labels don't occur in the insn any more. Returns the
195 largest INSN_UID found. */
203 for (insn = f; insn; insn = NEXT_INSN (insn))
205 if (GET_CODE (insn) == CODE_LABEL)
206 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
207 else if (GET_CODE (insn) == JUMP_INSN)
208 JUMP_LABEL (insn) = 0;
209 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
213 for (note = REG_NOTES (insn); note; note = next)
215 next = XEXP (note, 1);
216 if (REG_NOTE_KIND (note) == REG_LABEL
217 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
218 remove_note (insn, note);
221 if (INSN_UID (insn) > largest_uid)
222 largest_uid = INSN_UID (insn);
228 /* Mark the label each jump jumps to.
229 Combine consecutive labels, and count uses of labels. */
237 for (insn = f; insn; insn = NEXT_INSN (insn))
240 if (GET_CODE (insn) == CALL_INSN
241 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
243 mark_all_labels (XEXP (PATTERN (insn), 0));
244 mark_all_labels (XEXP (PATTERN (insn), 1));
245 mark_all_labels (XEXP (PATTERN (insn), 2));
247 /* Canonicalize the tail recursion label attached to the
248 CALL_PLACEHOLDER insn. */
249 if (XEXP (PATTERN (insn), 3))
251 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
252 XEXP (PATTERN (insn), 3));
253 mark_jump_label (label_ref, insn, 0);
254 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
260 mark_jump_label (PATTERN (insn), insn, 0);
261 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
263 /* When we know the LABEL_REF contained in a REG used in
264 an indirect jump, we'll have a REG_LABEL note so that
265 flow can tell where it's going. */
266 if (JUMP_LABEL (insn) == 0)
268 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
271 /* But a LABEL_REF around the REG_LABEL note, so
272 that we can canonicalize it. */
273 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
274 XEXP (label_note, 0));
276 mark_jump_label (label_ref, insn, 0);
277 XEXP (label_note, 0) = XEXP (label_ref, 0);
278 JUMP_LABEL (insn) = XEXP (label_note, 0);
285 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
286 jump. Assume that this unconditional jump is to the exit test code. If
287 the code is sufficiently simple, make a copy of it before INSN,
288 followed by a jump to the exit of the loop. Then delete the unconditional
291 Return 1 if we made the change, else 0.
293 This is only safe immediately after a regscan pass because it uses the
294 values of regno_first_uid and regno_last_uid. */
297 duplicate_loop_exit_test (loop_start)
300 rtx insn, set, reg, p, link;
301 rtx copy = 0, first_copy = 0;
303 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
305 int max_reg = max_reg_num ();
308 /* Scan the exit code. We do not perform this optimization if any insn:
312 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
313 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
314 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
317 We also do not do this if we find an insn with ASM_OPERANDS. While
318 this restriction should not be necessary, copying an insn with
319 ASM_OPERANDS can confuse asm_noperands in some cases.
321 Also, don't do this if the exit code is more than 20 insns. */
323 for (insn = exitcode;
325 && ! (GET_CODE (insn) == NOTE
326 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
327 insn = NEXT_INSN (insn))
329 switch (GET_CODE (insn))
335 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
336 a jump immediately after the loop start that branches outside
337 the loop but within an outer loop, near the exit test.
338 If we copied this exit test and created a phony
339 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
340 before the exit test look like these could be safely moved
341 out of the loop even if they actually may be never executed.
342 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
344 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
345 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
349 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
350 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
351 /* If we were to duplicate this code, we would not move
352 the BLOCK notes, and so debugging the moved code would
353 be difficult. Thus, we only move the code with -O2 or
360 /* The code below would grossly mishandle REG_WAS_0 notes,
361 so get rid of them here. */
362 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
363 remove_note (insn, p);
365 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
366 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
374 /* Unless INSN is zero, we can do the optimization. */
380 /* See if any insn sets a register only used in the loop exit code and
381 not a user variable. If so, replace it with a new register. */
382 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
383 if (GET_CODE (insn) == INSN
384 && (set = single_set (insn)) != 0
385 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
386 || (GET_CODE (reg) == SUBREG
387 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
388 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
389 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
391 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
392 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
397 /* We can do the replacement. Allocate reg_map if this is the
398 first replacement we found. */
400 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
402 REG_LOOP_TEST_P (reg) = 1;
404 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
408 /* Now copy each insn. */
409 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
411 switch (GET_CODE (insn))
414 copy = emit_barrier_before (loop_start);
417 /* Only copy line-number notes. */
418 if (NOTE_LINE_NUMBER (insn) >= 0)
420 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
421 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
426 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
428 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
430 mark_jump_label (PATTERN (copy), copy, 0);
432 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
434 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
435 if (REG_NOTE_KIND (link) != REG_LABEL)
437 if (GET_CODE (link) == EXPR_LIST)
439 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
444 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
449 if (reg_map && REG_NOTES (copy))
450 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
454 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
457 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
458 mark_jump_label (PATTERN (copy), copy, 0);
459 if (REG_NOTES (insn))
461 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
463 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
466 /* Predict conditional jump that do make loop looping as taken.
467 Other jumps are probably exit conditions, so predict
469 if (any_condjump_p (copy))
471 rtx label = JUMP_LABEL (copy);
474 /* The jump_insn after loop_start should be followed
475 by barrier and loopback label. */
476 if (prev_nonnote_insn (label)
477 && (PREV_INSN (prev_nonnote_insn (label))
478 == NEXT_INSN (loop_start)))
479 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
481 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
490 /* Record the first insn we copied. We need it so that we can
491 scan the copied insns for new pseudo registers. */
496 /* Now clean up by emitting a jump to the end label and deleting the jump
497 at the start of the loop. */
498 if (! copy || GET_CODE (copy) != BARRIER)
500 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
503 /* Record the first insn we copied. We need it so that we can
504 scan the copied insns for new pseudo registers. This may not
505 be strictly necessary since we should have copied at least one
506 insn above. But I am going to be safe. */
510 mark_jump_label (PATTERN (copy), copy, 0);
511 emit_barrier_before (loop_start);
514 /* Now scan from the first insn we copied to the last insn we copied
515 (copy) for new pseudo registers. Do this after the code to jump to
516 the end label since that might create a new pseudo too. */
517 reg_scan_update (first_copy, copy, max_reg);
519 /* Mark the exit code as the virtual top of the converted loop. */
520 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
522 delete_insn (next_nonnote_insn (loop_start));
531 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
532 notes between START and END out before START. Assume that END is not
533 such a note. START may be such a note. Returns the value of the new
534 starting insn, which may be different if the original start was such a
538 squeeze_notes (start, end)
544 for (insn = start; insn != end; insn = next)
546 next = NEXT_INSN (insn);
547 if (GET_CODE (insn) == NOTE
548 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
549 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
550 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
551 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
552 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
553 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
559 rtx prev = PREV_INSN (insn);
560 PREV_INSN (insn) = PREV_INSN (start);
561 NEXT_INSN (insn) = start;
562 NEXT_INSN (PREV_INSN (insn)) = insn;
563 PREV_INSN (NEXT_INSN (insn)) = insn;
564 NEXT_INSN (prev) = next;
565 PREV_INSN (next) = prev;
573 /* Return the label before INSN, or put a new label there. */
576 get_label_before (insn)
581 /* Find an existing label at this point
582 or make a new one if there is none. */
583 label = prev_nonnote_insn (insn);
585 if (label == 0 || GET_CODE (label) != CODE_LABEL)
587 rtx prev = PREV_INSN (insn);
589 label = gen_label_rtx ();
590 emit_label_after (label, prev);
591 LABEL_NUSES (label) = 0;
596 /* Return the label after INSN, or put a new label there. */
599 get_label_after (insn)
604 /* Find an existing label at this point
605 or make a new one if there is none. */
606 label = next_nonnote_insn (insn);
608 if (label == 0 || GET_CODE (label) != CODE_LABEL)
610 label = gen_label_rtx ();
611 emit_label_after (label, insn);
612 LABEL_NUSES (label) = 0;
617 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
618 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
619 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
620 know whether it's source is floating point or integer comparison. Machine
621 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
622 to help this function avoid overhead in these cases. */
624 reversed_comparison_code_parts (code, arg0, arg1, insn)
625 rtx insn, arg0, arg1;
628 enum machine_mode mode;
630 /* If this is not actually a comparison, we can't reverse it. */
631 if (GET_RTX_CLASS (code) != '<')
634 mode = GET_MODE (arg0);
635 if (mode == VOIDmode)
636 mode = GET_MODE (arg1);
638 /* First see if machine description supply us way to reverse the comparison.
639 Give it priority over everything else to allow machine description to do
641 #ifdef REVERSIBLE_CC_MODE
642 if (GET_MODE_CLASS (mode) == MODE_CC
643 && REVERSIBLE_CC_MODE (mode))
645 #ifdef REVERSE_CONDITION
646 return REVERSE_CONDITION (code, mode);
648 return reverse_condition (code);
652 /* Try a few special cases based on the comparison code. */
661 /* It is always safe to reverse EQ and NE, even for the floating
662 point. Similary the unsigned comparisons are never used for
663 floating point so we can reverse them in the default way. */
664 return reverse_condition (code);
669 /* In case we already see unordered comparison, we can be sure to
670 be dealing with floating point so we don't need any more tests. */
671 return reverse_condition_maybe_unordered (code);
676 /* We don't have safe way to reverse these yet. */
682 /* In case we give up IEEE compatibility, all comparisons are reversible. */
683 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
684 || flag_unsafe_math_optimizations)
685 return reverse_condition (code);
687 if (GET_MODE_CLASS (mode) == MODE_CC
694 /* Try to search for the comparison to determine the real mode.
695 This code is expensive, but with sane machine description it
696 will be never used, since REVERSIBLE_CC_MODE will return true
701 for (prev = prev_nonnote_insn (insn);
702 prev != 0 && GET_CODE (prev) != CODE_LABEL;
703 prev = prev_nonnote_insn (prev))
705 rtx set = set_of (arg0, prev);
706 if (set && GET_CODE (set) == SET
707 && rtx_equal_p (SET_DEST (set), arg0))
709 rtx src = SET_SRC (set);
711 if (GET_CODE (src) == COMPARE)
713 rtx comparison = src;
714 arg0 = XEXP (src, 0);
715 mode = GET_MODE (arg0);
716 if (mode == VOIDmode)
717 mode = GET_MODE (XEXP (comparison, 1));
720 /* We can get past reg-reg moves. This may be usefull for model
721 of i387 comparisons that first move flag registers around. */
728 /* If register is clobbered in some ununderstandable way,
735 /* An integer condition. */
736 if (GET_CODE (arg0) == CONST_INT
737 || (GET_MODE (arg0) != VOIDmode
738 && GET_MODE_CLASS (mode) != MODE_CC
739 && ! FLOAT_MODE_P (mode)))
740 return reverse_condition (code);
745 /* An wrapper around the previous function to take COMPARISON as rtx
746 expression. This simplifies many callers. */
748 reversed_comparison_code (comparison, insn)
749 rtx comparison, insn;
751 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
753 return reversed_comparison_code_parts (GET_CODE (comparison),
754 XEXP (comparison, 0),
755 XEXP (comparison, 1), insn);
758 /* Given an rtx-code for a comparison, return the code for the negated
759 comparison. If no such code exists, return UNKNOWN.
761 WATCH OUT! reverse_condition is not safe to use on a jump that might
762 be acting on the results of an IEEE floating point comparison, because
763 of the special treatment of non-signaling nans in comparisons.
764 Use reversed_comparison_code instead. */
767 reverse_condition (code)
810 /* Similar, but we're allowed to generate unordered comparisons, which
811 makes it safe for IEEE floating-point. Of course, we have to recognize
812 that the target will support them too... */
815 reverse_condition_maybe_unordered (code)
818 /* Non-IEEE formats don't have unordered conditions. */
819 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
820 return reverse_condition (code);
858 /* Similar, but return the code when two operands of a comparison are swapped.
859 This IS safe for IEEE floating-point. */
862 swap_condition (code)
905 /* Given a comparison CODE, return the corresponding unsigned comparison.
906 If CODE is an equality comparison or already an unsigned comparison,
910 unsigned_condition (code)
937 /* Similarly, return the signed version of a comparison. */
940 signed_condition (code)
967 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
968 truth of CODE1 implies the truth of CODE2. */
971 comparison_dominates_p (code1, code2)
972 enum rtx_code code1, code2;
974 /* UNKNOWN comparison codes can happen as a result of trying to revert
976 They can't match anything, so we have to reject them here. */
977 if (code1 == UNKNOWN || code2 == UNKNOWN)
986 if (code2 == UNLE || code2 == UNGE)
991 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
997 if (code2 == UNLE || code2 == NE)
1002 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1007 if (code2 == UNGE || code2 == NE)
1012 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1018 if (code2 == ORDERED)
1023 if (code2 == NE || code2 == ORDERED)
1028 if (code2 == LEU || code2 == NE)
1033 if (code2 == GEU || code2 == NE)
1038 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1039 || code2 == UNGE || code2 == UNGT)
1050 /* Return 1 if INSN is an unconditional jump and nothing else. */
1056 return (GET_CODE (insn) == JUMP_INSN
1057 && GET_CODE (PATTERN (insn)) == SET
1058 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1059 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1062 /* Return nonzero if INSN is a (possibly) conditional jump
1065 Use this function is deprecated, since we need to support combined
1066 branch and compare insns. Use any_condjump_p instead whenever possible. */
1072 register rtx x = PATTERN (insn);
1074 if (GET_CODE (x) != SET
1075 || GET_CODE (SET_DEST (x)) != PC)
1079 if (GET_CODE (x) == LABEL_REF)
1082 return (GET_CODE (x) == IF_THEN_ELSE
1083 && ((GET_CODE (XEXP (x, 2)) == PC
1084 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1085 || GET_CODE (XEXP (x, 1)) == RETURN))
1086 || (GET_CODE (XEXP (x, 1)) == PC
1087 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1088 || GET_CODE (XEXP (x, 2)) == RETURN))));
1093 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1096 Use this function is deprecated, since we need to support combined
1097 branch and compare insns. Use any_condjump_p instead whenever possible. */
1100 condjump_in_parallel_p (insn)
1103 register rtx x = PATTERN (insn);
1105 if (GET_CODE (x) != PARALLEL)
1108 x = XVECEXP (x, 0, 0);
1110 if (GET_CODE (x) != SET)
1112 if (GET_CODE (SET_DEST (x)) != PC)
1114 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1116 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1118 if (XEXP (SET_SRC (x), 2) == pc_rtx
1119 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1120 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1122 if (XEXP (SET_SRC (x), 1) == pc_rtx
1123 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1124 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1129 /* Return set of PC, otherwise NULL. */
1136 if (GET_CODE (insn) != JUMP_INSN)
1138 pat = PATTERN (insn);
1140 /* The set is allowed to appear either as the insn pattern or
1141 the first set in a PARALLEL. */
1142 if (GET_CODE (pat) == PARALLEL)
1143 pat = XVECEXP (pat, 0, 0);
1144 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1150 /* Return true when insn is an unconditional direct jump,
1151 possibly bundled inside a PARALLEL. */
1154 any_uncondjump_p (insn)
1157 rtx x = pc_set (insn);
1160 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1165 /* Return true when insn is a conditional jump. This function works for
1166 instructions containing PC sets in PARALLELs. The instruction may have
1167 various other effects so before removing the jump you must verify
1170 Note that unlike condjump_p it returns false for unconditional jumps. */
1173 any_condjump_p (insn)
1176 rtx x = pc_set (insn);
1181 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1184 a = GET_CODE (XEXP (SET_SRC (x), 1));
1185 b = GET_CODE (XEXP (SET_SRC (x), 2));
1187 return ((b == PC && (a == LABEL_REF || a == RETURN))
1188 || (a == PC && (b == LABEL_REF || b == RETURN)));
1191 /* Return the label of a conditional jump. */
1194 condjump_label (insn)
1197 rtx x = pc_set (insn);
1202 if (GET_CODE (x) == LABEL_REF)
1204 if (GET_CODE (x) != IF_THEN_ELSE)
1206 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1208 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1213 /* Return true if INSN is a (possibly conditional) return insn. */
1216 returnjump_p_1 (loc, data)
1218 void *data ATTRIBUTE_UNUSED;
1221 return x && GET_CODE (x) == RETURN;
1228 if (GET_CODE (insn) != JUMP_INSN)
1230 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1233 /* Return true if INSN is a jump that only transfers control and
1242 if (GET_CODE (insn) != JUMP_INSN)
1245 set = single_set (insn);
1248 if (GET_CODE (SET_DEST (set)) != PC)
1250 if (side_effects_p (SET_SRC (set)))
1258 /* Return 1 if X is an RTX that does nothing but set the condition codes
1259 and CLOBBER or USE registers.
1260 Return -1 if X does explicitly set the condition codes,
1261 but also does other things. */
1265 rtx x ATTRIBUTE_UNUSED;
1267 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1269 if (GET_CODE (x) == PARALLEL)
1273 int other_things = 0;
1274 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1276 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1277 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1279 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1282 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1288 /* Follow any unconditional jump at LABEL;
1289 return the ultimate label reached by any such chain of jumps.
1290 If LABEL is not followed by a jump, return LABEL.
1291 If the chain loops or we can't find end, return LABEL,
1292 since that tells caller to avoid changing the insn.
1294 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1295 a USE or CLOBBER. */
1298 follow_jumps (label)
1303 register rtx value = label;
1308 && (insn = next_active_insn (value)) != 0
1309 && GET_CODE (insn) == JUMP_INSN
1310 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1311 && onlyjump_p (insn))
1312 || GET_CODE (PATTERN (insn)) == RETURN)
1313 && (next = NEXT_INSN (insn))
1314 && GET_CODE (next) == BARRIER);
1317 /* Don't chain through the insn that jumps into a loop
1318 from outside the loop,
1319 since that would create multiple loop entry jumps
1320 and prevent loop optimization. */
1322 if (!reload_completed)
1323 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1324 if (GET_CODE (tem) == NOTE
1325 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1326 /* ??? Optional. Disables some optimizations, but makes
1327 gcov output more accurate with -O. */
1328 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1331 /* If we have found a cycle, make the insn jump to itself. */
1332 if (JUMP_LABEL (insn) == label)
1335 tem = next_active_insn (JUMP_LABEL (insn));
1336 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1337 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1340 value = JUMP_LABEL (insn);
1348 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1349 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1350 in INSN, then store one of them in JUMP_LABEL (INSN).
1351 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1352 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1353 Also, when there are consecutive labels, canonicalize on the last of them.
1355 Note that two labels separated by a loop-beginning note
1356 must be kept distinct if we have not yet done loop-optimization,
1357 because the gap between them is where loop-optimize
1358 will want to move invariant code to. CROSS_JUMP tells us
1359 that loop-optimization is done with. */
1362 mark_jump_label (x, insn, in_mem)
1367 register RTX_CODE code = GET_CODE (x);
1369 register const char *fmt;
1391 /* If this is a constant-pool reference, see if it is a label. */
1392 if (CONSTANT_POOL_ADDRESS_P (x))
1393 mark_jump_label (get_pool_constant (x), insn, in_mem);
1398 rtx label = XEXP (x, 0);
1400 /* Ignore remaining references to unreachable labels that
1401 have been deleted. */
1402 if (GET_CODE (label) == NOTE
1403 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1406 if (GET_CODE (label) != CODE_LABEL)
1409 /* Ignore references to labels of containing functions. */
1410 if (LABEL_REF_NONLOCAL_P (x))
1413 XEXP (x, 0) = label;
1414 if (! insn || ! INSN_DELETED_P (insn))
1415 ++LABEL_NUSES (label);
1419 if (GET_CODE (insn) == JUMP_INSN)
1420 JUMP_LABEL (insn) = label;
1423 /* Add a REG_LABEL note for LABEL unless there already
1424 is one. All uses of a label, except for labels
1425 that are the targets of jumps, must have a
1427 if (! find_reg_note (insn, REG_LABEL, label))
1428 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1435 /* Do walk the labels in a vector, but not the first operand of an
1436 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1439 if (! INSN_DELETED_P (insn))
1441 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1443 for (i = 0; i < XVECLEN (x, eltnum); i++)
1444 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1452 fmt = GET_RTX_FORMAT (code);
1453 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1456 mark_jump_label (XEXP (x, i), insn, in_mem);
1457 else if (fmt[i] == 'E')
1460 for (j = 0; j < XVECLEN (x, i); j++)
1461 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1466 /* If all INSN does is set the pc, delete it,
1467 and delete the insn that set the condition codes for it
1468 if that's what the previous thing was. */
1474 register rtx set = single_set (insn);
1476 if (set && GET_CODE (SET_DEST (set)) == PC)
1477 delete_computation (insn);
1480 /* Verify INSN is a BARRIER and delete it. */
1483 delete_barrier (insn)
1486 if (GET_CODE (insn) != BARRIER)
1492 /* Recursively delete prior insns that compute the value (used only by INSN
1493 which the caller is deleting) stored in the register mentioned by NOTE
1494 which is a REG_DEAD note associated with INSN. */
1497 delete_prior_computation (note, insn)
1502 rtx reg = XEXP (note, 0);
1504 for (our_prev = prev_nonnote_insn (insn);
1505 our_prev && (GET_CODE (our_prev) == INSN
1506 || GET_CODE (our_prev) == CALL_INSN);
1507 our_prev = prev_nonnote_insn (our_prev))
1509 rtx pat = PATTERN (our_prev);
1511 /* If we reach a CALL which is not calling a const function
1512 or the callee pops the arguments, then give up. */
1513 if (GET_CODE (our_prev) == CALL_INSN
1514 && (! CONST_CALL_P (our_prev)
1515 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1518 /* If we reach a SEQUENCE, it is too complex to try to
1519 do anything with it, so give up. */
1520 if (GET_CODE (pat) == SEQUENCE)
1523 if (GET_CODE (pat) == USE
1524 && GET_CODE (XEXP (pat, 0)) == INSN)
1525 /* reorg creates USEs that look like this. We leave them
1526 alone because reorg needs them for its own purposes. */
1529 if (reg_set_p (reg, pat))
1531 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1534 if (GET_CODE (pat) == PARALLEL)
1536 /* If we find a SET of something else, we can't
1541 for (i = 0; i < XVECLEN (pat, 0); i++)
1543 rtx part = XVECEXP (pat, 0, i);
1545 if (GET_CODE (part) == SET
1546 && SET_DEST (part) != reg)
1550 if (i == XVECLEN (pat, 0))
1551 delete_computation (our_prev);
1553 else if (GET_CODE (pat) == SET
1554 && GET_CODE (SET_DEST (pat)) == REG)
1556 int dest_regno = REGNO (SET_DEST (pat));
1559 + (dest_regno < FIRST_PSEUDO_REGISTER
1560 ? HARD_REGNO_NREGS (dest_regno,
1561 GET_MODE (SET_DEST (pat))) : 1));
1562 int regno = REGNO (reg);
1565 + (regno < FIRST_PSEUDO_REGISTER
1566 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1568 if (dest_regno >= regno
1569 && dest_endregno <= endregno)
1570 delete_computation (our_prev);
1572 /* We may have a multi-word hard register and some, but not
1573 all, of the words of the register are needed in subsequent
1574 insns. Write REG_UNUSED notes for those parts that were not
1576 else if (dest_regno <= regno
1577 && dest_endregno >= endregno)
1581 REG_NOTES (our_prev)
1582 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1583 REG_NOTES (our_prev));
1585 for (i = dest_regno; i < dest_endregno; i++)
1586 if (! find_regno_note (our_prev, REG_UNUSED, i))
1589 if (i == dest_endregno)
1590 delete_computation (our_prev);
1597 /* If PAT references the register that dies here, it is an
1598 additional use. Hence any prior SET isn't dead. However, this
1599 insn becomes the new place for the REG_DEAD note. */
1600 if (reg_overlap_mentioned_p (reg, pat))
1602 XEXP (note, 1) = REG_NOTES (our_prev);
1603 REG_NOTES (our_prev) = note;
1609 /* Delete INSN and recursively delete insns that compute values used only
1610 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1611 If we are running before flow.c, we need do nothing since flow.c will
1612 delete dead code. We also can't know if the registers being used are
1613 dead or not at this point.
1615 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1616 nothing other than set a register that dies in this insn, we can delete
1619 On machines with CC0, if CC0 is used in this insn, we may be able to
1620 delete the insn that set it. */
1623 delete_computation (insn)
1629 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1631 rtx prev = prev_nonnote_insn (insn);
1632 /* We assume that at this stage
1633 CC's are always set explicitly
1634 and always immediately before the jump that
1635 will use them. So if the previous insn
1636 exists to set the CC's, delete it
1637 (unless it performs auto-increments, etc.). */
1638 if (prev && GET_CODE (prev) == INSN
1639 && sets_cc0_p (PATTERN (prev)))
1641 if (sets_cc0_p (PATTERN (prev)) > 0
1642 && ! side_effects_p (PATTERN (prev)))
1643 delete_computation (prev);
1645 /* Otherwise, show that cc0 won't be used. */
1646 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1647 cc0_rtx, REG_NOTES (prev));
1652 for (note = REG_NOTES (insn); note; note = next)
1654 next = XEXP (note, 1);
1656 if (REG_NOTE_KIND (note) != REG_DEAD
1657 /* Verify that the REG_NOTE is legitimate. */
1658 || GET_CODE (XEXP (note, 0)) != REG)
1661 delete_prior_computation (note, insn);
1667 /* Delete insn INSN from the chain of insns and update label ref counts.
1668 May delete some following insns as a consequence; may even delete
1669 a label elsewhere and insns that follow it.
1671 Returns the first insn after INSN that was not deleted. */
1677 register rtx next = NEXT_INSN (insn);
1678 register rtx prev = PREV_INSN (insn);
1679 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1680 register int dont_really_delete = 0;
1683 while (next && INSN_DELETED_P (next))
1684 next = NEXT_INSN (next);
1686 /* This insn is already deleted => return first following nondeleted. */
1687 if (INSN_DELETED_P (insn))
1691 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1693 /* Don't delete user-declared labels. When optimizing, convert them
1694 to special NOTEs instead. When not optimizing, leave them alone. */
1695 if (was_code_label && LABEL_NAME (insn) != 0)
1699 const char *name = LABEL_NAME (insn);
1700 PUT_CODE (insn, NOTE);
1701 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
1702 NOTE_SOURCE_FILE (insn) = name;
1705 dont_really_delete = 1;
1708 /* Mark this insn as deleted. */
1709 INSN_DELETED_P (insn) = 1;
1711 /* If instruction is followed by a barrier,
1712 delete the barrier too. */
1714 if (next != 0 && GET_CODE (next) == BARRIER)
1716 INSN_DELETED_P (next) = 1;
1717 next = NEXT_INSN (next);
1720 /* Patch out INSN (and the barrier if any) */
1722 if (! dont_really_delete)
1726 NEXT_INSN (prev) = next;
1727 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
1728 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
1729 XVECLEN (PATTERN (prev), 0) - 1)) = next;
1734 PREV_INSN (next) = prev;
1735 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
1736 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
1739 if (prev && NEXT_INSN (prev) == 0)
1740 set_last_insn (prev);
1743 /* If deleting a jump, decrement the count of the label,
1744 and delete the label if it is now unused. */
1746 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1748 rtx lab = JUMP_LABEL (insn), lab_next;
1750 if (--LABEL_NUSES (lab) == 0)
1752 /* This can delete NEXT or PREV,
1753 either directly if NEXT is JUMP_LABEL (INSN),
1754 or indirectly through more levels of jumps. */
1757 /* I feel a little doubtful about this loop,
1758 but I see no clean and sure alternative way
1759 to find the first insn after INSN that is not now deleted.
1760 I hope this works. */
1761 while (next && INSN_DELETED_P (next))
1762 next = NEXT_INSN (next);
1765 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1766 && GET_CODE (lab_next) == JUMP_INSN
1767 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1768 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1770 /* If we're deleting the tablejump, delete the dispatch table.
1771 We may not be able to kill the label immediately preceeding
1772 just yet, as it might be referenced in code leading up to
1774 delete_insn (lab_next);
1778 /* Likewise if we're deleting a dispatch table. */
1780 if (GET_CODE (insn) == JUMP_INSN
1781 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1782 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1784 rtx pat = PATTERN (insn);
1785 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1786 int len = XVECLEN (pat, diff_vec_p);
1788 for (i = 0; i < len; i++)
1789 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1790 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1791 while (next && INSN_DELETED_P (next))
1792 next = NEXT_INSN (next);
1796 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1797 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1798 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1799 if (REG_NOTE_KIND (note) == REG_LABEL
1800 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1801 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1802 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
1803 delete_insn (XEXP (note, 0));
1805 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1806 prev = PREV_INSN (prev);
1808 /* If INSN was a label and a dispatch table follows it,
1809 delete the dispatch table. The tablejump must have gone already.
1810 It isn't useful to fall through into a table. */
1813 && NEXT_INSN (insn) != 0
1814 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1815 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1816 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1817 next = delete_insn (NEXT_INSN (insn));
1819 /* If INSN was a label, delete insns following it if now unreachable. */
1821 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1823 register RTX_CODE code;
1825 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1826 || code == NOTE || code == BARRIER
1827 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1830 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1831 next = NEXT_INSN (next);
1832 /* Keep going past other deleted labels to delete what follows. */
1833 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1834 next = NEXT_INSN (next);
1836 /* Note: if this deletes a jump, it can cause more
1837 deletion of unreachable code, after a different label.
1838 As long as the value from this recursive call is correct,
1839 this invocation functions correctly. */
1840 next = delete_insn (next);
1847 /* Advance from INSN till reaching something not deleted
1848 then return that. May return INSN itself. */
1851 next_nondeleted_insn (insn)
1854 while (INSN_DELETED_P (insn))
1855 insn = NEXT_INSN (insn);
1859 /* Delete a range of insns from FROM to TO, inclusive.
1860 This is for the sake of peephole optimization, so assume
1861 that whatever these insns do will still be done by a new
1862 peephole insn that will replace them. */
1865 delete_for_peephole (from, to)
1866 register rtx from, to;
1868 register rtx insn = from;
1872 register rtx next = NEXT_INSN (insn);
1873 register rtx prev = PREV_INSN (insn);
1875 if (GET_CODE (insn) != NOTE)
1877 INSN_DELETED_P (insn) = 1;
1879 /* Patch this insn out of the chain. */
1880 /* We don't do this all at once, because we
1881 must preserve all NOTEs. */
1883 NEXT_INSN (prev) = next;
1886 PREV_INSN (next) = prev;
1894 /* Note that if TO is an unconditional jump
1895 we *do not* delete the BARRIER that follows,
1896 since the peephole that replaces this sequence
1897 is also an unconditional jump in that case. */
1900 /* We have determined that INSN is never reached, and are about to
1901 delete it. Print a warning if the user asked for one.
1903 To try to make this warning more useful, this should only be called
1904 once per basic block not reached, and it only warns when the basic
1905 block contains more than one line from the current function, and
1906 contains at least one operation. CSE and inlining can duplicate insns,
1907 so it's possible to get spurious warnings from this. */
1910 never_reached_warning (avoided_insn)
1914 rtx a_line_note = NULL;
1915 int two_avoided_lines = 0;
1916 int contains_insn = 0;
1918 if (! warn_notreached)
1921 /* Scan forwards, looking at LINE_NUMBER notes, until
1922 we hit a LABEL or we run out of insns. */
1924 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1926 if (GET_CODE (insn) == CODE_LABEL)
1928 else if (GET_CODE (insn) == NOTE /* A line number note? */
1929 && NOTE_LINE_NUMBER (insn) >= 0)
1931 if (a_line_note == NULL)
1934 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1935 != NOTE_LINE_NUMBER (insn));
1937 else if (INSN_P (insn))
1940 if (two_avoided_lines && contains_insn)
1941 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1942 NOTE_LINE_NUMBER (a_line_note),
1943 "will never be executed");
1946 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1947 NLABEL as a return. Accrue modifications into the change group. */
1950 redirect_exp_1 (loc, olabel, nlabel, insn)
1955 register rtx x = *loc;
1956 register RTX_CODE code = GET_CODE (x);
1958 register const char *fmt;
1960 if (code == LABEL_REF)
1962 if (XEXP (x, 0) == olabel)
1966 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1968 n = gen_rtx_RETURN (VOIDmode);
1970 validate_change (insn, loc, n, 1);
1974 else if (code == RETURN && olabel == 0)
1976 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1977 if (loc == &PATTERN (insn))
1978 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1979 validate_change (insn, loc, x, 1);
1983 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1984 && GET_CODE (SET_SRC (x)) == LABEL_REF
1985 && XEXP (SET_SRC (x), 0) == olabel)
1987 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1991 fmt = GET_RTX_FORMAT (code);
1992 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1995 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1996 else if (fmt[i] == 'E')
1999 for (j = 0; j < XVECLEN (x, i); j++)
2000 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2005 /* Similar, but apply the change group and report success or failure. */
2008 redirect_exp (olabel, nlabel, insn)
2014 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2015 loc = &XVECEXP (PATTERN (insn), 0, 0);
2017 loc = &PATTERN (insn);
2019 redirect_exp_1 (loc, olabel, nlabel, insn);
2020 if (num_validated_changes () == 0)
2023 return apply_change_group ();
2026 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2027 the modifications into the change group. Return false if we did
2028 not see how to do that. */
2031 redirect_jump_1 (jump, nlabel)
2034 int ochanges = num_validated_changes ();
2037 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2038 loc = &XVECEXP (PATTERN (jump), 0, 0);
2040 loc = &PATTERN (jump);
2042 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2043 return num_validated_changes () > ochanges;
2046 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2047 jump target label is unused as a result, it and the code following
2050 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2053 The return value will be 1 if the change was made, 0 if it wasn't
2054 (this can only occur for NLABEL == 0). */
2057 redirect_jump (jump, nlabel, delete_unused)
2061 register rtx olabel = JUMP_LABEL (jump);
2063 if (nlabel == olabel)
2066 if (! redirect_exp (olabel, nlabel, jump))
2069 JUMP_LABEL (jump) = nlabel;
2071 ++LABEL_NUSES (nlabel);
2073 /* If we're eliding the jump over exception cleanups at the end of a
2074 function, move the function end note so that -Wreturn-type works. */
2075 if (olabel && nlabel
2076 && NEXT_INSN (olabel)
2077 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2078 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2079 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2081 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2082 delete_insn (olabel);
2087 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2088 Accrue the modifications into the change group. */
2094 register RTX_CODE code;
2095 rtx x = pc_set (insn);
2101 code = GET_CODE (x);
2103 if (code == IF_THEN_ELSE)
2105 register rtx comp = XEXP (x, 0);
2107 enum rtx_code reversed_code;
2109 /* We can do this in two ways: The preferable way, which can only
2110 be done if this is not an integer comparison, is to reverse
2111 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2112 of the IF_THEN_ELSE. If we can't do either, fail. */
2114 reversed_code = reversed_comparison_code (comp, insn);
2116 if (reversed_code != UNKNOWN)
2118 validate_change (insn, &XEXP (x, 0),
2119 gen_rtx_fmt_ee (reversed_code,
2120 GET_MODE (comp), XEXP (comp, 0),
2127 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2128 validate_change (insn, &XEXP (x, 2), tem, 1);
2134 /* Invert the jump condition of conditional jump insn, INSN.
2136 Return 1 if we can do so, 0 if we cannot find a way to do so that
2137 matches a pattern. */
2143 invert_exp_1 (insn);
2144 if (num_validated_changes () == 0)
2147 return apply_change_group ();
2150 /* Invert the condition of the jump JUMP, and make it jump to label
2151 NLABEL instead of where it jumps now. Accrue changes into the
2152 change group. Return false if we didn't see how to perform the
2153 inversion and redirection. */
2156 invert_jump_1 (jump, nlabel)
2161 ochanges = num_validated_changes ();
2162 invert_exp_1 (jump);
2163 if (num_validated_changes () == ochanges)
2166 return redirect_jump_1 (jump, nlabel);
2169 /* Invert the condition of the jump JUMP, and make it jump to label
2170 NLABEL instead of where it jumps now. Return true if successful. */
2173 invert_jump (jump, nlabel, delete_unused)
2177 /* We have to either invert the condition and change the label or
2178 do neither. Either operation could fail. We first try to invert
2179 the jump. If that succeeds, we try changing the label. If that fails,
2180 we invert the jump back to what it was. */
2182 if (! invert_exp (jump))
2185 if (redirect_jump (jump, nlabel, delete_unused))
2187 invert_br_probabilities (jump);
2192 if (! invert_exp (jump))
2193 /* This should just be putting it back the way it was. */
2200 /* Like rtx_equal_p except that it considers two REGs as equal
2201 if they renumber to the same value and considers two commutative
2202 operations to be the same if the order of the operands has been
2205 ??? Addition is not commutative on the PA due to the weird implicit
2206 space register selection rules for memory addresses. Therefore, we
2207 don't consider a + b == b + a.
2209 We could/should make this test a little tighter. Possibly only
2210 disabling it on the PA via some backend macro or only disabling this
2211 case when the PLUS is inside a MEM. */
2214 rtx_renumbered_equal_p (x, y)
2218 register RTX_CODE code = GET_CODE (x);
2219 register const char *fmt;
2224 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2225 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2226 && GET_CODE (SUBREG_REG (y)) == REG)))
2228 int reg_x = -1, reg_y = -1;
2229 int byte_x = 0, byte_y = 0;
2231 if (GET_MODE (x) != GET_MODE (y))
2234 /* If we haven't done any renumbering, don't
2235 make any assumptions. */
2236 if (reg_renumber == 0)
2237 return rtx_equal_p (x, y);
2241 reg_x = REGNO (SUBREG_REG (x));
2242 byte_x = SUBREG_BYTE (x);
2244 if (reg_renumber[reg_x] >= 0)
2246 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2247 GET_MODE (SUBREG_REG (x)),
2256 if (reg_renumber[reg_x] >= 0)
2257 reg_x = reg_renumber[reg_x];
2260 if (GET_CODE (y) == SUBREG)
2262 reg_y = REGNO (SUBREG_REG (y));
2263 byte_y = SUBREG_BYTE (y);
2265 if (reg_renumber[reg_y] >= 0)
2267 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2268 GET_MODE (SUBREG_REG (y)),
2277 if (reg_renumber[reg_y] >= 0)
2278 reg_y = reg_renumber[reg_y];
2281 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2284 /* Now we have disposed of all the cases
2285 in which different rtx codes can match. */
2286 if (code != GET_CODE (y))
2298 return INTVAL (x) == INTVAL (y);
2301 /* We can't assume nonlocal labels have their following insns yet. */
2302 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2303 return XEXP (x, 0) == XEXP (y, 0);
2305 /* Two label-refs are equivalent if they point at labels
2306 in the same position in the instruction stream. */
2307 return (next_real_insn (XEXP (x, 0))
2308 == next_real_insn (XEXP (y, 0)));
2311 return XSTR (x, 0) == XSTR (y, 0);
2314 /* If we didn't match EQ equality above, they aren't the same. */
2321 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2323 if (GET_MODE (x) != GET_MODE (y))
2326 /* For commutative operations, the RTX match if the operand match in any
2327 order. Also handle the simple binary and unary cases without a loop.
2329 ??? Don't consider PLUS a commutative operator; see comments above. */
2330 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2332 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2333 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2334 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2335 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2336 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2337 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2338 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2339 else if (GET_RTX_CLASS (code) == '1')
2340 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2342 /* Compare the elements. If any pair of corresponding elements
2343 fail to match, return 0 for the whole things. */
2345 fmt = GET_RTX_FORMAT (code);
2346 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2352 if (XWINT (x, i) != XWINT (y, i))
2357 if (XINT (x, i) != XINT (y, i))
2362 if (XTREE (x, i) != XTREE (y, i))
2367 if (strcmp (XSTR (x, i), XSTR (y, i)))
2372 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2377 if (XEXP (x, i) != XEXP (y, i))
2384 if (XVECLEN (x, i) != XVECLEN (y, i))
2386 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2387 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2398 /* If X is a hard register or equivalent to one or a subregister of one,
2399 return the hard register number. If X is a pseudo register that was not
2400 assigned a hard register, return the pseudo register number. Otherwise,
2401 return -1. Any rtx is valid for X. */
2407 if (GET_CODE (x) == REG)
2409 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2410 return reg_renumber[REGNO (x)];
2413 if (GET_CODE (x) == SUBREG)
2415 int base = true_regnum (SUBREG_REG (x));
2416 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2417 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2418 GET_MODE (SUBREG_REG (x)),
2419 SUBREG_BYTE (x), GET_MODE (x));
2424 /* Optimize code of the form:
2426 for (x = a[i]; x; ...)
2428 for (x = a[i]; x; ...)
2432 Loop optimize will change the above code into
2436 { ...; if (! (x = ...)) break; }
2439 { ...; if (! (x = ...)) break; }
2442 In general, if the first test fails, the program can branch
2443 directly to `foo' and skip the second try which is doomed to fail.
2444 We run this after loop optimization and before flow analysis. */
2446 /* When comparing the insn patterns, we track the fact that different
2447 pseudo-register numbers may have been used in each computation.
2448 The following array stores an equivalence -- same_regs[I] == J means
2449 that pseudo register I was used in the first set of tests in a context
2450 where J was used in the second set. We also count the number of such
2451 pending equivalences. If nonzero, the expressions really aren't the
2454 static int *same_regs;
2456 static int num_same_regs;
2458 /* Track any registers modified between the target of the first jump and
2459 the second jump. They never compare equal. */
2461 static char *modified_regs;
2463 /* Record if memory was modified. */
2465 static int modified_mem;
2467 /* Called via note_stores on each insn between the target of the first
2468 branch and the second branch. It marks any changed registers. */
2471 mark_modified_reg (dest, x, data)
2474 void *data ATTRIBUTE_UNUSED;
2479 if (GET_CODE (dest) == SUBREG)
2480 dest = SUBREG_REG (dest);
2482 if (GET_CODE (dest) == MEM)
2485 if (GET_CODE (dest) != REG)
2488 regno = REGNO (dest);
2489 if (regno >= FIRST_PSEUDO_REGISTER)
2490 modified_regs[regno] = 1;
2491 /* Don't consider a hard condition code register as modified,
2492 if it is only being set. thread_jumps will check if it is set
2493 to the same value. */
2494 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2495 || GET_CODE (x) != SET
2496 || ! rtx_equal_p (dest, SET_DEST (x))
2497 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2498 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2499 modified_regs[regno + i] = 1;
2502 /* F is the first insn in the chain of insns. */
2505 thread_jumps (f, max_reg, flag_before_loop)
2508 int flag_before_loop;
2510 /* Basic algorithm is to find a conditional branch,
2511 the label it may branch to, and the branch after
2512 that label. If the two branches test the same condition,
2513 walk back from both branch paths until the insn patterns
2514 differ, or code labels are hit. If we make it back to
2515 the target of the first branch, then we know that the first branch
2516 will either always succeed or always fail depending on the relative
2517 senses of the two branches. So adjust the first branch accordingly
2520 rtx label, b1, b2, t1, t2;
2521 enum rtx_code code1, code2;
2522 rtx b1op0, b1op1, b2op0, b2op1;
2526 enum rtx_code reversed_code1, reversed_code2;
2528 /* Allocate register tables and quick-reset table. */
2529 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2530 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2531 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2532 for (i = 0; i < max_reg; i++)
2539 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2544 /* Get to a candidate branch insn. */
2545 if (GET_CODE (b1) != JUMP_INSN
2546 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2549 memset (modified_regs, 0, max_reg * sizeof (char));
2552 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2555 label = JUMP_LABEL (b1);
2557 /* Look for a branch after the target. Record any registers and
2558 memory modified between the target and the branch. Stop when we
2559 get to a label since we can't know what was changed there. */
2560 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2562 if (GET_CODE (b2) == CODE_LABEL)
2565 else if (GET_CODE (b2) == JUMP_INSN)
2567 /* If this is an unconditional jump and is the only use of
2568 its target label, we can follow it. */
2569 if (any_uncondjump_p (b2)
2571 && JUMP_LABEL (b2) != 0
2572 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2574 b2 = JUMP_LABEL (b2);
2581 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2584 if (GET_CODE (b2) == CALL_INSN)
2587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2588 if (call_used_regs[i] && ! fixed_regs[i]
2589 && i != STACK_POINTER_REGNUM
2590 && i != FRAME_POINTER_REGNUM
2591 && i != HARD_FRAME_POINTER_REGNUM
2592 && i != ARG_POINTER_REGNUM)
2593 modified_regs[i] = 1;
2596 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2599 /* Check the next candidate branch insn from the label
2602 || GET_CODE (b2) != JUMP_INSN
2604 || !any_condjump_p (b2)
2605 || !onlyjump_p (b2))
2610 /* Get the comparison codes and operands, reversing the
2611 codes if appropriate. If we don't have comparison codes,
2612 we can't do anything. */
2613 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2614 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2615 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2616 reversed_code1 = code1;
2617 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2618 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2620 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2622 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2623 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2624 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2625 reversed_code2 = code2;
2626 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2627 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2629 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2631 /* If they test the same things and knowing that B1 branches
2632 tells us whether or not B2 branches, check if we
2633 can thread the branch. */
2634 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2635 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2636 && (comparison_dominates_p (code1, code2)
2637 || comparison_dominates_p (code1, reversed_code2)))
2640 t1 = prev_nonnote_insn (b1);
2641 t2 = prev_nonnote_insn (b2);
2643 while (t1 != 0 && t2 != 0)
2647 /* We have reached the target of the first branch.
2648 If there are no pending register equivalents,
2649 we know that this branch will either always
2650 succeed (if the senses of the two branches are
2651 the same) or always fail (if not). */
2654 if (num_same_regs != 0)
2657 if (comparison_dominates_p (code1, code2))
2658 new_label = JUMP_LABEL (b2);
2660 new_label = get_label_after (b2);
2662 if (JUMP_LABEL (b1) != new_label)
2664 rtx prev = PREV_INSN (new_label);
2666 if (flag_before_loop
2667 && GET_CODE (prev) == NOTE
2668 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2670 /* Don't thread to the loop label. If a loop
2671 label is reused, loop optimization will
2672 be disabled for that loop. */
2673 new_label = gen_label_rtx ();
2674 emit_label_after (new_label, PREV_INSN (prev));
2676 changed |= redirect_jump (b1, new_label, 1);
2681 /* If either of these is not a normal insn (it might be
2682 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2683 have already been skipped above.) Similarly, fail
2684 if the insns are different. */
2685 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2686 || recog_memoized (t1) != recog_memoized (t2)
2687 || ! rtx_equal_for_thread_p (PATTERN (t1),
2691 t1 = prev_nonnote_insn (t1);
2692 t2 = prev_nonnote_insn (t2);
2699 free (modified_regs);
2704 /* This is like RTX_EQUAL_P except that it knows about our handling of
2705 possibly equivalent registers and knows to consider volatile and
2706 modified objects as not equal.
2708 YINSN is the insn containing Y. */
2711 rtx_equal_for_thread_p (x, y, yinsn)
2717 register enum rtx_code code;
2718 register const char *fmt;
2720 code = GET_CODE (x);
2721 /* Rtx's of different codes cannot be equal. */
2722 if (code != GET_CODE (y))
2725 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2726 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2728 if (GET_MODE (x) != GET_MODE (y))
2731 /* For floating-point, consider everything unequal. This is a bit
2732 pessimistic, but this pass would only rarely do anything for FP
2734 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2735 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2738 /* For commutative operations, the RTX match if the operand match in any
2739 order. Also handle the simple binary and unary cases without a loop. */
2740 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2741 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2742 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2743 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2744 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2745 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2746 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2747 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2748 else if (GET_RTX_CLASS (code) == '1')
2749 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2751 /* Handle special-cases first. */
2755 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2758 /* If neither is user variable or hard register, check for possible
2760 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2761 || REGNO (x) < FIRST_PSEUDO_REGISTER
2762 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2765 if (same_regs[REGNO (x)] == -1)
2767 same_regs[REGNO (x)] = REGNO (y);
2770 /* If this is the first time we are seeing a register on the `Y'
2771 side, see if it is the last use. If not, we can't thread the
2772 jump, so mark it as not equivalent. */
2773 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2779 return (same_regs[REGNO (x)] == (int) REGNO (y));
2784 /* If memory modified or either volatile, not equivalent.
2785 Else, check address. */
2786 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2789 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2792 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2798 /* Cancel a pending `same_regs' if setting equivalenced registers.
2799 Then process source. */
2800 if (GET_CODE (SET_DEST (x)) == REG
2801 && GET_CODE (SET_DEST (y)) == REG)
2803 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2805 same_regs[REGNO (SET_DEST (x))] = -1;
2808 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2813 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2817 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2820 return XEXP (x, 0) == XEXP (y, 0);
2823 return XSTR (x, 0) == XSTR (y, 0);
2832 fmt = GET_RTX_FORMAT (code);
2833 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2838 if (XWINT (x, i) != XWINT (y, i))
2844 if (XINT (x, i) != XINT (y, i))
2850 /* Two vectors must have the same length. */
2851 if (XVECLEN (x, i) != XVECLEN (y, i))
2854 /* And the corresponding elements must match. */
2855 for (j = 0; j < XVECLEN (x, i); j++)
2856 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2857 XVECEXP (y, i, j), yinsn) == 0)
2862 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2868 if (strcmp (XSTR (x, i), XSTR (y, i)))
2873 /* These are just backpointers, so they don't matter. */
2880 /* It is believed that rtx's at this level will never
2881 contain anything but integers and other rtx's,
2882 except for within LABEL_REFs and SYMBOL_REFs. */