1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
59 #include "hard-reg-set.h"
61 #include "insn-config.h"
62 #include "insn-attr.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx *jump_chain;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain;
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
104 static int cross_jump_death_matters = 0;
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
145 rebuild_jump_labels (f)
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
154 jump_optimize_minimal (f)
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
168 If NOOP_MOVES is nonzero, delete no-op move insns.
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
201 int mark_labels_only;
204 register rtx insn, next;
211 enum rtx_code reversed_code;
214 cross_jump_death_matters = (cross_jump == 2);
215 max_uid = init_label_info (f) + 1;
217 /* Leave some extra room for labels and duplicate exit test insns
219 max_jump_chain = max_uid * 14 / 10;
220 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
222 mark_all_labels (f, cross_jump);
224 /* Keep track of labels used from static data; we don't track them
225 closely enough to delete them here, so make sure their reference
226 count doesn't drop to zero. */
228 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
229 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
230 LABEL_NUSES (XEXP (insn, 0))++;
232 /* Keep track of labels used for marking handlers for exception
233 regions; they cannot usually be deleted. */
235 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
236 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
237 LABEL_NUSES (XEXP (insn, 0))++;
239 if (! mark_labels_only)
240 delete_barrier_successors (f);
242 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
243 notes and recompute LABEL_NUSES. */
244 if (mark_labels_only)
247 last_insn = delete_unreferenced_labels (f);
250 delete_noop_moves (f);
252 /* Now iterate optimizing jumps until nothing changes over one pass. */
254 old_max_reg = max_reg_num ();
259 for (insn = f; insn; insn = next)
262 rtx temp, temp1, temp2 = NULL_RTX;
263 rtx temp4 ATTRIBUTE_UNUSED;
265 int this_is_any_uncondjump;
266 int this_is_any_condjump;
267 int this_is_onlyjump;
269 next = NEXT_INSN (insn);
271 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
272 jump. Try to optimize by duplicating the loop exit test if so.
273 This is only safe immediately after regscan, because it uses
274 the values of regno_first_uid and regno_last_uid. */
275 if (after_regscan && GET_CODE (insn) == NOTE
276 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
277 && (temp1 = next_nonnote_insn (insn)) != 0
278 && any_uncondjump_p (temp1)
279 && onlyjump_p (temp1))
281 temp = PREV_INSN (insn);
282 if (duplicate_loop_exit_test (insn))
285 next = NEXT_INSN (temp);
290 if (GET_CODE (insn) != JUMP_INSN)
293 this_is_any_condjump = any_condjump_p (insn);
294 this_is_any_uncondjump = any_uncondjump_p (insn);
295 this_is_onlyjump = onlyjump_p (insn);
297 /* Tension the labels in dispatch tables. */
299 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
300 changed |= tension_vector_labels (PATTERN (insn), 0);
301 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
302 changed |= tension_vector_labels (PATTERN (insn), 1);
304 /* See if this jump goes to another jump and redirect if so. */
305 nlabel = follow_jumps (JUMP_LABEL (insn));
306 if (nlabel != JUMP_LABEL (insn))
307 changed |= redirect_jump (insn, nlabel, 1);
309 if (! optimize || minimal)
312 /* If a dispatch table always goes to the same place,
313 get rid of it and replace the insn that uses it. */
315 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
316 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
319 rtx pat = PATTERN (insn);
320 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
321 int len = XVECLEN (pat, diff_vec_p);
322 rtx dispatch = prev_real_insn (insn);
325 for (i = 0; i < len; i++)
326 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
327 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
332 && GET_CODE (dispatch) == JUMP_INSN
333 && JUMP_LABEL (dispatch) != 0
334 /* Don't mess with a casesi insn.
335 XXX according to the comment before computed_jump_p(),
336 all casesi insns should be a parallel of the jump
337 and a USE of a LABEL_REF. */
338 && ! ((set = single_set (dispatch)) != NULL
339 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
340 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
342 redirect_tablejump (dispatch,
343 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
348 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
350 /* Detect jump to following insn. */
351 if (reallabelprev == insn
352 && (this_is_any_condjump || this_is_any_uncondjump)
355 next = next_real_insn (JUMP_LABEL (insn));
358 /* Remove the "inactive" but "real" insns (i.e. uses and
359 clobbers) in between here and there. */
361 while ((temp = next_real_insn (temp)) != next)
368 /* Detect a conditional jump going to the same place
369 as an immediately following unconditional jump. */
370 else if (this_is_any_condjump && this_is_onlyjump
371 && (temp = next_active_insn (insn)) != 0
372 && simplejump_p (temp)
373 && (next_active_insn (JUMP_LABEL (insn))
374 == next_active_insn (JUMP_LABEL (temp))))
376 /* Don't mess up test coverage analysis. */
378 if (flag_test_coverage && !reload_completed)
379 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
380 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
385 /* Ensure that we jump to the later of the two labels.
396 If we leave the goto L1, we'll incorrectly leave
397 return-reg dead for TEST true. */
399 temp2 = next_active_insn (JUMP_LABEL (insn));
401 temp2 = get_last_insn ();
402 if (GET_CODE (temp2) != CODE_LABEL)
403 temp2 = prev_label (temp2);
404 if (temp2 != JUMP_LABEL (temp))
405 redirect_jump (temp, temp2, 1);
413 /* Detect a conditional jump jumping over an unconditional jump. */
415 else if (this_is_any_condjump
416 && reallabelprev != 0
417 && GET_CODE (reallabelprev) == JUMP_INSN
418 && prev_active_insn (reallabelprev) == insn
419 && no_labels_between_p (insn, reallabelprev)
420 && any_uncondjump_p (reallabelprev)
421 && onlyjump_p (reallabelprev))
423 /* When we invert the unconditional jump, we will be
424 decrementing the usage count of its old label.
425 Make sure that we don't delete it now because that
426 might cause the following code to be deleted. */
427 rtx prev_uses = prev_nonnote_insn (reallabelprev);
428 rtx prev_label = JUMP_LABEL (insn);
431 ++LABEL_NUSES (prev_label);
433 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
435 /* It is very likely that if there are USE insns before
436 this jump, they hold REG_DEAD notes. These REG_DEAD
437 notes are no longer valid due to this optimization,
438 and will cause the life-analysis that following passes
439 (notably delayed-branch scheduling) to think that
440 these registers are dead when they are not.
442 To prevent this trouble, we just remove the USE insns
443 from the insn chain. */
445 while (prev_uses && GET_CODE (prev_uses) == INSN
446 && GET_CODE (PATTERN (prev_uses)) == USE)
448 rtx useless = prev_uses;
449 prev_uses = prev_nonnote_insn (prev_uses);
450 delete_insn (useless);
453 delete_insn (reallabelprev);
457 /* We can now safely delete the label if it is unreferenced
458 since the delete_insn above has deleted the BARRIER. */
459 if (prev_label && --LABEL_NUSES (prev_label) == 0)
460 delete_insn (prev_label);
462 next = NEXT_INSN (insn);
465 /* If we have an unconditional jump preceded by a USE, try to put
466 the USE before the target and jump there. This simplifies many
467 of the optimizations below since we don't have to worry about
468 dealing with these USE insns. We only do this if the label
469 being branch to already has the identical USE or if code
470 never falls through to that label. */
472 else if (this_is_any_uncondjump
473 && (temp = prev_nonnote_insn (insn)) != 0
474 && GET_CODE (temp) == INSN
475 && GET_CODE (PATTERN (temp)) == USE
476 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
477 && (GET_CODE (temp1) == BARRIER
478 || (GET_CODE (temp1) == INSN
479 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
480 /* Don't do this optimization if we have a loop containing
481 only the USE instruction, and the loop start label has
482 a usage count of 1. This is because we will redo this
483 optimization everytime through the outer loop, and jump
484 opt will never exit. */
485 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
486 && temp2 == JUMP_LABEL (insn)
487 && LABEL_NUSES (temp2) == 1))
489 if (GET_CODE (temp1) == BARRIER)
491 emit_insn_after (PATTERN (temp), temp1);
492 temp1 = NEXT_INSN (temp1);
496 redirect_jump (insn, get_label_before (temp1), 1);
497 reallabelprev = prev_real_insn (temp1);
499 next = NEXT_INSN (insn);
503 /* Detect a conditional jump jumping over an unconditional trap. */
505 && this_is_any_condjump && this_is_onlyjump
506 && reallabelprev != 0
507 && GET_CODE (reallabelprev) == INSN
508 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
509 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
510 && prev_active_insn (reallabelprev) == insn
511 && no_labels_between_p (insn, reallabelprev)
512 && (temp2 = get_condition (insn, &temp4))
513 && ((reversed_code = reversed_comparison_code (temp2, insn))
516 rtx new = gen_cond_trap (reversed_code,
517 XEXP (temp2, 0), XEXP (temp2, 1),
518 TRAP_CODE (PATTERN (reallabelprev)));
522 emit_insn_before (new, temp4);
523 delete_insn (reallabelprev);
529 /* Detect a jump jumping to an unconditional trap. */
530 else if (HAVE_trap && this_is_onlyjump
531 && (temp = next_active_insn (JUMP_LABEL (insn)))
532 && GET_CODE (temp) == INSN
533 && GET_CODE (PATTERN (temp)) == TRAP_IF
534 && (this_is_any_uncondjump
535 || (this_is_any_condjump
536 && (temp2 = get_condition (insn, &temp4)))))
538 rtx tc = TRAP_CONDITION (PATTERN (temp));
540 if (tc == const_true_rtx
541 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
544 /* Replace an unconditional jump to a trap with a trap. */
545 if (this_is_any_uncondjump)
547 emit_barrier_after (emit_insn_before (gen_trap (), insn));
552 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
554 TRAP_CODE (PATTERN (temp)));
557 emit_insn_before (new, temp4);
563 /* If the trap condition and jump condition are mutually
564 exclusive, redirect the jump to the following insn. */
565 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
566 && this_is_any_condjump
567 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
568 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
569 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
570 && redirect_jump (insn, get_label_after (temp), 1))
579 /* Now that the jump has been tensioned,
580 try cross jumping: check for identical code
581 before the jump and before its target label. */
583 /* First, cross jumping of conditional jumps: */
585 if (cross_jump && condjump_p (insn))
587 rtx newjpos, newlpos;
588 rtx x = prev_real_insn (JUMP_LABEL (insn));
590 /* A conditional jump may be crossjumped
591 only if the place it jumps to follows
592 an opposing jump that comes back here. */
594 if (x != 0 && ! jump_back_p (x, insn))
595 /* We have no opposing jump;
596 cannot cross jump this insn. */
600 /* TARGET is nonzero if it is ok to cross jump
601 to code before TARGET. If so, see if matches. */
603 find_cross_jump (insn, x, 2,
608 do_cross_jump (insn, newjpos, newlpos);
609 /* Make the old conditional jump
610 into an unconditional one. */
611 SET_SRC (PATTERN (insn))
612 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
613 INSN_CODE (insn) = -1;
614 emit_barrier_after (insn);
615 /* Add to jump_chain unless this is a new label
616 whose UID is too large. */
617 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
619 jump_chain[INSN_UID (insn)]
620 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
621 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
628 /* Cross jumping of unconditional jumps:
629 a few differences. */
631 if (cross_jump && simplejump_p (insn))
633 rtx newjpos, newlpos;
638 /* TARGET is nonzero if it is ok to cross jump
639 to code before TARGET. If so, see if matches. */
640 find_cross_jump (insn, JUMP_LABEL (insn), 1,
643 /* If cannot cross jump to code before the label,
644 see if we can cross jump to another jump to
646 /* Try each other jump to this label. */
647 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
648 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
649 target != 0 && newjpos == 0;
650 target = jump_chain[INSN_UID (target)])
652 && JUMP_LABEL (target) == JUMP_LABEL (insn)
653 /* Ignore TARGET if it's deleted. */
654 && ! INSN_DELETED_P (target))
655 find_cross_jump (insn, target, 2,
660 do_cross_jump (insn, newjpos, newlpos);
666 /* This code was dead in the previous jump.c! */
667 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
669 /* Return insns all "jump to the same place"
670 so we can cross-jump between any two of them. */
672 rtx newjpos, newlpos, target;
676 /* If cannot cross jump to code before the label,
677 see if we can cross jump to another jump to
679 /* Try each other jump to this label. */
680 for (target = jump_chain[0];
681 target != 0 && newjpos == 0;
682 target = jump_chain[INSN_UID (target)])
684 && ! INSN_DELETED_P (target)
685 && GET_CODE (PATTERN (target)) == RETURN)
686 find_cross_jump (insn, target, 2,
691 do_cross_jump (insn, newjpos, newlpos);
702 /* Delete extraneous line number notes.
703 Note that two consecutive notes for different lines are not really
704 extraneous. There should be some indication where that line belonged,
705 even if it became empty. */
710 for (insn = f; insn; insn = NEXT_INSN (insn))
711 if (GET_CODE (insn) == NOTE)
713 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
714 /* Any previous line note was for the prologue; gdb wants a new
715 note after the prologue even if it is for the same line. */
716 last_note = NULL_RTX;
717 else if (NOTE_LINE_NUMBER (insn) >= 0)
719 /* Delete this note if it is identical to previous note. */
721 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
722 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
739 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
740 notes whose labels don't occur in the insn any more. Returns the
741 largest INSN_UID found. */
749 for (insn = f; insn; insn = NEXT_INSN (insn))
751 if (GET_CODE (insn) == CODE_LABEL)
752 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
753 else if (GET_CODE (insn) == JUMP_INSN)
754 JUMP_LABEL (insn) = 0;
755 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
759 for (note = REG_NOTES (insn); note; note = next)
761 next = XEXP (note, 1);
762 if (REG_NOTE_KIND (note) == REG_LABEL
763 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
764 remove_note (insn, note);
767 if (INSN_UID (insn) > largest_uid)
768 largest_uid = INSN_UID (insn);
774 /* Delete insns following barriers, up to next label.
776 Also delete no-op jumps created by gcse. */
779 delete_barrier_successors (f)
785 for (insn = f; insn;)
787 if (GET_CODE (insn) == BARRIER)
789 insn = NEXT_INSN (insn);
791 never_reached_warning (insn);
793 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
795 if (GET_CODE (insn) == JUMP_INSN)
797 /* Detect when we're deleting a tablejump; get rid of
798 the jump table as well. */
799 rtx next1 = next_nonnote_insn (insn);
800 rtx next2 = next1 ? next_nonnote_insn (next1) : 0;
801 if (next2 && GET_CODE (next1) == CODE_LABEL
802 && GET_CODE (next2) == JUMP_INSN
803 && (GET_CODE (PATTERN (next2)) == ADDR_VEC
804 || GET_CODE (PATTERN (next2)) == ADDR_DIFF_VEC))
810 insn = delete_insn (insn);
812 else if (GET_CODE (insn) == NOTE
813 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
814 insn = NEXT_INSN (insn);
816 insn = delete_insn (insn);
818 /* INSN is now the code_label. */
821 /* Also remove (set (pc) (pc)) insns which can be created by
822 gcse. We eliminate such insns now to avoid having them
823 cause problems later. */
824 else if (GET_CODE (insn) == JUMP_INSN
825 && (set = pc_set (insn)) != NULL
826 && SET_SRC (set) == pc_rtx
827 && SET_DEST (set) == pc_rtx
828 && onlyjump_p (insn))
829 insn = delete_insn (insn);
832 insn = NEXT_INSN (insn);
836 /* Mark the label each jump jumps to.
837 Combine consecutive labels, and count uses of labels.
839 For each label, make a chain (using `jump_chain')
840 of all the *unconditional* jumps that jump to it;
841 also make a chain of all returns.
843 CROSS_JUMP indicates whether we are doing cross jumping
844 and if we are whether we will be paying attention to
845 death notes or not. */
848 mark_all_labels (f, cross_jump)
854 for (insn = f; insn; insn = NEXT_INSN (insn))
857 if (GET_CODE (insn) == CALL_INSN
858 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
860 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
861 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
862 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
864 /* Canonicalize the tail recursion label attached to the
865 CALL_PLACEHOLDER insn. */
866 if (XEXP (PATTERN (insn), 3))
868 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
869 XEXP (PATTERN (insn), 3));
870 mark_jump_label (label_ref, insn, cross_jump, 0);
871 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
877 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
878 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
880 /* When we know the LABEL_REF contained in a REG used in
881 an indirect jump, we'll have a REG_LABEL note so that
882 flow can tell where it's going. */
883 if (JUMP_LABEL (insn) == 0)
885 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
888 /* But a LABEL_REF around the REG_LABEL note, so
889 that we can canonicalize it. */
890 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
891 XEXP (label_note, 0));
893 mark_jump_label (label_ref, insn, cross_jump, 0);
894 XEXP (label_note, 0) = XEXP (label_ref, 0);
895 JUMP_LABEL (insn) = XEXP (label_note, 0);
898 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
900 jump_chain[INSN_UID (insn)]
901 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
902 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
904 if (GET_CODE (PATTERN (insn)) == RETURN)
906 jump_chain[INSN_UID (insn)] = jump_chain[0];
907 jump_chain[0] = insn;
913 /* Delete all labels already not referenced.
914 Also find and return the last insn. */
917 delete_unreferenced_labels (f)
920 rtx final = NULL_RTX;
923 for (insn = f; insn;)
925 if (GET_CODE (insn) == CODE_LABEL
926 && LABEL_NUSES (insn) == 0
927 && LABEL_ALTERNATE_NAME (insn) == NULL)
928 insn = delete_insn (insn);
932 insn = NEXT_INSN (insn);
939 /* Delete various simple forms of moves which have no necessary
943 delete_noop_moves (f)
948 for (insn = f; insn;)
950 next = NEXT_INSN (insn);
952 if (GET_CODE (insn) == INSN)
954 register rtx body = PATTERN (insn);
956 /* Detect and delete no-op move instructions
957 resulting from not allocating a parameter in a register. */
959 if (GET_CODE (body) == SET && set_noop_p (body))
960 delete_computation (insn);
962 /* Detect and ignore no-op move instructions
963 resulting from smart or fortuitous register allocation. */
965 else if (GET_CODE (body) == SET)
967 int sreg = true_regnum (SET_SRC (body));
968 int dreg = true_regnum (SET_DEST (body));
970 if (sreg == dreg && sreg >= 0)
972 else if (sreg >= 0 && dreg >= 0)
975 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
977 GET_MODE (SET_SRC (body)));
980 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
982 /* DREG may have been the target of a REG_DEAD note in
983 the insn which makes INSN redundant. If so, reorg
984 would still think it is dead. So search for such a
985 note and delete it if we find it. */
986 if (! find_regno_note (insn, REG_UNUSED, dreg))
987 for (trial = prev_nonnote_insn (insn);
988 trial && GET_CODE (trial) != CODE_LABEL;
989 trial = prev_nonnote_insn (trial))
990 if (find_regno_note (trial, REG_DEAD, dreg))
992 remove_death (dreg, trial);
996 /* Deleting insn could lose a death-note for SREG. */
997 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
999 /* Change this into a USE so that we won't emit
1000 code for it, but still can keep the note. */
1002 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
1003 INSN_CODE (insn) = -1;
1004 /* Remove all reg notes but the REG_DEAD one. */
1005 REG_NOTES (insn) = trial;
1006 XEXP (trial, 1) = NULL_RTX;
1012 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1013 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1014 NULL, 0, GET_MODE (SET_DEST (body))))
1016 /* This handles the case where we have two consecutive
1017 assignments of the same constant to pseudos that didn't
1018 get a hard reg. Each SET from the constant will be
1019 converted into a SET of the spill register and an
1020 output reload will be made following it. This produces
1021 two loads of the same constant into the same spill
1026 /* Look back for a death note for the first reg.
1027 If there is one, it is no longer accurate. */
1028 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1030 if ((GET_CODE (in_insn) == INSN
1031 || GET_CODE (in_insn) == JUMP_INSN)
1032 && find_regno_note (in_insn, REG_DEAD, dreg))
1034 remove_death (dreg, in_insn);
1037 in_insn = PREV_INSN (in_insn);
1040 /* Delete the second load of the value. */
1044 else if (GET_CODE (body) == PARALLEL)
1046 /* If each part is a set between two identical registers or
1047 a USE or CLOBBER, delete the insn. */
1051 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1053 tem = XVECEXP (body, 0, i);
1054 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1057 if (GET_CODE (tem) != SET
1058 || (sreg = true_regnum (SET_SRC (tem))) < 0
1059 || (dreg = true_regnum (SET_DEST (tem))) < 0
1072 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1073 jump. Assume that this unconditional jump is to the exit test code. If
1074 the code is sufficiently simple, make a copy of it before INSN,
1075 followed by a jump to the exit of the loop. Then delete the unconditional
1078 Return 1 if we made the change, else 0.
1080 This is only safe immediately after a regscan pass because it uses the
1081 values of regno_first_uid and regno_last_uid. */
1084 duplicate_loop_exit_test (loop_start)
1087 rtx insn, set, reg, p, link;
1088 rtx copy = 0, first_copy = 0;
1090 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1092 int max_reg = max_reg_num ();
1095 /* Scan the exit code. We do not perform this optimization if any insn:
1099 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1100 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1101 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1104 We also do not do this if we find an insn with ASM_OPERANDS. While
1105 this restriction should not be necessary, copying an insn with
1106 ASM_OPERANDS can confuse asm_noperands in some cases.
1108 Also, don't do this if the exit code is more than 20 insns. */
1110 for (insn = exitcode;
1112 && ! (GET_CODE (insn) == NOTE
1113 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1114 insn = NEXT_INSN (insn))
1116 switch (GET_CODE (insn))
1122 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1123 a jump immediately after the loop start that branches outside
1124 the loop but within an outer loop, near the exit test.
1125 If we copied this exit test and created a phony
1126 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1127 before the exit test look like these could be safely moved
1128 out of the loop even if they actually may be never executed.
1129 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1131 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1132 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1136 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1137 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1138 /* If we were to duplicate this code, we would not move
1139 the BLOCK notes, and so debugging the moved code would
1140 be difficult. Thus, we only move the code with -O2 or
1147 /* The code below would grossly mishandle REG_WAS_0 notes,
1148 so get rid of them here. */
1149 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1150 remove_note (insn, p);
1151 if (++num_insns > 20
1152 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1153 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1161 /* Unless INSN is zero, we can do the optimization. */
1167 /* See if any insn sets a register only used in the loop exit code and
1168 not a user variable. If so, replace it with a new register. */
1169 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1170 if (GET_CODE (insn) == INSN
1171 && (set = single_set (insn)) != 0
1172 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1173 || (GET_CODE (reg) == SUBREG
1174 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1175 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1176 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1178 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1179 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1184 /* We can do the replacement. Allocate reg_map if this is the
1185 first replacement we found. */
1187 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1189 REG_LOOP_TEST_P (reg) = 1;
1191 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1195 /* Now copy each insn. */
1196 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1198 switch (GET_CODE (insn))
1201 copy = emit_barrier_before (loop_start);
1204 /* Only copy line-number notes. */
1205 if (NOTE_LINE_NUMBER (insn) >= 0)
1207 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1208 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1213 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1215 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1217 mark_jump_label (PATTERN (copy), copy, 0, 0);
1219 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1221 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1222 if (REG_NOTE_KIND (link) != REG_LABEL)
1224 if (GET_CODE (link) == EXPR_LIST)
1226 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1231 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1236 if (reg_map && REG_NOTES (copy))
1237 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1241 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1244 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1245 mark_jump_label (PATTERN (copy), copy, 0, 0);
1246 if (REG_NOTES (insn))
1248 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1250 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1253 /* If this is a simple jump, add it to the jump chain. */
1255 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1256 && simplejump_p (copy))
1258 jump_chain[INSN_UID (copy)]
1259 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1260 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1268 /* Record the first insn we copied. We need it so that we can
1269 scan the copied insns for new pseudo registers. */
1274 /* Now clean up by emitting a jump to the end label and deleting the jump
1275 at the start of the loop. */
1276 if (! copy || GET_CODE (copy) != BARRIER)
1278 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1281 /* Record the first insn we copied. We need it so that we can
1282 scan the copied insns for new pseudo registers. This may not
1283 be strictly necessary since we should have copied at least one
1284 insn above. But I am going to be safe. */
1288 mark_jump_label (PATTERN (copy), copy, 0, 0);
1289 if (INSN_UID (copy) < max_jump_chain
1290 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1292 jump_chain[INSN_UID (copy)]
1293 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1294 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1296 emit_barrier_before (loop_start);
1299 /* Now scan from the first insn we copied to the last insn we copied
1300 (copy) for new pseudo registers. Do this after the code to jump to
1301 the end label since that might create a new pseudo too. */
1302 reg_scan_update (first_copy, copy, max_reg);
1304 /* Mark the exit code as the virtual top of the converted loop. */
1305 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1307 delete_insn (next_nonnote_insn (loop_start));
1316 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1317 notes between START and END out before START. Assume that END is not
1318 such a note. START may be such a note. Returns the value of the new
1319 starting insn, which may be different if the original start was such a
1323 squeeze_notes (start, end)
1329 for (insn = start; insn != end; insn = next)
1331 next = NEXT_INSN (insn);
1332 if (GET_CODE (insn) == NOTE
1333 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1334 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1335 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1336 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1337 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1338 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1344 rtx prev = PREV_INSN (insn);
1345 PREV_INSN (insn) = PREV_INSN (start);
1346 NEXT_INSN (insn) = start;
1347 NEXT_INSN (PREV_INSN (insn)) = insn;
1348 PREV_INSN (NEXT_INSN (insn)) = insn;
1349 NEXT_INSN (prev) = next;
1350 PREV_INSN (next) = prev;
1358 /* Compare the instructions before insn E1 with those before E2
1359 to find an opportunity for cross jumping.
1360 (This means detecting identical sequences of insns followed by
1361 jumps to the same place, or followed by a label and a jump
1362 to that label, and replacing one with a jump to the other.)
1364 Assume E1 is a jump that jumps to label E2
1365 (that is not always true but it might as well be).
1366 Find the longest possible equivalent sequences
1367 and store the first insns of those sequences into *F1 and *F2.
1368 Store zero there if no equivalent preceding instructions are found.
1370 We give up if we find a label in stream 1.
1371 Actually we could transfer that label into stream 2. */
1374 find_cross_jump (e1, e2, minimum, f1, f2)
1379 register rtx i1 = e1, i2 = e2;
1380 register rtx p1, p2;
1383 rtx last1 = 0, last2 = 0;
1384 rtx afterlast1 = 0, afterlast2 = 0;
1391 i1 = prev_nonnote_insn (i1);
1393 i2 = PREV_INSN (i2);
1394 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1395 i2 = PREV_INSN (i2);
1400 /* Don't allow the range of insns preceding E1 or E2
1401 to include the other (E2 or E1). */
1402 if (i2 == e1 || i1 == e2)
1405 /* If we will get to this code by jumping, those jumps will be
1406 tensioned to go directly to the new label (before I2),
1407 so this cross-jumping won't cost extra. So reduce the minimum. */
1408 if (GET_CODE (i1) == CODE_LABEL)
1414 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1420 /* If this is a CALL_INSN, compare register usage information.
1421 If we don't check this on stack register machines, the two
1422 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1423 numbers of stack registers in the same basic block.
1424 If we don't check this on machines with delay slots, a delay slot may
1425 be filled that clobbers a parameter expected by the subroutine.
1427 ??? We take the simple route for now and assume that if they're
1428 equal, they were constructed identically. */
1430 if (GET_CODE (i1) == CALL_INSN
1431 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1432 CALL_INSN_FUNCTION_USAGE (i2)))
1436 /* If cross_jump_death_matters is not 0, the insn's mode
1437 indicates whether or not the insn contains any stack-like
1440 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1442 /* If register stack conversion has already been done, then
1443 death notes must also be compared before it is certain that
1444 the two instruction streams match. */
1447 HARD_REG_SET i1_regset, i2_regset;
1449 CLEAR_HARD_REG_SET (i1_regset);
1450 CLEAR_HARD_REG_SET (i2_regset);
1452 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1453 if (REG_NOTE_KIND (note) == REG_DEAD
1454 && STACK_REG_P (XEXP (note, 0)))
1455 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1457 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1458 if (REG_NOTE_KIND (note) == REG_DEAD
1459 && STACK_REG_P (XEXP (note, 0)))
1460 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1462 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1471 /* Don't allow old-style asm or volatile extended asms to be accepted
1472 for cross jumping purposes. It is conceptually correct to allow
1473 them, since cross-jumping preserves the dynamic instruction order
1474 even though it is changing the static instruction order. However,
1475 if an asm is being used to emit an assembler pseudo-op, such as
1476 the MIPS `.set reorder' pseudo-op, then the static instruction order
1477 matters and it must be preserved. */
1478 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1479 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1480 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1483 if (lose || GET_CODE (p1) != GET_CODE (p2)
1484 || ! rtx_renumbered_equal_p (p1, p2))
1486 /* The following code helps take care of G++ cleanups. */
1490 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1491 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1492 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1493 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1494 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1495 /* If the equivalences are not to a constant, they may
1496 reference pseudos that no longer exist, so we can't
1498 && CONSTANT_P (XEXP (equiv1, 0))
1499 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1501 rtx s1 = single_set (i1);
1502 rtx s2 = single_set (i2);
1503 if (s1 != 0 && s2 != 0
1504 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1506 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1507 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1508 if (! rtx_renumbered_equal_p (p1, p2))
1510 else if (apply_change_group ())
1515 /* Insns fail to match; cross jumping is limited to the following
1519 /* Don't allow the insn after a compare to be shared by
1520 cross-jumping unless the compare is also shared.
1521 Here, if either of these non-matching insns is a compare,
1522 exclude the following insn from possible cross-jumping. */
1523 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1524 last1 = afterlast1, last2 = afterlast2, ++minimum;
1527 /* If cross-jumping here will feed a jump-around-jump
1528 optimization, this jump won't cost extra, so reduce
1530 if (GET_CODE (i1) == JUMP_INSN
1532 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1538 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1540 /* Ok, this insn is potentially includable in a cross-jump here. */
1541 afterlast1 = last1, afterlast2 = last2;
1542 last1 = i1, last2 = i2, --minimum;
1546 if (minimum <= 0 && last1 != 0 && last1 != e1)
1547 *f1 = last1, *f2 = last2;
1551 do_cross_jump (insn, newjpos, newlpos)
1552 rtx insn, newjpos, newlpos;
1554 /* Find an existing label at this point
1555 or make a new one if there is none. */
1556 register rtx label = get_label_before (newlpos);
1558 /* Make the same jump insn jump to the new point. */
1559 if (GET_CODE (PATTERN (insn)) == RETURN)
1561 /* Remove from jump chain of returns. */
1562 delete_from_jump_chain (insn);
1563 /* Change the insn. */
1564 PATTERN (insn) = gen_jump (label);
1565 INSN_CODE (insn) = -1;
1566 JUMP_LABEL (insn) = label;
1567 LABEL_NUSES (label)++;
1568 /* Add to new the jump chain. */
1569 if (INSN_UID (label) < max_jump_chain
1570 && INSN_UID (insn) < max_jump_chain)
1572 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1573 jump_chain[INSN_UID (label)] = insn;
1577 redirect_jump (insn, label, 1);
1579 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1580 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1581 the NEWJPOS stream. */
1583 while (newjpos != insn)
1587 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1588 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1589 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1590 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1591 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1592 remove_note (newlpos, lnote);
1594 delete_insn (newjpos);
1595 newjpos = next_real_insn (newjpos);
1596 newlpos = next_real_insn (newlpos);
1600 /* Return the label before INSN, or put a new label there. */
1603 get_label_before (insn)
1608 /* Find an existing label at this point
1609 or make a new one if there is none. */
1610 label = prev_nonnote_insn (insn);
1612 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1614 rtx prev = PREV_INSN (insn);
1616 label = gen_label_rtx ();
1617 emit_label_after (label, prev);
1618 LABEL_NUSES (label) = 0;
1623 /* Return the label after INSN, or put a new label there. */
1626 get_label_after (insn)
1631 /* Find an existing label at this point
1632 or make a new one if there is none. */
1633 label = next_nonnote_insn (insn);
1635 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1637 label = gen_label_rtx ();
1638 emit_label_after (label, insn);
1639 LABEL_NUSES (label) = 0;
1644 /* Return 1 if INSN is a jump that jumps to right after TARGET
1645 only on the condition that TARGET itself would drop through.
1646 Assumes that TARGET is a conditional jump. */
1649 jump_back_p (insn, target)
1653 enum rtx_code codei, codet;
1656 if (! any_condjump_p (insn)
1657 || any_uncondjump_p (target)
1658 || target != prev_real_insn (JUMP_LABEL (insn)))
1660 set = pc_set (insn);
1661 tset = pc_set (target);
1663 cinsn = XEXP (SET_SRC (set), 0);
1664 ctarget = XEXP (SET_SRC (tset), 0);
1666 codei = GET_CODE (cinsn);
1667 codet = GET_CODE (ctarget);
1669 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1671 codei = reversed_comparison_code (cinsn, insn);
1672 if (codei == UNKNOWN)
1676 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1678 codet = reversed_comparison_code (ctarget, target);
1679 if (codei == UNKNOWN)
1683 return (codei == codet
1684 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1685 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1688 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1689 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1690 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1691 know whether it's source is floating point or integer comparison. Machine
1692 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1693 to help this function avoid overhead in these cases. */
1695 reversed_comparison_code_parts (code, arg0, arg1, insn)
1696 rtx insn, arg0, arg1;
1699 enum machine_mode mode;
1701 /* If this is not actually a comparison, we can't reverse it. */
1702 if (GET_RTX_CLASS (code) != '<')
1705 mode = GET_MODE (arg0);
1706 if (mode == VOIDmode)
1707 mode = GET_MODE (arg1);
1709 /* First see if machine description supply us way to reverse the comparison.
1710 Give it priority over everything else to allow machine description to do
1712 #ifdef REVERSIBLE_CC_MODE
1713 if (GET_MODE_CLASS (mode) == MODE_CC
1714 && REVERSIBLE_CC_MODE (mode))
1716 #ifdef REVERSE_CONDITION
1717 return REVERSE_CONDITION (code, mode);
1719 return reverse_condition (code);
1723 /* Try few special cases based on the comparison code. */
1732 /* It is always safe to reverse EQ and NE, even for the floating
1733 point. Similary the unsigned comparisons are never used for
1734 floating point so we can reverse them in the default way. */
1735 return reverse_condition (code);
1740 /* In case we already see unordered comparison, we can be sure to
1741 be dealing with floating point so we don't need any more tests. */
1742 return reverse_condition_maybe_unordered (code);
1747 /* We don't have safe way to reverse these yet. */
1753 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1754 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1755 || flag_unsafe_math_optimizations)
1756 return reverse_condition (code);
1758 if (GET_MODE_CLASS (mode) == MODE_CC
1765 /* Try to search for the comparison to determine the real mode.
1766 This code is expensive, but with sane machine description it
1767 will be never used, since REVERSIBLE_CC_MODE will return true
1772 for (prev = prev_nonnote_insn (insn);
1773 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1774 prev = prev_nonnote_insn (prev))
1776 rtx set = set_of (arg0, prev);
1777 if (set && GET_CODE (set) == SET
1778 && rtx_equal_p (SET_DEST (set), arg0))
1780 rtx src = SET_SRC (set);
1782 if (GET_CODE (src) == COMPARE)
1784 rtx comparison = src;
1785 arg0 = XEXP (src, 0);
1786 mode = GET_MODE (arg0);
1787 if (mode == VOIDmode)
1788 mode = GET_MODE (XEXP (comparison, 1));
1791 /* We can get past reg-reg moves. This may be usefull for model
1792 of i387 comparisons that first move flag registers around. */
1799 /* If register is clobbered in some ununderstandable way,
1806 /* An integer condition. */
1807 if (GET_CODE (arg0) == CONST_INT
1808 || (GET_MODE (arg0) != VOIDmode
1809 && GET_MODE_CLASS (mode) != MODE_CC
1810 && ! FLOAT_MODE_P (mode)))
1811 return reverse_condition (code);
1816 /* An wrapper around the previous function to take COMPARISON as rtx
1817 expression. This simplifies many callers. */
1819 reversed_comparison_code (comparison, insn)
1820 rtx comparison, insn;
1822 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1824 return reversed_comparison_code_parts (GET_CODE (comparison),
1825 XEXP (comparison, 0),
1826 XEXP (comparison, 1), insn);
1829 /* Given an rtx-code for a comparison, return the code for the negated
1830 comparison. If no such code exists, return UNKNOWN.
1832 WATCH OUT! reverse_condition is not safe to use on a jump that might
1833 be acting on the results of an IEEE floating point comparison, because
1834 of the special treatment of non-signaling nans in comparisons.
1835 Use reversed_comparison_code instead. */
1838 reverse_condition (code)
1881 /* Similar, but we're allowed to generate unordered comparisons, which
1882 makes it safe for IEEE floating-point. Of course, we have to recognize
1883 that the target will support them too... */
1886 reverse_condition_maybe_unordered (code)
1889 /* Non-IEEE formats don't have unordered conditions. */
1890 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1891 return reverse_condition (code);
1929 /* Similar, but return the code when two operands of a comparison are swapped.
1930 This IS safe for IEEE floating-point. */
1933 swap_condition (code)
1976 /* Given a comparison CODE, return the corresponding unsigned comparison.
1977 If CODE is an equality comparison or already an unsigned comparison,
1978 CODE is returned. */
1981 unsigned_condition (code)
2008 /* Similarly, return the signed version of a comparison. */
2011 signed_condition (code)
2038 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2039 truth of CODE1 implies the truth of CODE2. */
2042 comparison_dominates_p (code1, code2)
2043 enum rtx_code code1, code2;
2045 /* UNKNOWN comparison codes can happen as a result of trying to revert
2047 They can't match anything, so we have to reject them here. */
2048 if (code1 == UNKNOWN || code2 == UNKNOWN)
2057 if (code2 == UNLE || code2 == UNGE)
2062 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2063 || code2 == ORDERED)
2068 if (code2 == UNLE || code2 == NE)
2073 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2078 if (code2 == UNGE || code2 == NE)
2083 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2089 if (code2 == ORDERED)
2094 if (code2 == NE || code2 == ORDERED)
2099 if (code2 == LEU || code2 == NE)
2104 if (code2 == GEU || code2 == NE)
2109 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2110 || code2 == UNGE || code2 == UNGT)
2121 /* Return 1 if INSN is an unconditional jump and nothing else. */
2127 return (GET_CODE (insn) == JUMP_INSN
2128 && GET_CODE (PATTERN (insn)) == SET
2129 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2130 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2133 /* Return nonzero if INSN is a (possibly) conditional jump
2136 Use this function is deprecated, since we need to support combined
2137 branch and compare insns. Use any_condjump_p instead whenever possible. */
2143 register rtx x = PATTERN (insn);
2145 if (GET_CODE (x) != SET
2146 || GET_CODE (SET_DEST (x)) != PC)
2150 if (GET_CODE (x) == LABEL_REF)
2153 return (GET_CODE (x) == IF_THEN_ELSE
2154 && ((GET_CODE (XEXP (x, 2)) == PC
2155 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2156 || GET_CODE (XEXP (x, 1)) == RETURN))
2157 || (GET_CODE (XEXP (x, 1)) == PC
2158 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2159 || GET_CODE (XEXP (x, 2)) == RETURN))));
2164 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2167 Use this function is deprecated, since we need to support combined
2168 branch and compare insns. Use any_condjump_p instead whenever possible. */
2171 condjump_in_parallel_p (insn)
2174 register rtx x = PATTERN (insn);
2176 if (GET_CODE (x) != PARALLEL)
2179 x = XVECEXP (x, 0, 0);
2181 if (GET_CODE (x) != SET)
2183 if (GET_CODE (SET_DEST (x)) != PC)
2185 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2187 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2189 if (XEXP (SET_SRC (x), 2) == pc_rtx
2190 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2191 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2193 if (XEXP (SET_SRC (x), 1) == pc_rtx
2194 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2195 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2200 /* Return set of PC, otherwise NULL. */
2207 if (GET_CODE (insn) != JUMP_INSN)
2209 pat = PATTERN (insn);
2211 /* The set is allowed to appear either as the insn pattern or
2212 the first set in a PARALLEL. */
2213 if (GET_CODE (pat) == PARALLEL)
2214 pat = XVECEXP (pat, 0, 0);
2215 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2221 /* Return true when insn is an unconditional direct jump,
2222 possibly bundled inside a PARALLEL. */
2225 any_uncondjump_p (insn)
2228 rtx x = pc_set (insn);
2231 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2236 /* Return true when insn is a conditional jump. This function works for
2237 instructions containing PC sets in PARALLELs. The instruction may have
2238 various other effects so before removing the jump you must verify
2241 Note that unlike condjump_p it returns false for unconditional jumps. */
2244 any_condjump_p (insn)
2247 rtx x = pc_set (insn);
2252 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2255 a = GET_CODE (XEXP (SET_SRC (x), 1));
2256 b = GET_CODE (XEXP (SET_SRC (x), 2));
2258 return ((b == PC && (a == LABEL_REF || a == RETURN))
2259 || (a == PC && (b == LABEL_REF || b == RETURN)));
2262 /* Return the label of a conditional jump. */
2265 condjump_label (insn)
2268 rtx x = pc_set (insn);
2273 if (GET_CODE (x) == LABEL_REF)
2275 if (GET_CODE (x) != IF_THEN_ELSE)
2277 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2279 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2284 /* Return true if INSN is a (possibly conditional) return insn. */
2287 returnjump_p_1 (loc, data)
2289 void *data ATTRIBUTE_UNUSED;
2292 return x && GET_CODE (x) == RETURN;
2299 if (GET_CODE (insn) != JUMP_INSN)
2301 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2304 /* Return true if INSN is a jump that only transfers control and
2313 if (GET_CODE (insn) != JUMP_INSN)
2316 set = single_set (insn);
2319 if (GET_CODE (SET_DEST (set)) != PC)
2321 if (side_effects_p (SET_SRC (set)))
2329 /* Return 1 if X is an RTX that does nothing but set the condition codes
2330 and CLOBBER or USE registers.
2331 Return -1 if X does explicitly set the condition codes,
2332 but also does other things. */
2336 rtx x ATTRIBUTE_UNUSED;
2338 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2340 if (GET_CODE (x) == PARALLEL)
2344 int other_things = 0;
2345 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2347 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2348 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2350 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2353 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2359 /* Follow any unconditional jump at LABEL;
2360 return the ultimate label reached by any such chain of jumps.
2361 If LABEL is not followed by a jump, return LABEL.
2362 If the chain loops or we can't find end, return LABEL,
2363 since that tells caller to avoid changing the insn.
2365 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2366 a USE or CLOBBER. */
2369 follow_jumps (label)
2374 register rtx value = label;
2379 && (insn = next_active_insn (value)) != 0
2380 && GET_CODE (insn) == JUMP_INSN
2381 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2382 && onlyjump_p (insn))
2383 || GET_CODE (PATTERN (insn)) == RETURN)
2384 && (next = NEXT_INSN (insn))
2385 && GET_CODE (next) == BARRIER);
2388 /* Don't chain through the insn that jumps into a loop
2389 from outside the loop,
2390 since that would create multiple loop entry jumps
2391 and prevent loop optimization. */
2393 if (!reload_completed)
2394 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2395 if (GET_CODE (tem) == NOTE
2396 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2397 /* ??? Optional. Disables some optimizations, but makes
2398 gcov output more accurate with -O. */
2399 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2402 /* If we have found a cycle, make the insn jump to itself. */
2403 if (JUMP_LABEL (insn) == label)
2406 tem = next_active_insn (JUMP_LABEL (insn));
2407 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2408 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2411 value = JUMP_LABEL (insn);
2418 /* Assuming that field IDX of X is a vector of label_refs,
2419 replace each of them by the ultimate label reached by it.
2420 Return nonzero if a change is made.
2421 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2424 tension_vector_labels (x, idx)
2430 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2432 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2433 register rtx nlabel = follow_jumps (olabel);
2434 if (nlabel && nlabel != olabel)
2436 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2437 ++LABEL_NUSES (nlabel);
2438 if (--LABEL_NUSES (olabel) == 0)
2439 delete_insn (olabel);
2446 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2447 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2448 in INSN, then store one of them in JUMP_LABEL (INSN).
2449 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2450 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2451 Also, when there are consecutive labels, canonicalize on the last of them.
2453 Note that two labels separated by a loop-beginning note
2454 must be kept distinct if we have not yet done loop-optimization,
2455 because the gap between them is where loop-optimize
2456 will want to move invariant code to. CROSS_JUMP tells us
2457 that loop-optimization is done with.
2459 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2460 two labels distinct if they are separated by only USE or CLOBBER insns. */
2463 mark_jump_label (x, insn, cross_jump, in_mem)
2469 register RTX_CODE code = GET_CODE (x);
2471 register const char *fmt;
2493 /* If this is a constant-pool reference, see if it is a label. */
2494 if (CONSTANT_POOL_ADDRESS_P (x))
2495 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2500 rtx label = XEXP (x, 0);
2505 /* Ignore remaining references to unreachable labels that
2506 have been deleted. */
2507 if (GET_CODE (label) == NOTE
2508 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2511 if (GET_CODE (label) != CODE_LABEL)
2514 /* Ignore references to labels of containing functions. */
2515 if (LABEL_REF_NONLOCAL_P (x))
2518 /* If there are other labels following this one,
2519 replace it with the last of the consecutive labels. */
2520 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2522 if (GET_CODE (next) == CODE_LABEL)
2524 else if (cross_jump && GET_CODE (next) == INSN
2525 && (GET_CODE (PATTERN (next)) == USE
2526 || GET_CODE (PATTERN (next)) == CLOBBER))
2528 else if (GET_CODE (next) != NOTE)
2530 else if (! cross_jump
2531 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2532 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2533 /* ??? Optional. Disables some optimizations, but
2534 makes gcov output more accurate with -O. */
2535 || (flag_test_coverage
2536 && NOTE_LINE_NUMBER (next) > 0)))
2540 XEXP (x, 0) = label;
2541 if (! insn || ! INSN_DELETED_P (insn))
2542 ++LABEL_NUSES (label);
2546 if (GET_CODE (insn) == JUMP_INSN)
2547 JUMP_LABEL (insn) = label;
2549 /* If we've changed OLABEL and we had a REG_LABEL note
2550 for it, update it as well. */
2551 else if (label != olabel
2552 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2553 XEXP (note, 0) = label;
2555 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2557 else if (! find_reg_note (insn, REG_LABEL, label))
2559 /* This code used to ignore labels which refered to dispatch
2560 tables to avoid flow.c generating worse code.
2562 However, in the presense of global optimizations like
2563 gcse which call find_basic_blocks without calling
2564 life_analysis, not recording such labels will lead
2565 to compiler aborts because of inconsistencies in the
2566 flow graph. So we go ahead and record the label.
2568 It may also be the case that the optimization argument
2569 is no longer valid because of the more accurate cfg
2570 we build in find_basic_blocks -- it no longer pessimizes
2571 code when it finds a REG_LABEL note. */
2572 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2579 /* Do walk the labels in a vector, but not the first operand of an
2580 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2583 if (! INSN_DELETED_P (insn))
2585 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2587 for (i = 0; i < XVECLEN (x, eltnum); i++)
2588 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2589 cross_jump, in_mem);
2597 fmt = GET_RTX_FORMAT (code);
2598 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2601 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2602 else if (fmt[i] == 'E')
2605 for (j = 0; j < XVECLEN (x, i); j++)
2606 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2611 /* If all INSN does is set the pc, delete it,
2612 and delete the insn that set the condition codes for it
2613 if that's what the previous thing was. */
2619 register rtx set = single_set (insn);
2621 if (set && GET_CODE (SET_DEST (set)) == PC)
2622 delete_computation (insn);
2625 /* Verify INSN is a BARRIER and delete it. */
2628 delete_barrier (insn)
2631 if (GET_CODE (insn) != BARRIER)
2637 /* Recursively delete prior insns that compute the value (used only by INSN
2638 which the caller is deleting) stored in the register mentioned by NOTE
2639 which is a REG_DEAD note associated with INSN. */
2642 delete_prior_computation (note, insn)
2647 rtx reg = XEXP (note, 0);
2649 for (our_prev = prev_nonnote_insn (insn);
2650 our_prev && (GET_CODE (our_prev) == INSN
2651 || GET_CODE (our_prev) == CALL_INSN);
2652 our_prev = prev_nonnote_insn (our_prev))
2654 rtx pat = PATTERN (our_prev);
2656 /* If we reach a CALL which is not calling a const function
2657 or the callee pops the arguments, then give up. */
2658 if (GET_CODE (our_prev) == CALL_INSN
2659 && (! CONST_CALL_P (our_prev)
2660 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2663 /* If we reach a SEQUENCE, it is too complex to try to
2664 do anything with it, so give up. */
2665 if (GET_CODE (pat) == SEQUENCE)
2668 if (GET_CODE (pat) == USE
2669 && GET_CODE (XEXP (pat, 0)) == INSN)
2670 /* reorg creates USEs that look like this. We leave them
2671 alone because reorg needs them for its own purposes. */
2674 if (reg_set_p (reg, pat))
2676 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2679 if (GET_CODE (pat) == PARALLEL)
2681 /* If we find a SET of something else, we can't
2686 for (i = 0; i < XVECLEN (pat, 0); i++)
2688 rtx part = XVECEXP (pat, 0, i);
2690 if (GET_CODE (part) == SET
2691 && SET_DEST (part) != reg)
2695 if (i == XVECLEN (pat, 0))
2696 delete_computation (our_prev);
2698 else if (GET_CODE (pat) == SET
2699 && GET_CODE (SET_DEST (pat)) == REG)
2701 int dest_regno = REGNO (SET_DEST (pat));
2704 + (dest_regno < FIRST_PSEUDO_REGISTER
2705 ? HARD_REGNO_NREGS (dest_regno,
2706 GET_MODE (SET_DEST (pat))) : 1));
2707 int regno = REGNO (reg);
2710 + (regno < FIRST_PSEUDO_REGISTER
2711 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2713 if (dest_regno >= regno
2714 && dest_endregno <= endregno)
2715 delete_computation (our_prev);
2717 /* We may have a multi-word hard register and some, but not
2718 all, of the words of the register are needed in subsequent
2719 insns. Write REG_UNUSED notes for those parts that were not
2721 else if (dest_regno <= regno
2722 && dest_endregno >= endregno)
2726 REG_NOTES (our_prev)
2727 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2728 REG_NOTES (our_prev));
2730 for (i = dest_regno; i < dest_endregno; i++)
2731 if (! find_regno_note (our_prev, REG_UNUSED, i))
2734 if (i == dest_endregno)
2735 delete_computation (our_prev);
2742 /* If PAT references the register that dies here, it is an
2743 additional use. Hence any prior SET isn't dead. However, this
2744 insn becomes the new place for the REG_DEAD note. */
2745 if (reg_overlap_mentioned_p (reg, pat))
2747 XEXP (note, 1) = REG_NOTES (our_prev);
2748 REG_NOTES (our_prev) = note;
2754 /* Delete INSN and recursively delete insns that compute values used only
2755 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2756 If we are running before flow.c, we need do nothing since flow.c will
2757 delete dead code. We also can't know if the registers being used are
2758 dead or not at this point.
2760 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2761 nothing other than set a register that dies in this insn, we can delete
2764 On machines with CC0, if CC0 is used in this insn, we may be able to
2765 delete the insn that set it. */
2768 delete_computation (insn)
2774 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2776 rtx prev = prev_nonnote_insn (insn);
2777 /* We assume that at this stage
2778 CC's are always set explicitly
2779 and always immediately before the jump that
2780 will use them. So if the previous insn
2781 exists to set the CC's, delete it
2782 (unless it performs auto-increments, etc.). */
2783 if (prev && GET_CODE (prev) == INSN
2784 && sets_cc0_p (PATTERN (prev)))
2786 if (sets_cc0_p (PATTERN (prev)) > 0
2787 && ! side_effects_p (PATTERN (prev)))
2788 delete_computation (prev);
2790 /* Otherwise, show that cc0 won't be used. */
2791 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2792 cc0_rtx, REG_NOTES (prev));
2797 for (note = REG_NOTES (insn); note; note = next)
2799 next = XEXP (note, 1);
2801 if (REG_NOTE_KIND (note) != REG_DEAD
2802 /* Verify that the REG_NOTE is legitimate. */
2803 || GET_CODE (XEXP (note, 0)) != REG)
2806 delete_prior_computation (note, insn);
2812 /* Delete insn INSN from the chain of insns and update label ref counts.
2813 May delete some following insns as a consequence; may even delete
2814 a label elsewhere and insns that follow it.
2816 Returns the first insn after INSN that was not deleted. */
2822 register rtx next = NEXT_INSN (insn);
2823 register rtx prev = PREV_INSN (insn);
2824 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2825 register int dont_really_delete = 0;
2828 while (next && INSN_DELETED_P (next))
2829 next = NEXT_INSN (next);
2831 /* This insn is already deleted => return first following nondeleted. */
2832 if (INSN_DELETED_P (insn))
2836 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2838 /* Don't delete user-declared labels. When optimizing, convert them
2839 to special NOTEs instead. When not optimizing, leave them alone. */
2840 if (was_code_label && LABEL_NAME (insn) != 0)
2844 const char *name = LABEL_NAME (insn);
2845 PUT_CODE (insn, NOTE);
2846 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2847 NOTE_SOURCE_FILE (insn) = name;
2850 dont_really_delete = 1;
2853 /* Mark this insn as deleted. */
2854 INSN_DELETED_P (insn) = 1;
2856 /* If this is an unconditional jump, delete it from the jump chain. */
2857 if (simplejump_p (insn))
2858 delete_from_jump_chain (insn);
2860 /* If instruction is followed by a barrier,
2861 delete the barrier too. */
2863 if (next != 0 && GET_CODE (next) == BARRIER)
2865 INSN_DELETED_P (next) = 1;
2866 next = NEXT_INSN (next);
2869 /* Patch out INSN (and the barrier if any) */
2871 if (! dont_really_delete)
2875 NEXT_INSN (prev) = next;
2876 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2877 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2878 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2883 PREV_INSN (next) = prev;
2884 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2885 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2888 if (prev && NEXT_INSN (prev) == 0)
2889 set_last_insn (prev);
2892 /* If deleting a jump, decrement the count of the label,
2893 and delete the label if it is now unused. */
2895 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2897 rtx lab = JUMP_LABEL (insn), lab_next;
2899 if (--LABEL_NUSES (lab) == 0)
2901 /* This can delete NEXT or PREV,
2902 either directly if NEXT is JUMP_LABEL (INSN),
2903 or indirectly through more levels of jumps. */
2906 /* I feel a little doubtful about this loop,
2907 but I see no clean and sure alternative way
2908 to find the first insn after INSN that is not now deleted.
2909 I hope this works. */
2910 while (next && INSN_DELETED_P (next))
2911 next = NEXT_INSN (next);
2914 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2915 && GET_CODE (lab_next) == JUMP_INSN
2916 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2917 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2919 /* If we're deleting the tablejump, delete the dispatch table.
2920 We may not be able to kill the label immediately preceeding
2921 just yet, as it might be referenced in code leading up to
2923 delete_insn (lab_next);
2927 /* Likewise if we're deleting a dispatch table. */
2929 if (GET_CODE (insn) == JUMP_INSN
2930 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2931 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2933 rtx pat = PATTERN (insn);
2934 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2935 int len = XVECLEN (pat, diff_vec_p);
2937 for (i = 0; i < len; i++)
2938 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2939 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2940 while (next && INSN_DELETED_P (next))
2941 next = NEXT_INSN (next);
2945 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2946 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2947 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2948 if (REG_NOTE_KIND (note) == REG_LABEL
2949 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2950 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2951 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2952 delete_insn (XEXP (note, 0));
2954 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2955 prev = PREV_INSN (prev);
2957 /* If INSN was a label and a dispatch table follows it,
2958 delete the dispatch table. The tablejump must have gone already.
2959 It isn't useful to fall through into a table. */
2962 && NEXT_INSN (insn) != 0
2963 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2964 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2965 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2966 next = delete_insn (NEXT_INSN (insn));
2968 /* If INSN was a label, delete insns following it if now unreachable. */
2970 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2972 register RTX_CODE code;
2974 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2975 || code == NOTE || code == BARRIER
2976 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2979 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2980 next = NEXT_INSN (next);
2981 /* Keep going past other deleted labels to delete what follows. */
2982 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2983 next = NEXT_INSN (next);
2985 /* Note: if this deletes a jump, it can cause more
2986 deletion of unreachable code, after a different label.
2987 As long as the value from this recursive call is correct,
2988 this invocation functions correctly. */
2989 next = delete_insn (next);
2996 /* Advance from INSN till reaching something not deleted
2997 then return that. May return INSN itself. */
3000 next_nondeleted_insn (insn)
3003 while (INSN_DELETED_P (insn))
3004 insn = NEXT_INSN (insn);
3008 /* Delete a range of insns from FROM to TO, inclusive.
3009 This is for the sake of peephole optimization, so assume
3010 that whatever these insns do will still be done by a new
3011 peephole insn that will replace them. */
3014 delete_for_peephole (from, to)
3015 register rtx from, to;
3017 register rtx insn = from;
3021 register rtx next = NEXT_INSN (insn);
3022 register rtx prev = PREV_INSN (insn);
3024 if (GET_CODE (insn) != NOTE)
3026 INSN_DELETED_P (insn) = 1;
3028 /* Patch this insn out of the chain. */
3029 /* We don't do this all at once, because we
3030 must preserve all NOTEs. */
3032 NEXT_INSN (prev) = next;
3035 PREV_INSN (next) = prev;
3043 /* Note that if TO is an unconditional jump
3044 we *do not* delete the BARRIER that follows,
3045 since the peephole that replaces this sequence
3046 is also an unconditional jump in that case. */
3049 /* We have determined that INSN is never reached, and are about to
3050 delete it. Print a warning if the user asked for one.
3052 To try to make this warning more useful, this should only be called
3053 once per basic block not reached, and it only warns when the basic
3054 block contains more than one line from the current function, and
3055 contains at least one operation. CSE and inlining can duplicate insns,
3056 so it's possible to get spurious warnings from this. */
3059 never_reached_warning (avoided_insn)
3063 rtx a_line_note = NULL;
3064 int two_avoided_lines = 0;
3065 int contains_insn = 0;
3067 if (! warn_notreached)
3070 /* Scan forwards, looking at LINE_NUMBER notes, until
3071 we hit a LABEL or we run out of insns. */
3073 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3075 if (GET_CODE (insn) == CODE_LABEL)
3077 else if (GET_CODE (insn) == NOTE /* A line number note? */
3078 && NOTE_LINE_NUMBER (insn) >= 0)
3080 if (a_line_note == NULL)
3083 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3084 != NOTE_LINE_NUMBER (insn));
3086 else if (INSN_P (insn))
3089 if (two_avoided_lines && contains_insn)
3090 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3091 NOTE_LINE_NUMBER (a_line_note),
3092 "will never be executed");
3095 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3096 NLABEL as a return. Accrue modifications into the change group. */
3099 redirect_exp_1 (loc, olabel, nlabel, insn)
3104 register rtx x = *loc;
3105 register RTX_CODE code = GET_CODE (x);
3107 register const char *fmt;
3109 if (code == LABEL_REF)
3111 if (XEXP (x, 0) == olabel)
3115 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3117 n = gen_rtx_RETURN (VOIDmode);
3119 validate_change (insn, loc, n, 1);
3123 else if (code == RETURN && olabel == 0)
3125 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3126 if (loc == &PATTERN (insn))
3127 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3128 validate_change (insn, loc, x, 1);
3132 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3133 && GET_CODE (SET_SRC (x)) == LABEL_REF
3134 && XEXP (SET_SRC (x), 0) == olabel)
3136 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3140 fmt = GET_RTX_FORMAT (code);
3141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3144 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3145 else if (fmt[i] == 'E')
3148 for (j = 0; j < XVECLEN (x, i); j++)
3149 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3154 /* Similar, but apply the change group and report success or failure. */
3157 redirect_exp (olabel, nlabel, insn)
3163 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3164 loc = &XVECEXP (PATTERN (insn), 0, 0);
3166 loc = &PATTERN (insn);
3168 redirect_exp_1 (loc, olabel, nlabel, insn);
3169 if (num_validated_changes () == 0)
3172 return apply_change_group ();
3175 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3176 the modifications into the change group. Return false if we did
3177 not see how to do that. */
3180 redirect_jump_1 (jump, nlabel)
3183 int ochanges = num_validated_changes ();
3186 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3187 loc = &XVECEXP (PATTERN (jump), 0, 0);
3189 loc = &PATTERN (jump);
3191 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3192 return num_validated_changes () > ochanges;
3195 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3196 jump target label is unused as a result, it and the code following
3199 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3202 The return value will be 1 if the change was made, 0 if it wasn't
3203 (this can only occur for NLABEL == 0). */
3206 redirect_jump (jump, nlabel, delete_unused)
3210 register rtx olabel = JUMP_LABEL (jump);
3212 if (nlabel == olabel)
3215 if (! redirect_exp (olabel, nlabel, jump))
3218 /* If this is an unconditional branch, delete it from the jump_chain of
3219 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3220 have UID's in range and JUMP_CHAIN is valid). */
3221 if (jump_chain && (simplejump_p (jump)
3222 || GET_CODE (PATTERN (jump)) == RETURN))
3224 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3226 delete_from_jump_chain (jump);
3227 if (label_index < max_jump_chain
3228 && INSN_UID (jump) < max_jump_chain)
3230 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3231 jump_chain[label_index] = jump;
3235 JUMP_LABEL (jump) = nlabel;
3237 ++LABEL_NUSES (nlabel);
3239 /* If we're eliding the jump over exception cleanups at the end of a
3240 function, move the function end note so that -Wreturn-type works. */
3241 if (olabel && nlabel
3242 && NEXT_INSN (olabel)
3243 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3244 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3245 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3247 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3248 delete_insn (olabel);
3253 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3254 Accrue the modifications into the change group. */
3260 register RTX_CODE code;
3261 rtx x = pc_set (insn);
3267 code = GET_CODE (x);
3269 if (code == IF_THEN_ELSE)
3271 register rtx comp = XEXP (x, 0);
3273 enum rtx_code reversed_code;
3275 /* We can do this in two ways: The preferable way, which can only
3276 be done if this is not an integer comparison, is to reverse
3277 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3278 of the IF_THEN_ELSE. If we can't do either, fail. */
3280 reversed_code = reversed_comparison_code (comp, insn);
3282 if (reversed_code != UNKNOWN)
3284 validate_change (insn, &XEXP (x, 0),
3285 gen_rtx_fmt_ee (reversed_code,
3286 GET_MODE (comp), XEXP (comp, 0),
3293 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3294 validate_change (insn, &XEXP (x, 2), tem, 1);
3300 /* Invert the jump condition of conditional jump insn, INSN.
3302 Return 1 if we can do so, 0 if we cannot find a way to do so that
3303 matches a pattern. */
3309 invert_exp_1 (insn);
3310 if (num_validated_changes () == 0)
3313 return apply_change_group ();
3316 /* Invert the condition of the jump JUMP, and make it jump to label
3317 NLABEL instead of where it jumps now. Accrue changes into the
3318 change group. Return false if we didn't see how to perform the
3319 inversion and redirection. */
3322 invert_jump_1 (jump, nlabel)
3327 ochanges = num_validated_changes ();
3328 invert_exp_1 (jump);
3329 if (num_validated_changes () == ochanges)
3332 return redirect_jump_1 (jump, nlabel);
3335 /* Invert the condition of the jump JUMP, and make it jump to label
3336 NLABEL instead of where it jumps now. Return true if successful. */
3339 invert_jump (jump, nlabel, delete_unused)
3343 /* We have to either invert the condition and change the label or
3344 do neither. Either operation could fail. We first try to invert
3345 the jump. If that succeeds, we try changing the label. If that fails,
3346 we invert the jump back to what it was. */
3348 if (! invert_exp (jump))
3351 if (redirect_jump (jump, nlabel, delete_unused))
3353 /* An inverted jump means that a probability taken becomes a
3354 probability not taken. Subtract the branch probability from the
3355 probability base to convert it back to a taken probability. */
3357 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3359 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3364 if (! invert_exp (jump))
3365 /* This should just be putting it back the way it was. */
3371 /* Delete the instruction JUMP from any jump chain it might be on. */
3374 delete_from_jump_chain (jump)
3378 rtx olabel = JUMP_LABEL (jump);
3380 /* Handle unconditional jumps. */
3381 if (jump_chain && olabel != 0
3382 && INSN_UID (olabel) < max_jump_chain
3383 && simplejump_p (jump))
3384 index = INSN_UID (olabel);
3385 /* Handle return insns. */
3386 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3391 if (jump_chain[index] == jump)
3392 jump_chain[index] = jump_chain[INSN_UID (jump)];
3397 for (insn = jump_chain[index];
3399 insn = jump_chain[INSN_UID (insn)])
3400 if (jump_chain[INSN_UID (insn)] == jump)
3402 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3408 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3410 If the old jump target label (before the dispatch table) becomes unused,
3411 it and the dispatch table may be deleted. In that case, find the insn
3412 before the jump references that label and delete it and logical successors
3416 redirect_tablejump (jump, nlabel)
3419 register rtx olabel = JUMP_LABEL (jump);
3420 rtx *notep, note, next;
3422 /* Add this jump to the jump_chain of NLABEL. */
3423 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3424 && INSN_UID (jump) < max_jump_chain)
3426 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3427 jump_chain[INSN_UID (nlabel)] = jump;
3430 for (notep = ®_NOTES (jump), note = *notep; note; note = next)
3432 next = XEXP (note, 1);
3434 if (REG_NOTE_KIND (note) != REG_DEAD
3435 /* Verify that the REG_NOTE is legitimate. */
3436 || GET_CODE (XEXP (note, 0)) != REG
3437 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3438 notep = &XEXP (note, 1);
3441 delete_prior_computation (note, jump);
3446 PATTERN (jump) = gen_jump (nlabel);
3447 JUMP_LABEL (jump) = nlabel;
3448 ++LABEL_NUSES (nlabel);
3449 INSN_CODE (jump) = -1;
3451 if (--LABEL_NUSES (olabel) == 0)
3453 delete_labelref_insn (jump, olabel, 0);
3454 delete_insn (olabel);
3458 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3459 If we found one, delete it and then delete this insn if DELETE_THIS is
3460 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3463 delete_labelref_insn (insn, label, delete_this)
3470 if (GET_CODE (insn) != NOTE
3471 && reg_mentioned_p (label, PATTERN (insn)))
3482 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3483 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3497 /* Like rtx_equal_p except that it considers two REGs as equal
3498 if they renumber to the same value and considers two commutative
3499 operations to be the same if the order of the operands has been
3502 ??? Addition is not commutative on the PA due to the weird implicit
3503 space register selection rules for memory addresses. Therefore, we
3504 don't consider a + b == b + a.
3506 We could/should make this test a little tighter. Possibly only
3507 disabling it on the PA via some backend macro or only disabling this
3508 case when the PLUS is inside a MEM. */
3511 rtx_renumbered_equal_p (x, y)
3515 register RTX_CODE code = GET_CODE (x);
3516 register const char *fmt;
3521 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3522 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3523 && GET_CODE (SUBREG_REG (y)) == REG)))
3525 int reg_x = -1, reg_y = -1;
3526 int byte_x = 0, byte_y = 0;
3528 if (GET_MODE (x) != GET_MODE (y))
3531 /* If we haven't done any renumbering, don't
3532 make any assumptions. */
3533 if (reg_renumber == 0)
3534 return rtx_equal_p (x, y);
3538 reg_x = REGNO (SUBREG_REG (x));
3539 byte_x = SUBREG_BYTE (x);
3541 if (reg_renumber[reg_x] >= 0)
3543 reg_x = subreg_regno_offset (reg_renumber[reg_x],
3544 GET_MODE (SUBREG_REG (x)),
3553 if (reg_renumber[reg_x] >= 0)
3554 reg_x = reg_renumber[reg_x];
3557 if (GET_CODE (y) == SUBREG)
3559 reg_y = REGNO (SUBREG_REG (y));
3560 byte_y = SUBREG_BYTE (y);
3562 if (reg_renumber[reg_y] >= 0)
3564 reg_y = subreg_regno_offset (reg_renumber[reg_y],
3565 GET_MODE (SUBREG_REG (y)),
3574 if (reg_renumber[reg_y] >= 0)
3575 reg_y = reg_renumber[reg_y];
3578 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
3581 /* Now we have disposed of all the cases
3582 in which different rtx codes can match. */
3583 if (code != GET_CODE (y))
3595 return INTVAL (x) == INTVAL (y);
3598 /* We can't assume nonlocal labels have their following insns yet. */
3599 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3600 return XEXP (x, 0) == XEXP (y, 0);
3602 /* Two label-refs are equivalent if they point at labels
3603 in the same position in the instruction stream. */
3604 return (next_real_insn (XEXP (x, 0))
3605 == next_real_insn (XEXP (y, 0)));
3608 return XSTR (x, 0) == XSTR (y, 0);
3611 /* If we didn't match EQ equality above, they aren't the same. */
3618 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3620 if (GET_MODE (x) != GET_MODE (y))
3623 /* For commutative operations, the RTX match if the operand match in any
3624 order. Also handle the simple binary and unary cases without a loop.
3626 ??? Don't consider PLUS a commutative operator; see comments above. */
3627 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3629 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3630 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3631 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3632 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3633 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3634 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3635 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3636 else if (GET_RTX_CLASS (code) == '1')
3637 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3639 /* Compare the elements. If any pair of corresponding elements
3640 fail to match, return 0 for the whole things. */
3642 fmt = GET_RTX_FORMAT (code);
3643 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3649 if (XWINT (x, i) != XWINT (y, i))
3654 if (XINT (x, i) != XINT (y, i))
3659 if (strcmp (XSTR (x, i), XSTR (y, i)))
3664 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3669 if (XEXP (x, i) != XEXP (y, i))
3676 if (XVECLEN (x, i) != XVECLEN (y, i))
3678 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3679 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3690 /* If X is a hard register or equivalent to one or a subregister of one,
3691 return the hard register number. If X is a pseudo register that was not
3692 assigned a hard register, return the pseudo register number. Otherwise,
3693 return -1. Any rtx is valid for X. */
3699 if (GET_CODE (x) == REG)
3701 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3702 return reg_renumber[REGNO (x)];
3705 if (GET_CODE (x) == SUBREG)
3707 int base = true_regnum (SUBREG_REG (x));
3708 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3709 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
3710 GET_MODE (SUBREG_REG (x)),
3711 SUBREG_BYTE (x), GET_MODE (x));
3716 /* Optimize code of the form:
3718 for (x = a[i]; x; ...)
3720 for (x = a[i]; x; ...)
3724 Loop optimize will change the above code into
3728 { ...; if (! (x = ...)) break; }
3731 { ...; if (! (x = ...)) break; }
3734 In general, if the first test fails, the program can branch
3735 directly to `foo' and skip the second try which is doomed to fail.
3736 We run this after loop optimization and before flow analysis. */
3738 /* When comparing the insn patterns, we track the fact that different
3739 pseudo-register numbers may have been used in each computation.
3740 The following array stores an equivalence -- same_regs[I] == J means
3741 that pseudo register I was used in the first set of tests in a context
3742 where J was used in the second set. We also count the number of such
3743 pending equivalences. If nonzero, the expressions really aren't the
3746 static int *same_regs;
3748 static int num_same_regs;
3750 /* Track any registers modified between the target of the first jump and
3751 the second jump. They never compare equal. */
3753 static char *modified_regs;
3755 /* Record if memory was modified. */
3757 static int modified_mem;
3759 /* Called via note_stores on each insn between the target of the first
3760 branch and the second branch. It marks any changed registers. */
3763 mark_modified_reg (dest, x, data)
3765 rtx x ATTRIBUTE_UNUSED;
3766 void *data ATTRIBUTE_UNUSED;
3771 if (GET_CODE (dest) == SUBREG)
3772 dest = SUBREG_REG (dest);
3774 if (GET_CODE (dest) == MEM)
3777 if (GET_CODE (dest) != REG)
3780 regno = REGNO (dest);
3781 if (regno >= FIRST_PSEUDO_REGISTER)
3782 modified_regs[regno] = 1;
3784 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3785 modified_regs[regno + i] = 1;
3788 /* F is the first insn in the chain of insns. */
3791 thread_jumps (f, max_reg, flag_before_loop)
3794 int flag_before_loop;
3796 /* Basic algorithm is to find a conditional branch,
3797 the label it may branch to, and the branch after
3798 that label. If the two branches test the same condition,
3799 walk back from both branch paths until the insn patterns
3800 differ, or code labels are hit. If we make it back to
3801 the target of the first branch, then we know that the first branch
3802 will either always succeed or always fail depending on the relative
3803 senses of the two branches. So adjust the first branch accordingly
3806 rtx label, b1, b2, t1, t2;
3807 enum rtx_code code1, code2;
3808 rtx b1op0, b1op1, b2op0, b2op1;
3812 enum rtx_code reversed_code1, reversed_code2;
3814 /* Allocate register tables and quick-reset table. */
3815 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3816 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3817 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3818 for (i = 0; i < max_reg; i++)
3825 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3830 /* Get to a candidate branch insn. */
3831 if (GET_CODE (b1) != JUMP_INSN
3832 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3835 memset (modified_regs, 0, max_reg * sizeof (char));
3838 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3841 label = JUMP_LABEL (b1);
3843 /* Look for a branch after the target. Record any registers and
3844 memory modified between the target and the branch. Stop when we
3845 get to a label since we can't know what was changed there. */
3846 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3848 if (GET_CODE (b2) == CODE_LABEL)
3851 else if (GET_CODE (b2) == JUMP_INSN)
3853 /* If this is an unconditional jump and is the only use of
3854 its target label, we can follow it. */
3855 if (any_uncondjump_p (b2)
3857 && JUMP_LABEL (b2) != 0
3858 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3860 b2 = JUMP_LABEL (b2);
3867 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3870 if (GET_CODE (b2) == CALL_INSN)
3873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3874 if (call_used_regs[i] && ! fixed_regs[i]
3875 && i != STACK_POINTER_REGNUM
3876 && i != FRAME_POINTER_REGNUM
3877 && i != HARD_FRAME_POINTER_REGNUM
3878 && i != ARG_POINTER_REGNUM)
3879 modified_regs[i] = 1;
3882 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3885 /* Check the next candidate branch insn from the label
3888 || GET_CODE (b2) != JUMP_INSN
3890 || !any_condjump_p (b2)
3891 || !onlyjump_p (b2))
3896 /* Get the comparison codes and operands, reversing the
3897 codes if appropriate. If we don't have comparison codes,
3898 we can't do anything. */
3899 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3900 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3901 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3902 reversed_code1 = code1;
3903 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3904 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3906 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3908 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3909 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3910 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3911 reversed_code2 = code2;
3912 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3913 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3915 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3917 /* If they test the same things and knowing that B1 branches
3918 tells us whether or not B2 branches, check if we
3919 can thread the branch. */
3920 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3921 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3922 && (comparison_dominates_p (code1, code2)
3923 || comparison_dominates_p (code1, reversed_code2)))
3926 t1 = prev_nonnote_insn (b1);
3927 t2 = prev_nonnote_insn (b2);
3929 while (t1 != 0 && t2 != 0)
3933 /* We have reached the target of the first branch.
3934 If there are no pending register equivalents,
3935 we know that this branch will either always
3936 succeed (if the senses of the two branches are
3937 the same) or always fail (if not). */
3940 if (num_same_regs != 0)
3943 if (comparison_dominates_p (code1, code2))
3944 new_label = JUMP_LABEL (b2);
3946 new_label = get_label_after (b2);
3948 if (JUMP_LABEL (b1) != new_label)
3950 rtx prev = PREV_INSN (new_label);
3952 if (flag_before_loop
3953 && GET_CODE (prev) == NOTE
3954 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3956 /* Don't thread to the loop label. If a loop
3957 label is reused, loop optimization will
3958 be disabled for that loop. */
3959 new_label = gen_label_rtx ();
3960 emit_label_after (new_label, PREV_INSN (prev));
3962 changed |= redirect_jump (b1, new_label, 1);
3967 /* If either of these is not a normal insn (it might be
3968 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3969 have already been skipped above.) Similarly, fail
3970 if the insns are different. */
3971 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3972 || recog_memoized (t1) != recog_memoized (t2)
3973 || ! rtx_equal_for_thread_p (PATTERN (t1),
3977 t1 = prev_nonnote_insn (t1);
3978 t2 = prev_nonnote_insn (t2);
3985 free (modified_regs);
3990 /* This is like RTX_EQUAL_P except that it knows about our handling of
3991 possibly equivalent registers and knows to consider volatile and
3992 modified objects as not equal.
3994 YINSN is the insn containing Y. */
3997 rtx_equal_for_thread_p (x, y, yinsn)
4003 register enum rtx_code code;
4004 register const char *fmt;
4006 code = GET_CODE (x);
4007 /* Rtx's of different codes cannot be equal. */
4008 if (code != GET_CODE (y))
4011 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4012 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4014 if (GET_MODE (x) != GET_MODE (y))
4017 /* For floating-point, consider everything unequal. This is a bit
4018 pessimistic, but this pass would only rarely do anything for FP
4020 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4021 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
4024 /* For commutative operations, the RTX match if the operand match in any
4025 order. Also handle the simple binary and unary cases without a loop. */
4026 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4027 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4028 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4029 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4030 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4031 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4032 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4033 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4034 else if (GET_RTX_CLASS (code) == '1')
4035 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4037 /* Handle special-cases first. */
4041 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4044 /* If neither is user variable or hard register, check for possible
4046 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4047 || REGNO (x) < FIRST_PSEUDO_REGISTER
4048 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4051 if (same_regs[REGNO (x)] == -1)
4053 same_regs[REGNO (x)] = REGNO (y);
4056 /* If this is the first time we are seeing a register on the `Y'
4057 side, see if it is the last use. If not, we can't thread the
4058 jump, so mark it as not equivalent. */
4059 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4065 return (same_regs[REGNO (x)] == (int) REGNO (y));
4070 /* If memory modified or either volatile, not equivalent.
4071 Else, check address. */
4072 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4075 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4078 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4084 /* Cancel a pending `same_regs' if setting equivalenced registers.
4085 Then process source. */
4086 if (GET_CODE (SET_DEST (x)) == REG
4087 && GET_CODE (SET_DEST (y)) == REG)
4089 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4091 same_regs[REGNO (SET_DEST (x))] = -1;
4094 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4099 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4103 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4106 return XEXP (x, 0) == XEXP (y, 0);
4109 return XSTR (x, 0) == XSTR (y, 0);
4118 fmt = GET_RTX_FORMAT (code);
4119 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4124 if (XWINT (x, i) != XWINT (y, i))
4130 if (XINT (x, i) != XINT (y, i))
4136 /* Two vectors must have the same length. */
4137 if (XVECLEN (x, i) != XVECLEN (y, i))
4140 /* And the corresponding elements must match. */
4141 for (j = 0; j < XVECLEN (x, i); j++)
4142 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4143 XVECEXP (y, i, j), yinsn) == 0)
4148 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4154 if (strcmp (XSTR (x, i), XSTR (y, i)))
4159 /* These are just backpointers, so they don't matter. */
4166 /* It is believed that rtx's at this level will never
4167 contain anything but integers and other rtx's,
4168 except for within LABEL_REFs and SYMBOL_REFs. */