1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
59 #include "hard-reg-set.h"
61 #include "insn-config.h"
62 #include "insn-attr.h"
70 /* ??? Eventually must record somehow the labels used by jumps
71 from nested functions. */
72 /* Pre-record the next or previous real insn for each label?
73 No, this pass is very fast anyway. */
74 /* Condense consecutive labels?
75 This would make life analysis faster, maybe. */
76 /* Optimize jump y; x: ... y: jumpif... x?
77 Don't know if it is worth bothering with. */
78 /* Optimize two cases of conditional jump to conditional jump?
79 This can never delete any instruction or make anything dead,
80 or even change what is live at any point.
81 So perhaps let combiner do it. */
83 /* Vector indexed by uid.
84 For each CODE_LABEL, index by its uid to get first unconditional jump
85 that jumps to the label.
86 For each JUMP_INSN, index by its uid to get the next unconditional jump
87 that jumps to the same label.
88 Element 0 is the start of a chain of all return insns.
89 (It is safe to use element 0 because insn uid 0 is not used. */
91 static rtx *jump_chain;
93 /* Maximum index in jump_chain. */
95 static int max_jump_chain;
97 /* Indicates whether death notes are significant in cross jump analysis.
98 Normally they are not significant, because of A and B jump to C,
99 and R dies in A, it must die in B. But this might not be true after
100 stack register conversion, and we must compare death notes in that
103 static int cross_jump_death_matters = 0;
105 static int init_label_info PARAMS ((rtx));
106 static void delete_barrier_successors PARAMS ((rtx));
107 static void mark_all_labels PARAMS ((rtx, int));
108 static rtx delete_unreferenced_labels PARAMS ((rtx));
109 static void delete_noop_moves PARAMS ((rtx));
110 static int duplicate_loop_exit_test PARAMS ((rtx));
111 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
112 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
113 static int jump_back_p PARAMS ((rtx, rtx));
114 static int tension_vector_labels PARAMS ((rtx, int));
115 static void delete_computation PARAMS ((rtx));
116 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
117 static int redirect_exp PARAMS ((rtx, rtx, rtx));
118 static void invert_exp_1 PARAMS ((rtx));
119 static int invert_exp PARAMS ((rtx));
120 static void delete_from_jump_chain PARAMS ((rtx));
121 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
122 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
123 static void redirect_tablejump PARAMS ((rtx, rtx));
124 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
125 static int returnjump_p_1 PARAMS ((rtx *, void *));
126 static void delete_prior_computation PARAMS ((rtx, rtx));
128 /* Main external entry point into the jump optimizer. See comments before
129 jump_optimize_1 for descriptions of the arguments. */
131 jump_optimize (f, cross_jump, noop_moves, after_regscan)
137 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
140 /* Alternate entry into the jump optimizer. This entry point only rebuilds
141 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
144 rebuild_jump_labels (f)
147 jump_optimize_1 (f, 0, 0, 0, 1, 0);
150 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
153 jump_optimize_minimal (f)
156 jump_optimize_1 (f, 0, 0, 0, 0, 1);
159 /* Delete no-op jumps and optimize jumps to jumps
160 and jumps around jumps.
161 Delete unused labels and unreachable code.
163 If CROSS_JUMP is 1, detect matching code
164 before a jump and its destination and unify them.
165 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
167 If NOOP_MOVES is nonzero, delete no-op move insns.
169 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
170 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
172 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
173 and JUMP_LABEL field for jumping insns.
175 If `optimize' is zero, don't change any code,
176 just determine whether control drops off the end of the function.
177 This case occurs when we have -W and not -O.
178 It works because `delete_insn' checks the value of `optimize'
179 and refrains from actually deleting when that is 0.
181 If MINIMAL is nonzero, then we only perform trivial optimizations:
183 * Removal of unreachable code after BARRIERs.
184 * Removal of unreferenced CODE_LABELs.
185 * Removal of a jump to the next instruction.
186 * Removal of a conditional jump followed by an unconditional jump
187 to the same target as the conditional jump.
188 * Simplify a conditional jump around an unconditional jump.
189 * Simplify a jump to a jump.
190 * Delete extraneous line number notes.
194 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
195 mark_labels_only, minimal)
200 int mark_labels_only;
203 register rtx insn, next;
210 enum rtx_code reversed_code;
213 cross_jump_death_matters = (cross_jump == 2);
214 max_uid = init_label_info (f) + 1;
216 if (! mark_labels_only)
217 delete_barrier_successors (f);
219 /* Leave some extra room for labels and duplicate exit test insns
221 max_jump_chain = max_uid * 14 / 10;
222 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
224 mark_all_labels (f, cross_jump);
226 /* Keep track of labels used from static data; we don't track them
227 closely enough to delete them here, so make sure their reference
228 count doesn't drop to zero. */
230 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
231 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
232 LABEL_NUSES (XEXP (insn, 0))++;
234 /* Keep track of labels used for marking handlers for exception
235 regions; they cannot usually be deleted. */
237 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
238 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
239 LABEL_NUSES (XEXP (insn, 0))++;
241 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
242 notes and recompute LABEL_NUSES. */
243 if (mark_labels_only)
246 last_insn = delete_unreferenced_labels (f);
249 delete_noop_moves (f);
251 /* Now iterate optimizing jumps until nothing changes over one pass. */
253 old_max_reg = max_reg_num ();
258 for (insn = f; insn; insn = next)
261 rtx temp, temp1, temp2 = NULL_RTX;
262 rtx temp4 ATTRIBUTE_UNUSED;
264 int this_is_any_uncondjump;
265 int this_is_any_condjump;
266 int this_is_onlyjump;
268 next = NEXT_INSN (insn);
270 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
271 jump. Try to optimize by duplicating the loop exit test if so.
272 This is only safe immediately after regscan, because it uses
273 the values of regno_first_uid and regno_last_uid. */
274 if (after_regscan && GET_CODE (insn) == NOTE
275 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
276 && (temp1 = next_nonnote_insn (insn)) != 0
277 && any_uncondjump_p (temp1)
278 && onlyjump_p (temp1))
280 temp = PREV_INSN (insn);
281 if (duplicate_loop_exit_test (insn))
284 next = NEXT_INSN (temp);
289 if (GET_CODE (insn) != JUMP_INSN)
292 this_is_any_condjump = any_condjump_p (insn);
293 this_is_any_uncondjump = any_uncondjump_p (insn);
294 this_is_onlyjump = onlyjump_p (insn);
296 /* Tension the labels in dispatch tables. */
298 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
299 changed |= tension_vector_labels (PATTERN (insn), 0);
300 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
301 changed |= tension_vector_labels (PATTERN (insn), 1);
303 /* See if this jump goes to another jump and redirect if so. */
304 nlabel = follow_jumps (JUMP_LABEL (insn));
305 if (nlabel != JUMP_LABEL (insn))
306 changed |= redirect_jump (insn, nlabel, 1);
308 if (! optimize || minimal)
311 /* If a dispatch table always goes to the same place,
312 get rid of it and replace the insn that uses it. */
314 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
315 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
318 rtx pat = PATTERN (insn);
319 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
320 int len = XVECLEN (pat, diff_vec_p);
321 rtx dispatch = prev_real_insn (insn);
324 for (i = 0; i < len; i++)
325 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
326 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
331 && GET_CODE (dispatch) == JUMP_INSN
332 && JUMP_LABEL (dispatch) != 0
333 /* Don't mess with a casesi insn.
334 XXX according to the comment before computed_jump_p(),
335 all casesi insns should be a parallel of the jump
336 and a USE of a LABEL_REF. */
337 && ! ((set = single_set (dispatch)) != NULL
338 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
339 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
341 redirect_tablejump (dispatch,
342 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
347 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
349 /* Detect jump to following insn. */
350 if (reallabelprev == insn
351 && (this_is_any_condjump || this_is_any_uncondjump)
354 next = next_real_insn (JUMP_LABEL (insn));
357 /* Remove the "inactive" but "real" insns (i.e. uses and
358 clobbers) in between here and there. */
360 while ((temp = next_real_insn (temp)) != next)
367 /* Detect a conditional jump going to the same place
368 as an immediately following unconditional jump. */
369 else if (this_is_any_condjump && this_is_onlyjump
370 && (temp = next_active_insn (insn)) != 0
371 && simplejump_p (temp)
372 && (next_active_insn (JUMP_LABEL (insn))
373 == next_active_insn (JUMP_LABEL (temp))))
375 /* Don't mess up test coverage analysis. */
377 if (flag_test_coverage && !reload_completed)
378 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
379 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
384 /* Ensure that we jump to the later of the two labels.
395 If we leave the goto L1, we'll incorrectly leave
396 return-reg dead for TEST true. */
398 temp2 = next_active_insn (JUMP_LABEL (insn));
400 temp2 = get_last_insn ();
401 if (GET_CODE (temp2) != CODE_LABEL)
402 temp2 = prev_label (temp2);
403 if (temp2 != JUMP_LABEL (temp))
404 redirect_jump (temp, temp2, 1);
412 /* Detect a conditional jump jumping over an unconditional jump. */
414 else if (this_is_any_condjump
415 && reallabelprev != 0
416 && GET_CODE (reallabelprev) == JUMP_INSN
417 && prev_active_insn (reallabelprev) == insn
418 && no_labels_between_p (insn, reallabelprev)
419 && any_uncondjump_p (reallabelprev)
420 && onlyjump_p (reallabelprev))
422 /* When we invert the unconditional jump, we will be
423 decrementing the usage count of its old label.
424 Make sure that we don't delete it now because that
425 might cause the following code to be deleted. */
426 rtx prev_uses = prev_nonnote_insn (reallabelprev);
427 rtx prev_label = JUMP_LABEL (insn);
430 ++LABEL_NUSES (prev_label);
432 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
434 /* It is very likely that if there are USE insns before
435 this jump, they hold REG_DEAD notes. These REG_DEAD
436 notes are no longer valid due to this optimization,
437 and will cause the life-analysis that following passes
438 (notably delayed-branch scheduling) to think that
439 these registers are dead when they are not.
441 To prevent this trouble, we just remove the USE insns
442 from the insn chain. */
444 while (prev_uses && GET_CODE (prev_uses) == INSN
445 && GET_CODE (PATTERN (prev_uses)) == USE)
447 rtx useless = prev_uses;
448 prev_uses = prev_nonnote_insn (prev_uses);
449 delete_insn (useless);
452 delete_insn (reallabelprev);
456 /* We can now safely delete the label if it is unreferenced
457 since the delete_insn above has deleted the BARRIER. */
458 if (prev_label && --LABEL_NUSES (prev_label) == 0)
459 delete_insn (prev_label);
461 next = NEXT_INSN (insn);
464 /* If we have an unconditional jump preceded by a USE, try to put
465 the USE before the target and jump there. This simplifies many
466 of the optimizations below since we don't have to worry about
467 dealing with these USE insns. We only do this if the label
468 being branch to already has the identical USE or if code
469 never falls through to that label. */
471 else if (this_is_any_uncondjump
472 && (temp = prev_nonnote_insn (insn)) != 0
473 && GET_CODE (temp) == INSN
474 && GET_CODE (PATTERN (temp)) == USE
475 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
476 && (GET_CODE (temp1) == BARRIER
477 || (GET_CODE (temp1) == INSN
478 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
479 /* Don't do this optimization if we have a loop containing
480 only the USE instruction, and the loop start label has
481 a usage count of 1. This is because we will redo this
482 optimization everytime through the outer loop, and jump
483 opt will never exit. */
484 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
485 && temp2 == JUMP_LABEL (insn)
486 && LABEL_NUSES (temp2) == 1))
488 if (GET_CODE (temp1) == BARRIER)
490 emit_insn_after (PATTERN (temp), temp1);
491 temp1 = NEXT_INSN (temp1);
495 redirect_jump (insn, get_label_before (temp1), 1);
496 reallabelprev = prev_real_insn (temp1);
498 next = NEXT_INSN (insn);
502 /* Detect a conditional jump jumping over an unconditional trap. */
504 && this_is_any_condjump && this_is_onlyjump
505 && reallabelprev != 0
506 && GET_CODE (reallabelprev) == INSN
507 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
508 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
509 && prev_active_insn (reallabelprev) == insn
510 && no_labels_between_p (insn, reallabelprev)
511 && (temp2 = get_condition (insn, &temp4))
512 && ((reversed_code = reversed_comparison_code (temp2, insn))
515 rtx new = gen_cond_trap (reversed_code,
516 XEXP (temp2, 0), XEXP (temp2, 1),
517 TRAP_CODE (PATTERN (reallabelprev)));
521 emit_insn_before (new, temp4);
522 delete_insn (reallabelprev);
528 /* Detect a jump jumping to an unconditional trap. */
529 else if (HAVE_trap && this_is_onlyjump
530 && (temp = next_active_insn (JUMP_LABEL (insn)))
531 && GET_CODE (temp) == INSN
532 && GET_CODE (PATTERN (temp)) == TRAP_IF
533 && (this_is_any_uncondjump
534 || (this_is_any_condjump
535 && (temp2 = get_condition (insn, &temp4)))))
537 rtx tc = TRAP_CONDITION (PATTERN (temp));
539 if (tc == const_true_rtx
540 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
543 /* Replace an unconditional jump to a trap with a trap. */
544 if (this_is_any_uncondjump)
546 emit_barrier_after (emit_insn_before (gen_trap (), insn));
551 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
553 TRAP_CODE (PATTERN (temp)));
556 emit_insn_before (new, temp4);
562 /* If the trap condition and jump condition are mutually
563 exclusive, redirect the jump to the following insn. */
564 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
565 && this_is_any_condjump
566 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
567 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
568 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
569 && redirect_jump (insn, get_label_after (temp), 1))
578 /* Now that the jump has been tensioned,
579 try cross jumping: check for identical code
580 before the jump and before its target label. */
582 /* First, cross jumping of conditional jumps: */
584 if (cross_jump && condjump_p (insn))
586 rtx newjpos, newlpos;
587 rtx x = prev_real_insn (JUMP_LABEL (insn));
589 /* A conditional jump may be crossjumped
590 only if the place it jumps to follows
591 an opposing jump that comes back here. */
593 if (x != 0 && ! jump_back_p (x, insn))
594 /* We have no opposing jump;
595 cannot cross jump this insn. */
599 /* TARGET is nonzero if it is ok to cross jump
600 to code before TARGET. If so, see if matches. */
602 find_cross_jump (insn, x, 2,
607 do_cross_jump (insn, newjpos, newlpos);
608 /* Make the old conditional jump
609 into an unconditional one. */
610 SET_SRC (PATTERN (insn))
611 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
612 INSN_CODE (insn) = -1;
613 emit_barrier_after (insn);
614 /* Add to jump_chain unless this is a new label
615 whose UID is too large. */
616 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
618 jump_chain[INSN_UID (insn)]
619 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
620 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
627 /* Cross jumping of unconditional jumps:
628 a few differences. */
630 if (cross_jump && simplejump_p (insn))
632 rtx newjpos, newlpos;
637 /* TARGET is nonzero if it is ok to cross jump
638 to code before TARGET. If so, see if matches. */
639 find_cross_jump (insn, JUMP_LABEL (insn), 1,
642 /* If cannot cross jump to code before the label,
643 see if we can cross jump to another jump to
645 /* Try each other jump to this label. */
646 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
647 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
648 target != 0 && newjpos == 0;
649 target = jump_chain[INSN_UID (target)])
651 && JUMP_LABEL (target) == JUMP_LABEL (insn)
652 /* Ignore TARGET if it's deleted. */
653 && ! INSN_DELETED_P (target))
654 find_cross_jump (insn, target, 2,
659 do_cross_jump (insn, newjpos, newlpos);
665 /* This code was dead in the previous jump.c! */
666 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
668 /* Return insns all "jump to the same place"
669 so we can cross-jump between any two of them. */
671 rtx newjpos, newlpos, target;
675 /* If cannot cross jump to code before the label,
676 see if we can cross jump to another jump to
678 /* Try each other jump to this label. */
679 for (target = jump_chain[0];
680 target != 0 && newjpos == 0;
681 target = jump_chain[INSN_UID (target)])
683 && ! INSN_DELETED_P (target)
684 && GET_CODE (PATTERN (target)) == RETURN)
685 find_cross_jump (insn, target, 2,
690 do_cross_jump (insn, newjpos, newlpos);
701 /* Delete extraneous line number notes.
702 Note that two consecutive notes for different lines are not really
703 extraneous. There should be some indication where that line belonged,
704 even if it became empty. */
709 for (insn = f; insn; insn = NEXT_INSN (insn))
710 if (GET_CODE (insn) == NOTE)
712 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
713 /* Any previous line note was for the prologue; gdb wants a new
714 note after the prologue even if it is for the same line. */
715 last_note = NULL_RTX;
716 else if (NOTE_LINE_NUMBER (insn) >= 0)
718 /* Delete this note if it is identical to previous note. */
720 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
721 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
738 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
739 notes whose labels don't occur in the insn any more. Returns the
740 largest INSN_UID found. */
748 for (insn = f; insn; insn = NEXT_INSN (insn))
750 if (GET_CODE (insn) == CODE_LABEL)
751 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
752 else if (GET_CODE (insn) == JUMP_INSN)
753 JUMP_LABEL (insn) = 0;
754 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
758 for (note = REG_NOTES (insn); note; note = next)
760 next = XEXP (note, 1);
761 if (REG_NOTE_KIND (note) == REG_LABEL
762 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
763 remove_note (insn, note);
766 if (INSN_UID (insn) > largest_uid)
767 largest_uid = INSN_UID (insn);
773 /* Delete insns following barriers, up to next label.
775 Also delete no-op jumps created by gcse. */
778 delete_barrier_successors (f)
784 for (insn = f; insn;)
786 if (GET_CODE (insn) == BARRIER)
788 insn = NEXT_INSN (insn);
790 never_reached_warning (insn);
792 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
794 if (GET_CODE (insn) == NOTE
795 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
796 insn = NEXT_INSN (insn);
798 insn = delete_insn (insn);
800 /* INSN is now the code_label. */
803 /* Also remove (set (pc) (pc)) insns which can be created by
804 gcse. We eliminate such insns now to avoid having them
805 cause problems later. */
806 else if (GET_CODE (insn) == JUMP_INSN
807 && (set = pc_set (insn)) != NULL
808 && SET_SRC (set) == pc_rtx
809 && SET_DEST (set) == pc_rtx
810 && onlyjump_p (insn))
811 insn = delete_insn (insn);
814 insn = NEXT_INSN (insn);
818 /* Mark the label each jump jumps to.
819 Combine consecutive labels, and count uses of labels.
821 For each label, make a chain (using `jump_chain')
822 of all the *unconditional* jumps that jump to it;
823 also make a chain of all returns.
825 CROSS_JUMP indicates whether we are doing cross jumping
826 and if we are whether we will be paying attention to
827 death notes or not. */
830 mark_all_labels (f, cross_jump)
836 for (insn = f; insn; insn = NEXT_INSN (insn))
839 if (GET_CODE (insn) == CALL_INSN
840 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
842 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
843 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
844 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
848 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
849 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
851 /* When we know the LABEL_REF contained in a REG used in
852 an indirect jump, we'll have a REG_LABEL note so that
853 flow can tell where it's going. */
854 if (JUMP_LABEL (insn) == 0)
856 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
859 /* But a LABEL_REF around the REG_LABEL note, so
860 that we can canonicalize it. */
861 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
862 XEXP (label_note, 0));
864 mark_jump_label (label_ref, insn, cross_jump, 0);
865 XEXP (label_note, 0) = XEXP (label_ref, 0);
866 JUMP_LABEL (insn) = XEXP (label_note, 0);
869 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
871 jump_chain[INSN_UID (insn)]
872 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
873 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
875 if (GET_CODE (PATTERN (insn)) == RETURN)
877 jump_chain[INSN_UID (insn)] = jump_chain[0];
878 jump_chain[0] = insn;
884 /* Delete all labels already not referenced.
885 Also find and return the last insn. */
888 delete_unreferenced_labels (f)
891 rtx final = NULL_RTX;
894 for (insn = f; insn;)
896 if (GET_CODE (insn) == CODE_LABEL
897 && LABEL_NUSES (insn) == 0
898 && LABEL_ALTERNATE_NAME (insn) == NULL)
899 insn = delete_insn (insn);
903 insn = NEXT_INSN (insn);
910 /* Delete various simple forms of moves which have no necessary
914 delete_noop_moves (f)
919 for (insn = f; insn;)
921 next = NEXT_INSN (insn);
923 if (GET_CODE (insn) == INSN)
925 register rtx body = PATTERN (insn);
927 /* Detect and delete no-op move instructions
928 resulting from not allocating a parameter in a register. */
930 if (GET_CODE (body) == SET && set_noop_p (body))
931 delete_computation (insn);
933 /* Detect and ignore no-op move instructions
934 resulting from smart or fortuitous register allocation. */
936 else if (GET_CODE (body) == SET)
938 int sreg = true_regnum (SET_SRC (body));
939 int dreg = true_regnum (SET_DEST (body));
941 if (sreg == dreg && sreg >= 0)
943 else if (sreg >= 0 && dreg >= 0)
946 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
947 sreg, NULL_PTR, dreg,
948 GET_MODE (SET_SRC (body)));
951 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
953 /* DREG may have been the target of a REG_DEAD note in
954 the insn which makes INSN redundant. If so, reorg
955 would still think it is dead. So search for such a
956 note and delete it if we find it. */
957 if (! find_regno_note (insn, REG_UNUSED, dreg))
958 for (trial = prev_nonnote_insn (insn);
959 trial && GET_CODE (trial) != CODE_LABEL;
960 trial = prev_nonnote_insn (trial))
961 if (find_regno_note (trial, REG_DEAD, dreg))
963 remove_death (dreg, trial);
967 /* Deleting insn could lose a death-note for SREG. */
968 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
970 /* Change this into a USE so that we won't emit
971 code for it, but still can keep the note. */
973 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
974 INSN_CODE (insn) = -1;
975 /* Remove all reg notes but the REG_DEAD one. */
976 REG_NOTES (insn) = trial;
977 XEXP (trial, 1) = NULL_RTX;
983 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
984 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
986 GET_MODE (SET_DEST (body))))
988 /* This handles the case where we have two consecutive
989 assignments of the same constant to pseudos that didn't
990 get a hard reg. Each SET from the constant will be
991 converted into a SET of the spill register and an
992 output reload will be made following it. This produces
993 two loads of the same constant into the same spill
998 /* Look back for a death note for the first reg.
999 If there is one, it is no longer accurate. */
1000 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1002 if ((GET_CODE (in_insn) == INSN
1003 || GET_CODE (in_insn) == JUMP_INSN)
1004 && find_regno_note (in_insn, REG_DEAD, dreg))
1006 remove_death (dreg, in_insn);
1009 in_insn = PREV_INSN (in_insn);
1012 /* Delete the second load of the value. */
1016 else if (GET_CODE (body) == PARALLEL)
1018 /* If each part is a set between two identical registers or
1019 a USE or CLOBBER, delete the insn. */
1023 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1025 tem = XVECEXP (body, 0, i);
1026 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1029 if (GET_CODE (tem) != SET
1030 || (sreg = true_regnum (SET_SRC (tem))) < 0
1031 || (dreg = true_regnum (SET_DEST (tem))) < 0
1044 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1045 jump. Assume that this unconditional jump is to the exit test code. If
1046 the code is sufficiently simple, make a copy of it before INSN,
1047 followed by a jump to the exit of the loop. Then delete the unconditional
1050 Return 1 if we made the change, else 0.
1052 This is only safe immediately after a regscan pass because it uses the
1053 values of regno_first_uid and regno_last_uid. */
1056 duplicate_loop_exit_test (loop_start)
1059 rtx insn, set, reg, p, link;
1060 rtx copy = 0, first_copy = 0;
1062 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1064 int max_reg = max_reg_num ();
1067 /* Scan the exit code. We do not perform this optimization if any insn:
1071 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1072 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1073 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1076 We also do not do this if we find an insn with ASM_OPERANDS. While
1077 this restriction should not be necessary, copying an insn with
1078 ASM_OPERANDS can confuse asm_noperands in some cases.
1080 Also, don't do this if the exit code is more than 20 insns. */
1082 for (insn = exitcode;
1084 && ! (GET_CODE (insn) == NOTE
1085 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1086 insn = NEXT_INSN (insn))
1088 switch (GET_CODE (insn))
1094 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1095 a jump immediately after the loop start that branches outside
1096 the loop but within an outer loop, near the exit test.
1097 If we copied this exit test and created a phony
1098 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1099 before the exit test look like these could be safely moved
1100 out of the loop even if they actually may be never executed.
1101 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1103 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1104 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1108 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1109 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1110 /* If we were to duplicate this code, we would not move
1111 the BLOCK notes, and so debugging the moved code would
1112 be difficult. Thus, we only move the code with -O2 or
1119 /* The code below would grossly mishandle REG_WAS_0 notes,
1120 so get rid of them here. */
1121 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1122 remove_note (insn, p);
1123 if (++num_insns > 20
1124 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1125 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1133 /* Unless INSN is zero, we can do the optimization. */
1139 /* See if any insn sets a register only used in the loop exit code and
1140 not a user variable. If so, replace it with a new register. */
1141 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1142 if (GET_CODE (insn) == INSN
1143 && (set = single_set (insn)) != 0
1144 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1145 || (GET_CODE (reg) == SUBREG
1146 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1147 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1148 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1150 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1151 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1156 /* We can do the replacement. Allocate reg_map if this is the
1157 first replacement we found. */
1159 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1161 REG_LOOP_TEST_P (reg) = 1;
1163 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1167 /* Now copy each insn. */
1168 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1170 switch (GET_CODE (insn))
1173 copy = emit_barrier_before (loop_start);
1176 /* Only copy line-number notes. */
1177 if (NOTE_LINE_NUMBER (insn) >= 0)
1179 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1180 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1185 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1187 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1189 mark_jump_label (PATTERN (copy), copy, 0, 0);
1191 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1193 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1194 if (REG_NOTE_KIND (link) != REG_LABEL)
1196 if (GET_CODE (link) == EXPR_LIST)
1198 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1203 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1208 if (reg_map && REG_NOTES (copy))
1209 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1213 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1216 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1217 mark_jump_label (PATTERN (copy), copy, 0, 0);
1218 if (REG_NOTES (insn))
1220 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1222 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1225 /* If this is a simple jump, add it to the jump chain. */
1227 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1228 && simplejump_p (copy))
1230 jump_chain[INSN_UID (copy)]
1231 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1232 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1240 /* Record the first insn we copied. We need it so that we can
1241 scan the copied insns for new pseudo registers. */
1246 /* Now clean up by emitting a jump to the end label and deleting the jump
1247 at the start of the loop. */
1248 if (! copy || GET_CODE (copy) != BARRIER)
1250 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1253 /* Record the first insn we copied. We need it so that we can
1254 scan the copied insns for new pseudo registers. This may not
1255 be strictly necessary since we should have copied at least one
1256 insn above. But I am going to be safe. */
1260 mark_jump_label (PATTERN (copy), copy, 0, 0);
1261 if (INSN_UID (copy) < max_jump_chain
1262 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1264 jump_chain[INSN_UID (copy)]
1265 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1266 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1268 emit_barrier_before (loop_start);
1271 /* Now scan from the first insn we copied to the last insn we copied
1272 (copy) for new pseudo registers. Do this after the code to jump to
1273 the end label since that might create a new pseudo too. */
1274 reg_scan_update (first_copy, copy, max_reg);
1276 /* Mark the exit code as the virtual top of the converted loop. */
1277 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1279 delete_insn (next_nonnote_insn (loop_start));
1288 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1289 notes between START and END out before START. Assume that END is not
1290 such a note. START may be such a note. Returns the value of the new
1291 starting insn, which may be different if the original start was such a
1295 squeeze_notes (start, end)
1301 for (insn = start; insn != end; insn = next)
1303 next = NEXT_INSN (insn);
1304 if (GET_CODE (insn) == NOTE
1305 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1306 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1307 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1308 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1309 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1310 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1316 rtx prev = PREV_INSN (insn);
1317 PREV_INSN (insn) = PREV_INSN (start);
1318 NEXT_INSN (insn) = start;
1319 NEXT_INSN (PREV_INSN (insn)) = insn;
1320 PREV_INSN (NEXT_INSN (insn)) = insn;
1321 NEXT_INSN (prev) = next;
1322 PREV_INSN (next) = prev;
1330 /* Compare the instructions before insn E1 with those before E2
1331 to find an opportunity for cross jumping.
1332 (This means detecting identical sequences of insns followed by
1333 jumps to the same place, or followed by a label and a jump
1334 to that label, and replacing one with a jump to the other.)
1336 Assume E1 is a jump that jumps to label E2
1337 (that is not always true but it might as well be).
1338 Find the longest possible equivalent sequences
1339 and store the first insns of those sequences into *F1 and *F2.
1340 Store zero there if no equivalent preceding instructions are found.
1342 We give up if we find a label in stream 1.
1343 Actually we could transfer that label into stream 2. */
1346 find_cross_jump (e1, e2, minimum, f1, f2)
1351 register rtx i1 = e1, i2 = e2;
1352 register rtx p1, p2;
1355 rtx last1 = 0, last2 = 0;
1356 rtx afterlast1 = 0, afterlast2 = 0;
1363 i1 = prev_nonnote_insn (i1);
1365 i2 = PREV_INSN (i2);
1366 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1367 i2 = PREV_INSN (i2);
1372 /* Don't allow the range of insns preceding E1 or E2
1373 to include the other (E2 or E1). */
1374 if (i2 == e1 || i1 == e2)
1377 /* If we will get to this code by jumping, those jumps will be
1378 tensioned to go directly to the new label (before I2),
1379 so this cross-jumping won't cost extra. So reduce the minimum. */
1380 if (GET_CODE (i1) == CODE_LABEL)
1386 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1392 /* If this is a CALL_INSN, compare register usage information.
1393 If we don't check this on stack register machines, the two
1394 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1395 numbers of stack registers in the same basic block.
1396 If we don't check this on machines with delay slots, a delay slot may
1397 be filled that clobbers a parameter expected by the subroutine.
1399 ??? We take the simple route for now and assume that if they're
1400 equal, they were constructed identically. */
1402 if (GET_CODE (i1) == CALL_INSN
1403 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1404 CALL_INSN_FUNCTION_USAGE (i2)))
1408 /* If cross_jump_death_matters is not 0, the insn's mode
1409 indicates whether or not the insn contains any stack-like
1412 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1414 /* If register stack conversion has already been done, then
1415 death notes must also be compared before it is certain that
1416 the two instruction streams match. */
1419 HARD_REG_SET i1_regset, i2_regset;
1421 CLEAR_HARD_REG_SET (i1_regset);
1422 CLEAR_HARD_REG_SET (i2_regset);
1424 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1425 if (REG_NOTE_KIND (note) == REG_DEAD
1426 && STACK_REG_P (XEXP (note, 0)))
1427 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1429 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1430 if (REG_NOTE_KIND (note) == REG_DEAD
1431 && STACK_REG_P (XEXP (note, 0)))
1432 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1434 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1443 /* Don't allow old-style asm or volatile extended asms to be accepted
1444 for cross jumping purposes. It is conceptually correct to allow
1445 them, since cross-jumping preserves the dynamic instruction order
1446 even though it is changing the static instruction order. However,
1447 if an asm is being used to emit an assembler pseudo-op, such as
1448 the MIPS `.set reorder' pseudo-op, then the static instruction order
1449 matters and it must be preserved. */
1450 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1451 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1452 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1455 if (lose || GET_CODE (p1) != GET_CODE (p2)
1456 || ! rtx_renumbered_equal_p (p1, p2))
1458 /* The following code helps take care of G++ cleanups. */
1462 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1463 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1464 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1465 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1466 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1467 /* If the equivalences are not to a constant, they may
1468 reference pseudos that no longer exist, so we can't
1470 && CONSTANT_P (XEXP (equiv1, 0))
1471 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1473 rtx s1 = single_set (i1);
1474 rtx s2 = single_set (i2);
1475 if (s1 != 0 && s2 != 0
1476 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1478 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1479 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1480 if (! rtx_renumbered_equal_p (p1, p2))
1482 else if (apply_change_group ())
1487 /* Insns fail to match; cross jumping is limited to the following
1491 /* Don't allow the insn after a compare to be shared by
1492 cross-jumping unless the compare is also shared.
1493 Here, if either of these non-matching insns is a compare,
1494 exclude the following insn from possible cross-jumping. */
1495 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1496 last1 = afterlast1, last2 = afterlast2, ++minimum;
1499 /* If cross-jumping here will feed a jump-around-jump
1500 optimization, this jump won't cost extra, so reduce
1502 if (GET_CODE (i1) == JUMP_INSN
1504 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1510 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1512 /* Ok, this insn is potentially includable in a cross-jump here. */
1513 afterlast1 = last1, afterlast2 = last2;
1514 last1 = i1, last2 = i2, --minimum;
1518 if (minimum <= 0 && last1 != 0 && last1 != e1)
1519 *f1 = last1, *f2 = last2;
1523 do_cross_jump (insn, newjpos, newlpos)
1524 rtx insn, newjpos, newlpos;
1526 /* Find an existing label at this point
1527 or make a new one if there is none. */
1528 register rtx label = get_label_before (newlpos);
1530 /* Make the same jump insn jump to the new point. */
1531 if (GET_CODE (PATTERN (insn)) == RETURN)
1533 /* Remove from jump chain of returns. */
1534 delete_from_jump_chain (insn);
1535 /* Change the insn. */
1536 PATTERN (insn) = gen_jump (label);
1537 INSN_CODE (insn) = -1;
1538 JUMP_LABEL (insn) = label;
1539 LABEL_NUSES (label)++;
1540 /* Add to new the jump chain. */
1541 if (INSN_UID (label) < max_jump_chain
1542 && INSN_UID (insn) < max_jump_chain)
1544 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1545 jump_chain[INSN_UID (label)] = insn;
1549 redirect_jump (insn, label, 1);
1551 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1552 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1553 the NEWJPOS stream. */
1555 while (newjpos != insn)
1559 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1560 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1561 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1562 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1563 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1564 remove_note (newlpos, lnote);
1566 delete_insn (newjpos);
1567 newjpos = next_real_insn (newjpos);
1568 newlpos = next_real_insn (newlpos);
1572 /* Return the label before INSN, or put a new label there. */
1575 get_label_before (insn)
1580 /* Find an existing label at this point
1581 or make a new one if there is none. */
1582 label = prev_nonnote_insn (insn);
1584 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1586 rtx prev = PREV_INSN (insn);
1588 label = gen_label_rtx ();
1589 emit_label_after (label, prev);
1590 LABEL_NUSES (label) = 0;
1595 /* Return the label after INSN, or put a new label there. */
1598 get_label_after (insn)
1603 /* Find an existing label at this point
1604 or make a new one if there is none. */
1605 label = next_nonnote_insn (insn);
1607 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1609 label = gen_label_rtx ();
1610 emit_label_after (label, insn);
1611 LABEL_NUSES (label) = 0;
1616 /* Return 1 if INSN is a jump that jumps to right after TARGET
1617 only on the condition that TARGET itself would drop through.
1618 Assumes that TARGET is a conditional jump. */
1621 jump_back_p (insn, target)
1625 enum rtx_code codei, codet;
1628 if (! any_condjump_p (insn)
1629 || any_uncondjump_p (target)
1630 || target != prev_real_insn (JUMP_LABEL (insn)))
1632 set = pc_set (insn);
1633 tset = pc_set (target);
1635 cinsn = XEXP (SET_SRC (set), 0);
1636 ctarget = XEXP (SET_SRC (tset), 0);
1638 codei = GET_CODE (cinsn);
1639 codet = GET_CODE (ctarget);
1641 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1643 codei = reversed_comparison_code (cinsn, insn);
1644 if (codei == UNKNOWN)
1648 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1650 codet = reversed_comparison_code (ctarget, target);
1651 if (codei == UNKNOWN)
1655 return (codei == codet
1656 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1657 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1660 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1661 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1662 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1663 know whether it's source is floating point or integer comparison. Machine
1664 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1665 to help this function avoid overhead in these cases. */
1667 reversed_comparison_code_parts (code, arg0, arg1, insn)
1668 rtx insn, arg0, arg1;
1671 enum machine_mode mode;
1673 /* If this is not actually a comparison, we can't reverse it. */
1674 if (GET_RTX_CLASS (code) != '<')
1677 mode = GET_MODE (arg0);
1678 if (mode == VOIDmode)
1679 mode = GET_MODE (arg1);
1681 /* First see if machine description supply us way to reverse the comparison.
1682 Give it priority over everything else to allow machine description to do
1684 #ifdef REVERSIBLE_CC_MODE
1685 if (GET_MODE_CLASS (mode) == MODE_CC
1686 && REVERSIBLE_CC_MODE (mode))
1688 #ifdef REVERSE_CONDITION
1689 return REVERSE_CONDITION (code, mode);
1691 return reverse_condition (code);
1695 /* Try few special cases based on the comparison code. */
1704 /* It is always safe to reverse EQ and NE, even for the floating
1705 point. Similary the unsigned comparisons are never used for
1706 floating point so we can reverse them in the default way. */
1707 return reverse_condition (code);
1712 /* In case we already see unordered comparison, we can be sure to
1713 be dealing with floating point so we don't need any more tests. */
1714 return reverse_condition_maybe_unordered (code);
1719 /* We don't have safe way to reverse these yet. */
1725 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1726 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1727 || flag_unsafe_math_optimizations)
1728 return reverse_condition (code);
1730 if (GET_MODE_CLASS (mode) == MODE_CC
1737 /* Try to search for the comparison to determine the real mode.
1738 This code is expensive, but with sane machine description it
1739 will be never used, since REVERSIBLE_CC_MODE will return true
1744 for (prev = prev_nonnote_insn (insn);
1745 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1746 prev = prev_nonnote_insn (prev))
1748 rtx set = set_of (arg0, prev);
1749 if (set && GET_CODE (set) == SET
1750 && rtx_equal_p (SET_DEST (set), arg0))
1752 rtx src = SET_SRC (set);
1754 if (GET_CODE (src) == COMPARE)
1756 rtx comparison = src;
1757 arg0 = XEXP (src, 0);
1758 mode = GET_MODE (arg0);
1759 if (mode == VOIDmode)
1760 mode = GET_MODE (XEXP (comparison, 1));
1763 /* We can get past reg-reg moves. This may be usefull for model
1764 of i387 comparisons that first move flag registers around. */
1771 /* If register is clobbered in some ununderstandable way,
1778 /* An integer condition. */
1779 if (GET_CODE (arg0) == CONST_INT
1780 || (GET_MODE (arg0) != VOIDmode
1781 && GET_MODE_CLASS (mode) != MODE_CC
1782 && ! FLOAT_MODE_P (mode)))
1783 return reverse_condition (code);
1788 /* An wrapper around the previous function to take COMPARISON as rtx
1789 expression. This simplifies many callers. */
1791 reversed_comparison_code (comparison, insn)
1792 rtx comparison, insn;
1794 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1796 return reversed_comparison_code_parts (GET_CODE (comparison),
1797 XEXP (comparison, 0),
1798 XEXP (comparison, 1), insn);
1801 /* Given an rtx-code for a comparison, return the code for the negated
1802 comparison. If no such code exists, return UNKNOWN.
1804 WATCH OUT! reverse_condition is not safe to use on a jump that might
1805 be acting on the results of an IEEE floating point comparison, because
1806 of the special treatment of non-signaling nans in comparisons.
1807 Use reversed_comparison_code instead. */
1810 reverse_condition (code)
1853 /* Similar, but we're allowed to generate unordered comparisons, which
1854 makes it safe for IEEE floating-point. Of course, we have to recognize
1855 that the target will support them too... */
1858 reverse_condition_maybe_unordered (code)
1861 /* Non-IEEE formats don't have unordered conditions. */
1862 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1863 return reverse_condition (code);
1901 /* Similar, but return the code when two operands of a comparison are swapped.
1902 This IS safe for IEEE floating-point. */
1905 swap_condition (code)
1948 /* Given a comparison CODE, return the corresponding unsigned comparison.
1949 If CODE is an equality comparison or already an unsigned comparison,
1950 CODE is returned. */
1953 unsigned_condition (code)
1980 /* Similarly, return the signed version of a comparison. */
1983 signed_condition (code)
2010 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2011 truth of CODE1 implies the truth of CODE2. */
2014 comparison_dominates_p (code1, code2)
2015 enum rtx_code code1, code2;
2017 /* UNKNOWN comparison codes can happen as a result of trying to revert
2019 They can't match anything, so we have to reject them here. */
2020 if (code1 == UNKNOWN || code2 == UNKNOWN)
2029 if (code2 == UNLE || code2 == UNGE)
2034 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2035 || code2 == ORDERED)
2040 if (code2 == UNLE || code2 == NE)
2045 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2050 if (code2 == UNGE || code2 == NE)
2055 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2061 if (code2 == ORDERED)
2066 if (code2 == NE || code2 == ORDERED)
2071 if (code2 == LEU || code2 == NE)
2076 if (code2 == GEU || code2 == NE)
2081 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2082 || code2 == UNGE || code2 == UNGT)
2093 /* Return 1 if INSN is an unconditional jump and nothing else. */
2099 return (GET_CODE (insn) == JUMP_INSN
2100 && GET_CODE (PATTERN (insn)) == SET
2101 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2102 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2105 /* Return nonzero if INSN is a (possibly) conditional jump
2108 Use this function is deprecated, since we need to support combined
2109 branch and compare insns. Use any_condjump_p instead whenever possible. */
2115 register rtx x = PATTERN (insn);
2117 if (GET_CODE (x) != SET
2118 || GET_CODE (SET_DEST (x)) != PC)
2122 if (GET_CODE (x) == LABEL_REF)
2125 return (GET_CODE (x) == IF_THEN_ELSE
2126 && ((GET_CODE (XEXP (x, 2)) == PC
2127 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2128 || GET_CODE (XEXP (x, 1)) == RETURN))
2129 || (GET_CODE (XEXP (x, 1)) == PC
2130 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2131 || GET_CODE (XEXP (x, 2)) == RETURN))));
2136 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2139 Use this function is deprecated, since we need to support combined
2140 branch and compare insns. Use any_condjump_p instead whenever possible. */
2143 condjump_in_parallel_p (insn)
2146 register rtx x = PATTERN (insn);
2148 if (GET_CODE (x) != PARALLEL)
2151 x = XVECEXP (x, 0, 0);
2153 if (GET_CODE (x) != SET)
2155 if (GET_CODE (SET_DEST (x)) != PC)
2157 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2159 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2161 if (XEXP (SET_SRC (x), 2) == pc_rtx
2162 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2163 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2165 if (XEXP (SET_SRC (x), 1) == pc_rtx
2166 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2167 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2172 /* Return set of PC, otherwise NULL. */
2179 if (GET_CODE (insn) != JUMP_INSN)
2181 pat = PATTERN (insn);
2183 /* The set is allowed to appear either as the insn pattern or
2184 the first set in a PARALLEL. */
2185 if (GET_CODE (pat) == PARALLEL)
2186 pat = XVECEXP (pat, 0, 0);
2187 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2193 /* Return true when insn is an unconditional direct jump,
2194 possibly bundled inside a PARALLEL. */
2197 any_uncondjump_p (insn)
2200 rtx x = pc_set (insn);
2203 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2208 /* Return true when insn is a conditional jump. This function works for
2209 instructions containing PC sets in PARALLELs. The instruction may have
2210 various other effects so before removing the jump you must verify
2213 Note that unlike condjump_p it returns false for unconditional jumps. */
2216 any_condjump_p (insn)
2219 rtx x = pc_set (insn);
2224 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2227 a = GET_CODE (XEXP (SET_SRC (x), 1));
2228 b = GET_CODE (XEXP (SET_SRC (x), 2));
2230 return ((b == PC && (a == LABEL_REF || a == RETURN))
2231 || (a == PC && (b == LABEL_REF || b == RETURN)));
2234 /* Return the label of a conditional jump. */
2237 condjump_label (insn)
2240 rtx x = pc_set (insn);
2245 if (GET_CODE (x) == LABEL_REF)
2247 if (GET_CODE (x) != IF_THEN_ELSE)
2249 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2251 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2256 /* Return true if INSN is a (possibly conditional) return insn. */
2259 returnjump_p_1 (loc, data)
2261 void *data ATTRIBUTE_UNUSED;
2264 return x && GET_CODE (x) == RETURN;
2271 if (GET_CODE (insn) != JUMP_INSN)
2273 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2276 /* Return true if INSN is a jump that only transfers control and
2285 if (GET_CODE (insn) != JUMP_INSN)
2288 set = single_set (insn);
2291 if (GET_CODE (SET_DEST (set)) != PC)
2293 if (side_effects_p (SET_SRC (set)))
2301 /* Return 1 if X is an RTX that does nothing but set the condition codes
2302 and CLOBBER or USE registers.
2303 Return -1 if X does explicitly set the condition codes,
2304 but also does other things. */
2308 rtx x ATTRIBUTE_UNUSED;
2310 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2312 if (GET_CODE (x) == PARALLEL)
2316 int other_things = 0;
2317 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2319 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2320 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2322 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2325 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2331 /* Follow any unconditional jump at LABEL;
2332 return the ultimate label reached by any such chain of jumps.
2333 If LABEL is not followed by a jump, return LABEL.
2334 If the chain loops or we can't find end, return LABEL,
2335 since that tells caller to avoid changing the insn.
2337 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2338 a USE or CLOBBER. */
2341 follow_jumps (label)
2346 register rtx value = label;
2351 && (insn = next_active_insn (value)) != 0
2352 && GET_CODE (insn) == JUMP_INSN
2353 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2354 && onlyjump_p (insn))
2355 || GET_CODE (PATTERN (insn)) == RETURN)
2356 && (next = NEXT_INSN (insn))
2357 && GET_CODE (next) == BARRIER);
2360 /* Don't chain through the insn that jumps into a loop
2361 from outside the loop,
2362 since that would create multiple loop entry jumps
2363 and prevent loop optimization. */
2365 if (!reload_completed)
2366 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2367 if (GET_CODE (tem) == NOTE
2368 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2369 /* ??? Optional. Disables some optimizations, but makes
2370 gcov output more accurate with -O. */
2371 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2374 /* If we have found a cycle, make the insn jump to itself. */
2375 if (JUMP_LABEL (insn) == label)
2378 tem = next_active_insn (JUMP_LABEL (insn));
2379 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2380 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2383 value = JUMP_LABEL (insn);
2390 /* Assuming that field IDX of X is a vector of label_refs,
2391 replace each of them by the ultimate label reached by it.
2392 Return nonzero if a change is made.
2393 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2396 tension_vector_labels (x, idx)
2402 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2404 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2405 register rtx nlabel = follow_jumps (olabel);
2406 if (nlabel && nlabel != olabel)
2408 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2409 ++LABEL_NUSES (nlabel);
2410 if (--LABEL_NUSES (olabel) == 0)
2411 delete_insn (olabel);
2418 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2419 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2420 in INSN, then store one of them in JUMP_LABEL (INSN).
2421 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2422 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2423 Also, when there are consecutive labels, canonicalize on the last of them.
2425 Note that two labels separated by a loop-beginning note
2426 must be kept distinct if we have not yet done loop-optimization,
2427 because the gap between them is where loop-optimize
2428 will want to move invariant code to. CROSS_JUMP tells us
2429 that loop-optimization is done with.
2431 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2432 two labels distinct if they are separated by only USE or CLOBBER insns. */
2435 mark_jump_label (x, insn, cross_jump, in_mem)
2441 register RTX_CODE code = GET_CODE (x);
2443 register const char *fmt;
2465 /* If this is a constant-pool reference, see if it is a label. */
2466 if (CONSTANT_POOL_ADDRESS_P (x))
2467 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2472 rtx label = XEXP (x, 0);
2477 /* Ignore remaining references to unreachable labels that
2478 have been deleted. */
2479 if (GET_CODE (label) == NOTE
2480 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2483 if (GET_CODE (label) != CODE_LABEL)
2486 /* Ignore references to labels of containing functions. */
2487 if (LABEL_REF_NONLOCAL_P (x))
2490 /* If there are other labels following this one,
2491 replace it with the last of the consecutive labels. */
2492 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2494 if (GET_CODE (next) == CODE_LABEL)
2496 else if (cross_jump && GET_CODE (next) == INSN
2497 && (GET_CODE (PATTERN (next)) == USE
2498 || GET_CODE (PATTERN (next)) == CLOBBER))
2500 else if (GET_CODE (next) != NOTE)
2502 else if (! cross_jump
2503 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2504 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2505 /* ??? Optional. Disables some optimizations, but
2506 makes gcov output more accurate with -O. */
2507 || (flag_test_coverage
2508 && NOTE_LINE_NUMBER (next) > 0)))
2512 XEXP (x, 0) = label;
2513 if (! insn || ! INSN_DELETED_P (insn))
2514 ++LABEL_NUSES (label);
2518 if (GET_CODE (insn) == JUMP_INSN)
2519 JUMP_LABEL (insn) = label;
2521 /* If we've changed OLABEL and we had a REG_LABEL note
2522 for it, update it as well. */
2523 else if (label != olabel
2524 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2525 XEXP (note, 0) = label;
2527 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2529 else if (! find_reg_note (insn, REG_LABEL, label))
2531 /* This code used to ignore labels which refered to dispatch
2532 tables to avoid flow.c generating worse code.
2534 However, in the presense of global optimizations like
2535 gcse which call find_basic_blocks without calling
2536 life_analysis, not recording such labels will lead
2537 to compiler aborts because of inconsistencies in the
2538 flow graph. So we go ahead and record the label.
2540 It may also be the case that the optimization argument
2541 is no longer valid because of the more accurate cfg
2542 we build in find_basic_blocks -- it no longer pessimizes
2543 code when it finds a REG_LABEL note. */
2544 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2551 /* Do walk the labels in a vector, but not the first operand of an
2552 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2555 if (! INSN_DELETED_P (insn))
2557 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2559 for (i = 0; i < XVECLEN (x, eltnum); i++)
2560 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2561 cross_jump, in_mem);
2569 fmt = GET_RTX_FORMAT (code);
2570 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2573 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2574 else if (fmt[i] == 'E')
2577 for (j = 0; j < XVECLEN (x, i); j++)
2578 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2583 /* If all INSN does is set the pc, delete it,
2584 and delete the insn that set the condition codes for it
2585 if that's what the previous thing was. */
2591 register rtx set = single_set (insn);
2593 if (set && GET_CODE (SET_DEST (set)) == PC)
2594 delete_computation (insn);
2597 /* Verify INSN is a BARRIER and delete it. */
2600 delete_barrier (insn)
2603 if (GET_CODE (insn) != BARRIER)
2609 /* Recursively delete prior insns that compute the value (used only by INSN
2610 which the caller is deleting) stored in the register mentioned by NOTE
2611 which is a REG_DEAD note associated with INSN. */
2614 delete_prior_computation (note, insn)
2619 rtx reg = XEXP (note, 0);
2621 for (our_prev = prev_nonnote_insn (insn);
2622 our_prev && (GET_CODE (our_prev) == INSN
2623 || GET_CODE (our_prev) == CALL_INSN);
2624 our_prev = prev_nonnote_insn (our_prev))
2626 rtx pat = PATTERN (our_prev);
2628 /* If we reach a CALL which is not calling a const function
2629 or the callee pops the arguments, then give up. */
2630 if (GET_CODE (our_prev) == CALL_INSN
2631 && (! CONST_CALL_P (our_prev)
2632 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2635 /* If we reach a SEQUENCE, it is too complex to try to
2636 do anything with it, so give up. */
2637 if (GET_CODE (pat) == SEQUENCE)
2640 if (GET_CODE (pat) == USE
2641 && GET_CODE (XEXP (pat, 0)) == INSN)
2642 /* reorg creates USEs that look like this. We leave them
2643 alone because reorg needs them for its own purposes. */
2646 if (reg_set_p (reg, pat))
2648 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2651 if (GET_CODE (pat) == PARALLEL)
2653 /* If we find a SET of something else, we can't
2658 for (i = 0; i < XVECLEN (pat, 0); i++)
2660 rtx part = XVECEXP (pat, 0, i);
2662 if (GET_CODE (part) == SET
2663 && SET_DEST (part) != reg)
2667 if (i == XVECLEN (pat, 0))
2668 delete_computation (our_prev);
2670 else if (GET_CODE (pat) == SET
2671 && GET_CODE (SET_DEST (pat)) == REG)
2673 int dest_regno = REGNO (SET_DEST (pat));
2676 + (dest_regno < FIRST_PSEUDO_REGISTER
2677 ? HARD_REGNO_NREGS (dest_regno,
2678 GET_MODE (SET_DEST (pat))) : 1));
2679 int regno = REGNO (reg);
2682 + (regno < FIRST_PSEUDO_REGISTER
2683 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2685 if (dest_regno >= regno
2686 && dest_endregno <= endregno)
2687 delete_computation (our_prev);
2689 /* We may have a multi-word hard register and some, but not
2690 all, of the words of the register are needed in subsequent
2691 insns. Write REG_UNUSED notes for those parts that were not
2693 else if (dest_regno <= regno
2694 && dest_endregno >= endregno)
2698 REG_NOTES (our_prev)
2699 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2700 REG_NOTES (our_prev));
2702 for (i = dest_regno; i < dest_endregno; i++)
2703 if (! find_regno_note (our_prev, REG_UNUSED, i))
2706 if (i == dest_endregno)
2707 delete_computation (our_prev);
2714 /* If PAT references the register that dies here, it is an
2715 additional use. Hence any prior SET isn't dead. However, this
2716 insn becomes the new place for the REG_DEAD note. */
2717 if (reg_overlap_mentioned_p (reg, pat))
2719 XEXP (note, 1) = REG_NOTES (our_prev);
2720 REG_NOTES (our_prev) = note;
2726 /* Delete INSN and recursively delete insns that compute values used only
2727 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2728 If we are running before flow.c, we need do nothing since flow.c will
2729 delete dead code. We also can't know if the registers being used are
2730 dead or not at this point.
2732 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2733 nothing other than set a register that dies in this insn, we can delete
2736 On machines with CC0, if CC0 is used in this insn, we may be able to
2737 delete the insn that set it. */
2740 delete_computation (insn)
2746 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2748 rtx prev = prev_nonnote_insn (insn);
2749 /* We assume that at this stage
2750 CC's are always set explicitly
2751 and always immediately before the jump that
2752 will use them. So if the previous insn
2753 exists to set the CC's, delete it
2754 (unless it performs auto-increments, etc.). */
2755 if (prev && GET_CODE (prev) == INSN
2756 && sets_cc0_p (PATTERN (prev)))
2758 if (sets_cc0_p (PATTERN (prev)) > 0
2759 && ! side_effects_p (PATTERN (prev)))
2760 delete_computation (prev);
2762 /* Otherwise, show that cc0 won't be used. */
2763 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2764 cc0_rtx, REG_NOTES (prev));
2769 for (note = REG_NOTES (insn); note; note = next)
2771 next = XEXP (note, 1);
2773 if (REG_NOTE_KIND (note) != REG_DEAD
2774 /* Verify that the REG_NOTE is legitimate. */
2775 || GET_CODE (XEXP (note, 0)) != REG)
2778 delete_prior_computation (note, insn);
2784 /* Delete insn INSN from the chain of insns and update label ref counts.
2785 May delete some following insns as a consequence; may even delete
2786 a label elsewhere and insns that follow it.
2788 Returns the first insn after INSN that was not deleted. */
2794 register rtx next = NEXT_INSN (insn);
2795 register rtx prev = PREV_INSN (insn);
2796 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2797 register int dont_really_delete = 0;
2800 while (next && INSN_DELETED_P (next))
2801 next = NEXT_INSN (next);
2803 /* This insn is already deleted => return first following nondeleted. */
2804 if (INSN_DELETED_P (insn))
2808 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2810 /* Don't delete user-declared labels. When optimizing, convert them
2811 to special NOTEs instead. When not optimizing, leave them alone. */
2812 if (was_code_label && LABEL_NAME (insn) != 0)
2815 dont_really_delete = 1;
2816 else if (! dont_really_delete)
2818 const char *name = LABEL_NAME (insn);
2819 PUT_CODE (insn, NOTE);
2820 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2821 NOTE_SOURCE_FILE (insn) = name;
2822 dont_really_delete = 1;
2826 /* Mark this insn as deleted. */
2827 INSN_DELETED_P (insn) = 1;
2829 /* If this is an unconditional jump, delete it from the jump chain. */
2830 if (simplejump_p (insn))
2831 delete_from_jump_chain (insn);
2833 /* If instruction is followed by a barrier,
2834 delete the barrier too. */
2836 if (next != 0 && GET_CODE (next) == BARRIER)
2838 INSN_DELETED_P (next) = 1;
2839 next = NEXT_INSN (next);
2842 /* Patch out INSN (and the barrier if any) */
2844 if (! dont_really_delete)
2848 NEXT_INSN (prev) = next;
2849 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2850 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2851 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2856 PREV_INSN (next) = prev;
2857 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2858 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2861 if (prev && NEXT_INSN (prev) == 0)
2862 set_last_insn (prev);
2865 /* If deleting a jump, decrement the count of the label,
2866 and delete the label if it is now unused. */
2868 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2870 rtx lab = JUMP_LABEL (insn), lab_next;
2872 if (--LABEL_NUSES (lab) == 0)
2874 /* This can delete NEXT or PREV,
2875 either directly if NEXT is JUMP_LABEL (INSN),
2876 or indirectly through more levels of jumps. */
2879 /* I feel a little doubtful about this loop,
2880 but I see no clean and sure alternative way
2881 to find the first insn after INSN that is not now deleted.
2882 I hope this works. */
2883 while (next && INSN_DELETED_P (next))
2884 next = NEXT_INSN (next);
2887 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2888 && GET_CODE (lab_next) == JUMP_INSN
2889 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2890 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2892 /* If we're deleting the tablejump, delete the dispatch table.
2893 We may not be able to kill the label immediately preceeding
2894 just yet, as it might be referenced in code leading up to
2896 delete_insn (lab_next);
2900 /* Likewise if we're deleting a dispatch table. */
2902 if (GET_CODE (insn) == JUMP_INSN
2903 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2904 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2906 rtx pat = PATTERN (insn);
2907 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2908 int len = XVECLEN (pat, diff_vec_p);
2910 for (i = 0; i < len; i++)
2911 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2912 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2913 while (next && INSN_DELETED_P (next))
2914 next = NEXT_INSN (next);
2918 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2919 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2920 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2921 if (REG_NOTE_KIND (note) == REG_LABEL
2922 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2923 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2924 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2925 delete_insn (XEXP (note, 0));
2927 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2928 prev = PREV_INSN (prev);
2930 /* If INSN was a label and a dispatch table follows it,
2931 delete the dispatch table. The tablejump must have gone already.
2932 It isn't useful to fall through into a table. */
2935 && NEXT_INSN (insn) != 0
2936 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2937 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2938 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2939 next = delete_insn (NEXT_INSN (insn));
2941 /* If INSN was a label, delete insns following it if now unreachable. */
2943 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2945 register RTX_CODE code;
2947 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2948 || code == NOTE || code == BARRIER
2949 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2952 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2953 next = NEXT_INSN (next);
2954 /* Keep going past other deleted labels to delete what follows. */
2955 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2956 next = NEXT_INSN (next);
2958 /* Note: if this deletes a jump, it can cause more
2959 deletion of unreachable code, after a different label.
2960 As long as the value from this recursive call is correct,
2961 this invocation functions correctly. */
2962 next = delete_insn (next);
2969 /* Advance from INSN till reaching something not deleted
2970 then return that. May return INSN itself. */
2973 next_nondeleted_insn (insn)
2976 while (INSN_DELETED_P (insn))
2977 insn = NEXT_INSN (insn);
2981 /* Delete a range of insns from FROM to TO, inclusive.
2982 This is for the sake of peephole optimization, so assume
2983 that whatever these insns do will still be done by a new
2984 peephole insn that will replace them. */
2987 delete_for_peephole (from, to)
2988 register rtx from, to;
2990 register rtx insn = from;
2994 register rtx next = NEXT_INSN (insn);
2995 register rtx prev = PREV_INSN (insn);
2997 if (GET_CODE (insn) != NOTE)
2999 INSN_DELETED_P (insn) = 1;
3001 /* Patch this insn out of the chain. */
3002 /* We don't do this all at once, because we
3003 must preserve all NOTEs. */
3005 NEXT_INSN (prev) = next;
3008 PREV_INSN (next) = prev;
3016 /* Note that if TO is an unconditional jump
3017 we *do not* delete the BARRIER that follows,
3018 since the peephole that replaces this sequence
3019 is also an unconditional jump in that case. */
3022 /* We have determined that INSN is never reached, and are about to
3023 delete it. Print a warning if the user asked for one.
3025 To try to make this warning more useful, this should only be called
3026 once per basic block not reached, and it only warns when the basic
3027 block contains more than one line from the current function, and
3028 contains at least one operation. CSE and inlining can duplicate insns,
3029 so it's possible to get spurious warnings from this. */
3032 never_reached_warning (avoided_insn)
3036 rtx a_line_note = NULL;
3037 int two_avoided_lines = 0;
3038 int contains_insn = 0;
3040 if (! warn_notreached)
3043 /* Scan forwards, looking at LINE_NUMBER notes, until
3044 we hit a LABEL or we run out of insns. */
3046 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3048 if (GET_CODE (insn) == CODE_LABEL)
3050 else if (GET_CODE (insn) == NOTE /* A line number note? */
3051 && NOTE_LINE_NUMBER (insn) >= 0)
3053 if (a_line_note == NULL)
3056 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3057 != NOTE_LINE_NUMBER (insn));
3059 else if (INSN_P (insn))
3062 if (two_avoided_lines && contains_insn)
3063 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3064 NOTE_LINE_NUMBER (a_line_note),
3065 "will never be executed");
3068 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3069 NLABEL as a return. Accrue modifications into the change group. */
3072 redirect_exp_1 (loc, olabel, nlabel, insn)
3077 register rtx x = *loc;
3078 register RTX_CODE code = GET_CODE (x);
3080 register const char *fmt;
3082 if (code == LABEL_REF)
3084 if (XEXP (x, 0) == olabel)
3088 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3090 n = gen_rtx_RETURN (VOIDmode);
3092 validate_change (insn, loc, n, 1);
3096 else if (code == RETURN && olabel == 0)
3098 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3099 if (loc == &PATTERN (insn))
3100 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3101 validate_change (insn, loc, x, 1);
3105 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3106 && GET_CODE (SET_SRC (x)) == LABEL_REF
3107 && XEXP (SET_SRC (x), 0) == olabel)
3109 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3113 fmt = GET_RTX_FORMAT (code);
3114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3117 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3118 else if (fmt[i] == 'E')
3121 for (j = 0; j < XVECLEN (x, i); j++)
3122 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3127 /* Similar, but apply the change group and report success or failure. */
3130 redirect_exp (olabel, nlabel, insn)
3136 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3137 loc = &XVECEXP (PATTERN (insn), 0, 0);
3139 loc = &PATTERN (insn);
3141 redirect_exp_1 (loc, olabel, nlabel, insn);
3142 if (num_validated_changes () == 0)
3145 return apply_change_group ();
3148 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3149 the modifications into the change group. Return false if we did
3150 not see how to do that. */
3153 redirect_jump_1 (jump, nlabel)
3156 int ochanges = num_validated_changes ();
3159 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3160 loc = &XVECEXP (PATTERN (jump), 0, 0);
3162 loc = &PATTERN (jump);
3164 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3165 return num_validated_changes () > ochanges;
3168 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3169 jump target label is unused as a result, it and the code following
3172 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3175 The return value will be 1 if the change was made, 0 if it wasn't
3176 (this can only occur for NLABEL == 0). */
3179 redirect_jump (jump, nlabel, delete_unused)
3183 register rtx olabel = JUMP_LABEL (jump);
3185 if (nlabel == olabel)
3188 if (! redirect_exp (olabel, nlabel, jump))
3191 /* If this is an unconditional branch, delete it from the jump_chain of
3192 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3193 have UID's in range and JUMP_CHAIN is valid). */
3194 if (jump_chain && (simplejump_p (jump)
3195 || GET_CODE (PATTERN (jump)) == RETURN))
3197 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3199 delete_from_jump_chain (jump);
3200 if (label_index < max_jump_chain
3201 && INSN_UID (jump) < max_jump_chain)
3203 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3204 jump_chain[label_index] = jump;
3208 JUMP_LABEL (jump) = nlabel;
3210 ++LABEL_NUSES (nlabel);
3212 /* If we're eliding the jump over exception cleanups at the end of a
3213 function, move the function end note so that -Wreturn-type works. */
3214 if (olabel && nlabel
3215 && NEXT_INSN (olabel)
3216 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3217 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3218 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3220 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3221 delete_insn (olabel);
3226 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3227 Accrue the modifications into the change group. */
3233 register RTX_CODE code;
3234 rtx x = pc_set (insn);
3240 code = GET_CODE (x);
3242 if (code == IF_THEN_ELSE)
3244 register rtx comp = XEXP (x, 0);
3246 enum rtx_code reversed_code;
3248 /* We can do this in two ways: The preferable way, which can only
3249 be done if this is not an integer comparison, is to reverse
3250 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3251 of the IF_THEN_ELSE. If we can't do either, fail. */
3253 reversed_code = reversed_comparison_code (comp, insn);
3255 if (reversed_code != UNKNOWN)
3257 validate_change (insn, &XEXP (x, 0),
3258 gen_rtx_fmt_ee (reversed_code,
3259 GET_MODE (comp), XEXP (comp, 0),
3266 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3267 validate_change (insn, &XEXP (x, 2), tem, 1);
3273 /* Invert the jump condition of conditional jump insn, INSN.
3275 Return 1 if we can do so, 0 if we cannot find a way to do so that
3276 matches a pattern. */
3282 invert_exp_1 (insn);
3283 if (num_validated_changes () == 0)
3286 return apply_change_group ();
3289 /* Invert the condition of the jump JUMP, and make it jump to label
3290 NLABEL instead of where it jumps now. Accrue changes into the
3291 change group. Return false if we didn't see how to perform the
3292 inversion and redirection. */
3295 invert_jump_1 (jump, nlabel)
3300 ochanges = num_validated_changes ();
3301 invert_exp_1 (jump);
3302 if (num_validated_changes () == ochanges)
3305 return redirect_jump_1 (jump, nlabel);
3308 /* Invert the condition of the jump JUMP, and make it jump to label
3309 NLABEL instead of where it jumps now. Return true if successful. */
3312 invert_jump (jump, nlabel, delete_unused)
3316 /* We have to either invert the condition and change the label or
3317 do neither. Either operation could fail. We first try to invert
3318 the jump. If that succeeds, we try changing the label. If that fails,
3319 we invert the jump back to what it was. */
3321 if (! invert_exp (jump))
3324 if (redirect_jump (jump, nlabel, delete_unused))
3326 /* An inverted jump means that a probability taken becomes a
3327 probability not taken. Subtract the branch probability from the
3328 probability base to convert it back to a taken probability. */
3330 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3332 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3337 if (! invert_exp (jump))
3338 /* This should just be putting it back the way it was. */
3344 /* Delete the instruction JUMP from any jump chain it might be on. */
3347 delete_from_jump_chain (jump)
3351 rtx olabel = JUMP_LABEL (jump);
3353 /* Handle unconditional jumps. */
3354 if (jump_chain && olabel != 0
3355 && INSN_UID (olabel) < max_jump_chain
3356 && simplejump_p (jump))
3357 index = INSN_UID (olabel);
3358 /* Handle return insns. */
3359 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3364 if (jump_chain[index] == jump)
3365 jump_chain[index] = jump_chain[INSN_UID (jump)];
3370 for (insn = jump_chain[index];
3372 insn = jump_chain[INSN_UID (insn)])
3373 if (jump_chain[INSN_UID (insn)] == jump)
3375 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3381 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3383 If the old jump target label (before the dispatch table) becomes unused,
3384 it and the dispatch table may be deleted. In that case, find the insn
3385 before the jump references that label and delete it and logical successors
3389 redirect_tablejump (jump, nlabel)
3392 register rtx olabel = JUMP_LABEL (jump);
3393 rtx *notep, note, next;
3395 /* Add this jump to the jump_chain of NLABEL. */
3396 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3397 && INSN_UID (jump) < max_jump_chain)
3399 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3400 jump_chain[INSN_UID (nlabel)] = jump;
3403 for (notep = ®_NOTES (jump), note = *notep; note; note = next)
3405 next = XEXP (note, 1);
3407 if (REG_NOTE_KIND (note) != REG_DEAD
3408 /* Verify that the REG_NOTE is legitimate. */
3409 || GET_CODE (XEXP (note, 0)) != REG
3410 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3411 notep = &XEXP (note, 1);
3414 delete_prior_computation (note, jump);
3419 PATTERN (jump) = gen_jump (nlabel);
3420 JUMP_LABEL (jump) = nlabel;
3421 ++LABEL_NUSES (nlabel);
3422 INSN_CODE (jump) = -1;
3424 if (--LABEL_NUSES (olabel) == 0)
3426 delete_labelref_insn (jump, olabel, 0);
3427 delete_insn (olabel);
3431 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3432 If we found one, delete it and then delete this insn if DELETE_THIS is
3433 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3436 delete_labelref_insn (insn, label, delete_this)
3443 if (GET_CODE (insn) != NOTE
3444 && reg_mentioned_p (label, PATTERN (insn)))
3455 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3456 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3470 /* Like rtx_equal_p except that it considers two REGs as equal
3471 if they renumber to the same value and considers two commutative
3472 operations to be the same if the order of the operands has been
3475 ??? Addition is not commutative on the PA due to the weird implicit
3476 space register selection rules for memory addresses. Therefore, we
3477 don't consider a + b == b + a.
3479 We could/should make this test a little tighter. Possibly only
3480 disabling it on the PA via some backend macro or only disabling this
3481 case when the PLUS is inside a MEM. */
3484 rtx_renumbered_equal_p (x, y)
3488 register RTX_CODE code = GET_CODE (x);
3489 register const char *fmt;
3494 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3495 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3496 && GET_CODE (SUBREG_REG (y)) == REG)))
3498 int reg_x = -1, reg_y = -1;
3499 int byte_x = 0, byte_y = 0;
3501 if (GET_MODE (x) != GET_MODE (y))
3504 /* If we haven't done any renumbering, don't
3505 make any assumptions. */
3506 if (reg_renumber == 0)
3507 return rtx_equal_p (x, y);
3511 reg_x = REGNO (SUBREG_REG (x));
3512 byte_x = SUBREG_BYTE (x);
3514 if (reg_renumber[reg_x] >= 0)
3516 reg_x = subreg_regno_offset (reg_renumber[reg_x],
3517 GET_MODE (SUBREG_REG (x)),
3526 if (reg_renumber[reg_x] >= 0)
3527 reg_x = reg_renumber[reg_x];
3530 if (GET_CODE (y) == SUBREG)
3532 reg_y = REGNO (SUBREG_REG (y));
3533 byte_y = SUBREG_BYTE (y);
3535 if (reg_renumber[reg_y] >= 0)
3537 reg_y = subreg_regno_offset (reg_renumber[reg_y],
3538 GET_MODE (SUBREG_REG (y)),
3547 if (reg_renumber[reg_y] >= 0)
3548 reg_y = reg_renumber[reg_y];
3551 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
3554 /* Now we have disposed of all the cases
3555 in which different rtx codes can match. */
3556 if (code != GET_CODE (y))
3568 return INTVAL (x) == INTVAL (y);
3571 /* We can't assume nonlocal labels have their following insns yet. */
3572 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3573 return XEXP (x, 0) == XEXP (y, 0);
3575 /* Two label-refs are equivalent if they point at labels
3576 in the same position in the instruction stream. */
3577 return (next_real_insn (XEXP (x, 0))
3578 == next_real_insn (XEXP (y, 0)));
3581 return XSTR (x, 0) == XSTR (y, 0);
3584 /* If we didn't match EQ equality above, they aren't the same. */
3591 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3593 if (GET_MODE (x) != GET_MODE (y))
3596 /* For commutative operations, the RTX match if the operand match in any
3597 order. Also handle the simple binary and unary cases without a loop.
3599 ??? Don't consider PLUS a commutative operator; see comments above. */
3600 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3602 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3603 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3604 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3605 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3606 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3607 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3608 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3609 else if (GET_RTX_CLASS (code) == '1')
3610 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3612 /* Compare the elements. If any pair of corresponding elements
3613 fail to match, return 0 for the whole things. */
3615 fmt = GET_RTX_FORMAT (code);
3616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3622 if (XWINT (x, i) != XWINT (y, i))
3627 if (XINT (x, i) != XINT (y, i))
3632 if (strcmp (XSTR (x, i), XSTR (y, i)))
3637 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3642 if (XEXP (x, i) != XEXP (y, i))
3649 if (XVECLEN (x, i) != XVECLEN (y, i))
3651 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3652 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3663 /* If X is a hard register or equivalent to one or a subregister of one,
3664 return the hard register number. If X is a pseudo register that was not
3665 assigned a hard register, return the pseudo register number. Otherwise,
3666 return -1. Any rtx is valid for X. */
3672 if (GET_CODE (x) == REG)
3674 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3675 return reg_renumber[REGNO (x)];
3678 if (GET_CODE (x) == SUBREG)
3680 int base = true_regnum (SUBREG_REG (x));
3681 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3682 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
3683 GET_MODE (SUBREG_REG (x)),
3684 SUBREG_BYTE (x), GET_MODE (x));
3689 /* Optimize code of the form:
3691 for (x = a[i]; x; ...)
3693 for (x = a[i]; x; ...)
3697 Loop optimize will change the above code into
3701 { ...; if (! (x = ...)) break; }
3704 { ...; if (! (x = ...)) break; }
3707 In general, if the first test fails, the program can branch
3708 directly to `foo' and skip the second try which is doomed to fail.
3709 We run this after loop optimization and before flow analysis. */
3711 /* When comparing the insn patterns, we track the fact that different
3712 pseudo-register numbers may have been used in each computation.
3713 The following array stores an equivalence -- same_regs[I] == J means
3714 that pseudo register I was used in the first set of tests in a context
3715 where J was used in the second set. We also count the number of such
3716 pending equivalences. If nonzero, the expressions really aren't the
3719 static int *same_regs;
3721 static int num_same_regs;
3723 /* Track any registers modified between the target of the first jump and
3724 the second jump. They never compare equal. */
3726 static char *modified_regs;
3728 /* Record if memory was modified. */
3730 static int modified_mem;
3732 /* Called via note_stores on each insn between the target of the first
3733 branch and the second branch. It marks any changed registers. */
3736 mark_modified_reg (dest, x, data)
3738 rtx x ATTRIBUTE_UNUSED;
3739 void *data ATTRIBUTE_UNUSED;
3744 if (GET_CODE (dest) == SUBREG)
3745 dest = SUBREG_REG (dest);
3747 if (GET_CODE (dest) == MEM)
3750 if (GET_CODE (dest) != REG)
3753 regno = REGNO (dest);
3754 if (regno >= FIRST_PSEUDO_REGISTER)
3755 modified_regs[regno] = 1;
3757 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3758 modified_regs[regno + i] = 1;
3761 /* F is the first insn in the chain of insns. */
3764 thread_jumps (f, max_reg, flag_before_loop)
3767 int flag_before_loop;
3769 /* Basic algorithm is to find a conditional branch,
3770 the label it may branch to, and the branch after
3771 that label. If the two branches test the same condition,
3772 walk back from both branch paths until the insn patterns
3773 differ, or code labels are hit. If we make it back to
3774 the target of the first branch, then we know that the first branch
3775 will either always succeed or always fail depending on the relative
3776 senses of the two branches. So adjust the first branch accordingly
3779 rtx label, b1, b2, t1, t2;
3780 enum rtx_code code1, code2;
3781 rtx b1op0, b1op1, b2op0, b2op1;
3785 enum rtx_code reversed_code1, reversed_code2;
3787 /* Allocate register tables and quick-reset table. */
3788 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3789 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3790 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3791 for (i = 0; i < max_reg; i++)
3798 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3803 /* Get to a candidate branch insn. */
3804 if (GET_CODE (b1) != JUMP_INSN
3805 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3808 memset (modified_regs, 0, max_reg * sizeof (char));
3811 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3814 label = JUMP_LABEL (b1);
3816 /* Look for a branch after the target. Record any registers and
3817 memory modified between the target and the branch. Stop when we
3818 get to a label since we can't know what was changed there. */
3819 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3821 if (GET_CODE (b2) == CODE_LABEL)
3824 else if (GET_CODE (b2) == JUMP_INSN)
3826 /* If this is an unconditional jump and is the only use of
3827 its target label, we can follow it. */
3828 if (any_uncondjump_p (b2)
3830 && JUMP_LABEL (b2) != 0
3831 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3833 b2 = JUMP_LABEL (b2);
3840 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3843 if (GET_CODE (b2) == CALL_INSN)
3846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3847 if (call_used_regs[i] && ! fixed_regs[i]
3848 && i != STACK_POINTER_REGNUM
3849 && i != FRAME_POINTER_REGNUM
3850 && i != HARD_FRAME_POINTER_REGNUM
3851 && i != ARG_POINTER_REGNUM)
3852 modified_regs[i] = 1;
3855 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3858 /* Check the next candidate branch insn from the label
3861 || GET_CODE (b2) != JUMP_INSN
3863 || !any_condjump_p (b2)
3864 || !onlyjump_p (b2))
3869 /* Get the comparison codes and operands, reversing the
3870 codes if appropriate. If we don't have comparison codes,
3871 we can't do anything. */
3872 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3873 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3874 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3875 reversed_code1 = code1;
3876 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3877 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3879 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3881 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3882 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3883 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3884 reversed_code2 = code2;
3885 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3886 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3888 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3890 /* If they test the same things and knowing that B1 branches
3891 tells us whether or not B2 branches, check if we
3892 can thread the branch. */
3893 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3894 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3895 && (comparison_dominates_p (code1, code2)
3896 || comparison_dominates_p (code1, reversed_code2)))
3899 t1 = prev_nonnote_insn (b1);
3900 t2 = prev_nonnote_insn (b2);
3902 while (t1 != 0 && t2 != 0)
3906 /* We have reached the target of the first branch.
3907 If there are no pending register equivalents,
3908 we know that this branch will either always
3909 succeed (if the senses of the two branches are
3910 the same) or always fail (if not). */
3913 if (num_same_regs != 0)
3916 if (comparison_dominates_p (code1, code2))
3917 new_label = JUMP_LABEL (b2);
3919 new_label = get_label_after (b2);
3921 if (JUMP_LABEL (b1) != new_label)
3923 rtx prev = PREV_INSN (new_label);
3925 if (flag_before_loop
3926 && GET_CODE (prev) == NOTE
3927 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3929 /* Don't thread to the loop label. If a loop
3930 label is reused, loop optimization will
3931 be disabled for that loop. */
3932 new_label = gen_label_rtx ();
3933 emit_label_after (new_label, PREV_INSN (prev));
3935 changed |= redirect_jump (b1, new_label, 1);
3940 /* If either of these is not a normal insn (it might be
3941 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3942 have already been skipped above.) Similarly, fail
3943 if the insns are different. */
3944 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3945 || recog_memoized (t1) != recog_memoized (t2)
3946 || ! rtx_equal_for_thread_p (PATTERN (t1),
3950 t1 = prev_nonnote_insn (t1);
3951 t2 = prev_nonnote_insn (t2);
3958 free (modified_regs);
3963 /* This is like RTX_EQUAL_P except that it knows about our handling of
3964 possibly equivalent registers and knows to consider volatile and
3965 modified objects as not equal.
3967 YINSN is the insn containing Y. */
3970 rtx_equal_for_thread_p (x, y, yinsn)
3976 register enum rtx_code code;
3977 register const char *fmt;
3979 code = GET_CODE (x);
3980 /* Rtx's of different codes cannot be equal. */
3981 if (code != GET_CODE (y))
3984 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3985 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3987 if (GET_MODE (x) != GET_MODE (y))
3990 /* For floating-point, consider everything unequal. This is a bit
3991 pessimistic, but this pass would only rarely do anything for FP
3993 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3994 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
3997 /* For commutative operations, the RTX match if the operand match in any
3998 order. Also handle the simple binary and unary cases without a loop. */
3999 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4000 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4001 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4002 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4003 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4004 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4005 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4006 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4007 else if (GET_RTX_CLASS (code) == '1')
4008 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4010 /* Handle special-cases first. */
4014 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4017 /* If neither is user variable or hard register, check for possible
4019 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4020 || REGNO (x) < FIRST_PSEUDO_REGISTER
4021 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4024 if (same_regs[REGNO (x)] == -1)
4026 same_regs[REGNO (x)] = REGNO (y);
4029 /* If this is the first time we are seeing a register on the `Y'
4030 side, see if it is the last use. If not, we can't thread the
4031 jump, so mark it as not equivalent. */
4032 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4038 return (same_regs[REGNO (x)] == (int) REGNO (y));
4043 /* If memory modified or either volatile, not equivalent.
4044 Else, check address. */
4045 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4048 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4051 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4057 /* Cancel a pending `same_regs' if setting equivalenced registers.
4058 Then process source. */
4059 if (GET_CODE (SET_DEST (x)) == REG
4060 && GET_CODE (SET_DEST (y)) == REG)
4062 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4064 same_regs[REGNO (SET_DEST (x))] = -1;
4067 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4072 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4076 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4079 return XEXP (x, 0) == XEXP (y, 0);
4082 return XSTR (x, 0) == XSTR (y, 0);
4091 fmt = GET_RTX_FORMAT (code);
4092 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4097 if (XWINT (x, i) != XWINT (y, i))
4103 if (XINT (x, i) != XINT (y, i))
4109 /* Two vectors must have the same length. */
4110 if (XVECLEN (x, i) != XVECLEN (y, i))
4113 /* And the corresponding elements must match. */
4114 for (j = 0; j < XVECLEN (x, i); j++)
4115 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4116 XVECEXP (y, i, j), yinsn) == 0)
4121 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4127 if (strcmp (XSTR (x, i), XSTR (y, i)))
4132 /* These are just backpointers, so they don't matter. */
4139 /* It is believed that rtx's at this level will never
4140 contain anything but integers and other rtx's,
4141 except for within LABEL_REFs and SYMBOL_REFs. */