1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
116 #include "coretypes.h"
118 #include "diagnostic-core.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
132 #include "insn-attr.h"
133 #include "resource.h"
138 #include "tree-pass.h"
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function, or before a
166 static rtx function_return_label;
167 /* Likewise for a simple_return. */
168 static rtx function_simple_return_label;
170 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
171 not always monotonically increase. */
172 static int *uid_to_ruid;
174 /* Highest valid index in `uid_to_ruid'. */
177 static int stop_search_p (rtx, int);
178 static int resource_conflicts_p (struct resources *, struct resources *);
179 static int insn_references_resource_p (rtx, struct resources *, bool);
180 static int insn_sets_resource_p (rtx, struct resources *, bool);
181 static rtx find_end_label (rtx);
182 static rtx emit_delay_sequence (rtx, rtx, int);
183 static rtx add_to_delay_list (rtx, rtx);
184 static rtx delete_from_delay_slot (rtx);
185 static void delete_scheduled_jump (rtx);
186 static void note_delay_statistics (int, int);
187 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
188 static rtx optimize_skip (rtx);
190 static int get_jump_flags (rtx, rtx);
191 static int rare_destination (rtx);
192 static int mostly_true_jump (rtx, rtx);
193 static rtx get_branch_condition (rtx, rtx);
194 static int condition_dominates_p (rtx, rtx);
195 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
196 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
197 static int check_annul_list_true_false (int, rtx);
198 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
202 int, int *, int *, rtx *);
203 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
208 static void try_merge_delay_insns (rtx, rtx);
209 static rtx redundant_insn (rtx, rtx, rtx);
210 static int own_thread_p (rtx, rtx, int);
211 static void update_block (rtx, rtx);
212 static int reorg_redirect_jump (rtx, rtx);
213 static void update_reg_dead_notes (rtx, rtx);
214 static void fix_reg_dead_note (rtx, rtx);
215 static void update_reg_unused_notes (rtx, rtx);
216 static void fill_simple_delay_slots (int);
217 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
220 static void fill_eager_delay_slots (void);
221 static void relax_delay_slots (rtx);
223 static void make_return_insns (rtx);
226 /* A wrapper around next_active_insn which takes care to return ret_rtx
230 first_active_target_insn (rtx insn)
232 if (ANY_RETURN_P (insn))
234 return next_active_insn (insn);
237 /* Return true iff INSN is a simplejump, or any kind of return insn. */
240 simplejump_or_return_p (rtx insn)
242 return (JUMP_P (insn)
243 && (simplejump_p (insn) || ANY_RETURN_P (PATTERN (insn))));
246 /* Return TRUE if this insn should stop the search for insn to fill delay
247 slots. LABELS_P indicates that labels should terminate the search.
248 In all cases, jumps terminate the search. */
251 stop_search_p (rtx insn, int labels_p)
256 /* If the insn can throw an exception that is caught within the function,
257 it may effectively perform a jump from the viewpoint of the function.
258 Therefore act like for a jump. */
259 if (can_throw_internal (insn))
262 switch (GET_CODE (insn))
276 /* OK unless it contains a delay slot or is an `asm' insn of some type.
277 We don't know anything about these. */
278 return (GET_CODE (PATTERN (insn)) == SEQUENCE
279 || GET_CODE (PATTERN (insn)) == ASM_INPUT
280 || asm_noperands (PATTERN (insn)) >= 0);
287 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
288 resource set contains a volatile memory reference. Otherwise, return FALSE. */
291 resource_conflicts_p (struct resources *res1, struct resources *res2)
293 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
294 || (res1->unch_memory && res2->unch_memory)
295 || res1->volatil || res2->volatil)
299 return (res1->regs & res2->regs) != HARD_CONST (0);
304 for (i = 0; i < HARD_REG_SET_LONGS; i++)
305 if ((res1->regs[i] & res2->regs[i]) != 0)
312 /* Return TRUE if any resource marked in RES, a `struct resources', is
313 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
314 routine is using those resources.
316 We compute this by computing all the resources referenced by INSN and
317 seeing if this conflicts with RES. It might be faster to directly check
318 ourselves, and this is the way it used to work, but it means duplicating
319 a large block of complex code. */
322 insn_references_resource_p (rtx insn, struct resources *res,
323 bool include_delayed_effects)
325 struct resources insn_res;
327 CLEAR_RESOURCE (&insn_res);
328 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
329 return resource_conflicts_p (&insn_res, res);
332 /* Return TRUE if INSN modifies resources that are marked in RES.
333 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
334 included. CC0 is only modified if it is explicitly set; see comments
335 in front of mark_set_resources for details. */
338 insn_sets_resource_p (rtx insn, struct resources *res,
339 bool include_delayed_effects)
341 struct resources insn_sets;
343 CLEAR_RESOURCE (&insn_sets);
344 mark_set_resources (insn, &insn_sets, 0,
345 (include_delayed_effects
348 return resource_conflicts_p (&insn_sets, res);
351 /* Find a label at the end of the function or before a RETURN. If there
352 is none, try to make one. If that fails, returns 0.
354 The property of such a label is that it is placed just before the
355 epilogue or a bare RETURN insn, so that another bare RETURN can be
356 turned into a jump to the label unconditionally. In particular, the
357 label cannot be placed before a RETURN insn with a filled delay slot.
359 ??? There may be a problem with the current implementation. Suppose
360 we start with a bare RETURN insn and call find_end_label. It may set
361 function_return_label just before the RETURN. Suppose the machinery
362 is able to fill the delay slot of the RETURN insn afterwards. Then
363 function_return_label is no longer valid according to the property
364 described above and find_end_label will still return it unmodified.
365 Note that this is probably mitigated by the following observation:
366 once function_return_label is made, it is very likely the target of
367 a jump, so filling the delay slot of the RETURN will be much more
369 KIND is either simple_return_rtx or ret_rtx, indicating which type of
370 return we're looking for. */
373 find_end_label (rtx kind)
379 plabel = &function_return_label;
382 gcc_assert (kind == simple_return_rtx);
383 plabel = &function_simple_return_label;
386 /* If we found one previously, return it. */
390 /* Otherwise, see if there is a label at the end of the function. If there
391 is, it must be that RETURN insns aren't needed, so that is our return
392 label and we don't have to do anything else. */
394 insn = get_last_insn ();
396 || (NONJUMP_INSN_P (insn)
397 && (GET_CODE (PATTERN (insn)) == USE
398 || GET_CODE (PATTERN (insn)) == CLOBBER)))
399 insn = PREV_INSN (insn);
401 /* When a target threads its epilogue we might already have a
402 suitable return insn. If so put a label before it for the
403 function_return_label. */
405 && JUMP_P (PREV_INSN (insn))
406 && PATTERN (PREV_INSN (insn)) == kind)
408 rtx temp = PREV_INSN (PREV_INSN (insn));
409 rtx label = gen_label_rtx ();
410 LABEL_NUSES (label) = 0;
412 /* Put the label before any USE insns that may precede the RETURN
414 while (GET_CODE (temp) == USE)
415 temp = PREV_INSN (temp);
417 emit_label_after (label, temp);
421 else if (LABEL_P (insn))
425 rtx label = gen_label_rtx ();
426 LABEL_NUSES (label) = 0;
427 /* If the basic block reorder pass moves the return insn to
428 some other place try to locate it again and put our
429 function_return_label there. */
430 while (insn && ! (JUMP_P (insn) && (PATTERN (insn) == kind)))
431 insn = PREV_INSN (insn);
434 insn = PREV_INSN (insn);
436 /* Put the label before any USE insns that may precede the
438 while (GET_CODE (insn) == USE)
439 insn = PREV_INSN (insn);
441 emit_label_after (label, insn);
451 /* The RETURN insn has its delay slot filled so we cannot
452 emit the label just before it. Since we already have
453 an epilogue and cannot emit a new RETURN, we cannot
454 emit the label at all. */
456 #endif /* HAVE_epilogue */
458 /* Otherwise, make a new label and emit a RETURN and BARRIER,
462 /* We don't bother trying to create a return insn if the
463 epilogue has filled delay-slots; we would have to try and
464 move the delay-slot fillers to the delay-slots for the new
465 return insn or in front of the new return insn. */
466 if (crtl->epilogue_delay_list == NULL
469 /* The return we make may have delay slots too. */
470 rtx insn = gen_return ();
471 insn = emit_jump_insn (insn);
472 JUMP_LABEL (insn) = ret_rtx;
474 if (num_delay_slots (insn) > 0)
475 obstack_ptr_grow (&unfilled_slots_obstack, insn);
482 /* Show one additional use for this label so it won't go away until
484 ++LABEL_NUSES (*plabel);
489 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
490 the pattern of INSN with the SEQUENCE.
492 Chain the insns so that NEXT_INSN of each insn in the sequence points to
493 the next and NEXT_INSN of the last insn in the sequence points to
494 the first insn after the sequence. Similarly for PREV_INSN. This makes
495 it easier to scan all insns.
497 Returns the SEQUENCE that replaces INSN. */
500 emit_delay_sequence (rtx insn, rtx list, int length)
506 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
507 rtvec seqv = rtvec_alloc (length + 1);
508 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
509 rtx seq_insn = make_insn_raw (seq);
510 rtx first = get_insns ();
511 rtx last = get_last_insn ();
513 /* Make a copy of the insn having delay slots. */
514 rtx delay_insn = copy_rtx (insn);
516 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
517 confuse further processing. Update LAST in case it was the last insn.
518 We will put the BARRIER back in later. */
519 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
521 delete_related_insns (NEXT_INSN (insn));
522 last = get_last_insn ();
526 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
527 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
528 PREV_INSN (seq_insn) = PREV_INSN (insn);
531 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
534 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
536 /* Note the calls to set_new_first_and_last_insn must occur after
537 SEQ_INSN has been completely spliced into the insn stream.
539 Otherwise CUR_INSN_UID will get set to an incorrect value because
540 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
542 set_new_first_and_last_insn (first, seq_insn);
545 set_new_first_and_last_insn (seq_insn, last);
547 /* Build our SEQUENCE and rebuild the insn chain. */
548 XVECEXP (seq, 0, 0) = delay_insn;
549 INSN_DELETED_P (delay_insn) = 0;
550 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
552 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (delay_insn);
554 for (li = list; li; li = XEXP (li, 1), i++)
556 rtx tem = XEXP (li, 0);
559 /* Show that this copy of the insn isn't deleted. */
560 INSN_DELETED_P (tem) = 0;
562 XVECEXP (seq, 0, i) = tem;
563 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
564 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
566 /* SPARC assembler, for instance, emit warning when debug info is output
567 into the delay slot. */
568 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
569 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
570 INSN_LOCATOR (tem) = 0;
572 for (note = REG_NOTES (tem); note; note = next)
574 next = XEXP (note, 1);
575 switch (REG_NOTE_KIND (note))
578 /* Remove any REG_DEAD notes because we can't rely on them now
579 that the insn has been moved. */
580 remove_note (tem, note);
583 case REG_LABEL_OPERAND:
584 case REG_LABEL_TARGET:
585 /* Keep the label reference count up to date. */
586 if (LABEL_P (XEXP (note, 0)))
587 LABEL_NUSES (XEXP (note, 0)) ++;
596 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
598 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
599 last insn in that SEQUENCE to point to us. Similarly for the first
600 insn in the following insn if it is a SEQUENCE. */
602 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
603 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
604 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
605 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
608 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
609 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
610 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
612 /* If there used to be a BARRIER, put it back. */
614 emit_barrier_after (seq_insn);
616 gcc_assert (i == length + 1);
621 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
622 be in the order in which the insns are to be executed. */
625 add_to_delay_list (rtx insn, rtx delay_list)
627 /* If we have an empty list, just make a new list element. If
628 INSN has its block number recorded, clear it since we may
629 be moving the insn to a new block. */
633 clear_hashed_info_for_insn (insn);
634 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
637 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
639 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
644 /* Delete INSN from the delay slot of the insn that it is in, which may
645 produce an insn with no delay slots. Return the new insn. */
648 delete_from_delay_slot (rtx insn)
650 rtx trial, seq_insn, seq, prev;
655 /* We first must find the insn containing the SEQUENCE with INSN in its
656 delay slot. Do this by finding an insn, TRIAL, where
657 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
660 PREV_INSN (NEXT_INSN (trial)) == trial;
661 trial = NEXT_INSN (trial))
664 seq_insn = PREV_INSN (NEXT_INSN (trial));
665 seq = PATTERN (seq_insn);
667 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
670 /* Create a delay list consisting of all the insns other than the one
671 we are deleting (unless we were the only one). */
672 if (XVECLEN (seq, 0) > 2)
673 for (i = 1; i < XVECLEN (seq, 0); i++)
674 if (XVECEXP (seq, 0, i) != insn)
675 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
677 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
678 list, and rebuild the delay list if non-empty. */
679 prev = PREV_INSN (seq_insn);
680 trial = XVECEXP (seq, 0, 0);
681 delete_related_insns (seq_insn);
682 add_insn_after (trial, prev, NULL);
684 /* If there was a barrier after the old SEQUENCE, remit it. */
686 emit_barrier_after (trial);
688 /* If there are any delay insns, remit them. Otherwise clear the
691 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
692 else if (JUMP_P (trial))
693 INSN_ANNULLED_BRANCH_P (trial) = 0;
695 INSN_FROM_TARGET_P (insn) = 0;
697 /* Show we need to fill this insn again. */
698 obstack_ptr_grow (&unfilled_slots_obstack, trial);
703 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
704 the insn that sets CC0 for it and delete it too. */
707 delete_scheduled_jump (rtx insn)
709 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
710 delete the insn that sets the condition code, but it is hard to find it.
711 Since this case is rare anyway, don't bother trying; there would likely
712 be other insns that became dead anyway, which we wouldn't know to
716 if (reg_mentioned_p (cc0_rtx, insn))
718 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
720 /* If a reg-note was found, it points to an insn to set CC0. This
721 insn is in the delay list of some other insn. So delete it from
722 the delay list it was in. */
725 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
726 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
727 delete_from_delay_slot (XEXP (note, 0));
731 /* The insn setting CC0 is our previous insn, but it may be in
732 a delay slot. It will be the last insn in the delay slot, if
734 rtx trial = previous_insn (insn);
736 trial = prev_nonnote_insn (trial);
737 if (sets_cc0_p (PATTERN (trial)) != 1
738 || FIND_REG_INC_NOTE (trial, NULL_RTX))
740 if (PREV_INSN (NEXT_INSN (trial)) == trial)
741 delete_related_insns (trial);
743 delete_from_delay_slot (trial);
748 delete_related_insns (insn);
751 /* Counters for delay-slot filling. */
753 #define NUM_REORG_FUNCTIONS 2
754 #define MAX_DELAY_HISTOGRAM 3
755 #define MAX_REORG_PASSES 2
757 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
759 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
761 static int reorg_pass_number;
764 note_delay_statistics (int slots_filled, int index)
766 num_insns_needing_delays[index][reorg_pass_number]++;
767 if (slots_filled > MAX_DELAY_HISTOGRAM)
768 slots_filled = MAX_DELAY_HISTOGRAM;
769 num_filled_delays[index][slots_filled][reorg_pass_number]++;
772 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
774 /* Optimize the following cases:
776 1. When a conditional branch skips over only one instruction,
777 use an annulling branch and put that insn in the delay slot.
778 Use either a branch that annuls when the condition if true or
779 invert the test with a branch that annuls when the condition is
780 false. This saves insns, since otherwise we must copy an insn
783 (orig) (skip) (otherwise)
784 Bcc.n L1 Bcc',a L1 Bcc,a L1'
791 2. When a conditional branch skips over only one instruction,
792 and after that, it unconditionally branches somewhere else,
793 perform the similar optimization. This saves executing the
794 second branch in the case where the inverted condition is true.
803 This should be expanded to skip over N insns, where N is the number
804 of delay slots required. */
807 optimize_skip (rtx insn)
809 rtx trial = next_nonnote_insn (insn);
810 rtx next_trial = next_active_insn (trial);
814 flags = get_jump_flags (insn, JUMP_LABEL (insn));
817 || !NONJUMP_INSN_P (trial)
818 || GET_CODE (PATTERN (trial)) == SEQUENCE
819 || recog_memoized (trial) < 0
820 || (! eligible_for_annul_false (insn, 0, trial, flags)
821 && ! eligible_for_annul_true (insn, 0, trial, flags))
822 || can_throw_internal (trial))
825 /* There are two cases where we are just executing one insn (we assume
826 here that a branch requires only one insn; this should be generalized
827 at some point): Where the branch goes around a single insn or where
828 we have one insn followed by a branch to the same label we branch to.
829 In both of these cases, inverting the jump and annulling the delay
830 slot give the same effect in fewer insns. */
831 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
832 && ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
834 && simplejump_or_return_p (next_trial)
835 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
837 if (eligible_for_annul_false (insn, 0, trial, flags))
839 if (invert_jump (insn, JUMP_LABEL (insn), 1))
840 INSN_FROM_TARGET_P (trial) = 1;
841 else if (! eligible_for_annul_true (insn, 0, trial, flags))
845 delay_list = add_to_delay_list (trial, NULL_RTX);
846 next_trial = next_active_insn (trial);
847 update_block (trial, trial);
848 delete_related_insns (trial);
850 /* Also, if we are targeting an unconditional
851 branch, thread our jump to the target of that branch. Don't
852 change this into a RETURN here, because it may not accept what
853 we have in the delay slot. We'll fix this up later. */
854 if (next_trial && simplejump_or_return_p (next_trial))
856 rtx target_label = JUMP_LABEL (next_trial);
857 if (ANY_RETURN_P (target_label))
858 target_label = find_end_label (target_label);
862 /* Recompute the flags based on TARGET_LABEL since threading
863 the jump to TARGET_LABEL may change the direction of the
864 jump (which may change the circumstances in which the
865 delay slot is nullified). */
866 flags = get_jump_flags (insn, target_label);
867 if (eligible_for_annul_true (insn, 0, trial, flags))
868 reorg_redirect_jump (insn, target_label);
872 INSN_ANNULLED_BRANCH_P (insn) = 1;
879 /* Encode and return branch direction and prediction information for
880 INSN assuming it will jump to LABEL.
882 Non conditional branches return no direction information and
883 are predicted as very likely taken. */
886 get_jump_flags (rtx insn, rtx label)
890 /* get_jump_flags can be passed any insn with delay slots, these may
891 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
892 direction information, and only if they are conditional jumps.
894 If LABEL is a return, then there is no way to determine the branch
897 && (condjump_p (insn) || condjump_in_parallel_p (insn))
898 && !ANY_RETURN_P (label)
899 && INSN_UID (insn) <= max_uid
900 && INSN_UID (label) <= max_uid)
902 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
903 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
904 /* No valid direction information. */
908 /* If insn is a conditional branch call mostly_true_jump to get
909 determine the branch prediction.
911 Non conditional branches are predicted as very likely taken. */
913 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
917 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
921 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
924 flags |= ATTR_FLAG_likely;
927 flags |= ATTR_FLAG_unlikely;
930 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
938 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
943 /* Return 1 if INSN is a destination that will be branched to rarely (the
944 return point of a function); return 2 if DEST will be branched to very
945 rarely (a call to a function that doesn't return). Otherwise,
949 rare_destination (rtx insn)
954 for (; insn && !ANY_RETURN_P (insn); insn = next)
956 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
957 insn = XVECEXP (PATTERN (insn), 0, 0);
959 next = NEXT_INSN (insn);
961 switch (GET_CODE (insn))
966 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
967 don't scan past JUMP_INSNs, so any barrier we find here must
968 have been after a CALL_INSN and hence mean the call doesn't
972 if (ANY_RETURN_P (PATTERN (insn)))
974 else if (simplejump_p (insn)
975 && jump_count++ < 10)
976 next = JUMP_LABEL (insn);
985 /* If we got here it means we hit the end of the function. So this
986 is an unlikely destination. */
991 /* Return truth value of the statement that this branch
992 is mostly taken. If we think that the branch is extremely likely
993 to be taken, we return 2. If the branch is slightly more likely to be
994 taken, return 1. If the branch is slightly less likely to be taken,
995 return 0 and if the branch is highly unlikely to be taken, return -1.
997 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
1000 mostly_true_jump (rtx jump_insn, rtx condition)
1002 rtx target_label = JUMP_LABEL (jump_insn);
1004 int rare_dest, rare_fallthrough;
1006 /* If branch probabilities are available, then use that number since it
1007 always gives a correct answer. */
1008 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
1011 int prob = INTVAL (XEXP (note, 0));
1013 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1015 else if (prob >= REG_BR_PROB_BASE / 2)
1017 else if (prob >= REG_BR_PROB_BASE / 10)
1023 /* Look at the relative rarities of the fallthrough and destination. If
1024 they differ, we can predict the branch that way. */
1025 rare_dest = rare_destination (target_label);
1026 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1028 switch (rare_fallthrough - rare_dest)
1042 /* If we couldn't figure out what this jump was, assume it won't be
1043 taken. This should be rare. */
1047 /* Predict backward branches usually take, forward branches usually not. If
1048 we don't know whether this is forward or backward, assume the branch
1049 will be taken, since most are. */
1050 return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
1051 || INSN_UID (target_label) > max_uid
1052 || (uid_to_ruid[INSN_UID (jump_insn)]
1053 > uid_to_ruid[INSN_UID (target_label)]));
1056 /* Return the condition under which INSN will branch to TARGET. If TARGET
1057 is zero, return the condition under which INSN will return. If INSN is
1058 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1059 type of jump, or it doesn't go to TARGET, return 0. */
1062 get_branch_condition (rtx insn, rtx target)
1064 rtx pat = PATTERN (insn);
1067 if (condjump_in_parallel_p (insn))
1068 pat = XVECEXP (pat, 0, 0);
1070 if (ANY_RETURN_P (pat))
1071 return pat == target ? const_true_rtx : 0;
1073 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1076 src = SET_SRC (pat);
1077 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1078 return const_true_rtx;
1080 else if (GET_CODE (src) == IF_THEN_ELSE
1081 && XEXP (src, 2) == pc_rtx
1082 && GET_CODE (XEXP (src, 1)) == LABEL_REF
1083 && XEXP (XEXP (src, 1), 0) == target)
1084 return XEXP (src, 0);
1086 else if (GET_CODE (src) == IF_THEN_ELSE
1087 && XEXP (src, 1) == pc_rtx
1088 && GET_CODE (XEXP (src, 2)) == LABEL_REF
1089 && XEXP (XEXP (src, 2), 0) == target)
1092 rev = reversed_comparison_code (XEXP (src, 0), insn);
1094 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1095 XEXP (XEXP (src, 0), 0),
1096 XEXP (XEXP (src, 0), 1));
1102 /* Return nonzero if CONDITION is more strict than the condition of
1103 INSN, i.e., if INSN will always branch if CONDITION is true. */
1106 condition_dominates_p (rtx condition, rtx insn)
1108 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1109 enum rtx_code code = GET_CODE (condition);
1110 enum rtx_code other_code;
1112 if (rtx_equal_p (condition, other_condition)
1113 || other_condition == const_true_rtx)
1116 else if (condition == const_true_rtx || other_condition == 0)
1119 other_code = GET_CODE (other_condition);
1120 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1121 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1122 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1125 return comparison_dominates_p (code, other_code);
1128 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1129 any insns already in the delay slot of JUMP. */
1132 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1135 rtx pat = PATTERN (seq);
1137 /* Make sure all the delay slots of this jump would still
1138 be valid after threading the jump. If they are still
1139 valid, then return nonzero. */
1141 flags = get_jump_flags (jump, newlabel);
1142 for (i = 1; i < XVECLEN (pat, 0); i++)
1144 #ifdef ANNUL_IFFALSE_SLOTS
1145 (INSN_ANNULLED_BRANCH_P (jump)
1146 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1147 ? eligible_for_annul_false (jump, i - 1,
1148 XVECEXP (pat, 0, i), flags) :
1150 #ifdef ANNUL_IFTRUE_SLOTS
1151 (INSN_ANNULLED_BRANCH_P (jump)
1152 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1153 ? eligible_for_annul_true (jump, i - 1,
1154 XVECEXP (pat, 0, i), flags) :
1156 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1159 return (i == XVECLEN (pat, 0));
1162 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1163 any insns we wish to place in the delay slot of JUMP. */
1166 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1171 /* Make sure all the insns in DELAY_LIST would still be
1172 valid after threading the jump. If they are still
1173 valid, then return nonzero. */
1175 flags = get_jump_flags (jump, newlabel);
1176 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1178 #ifdef ANNUL_IFFALSE_SLOTS
1179 (INSN_ANNULLED_BRANCH_P (jump)
1180 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1181 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1183 #ifdef ANNUL_IFTRUE_SLOTS
1184 (INSN_ANNULLED_BRANCH_P (jump)
1185 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1186 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1188 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1191 return (li == NULL);
1194 /* DELAY_LIST is a list of insns that have already been placed into delay
1195 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1196 If not, return 0; otherwise return 1. */
1199 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1205 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1207 rtx trial = XEXP (temp, 0);
1209 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1210 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1218 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1219 the condition tested by INSN is CONDITION and the resources shown in
1220 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1221 from SEQ's delay list, in addition to whatever insns it may execute
1222 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1223 needed while searching for delay slot insns. Return the concatenated
1224 delay list if possible, otherwise, return 0.
1226 SLOTS_TO_FILL is the total number of slots required by INSN, and
1227 PSLOTS_FILLED points to the number filled so far (also the number of
1228 insns in DELAY_LIST). It is updated with the number that have been
1229 filled from the SEQUENCE, if any.
1231 PANNUL_P points to a nonzero value if we already know that we need
1232 to annul INSN. If this routine determines that annulling is needed,
1233 it may set that value nonzero.
1235 PNEW_THREAD points to a location that is to receive the place at which
1236 execution should continue. */
1239 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1240 rtx delay_list, struct resources *sets,
1241 struct resources *needed,
1242 struct resources *other_needed,
1243 int slots_to_fill, int *pslots_filled,
1244 int *pannul_p, rtx *pnew_thread)
1247 int slots_remaining = slots_to_fill - *pslots_filled;
1248 int total_slots_filled = *pslots_filled;
1249 rtx new_delay_list = 0;
1250 int must_annul = *pannul_p;
1253 struct resources cc_set;
1255 /* We can't do anything if there are more delay slots in SEQ than we
1256 can handle, or if we don't know that it will be a taken branch.
1257 We know that it will be a taken branch if it is either an unconditional
1258 branch or a conditional branch with a stricter branch condition.
1260 Also, exit if the branch has more than one set, since then it is computing
1261 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1262 ??? It may be possible to move other sets into INSN in addition to
1263 moving the instructions in the delay slots.
1265 We can not steal the delay list if one of the instructions in the
1266 current delay_list modifies the condition codes and the jump in the
1267 sequence is a conditional jump. We can not do this because we can
1268 not change the direction of the jump because the condition codes
1269 will effect the direction of the jump in the sequence. */
1271 CLEAR_RESOURCE (&cc_set);
1272 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1274 rtx trial = XEXP (temp, 0);
1276 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1277 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false))
1281 if (XVECLEN (seq, 0) - 1 > slots_remaining
1282 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1283 || ! single_set (XVECEXP (seq, 0, 0)))
1286 #ifdef MD_CAN_REDIRECT_BRANCH
1287 /* On some targets, branches with delay slots can have a limited
1288 displacement. Give the back end a chance to tell us we can't do
1290 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1294 for (i = 1; i < XVECLEN (seq, 0); i++)
1296 rtx trial = XVECEXP (seq, 0, i);
1299 if (insn_references_resource_p (trial, sets, false)
1300 || insn_sets_resource_p (trial, needed, false)
1301 || insn_sets_resource_p (trial, sets, false)
1303 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1305 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1307 /* If TRIAL is from the fallthrough code of an annulled branch insn
1308 in SEQ, we cannot use it. */
1309 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1310 && ! INSN_FROM_TARGET_P (trial)))
1313 /* If this insn was already done (usually in a previous delay slot),
1314 pretend we put it in our delay slot. */
1315 if (redundant_insn (trial, insn, new_delay_list))
1318 /* We will end up re-vectoring this branch, so compute flags
1319 based on jumping to the new label. */
1320 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1323 && ((condition == const_true_rtx
1324 || (! insn_sets_resource_p (trial, other_needed, false)
1325 && ! may_trap_or_fault_p (PATTERN (trial)))))
1326 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1327 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1329 check_annul_list_true_false (0, delay_list)
1330 && check_annul_list_true_false (0, new_delay_list)
1331 && eligible_for_annul_false (insn, total_slots_filled,
1336 temp = copy_rtx (trial);
1337 INSN_FROM_TARGET_P (temp) = 1;
1338 new_delay_list = add_to_delay_list (temp, new_delay_list);
1339 total_slots_filled++;
1341 if (--slots_remaining == 0)
1348 /* Show the place to which we will be branching. */
1349 *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1351 /* Add any new insns to the delay list and update the count of the
1352 number of slots filled. */
1353 *pslots_filled = total_slots_filled;
1357 if (delay_list == 0)
1358 return new_delay_list;
1360 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1361 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1366 /* Similar to steal_delay_list_from_target except that SEQ is on the
1367 fallthrough path of INSN. Here we only do something if the delay insn
1368 of SEQ is an unconditional branch. In that case we steal its delay slot
1369 for INSN since unconditional branches are much easier to fill. */
1372 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1373 rtx delay_list, struct resources *sets,
1374 struct resources *needed,
1375 struct resources *other_needed,
1376 int slots_to_fill, int *pslots_filled,
1381 int must_annul = *pannul_p;
1384 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1386 /* We can't do anything if SEQ's delay insn isn't an
1387 unconditional branch. */
1389 if (! simplejump_or_return_p (XVECEXP (seq, 0, 0)))
1392 for (i = 1; i < XVECLEN (seq, 0); i++)
1394 rtx trial = XVECEXP (seq, 0, i);
1396 /* If TRIAL sets CC0, stealing it will move it too far from the use
1398 if (insn_references_resource_p (trial, sets, false)
1399 || insn_sets_resource_p (trial, needed, false)
1400 || insn_sets_resource_p (trial, sets, false)
1402 || sets_cc0_p (PATTERN (trial))
1408 /* If this insn was already done, we don't need it. */
1409 if (redundant_insn (trial, insn, delay_list))
1411 delete_from_delay_slot (trial);
1416 && ((condition == const_true_rtx
1417 || (! insn_sets_resource_p (trial, other_needed, false)
1418 && ! may_trap_or_fault_p (PATTERN (trial)))))
1419 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1420 : (must_annul || delay_list == NULL) && (must_annul = 1,
1421 check_annul_list_true_false (1, delay_list)
1422 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1426 delete_from_delay_slot (trial);
1427 delay_list = add_to_delay_list (trial, delay_list);
1429 if (++(*pslots_filled) == slots_to_fill)
1441 /* Try merging insns starting at THREAD which match exactly the insns in
1444 If all insns were matched and the insn was previously annulling, the
1445 annul bit will be cleared.
1447 For each insn that is merged, if the branch is or will be non-annulling,
1448 we delete the merged insn. */
1451 try_merge_delay_insns (rtx insn, rtx thread)
1453 rtx trial, next_trial;
1454 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1455 int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
1456 int slot_number = 1;
1457 int num_slots = XVECLEN (PATTERN (insn), 0);
1458 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1459 struct resources set, needed;
1460 rtx merged_insns = 0;
1464 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1466 CLEAR_RESOURCE (&needed);
1467 CLEAR_RESOURCE (&set);
1469 /* If this is not an annulling branch, take into account anything needed in
1470 INSN's delay slot. This prevents two increments from being incorrectly
1471 folded into one. If we are annulling, this would be the correct
1472 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1473 will essentially disable this optimization. This method is somewhat of
1474 a kludge, but I don't see a better way.) */
1476 for (i = 1 ; i < num_slots; i++)
1477 if (XVECEXP (PATTERN (insn), 0, i))
1478 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1481 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1483 rtx pat = PATTERN (trial);
1484 rtx oldtrial = trial;
1486 next_trial = next_nonnote_insn (trial);
1488 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1489 if (NONJUMP_INSN_P (trial)
1490 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1493 if (GET_CODE (next_to_match) == GET_CODE (trial)
1495 /* We can't share an insn that sets cc0. */
1496 && ! sets_cc0_p (pat)
1498 && ! insn_references_resource_p (trial, &set, true)
1499 && ! insn_sets_resource_p (trial, &set, true)
1500 && ! insn_sets_resource_p (trial, &needed, true)
1501 && (trial = try_split (pat, trial, 0)) != 0
1502 /* Update next_trial, in case try_split succeeded. */
1503 && (next_trial = next_nonnote_insn (trial))
1504 /* Likewise THREAD. */
1505 && (thread = oldtrial == thread ? trial : thread)
1506 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1507 /* Have to test this condition if annul condition is different
1508 from (and less restrictive than) non-annulling one. */
1509 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1514 update_block (trial, thread);
1515 if (trial == thread)
1516 thread = next_active_insn (thread);
1518 delete_related_insns (trial);
1519 INSN_FROM_TARGET_P (next_to_match) = 0;
1522 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1524 if (++slot_number == num_slots)
1527 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1530 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1531 mark_referenced_resources (trial, &needed, true);
1534 /* See if we stopped on a filled insn. If we did, try to see if its
1535 delay slots match. */
1536 if (slot_number != num_slots
1537 && trial && NONJUMP_INSN_P (trial)
1538 && GET_CODE (PATTERN (trial)) == SEQUENCE
1539 && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
1540 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
1542 rtx pat = PATTERN (trial);
1543 rtx filled_insn = XVECEXP (pat, 0, 0);
1545 /* Account for resources set/needed by the filled insn. */
1546 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1547 mark_referenced_resources (filled_insn, &needed, true);
1549 for (i = 1; i < XVECLEN (pat, 0); i++)
1551 rtx dtrial = XVECEXP (pat, 0, i);
1553 if (! insn_references_resource_p (dtrial, &set, true)
1554 && ! insn_sets_resource_p (dtrial, &set, true)
1555 && ! insn_sets_resource_p (dtrial, &needed, true)
1557 && ! sets_cc0_p (PATTERN (dtrial))
1559 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1560 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1566 update_block (dtrial, thread);
1567 new_rtx = delete_from_delay_slot (dtrial);
1568 if (INSN_DELETED_P (thread))
1570 INSN_FROM_TARGET_P (next_to_match) = 0;
1573 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1576 if (++slot_number == num_slots)
1579 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1583 /* Keep track of the set/referenced resources for the delay
1584 slots of any trial insns we encounter. */
1585 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1586 mark_referenced_resources (dtrial, &needed, true);
1591 /* If all insns in the delay slot have been matched and we were previously
1592 annulling the branch, we need not any more. In that case delete all the
1593 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1594 the delay list so that we know that it isn't only being used at the
1596 if (slot_number == num_slots && annul_p)
1598 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1600 if (GET_MODE (merged_insns) == SImode)
1604 update_block (XEXP (merged_insns, 0), thread);
1605 new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0));
1606 if (INSN_DELETED_P (thread))
1611 update_block (XEXP (merged_insns, 0), thread);
1612 delete_related_insns (XEXP (merged_insns, 0));
1616 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1618 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1619 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1623 /* See if INSN is redundant with an insn in front of TARGET. Often this
1624 is called when INSN is a candidate for a delay slot of TARGET.
1625 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1626 of INSN. Often INSN will be redundant with an insn in a delay slot of
1627 some previous insn. This happens when we have a series of branches to the
1628 same label; in that case the first insn at the target might want to go
1629 into each of the delay slots.
1631 If we are not careful, this routine can take up a significant fraction
1632 of the total compilation time (4%), but only wins rarely. Hence we
1633 speed this routine up by making two passes. The first pass goes back
1634 until it hits a label and sees if it finds an insn with an identical
1635 pattern. Only in this (relatively rare) event does it check for
1638 We do not split insns we encounter. This could cause us not to find a
1639 redundant insn, but the cost of splitting seems greater than the possible
1640 gain in rare cases. */
1643 redundant_insn (rtx insn, rtx target, rtx delay_list)
1645 rtx target_main = target;
1646 rtx ipat = PATTERN (insn);
1648 struct resources needed, set;
1650 unsigned insns_to_search;
1652 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1653 are allowed to not actually assign to such a register. */
1654 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1657 /* Scan backwards looking for a match. */
1658 for (trial = PREV_INSN (target),
1659 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1660 trial && insns_to_search > 0;
1661 trial = PREV_INSN (trial))
1663 if (LABEL_P (trial))
1666 if (!NONDEBUG_INSN_P (trial))
1670 pat = PATTERN (trial);
1671 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1674 if (GET_CODE (pat) == SEQUENCE)
1676 /* Stop for a CALL and its delay slots because it is difficult to
1677 track its resource needs correctly. */
1678 if (CALL_P (XVECEXP (pat, 0, 0)))
1681 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1682 slots because it is difficult to track its resource needs
1685 #ifdef INSN_SETS_ARE_DELAYED
1686 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1690 #ifdef INSN_REFERENCES_ARE_DELAYED
1691 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1695 /* See if any of the insns in the delay slot match, updating
1696 resource requirements as we go. */
1697 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1698 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1699 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1700 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1703 /* If found a match, exit this loop early. */
1708 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1709 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1713 /* If we didn't find an insn that matches, return 0. */
1717 /* See what resources this insn sets and needs. If they overlap, or
1718 if this insn references CC0, it can't be redundant. */
1720 CLEAR_RESOURCE (&needed);
1721 CLEAR_RESOURCE (&set);
1722 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1723 mark_referenced_resources (insn, &needed, true);
1725 /* If TARGET is a SEQUENCE, get the main insn. */
1726 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1727 target_main = XVECEXP (PATTERN (target), 0, 0);
1729 if (resource_conflicts_p (&needed, &set)
1731 || reg_mentioned_p (cc0_rtx, ipat)
1733 /* The insn requiring the delay may not set anything needed or set by
1735 || insn_sets_resource_p (target_main, &needed, true)
1736 || insn_sets_resource_p (target_main, &set, true))
1739 /* Insns we pass may not set either NEEDED or SET, so merge them for
1741 needed.memory |= set.memory;
1742 needed.unch_memory |= set.unch_memory;
1743 IOR_HARD_REG_SET (needed.regs, set.regs);
1745 /* This insn isn't redundant if it conflicts with an insn that either is
1746 or will be in a delay slot of TARGET. */
1750 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1752 delay_list = XEXP (delay_list, 1);
1755 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1756 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1757 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1761 /* Scan backwards until we reach a label or an insn that uses something
1762 INSN sets or sets something insn uses or sets. */
1764 for (trial = PREV_INSN (target),
1765 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1766 trial && !LABEL_P (trial) && insns_to_search > 0;
1767 trial = PREV_INSN (trial))
1769 if (!NONDEBUG_INSN_P (trial))
1773 pat = PATTERN (trial);
1774 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1777 if (GET_CODE (pat) == SEQUENCE)
1779 bool annul_p = false;
1780 rtx control = XVECEXP (pat, 0, 0);
1782 /* If this is a CALL_INSN and its delay slots, it is hard to track
1783 the resource needs properly, so give up. */
1784 if (CALL_P (control))
1787 /* If this is an INSN or JUMP_INSN with delayed effects, it
1788 is hard to track the resource needs properly, so give up. */
1790 #ifdef INSN_SETS_ARE_DELAYED
1791 if (INSN_SETS_ARE_DELAYED (control))
1795 #ifdef INSN_REFERENCES_ARE_DELAYED
1796 if (INSN_REFERENCES_ARE_DELAYED (control))
1800 if (JUMP_P (control))
1801 annul_p = INSN_ANNULLED_BRANCH_P (control);
1803 /* See if any of the insns in the delay slot match, updating
1804 resource requirements as we go. */
1805 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1807 rtx candidate = XVECEXP (pat, 0, i);
1809 /* If an insn will be annulled if the branch is false, it isn't
1810 considered as a possible duplicate insn. */
1811 if (rtx_equal_p (PATTERN (candidate), ipat)
1812 && ! (annul_p && INSN_FROM_TARGET_P (candidate)))
1814 /* Show that this insn will be used in the sequel. */
1815 INSN_FROM_TARGET_P (candidate) = 0;
1819 /* Unless this is an annulled insn from the target of a branch,
1820 we must stop if it sets anything needed or set by INSN. */
1821 if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
1822 && insn_sets_resource_p (candidate, &needed, true))
1826 /* If the insn requiring the delay slot conflicts with INSN, we
1828 if (insn_sets_resource_p (control, &needed, true))
1833 /* See if TRIAL is the same as INSN. */
1834 pat = PATTERN (trial);
1835 if (rtx_equal_p (pat, ipat))
1838 /* Can't go any further if TRIAL conflicts with INSN. */
1839 if (insn_sets_resource_p (trial, &needed, true))
1847 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1848 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1849 is nonzero, we are allowed to fall into this thread; otherwise, we are
1852 If LABEL is used more than one or we pass a label other than LABEL before
1853 finding an active insn, we do not own this thread. */
1856 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1861 /* We don't own the function end. */
1862 if (thread == 0 || ANY_RETURN_P (thread))
1865 /* Get the first active insn, or THREAD, if it is an active insn. */
1866 active_insn = next_active_insn (PREV_INSN (thread));
1868 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1870 && (insn != label || LABEL_NUSES (insn) != 1))
1873 if (allow_fallthrough)
1876 /* Ensure that we reach a BARRIER before any insn or label. */
1877 for (insn = prev_nonnote_insn (thread);
1878 insn == 0 || !BARRIER_P (insn);
1879 insn = prev_nonnote_insn (insn))
1882 || (NONJUMP_INSN_P (insn)
1883 && GET_CODE (PATTERN (insn)) != USE
1884 && GET_CODE (PATTERN (insn)) != CLOBBER))
1890 /* Called when INSN is being moved from a location near the target of a jump.
1891 We leave a marker of the form (use (INSN)) immediately in front
1892 of WHERE for mark_target_live_regs. These markers will be deleted when
1895 We used to try to update the live status of registers if WHERE is at
1896 the start of a basic block, but that can't work since we may remove a
1897 BARRIER in relax_delay_slots. */
1900 update_block (rtx insn, rtx where)
1902 /* Ignore if this was in a delay slot and it came from the target of
1904 if (INSN_FROM_TARGET_P (insn))
1907 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1909 /* INSN might be making a value live in a block where it didn't use to
1910 be. So recompute liveness information for this block. */
1912 incr_ticks_for_insn (insn);
1915 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1916 the basic block containing the jump. */
1919 reorg_redirect_jump (rtx jump, rtx nlabel)
1921 incr_ticks_for_insn (jump);
1922 return redirect_jump (jump, nlabel, 1);
1925 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1926 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1927 that reference values used in INSN. If we find one, then we move the
1928 REG_DEAD note to INSN.
1930 This is needed to handle the case where a later insn (after INSN) has a
1931 REG_DEAD note for a register used by INSN, and this later insn subsequently
1932 gets moved before a CODE_LABEL because it is a redundant insn. In this
1933 case, mark_target_live_regs may be confused into thinking the register
1934 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1937 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1941 for (p = next_nonnote_insn (insn); p != delayed_insn;
1942 p = next_nonnote_insn (p))
1943 for (link = REG_NOTES (p); link; link = next)
1945 next = XEXP (link, 1);
1947 if (REG_NOTE_KIND (link) != REG_DEAD
1948 || !REG_P (XEXP (link, 0)))
1951 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1953 /* Move the REG_DEAD note from P to INSN. */
1954 remove_note (p, link);
1955 XEXP (link, 1) = REG_NOTES (insn);
1956 REG_NOTES (insn) = link;
1961 /* Called when an insn redundant with start_insn is deleted. If there
1962 is a REG_DEAD note for the target of start_insn between start_insn
1963 and stop_insn, then the REG_DEAD note needs to be deleted since the
1964 value no longer dies there.
1966 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1967 confused into thinking the register is dead. */
1970 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1974 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1975 p = next_nonnote_insn (p))
1976 for (link = REG_NOTES (p); link; link = next)
1978 next = XEXP (link, 1);
1980 if (REG_NOTE_KIND (link) != REG_DEAD
1981 || !REG_P (XEXP (link, 0)))
1984 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1986 remove_note (p, link);
1992 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1994 This handles the case of udivmodXi4 instructions which optimize their
1995 output depending on whether any REG_UNUSED notes are present.
1996 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2000 update_reg_unused_notes (rtx insn, rtx redundant_insn)
2004 for (link = REG_NOTES (insn); link; link = next)
2006 next = XEXP (link, 1);
2008 if (REG_NOTE_KIND (link) != REG_UNUSED
2009 || !REG_P (XEXP (link, 0)))
2012 if (! find_regno_note (redundant_insn, REG_UNUSED,
2013 REGNO (XEXP (link, 0))))
2014 remove_note (insn, link);
2018 /* Return the label before INSN, or put a new label there. */
2021 get_label_before (rtx insn)
2025 /* Find an existing label at this point
2026 or make a new one if there is none. */
2027 label = prev_nonnote_insn (insn);
2029 if (label == 0 || !LABEL_P (label))
2031 rtx prev = PREV_INSN (insn);
2033 label = gen_label_rtx ();
2034 emit_label_after (label, prev);
2035 LABEL_NUSES (label) = 0;
2040 /* Scan a function looking for insns that need a delay slot and find insns to
2041 put into the delay slot.
2043 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2044 as calls). We do these first since we don't want jump insns (that are
2045 easier to fill) to get the only insns that could be used for non-jump insns.
2046 When it is zero, only try to fill JUMP_INSNs.
2048 When slots are filled in this manner, the insns (including the
2049 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2050 it is possible to tell whether a delay slot has really been filled
2051 or not. `final' knows how to deal with this, by communicating
2052 through FINAL_SEQUENCE. */
2055 fill_simple_delay_slots (int non_jumps_p)
2057 rtx insn, pat, trial, next_trial;
2059 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2060 struct resources needed, set;
2061 int slots_to_fill, slots_filled;
2064 for (i = 0; i < num_unfilled_slots; i++)
2067 /* Get the next insn to fill. If it has already had any slots assigned,
2068 we can't do anything with it. Maybe we'll improve this later. */
2070 insn = unfilled_slots_base[i];
2072 || INSN_DELETED_P (insn)
2073 || (NONJUMP_INSN_P (insn)
2074 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2075 || (JUMP_P (insn) && non_jumps_p)
2076 || (!JUMP_P (insn) && ! non_jumps_p))
2079 /* It may have been that this insn used to need delay slots, but
2080 now doesn't; ignore in that case. This can happen, for example,
2081 on the HP PA RISC, where the number of delay slots depends on
2082 what insns are nearby. */
2083 slots_to_fill = num_delay_slots (insn);
2085 /* Some machine description have defined instructions to have
2086 delay slots only in certain circumstances which may depend on
2087 nearby insns (which change due to reorg's actions).
2089 For example, the PA port normally has delay slots for unconditional
2092 However, the PA port claims such jumps do not have a delay slot
2093 if they are immediate successors of certain CALL_INSNs. This
2094 allows the port to favor filling the delay slot of the call with
2095 the unconditional jump. */
2096 if (slots_to_fill == 0)
2099 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2100 says how many. After initialization, first try optimizing
2103 nop add %o7,.-L1,%o7
2107 If this case applies, the delay slot of the call is filled with
2108 the unconditional jump. This is done first to avoid having the
2109 delay slot of the call filled in the backward scan. Also, since
2110 the unconditional jump is likely to also have a delay slot, that
2111 insn must exist when it is subsequently scanned.
2113 This is tried on each insn with delay slots as some machines
2114 have insns which perform calls, but are not represented as
2121 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2123 flags = get_jump_flags (insn, NULL_RTX);
2125 if ((trial = next_active_insn (insn))
2127 && simplejump_p (trial)
2128 && eligible_for_delay (insn, slots_filled, trial, flags)
2129 && no_labels_between_p (insn, trial)
2130 && ! can_throw_internal (trial))
2134 delay_list = add_to_delay_list (trial, delay_list);
2136 /* TRIAL may have had its delay slot filled, then unfilled. When
2137 the delay slot is unfilled, TRIAL is placed back on the unfilled
2138 slots obstack. Unfortunately, it is placed on the end of the
2139 obstack, not in its original location. Therefore, we must search
2140 from entry i + 1 to the end of the unfilled slots obstack to
2141 try and find TRIAL. */
2142 tmp = &unfilled_slots_base[i + 1];
2143 while (*tmp != trial && tmp != unfilled_slots_next)
2146 /* Remove the unconditional jump from consideration for delay slot
2147 filling and unthread it. */
2151 rtx next = NEXT_INSN (trial);
2152 rtx prev = PREV_INSN (trial);
2154 NEXT_INSN (prev) = next;
2156 PREV_INSN (next) = prev;
2160 /* Now, scan backwards from the insn to search for a potential
2161 delay-slot candidate. Stop searching when a label or jump is hit.
2163 For each candidate, if it is to go into the delay slot (moved
2164 forward in execution sequence), it must not need or set any resources
2165 that were set by later insns and must not set any resources that
2166 are needed for those insns.
2168 The delay slot insn itself sets resources unless it is a call
2169 (in which case the called routine, not the insn itself, is doing
2172 if (slots_filled < slots_to_fill)
2174 CLEAR_RESOURCE (&needed);
2175 CLEAR_RESOURCE (&set);
2176 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2177 mark_referenced_resources (insn, &needed, false);
2179 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2182 next_trial = prev_nonnote_insn (trial);
2184 /* This must be an INSN or CALL_INSN. */
2185 pat = PATTERN (trial);
2187 /* Stand-alone USE and CLOBBER are just for flow. */
2188 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2191 /* Check for resource conflict first, to avoid unnecessary
2193 if (! insn_references_resource_p (trial, &set, true)
2194 && ! insn_sets_resource_p (trial, &set, true)
2195 && ! insn_sets_resource_p (trial, &needed, true)
2197 /* Can't separate set of cc0 from its use. */
2198 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2200 && ! can_throw_internal (trial))
2202 trial = try_split (pat, trial, 1);
2203 next_trial = prev_nonnote_insn (trial);
2204 if (eligible_for_delay (insn, slots_filled, trial, flags))
2206 /* In this case, we are searching backward, so if we
2207 find insns to put on the delay list, we want
2208 to put them at the head, rather than the
2209 tail, of the list. */
2211 update_reg_dead_notes (trial, insn);
2212 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2214 update_block (trial, trial);
2215 delete_related_insns (trial);
2216 if (slots_to_fill == ++slots_filled)
2222 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2223 mark_referenced_resources (trial, &needed, true);
2227 /* If all needed slots haven't been filled, we come here. */
2229 /* Try to optimize case of jumping around a single insn. */
2230 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2231 if (slots_filled != slots_to_fill
2234 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2236 delay_list = optimize_skip (insn);
2242 /* Try to get insns from beyond the insn needing the delay slot.
2243 These insns can neither set or reference resources set in insns being
2244 skipped, cannot set resources in the insn being skipped, and, if this
2245 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2246 call might not return).
2248 There used to be code which continued past the target label if
2249 we saw all uses of the target label. This code did not work,
2250 because it failed to account for some instructions which were
2251 both annulled and marked as from the target. This can happen as a
2252 result of optimize_skip. Since this code was redundant with
2253 fill_eager_delay_slots anyways, it was just deleted. */
2255 if (slots_filled != slots_to_fill
2256 /* If this instruction could throw an exception which is
2257 caught in the same function, then it's not safe to fill
2258 the delay slot with an instruction from beyond this
2259 point. For example, consider:
2270 Even though `i' is a local variable, we must be sure not
2271 to put `i = 3' in the delay slot if `f' might throw an
2274 Presumably, we should also check to see if we could get
2275 back to this function via `setjmp'. */
2276 && ! can_throw_internal (insn)
2278 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2279 && ! simplejump_p (insn)
2280 && !ANY_RETURN_P (JUMP_LABEL (insn)))))
2282 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2283 label. Otherwise, zero. */
2285 int maybe_never = 0;
2286 rtx pat, trial_delay;
2288 CLEAR_RESOURCE (&needed);
2289 CLEAR_RESOURCE (&set);
2293 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2294 mark_referenced_resources (insn, &needed, true);
2299 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2300 mark_referenced_resources (insn, &needed, true);
2302 target = JUMP_LABEL (insn);
2305 if (target == 0 || ANY_RETURN_P (target))
2306 for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2309 next_trial = next_nonnote_insn (trial);
2311 /* This must be an INSN or CALL_INSN. */
2312 pat = PATTERN (trial);
2314 /* Stand-alone USE and CLOBBER are just for flow. */
2315 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2318 /* If this already has filled delay slots, get the insn needing
2320 if (GET_CODE (pat) == SEQUENCE)
2321 trial_delay = XVECEXP (pat, 0, 0);
2323 trial_delay = trial;
2325 /* Stop our search when seeing a jump. */
2326 if (JUMP_P (trial_delay))
2329 /* See if we have a resource problem before we try to
2331 if (GET_CODE (pat) != SEQUENCE
2332 && ! insn_references_resource_p (trial, &set, true)
2333 && ! insn_sets_resource_p (trial, &set, true)
2334 && ! insn_sets_resource_p (trial, &needed, true)
2336 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2338 && ! (maybe_never && may_trap_or_fault_p (pat))
2339 && (trial = try_split (pat, trial, 0))
2340 && eligible_for_delay (insn, slots_filled, trial, flags)
2341 && ! can_throw_internal(trial))
2343 next_trial = next_nonnote_insn (trial);
2344 delay_list = add_to_delay_list (trial, delay_list);
2347 if (reg_mentioned_p (cc0_rtx, pat))
2348 link_cc0_insns (trial);
2351 delete_related_insns (trial);
2352 if (slots_to_fill == ++slots_filled)
2357 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2358 mark_referenced_resources (trial, &needed, true);
2360 /* Ensure we don't put insns between the setting of cc and the
2361 comparison by moving a setting of cc into an earlier delay
2362 slot since these insns could clobber the condition code. */
2365 /* If this is a call or jump, we might not get here. */
2366 if (CALL_P (trial_delay)
2367 || JUMP_P (trial_delay))
2371 /* If there are slots left to fill and our search was stopped by an
2372 unconditional branch, try the insn at the branch target. We can
2373 redirect the branch if it works.
2375 Don't do this if the insn at the branch target is a branch. */
2376 if (slots_to_fill != slots_filled
2378 && jump_to_label_p (trial)
2379 && simplejump_p (trial)
2380 && (target == 0 || JUMP_LABEL (trial) == target)
2381 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2382 && ! (NONJUMP_INSN_P (next_trial)
2383 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2384 && !JUMP_P (next_trial)
2385 && ! insn_references_resource_p (next_trial, &set, true)
2386 && ! insn_sets_resource_p (next_trial, &set, true)
2387 && ! insn_sets_resource_p (next_trial, &needed, true)
2389 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2391 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2392 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2393 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2394 && ! can_throw_internal (trial))
2396 /* See comment in relax_delay_slots about necessity of using
2397 next_real_insn here. */
2398 rtx new_label = next_real_insn (next_trial);
2401 new_label = get_label_before (new_label);
2403 new_label = find_end_label (simple_return_rtx);
2408 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2410 reorg_redirect_jump (trial, new_label);
2412 /* If we merged because we both jumped to the same place,
2413 redirect the original insn also. */
2415 reorg_redirect_jump (insn, new_label);
2420 /* If this is an unconditional jump, then try to get insns from the
2421 target of the jump. */
2423 && simplejump_p (insn)
2424 && slots_filled != slots_to_fill)
2426 = fill_slots_from_thread (insn, const_true_rtx,
2427 next_active_insn (JUMP_LABEL (insn)),
2429 own_thread_p (JUMP_LABEL (insn),
2430 JUMP_LABEL (insn), 0),
2431 slots_to_fill, &slots_filled,
2435 unfilled_slots_base[i]
2436 = emit_delay_sequence (insn, delay_list, slots_filled);
2438 if (slots_to_fill == slots_filled)
2439 unfilled_slots_base[i] = 0;
2441 note_delay_statistics (slots_filled, 0);
2444 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2445 /* See if the epilogue needs any delay slots. Try to fill them if so.
2446 The only thing we can do is scan backwards from the end of the
2447 function. If we did this in a previous pass, it is incorrect to do it
2449 if (crtl->epilogue_delay_list)
2452 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2453 if (slots_to_fill == 0)
2457 CLEAR_RESOURCE (&set);
2459 /* The frame pointer and stack pointer are needed at the beginning of
2460 the epilogue, so instructions setting them can not be put in the
2461 epilogue delay slot. However, everything else needed at function
2462 end is safe, so we don't want to use end_of_function_needs here. */
2463 CLEAR_RESOURCE (&needed);
2464 if (frame_pointer_needed)
2466 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2467 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2468 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2470 if (! EXIT_IGNORE_STACK
2471 || current_function_sp_is_unchanging)
2472 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2475 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2477 #ifdef EPILOGUE_USES
2478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2480 if (EPILOGUE_USES (i))
2481 SET_HARD_REG_BIT (needed.regs, i);
2485 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2486 trial = PREV_INSN (trial))
2490 pat = PATTERN (trial);
2491 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2494 if (! insn_references_resource_p (trial, &set, true)
2495 && ! insn_sets_resource_p (trial, &needed, true)
2496 && ! insn_sets_resource_p (trial, &set, true)
2498 /* Don't want to mess with cc0 here. */
2499 && ! reg_mentioned_p (cc0_rtx, pat)
2501 && ! can_throw_internal (trial))
2503 trial = try_split (pat, trial, 1);
2504 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2506 /* Here as well we are searching backward, so put the
2507 insns we find on the head of the list. */
2509 crtl->epilogue_delay_list
2510 = gen_rtx_INSN_LIST (VOIDmode, trial,
2511 crtl->epilogue_delay_list);
2512 mark_end_of_function_resources (trial, true);
2513 update_block (trial, trial);
2514 delete_related_insns (trial);
2516 /* Clear deleted bit so final.c will output the insn. */
2517 INSN_DELETED_P (trial) = 0;
2519 if (slots_to_fill == ++slots_filled)
2525 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2526 mark_referenced_resources (trial, &needed, true);
2529 note_delay_statistics (slots_filled, 0);
2533 /* Follow any unconditional jump at LABEL;
2534 return the ultimate label reached by any such chain of jumps.
2535 Return a suitable return rtx if the chain ultimately leads to a
2537 If LABEL is not followed by a jump, return LABEL.
2538 If the chain loops or we can't find end, return LABEL,
2539 since that tells caller to avoid changing the insn. */
2542 follow_jumps (rtx label)
2549 if (ANY_RETURN_P (label))
2553 && (insn = next_active_insn (value)) != 0
2555 && JUMP_LABEL (insn) != NULL_RTX
2556 && ((any_uncondjump_p (insn) && onlyjump_p (insn))
2557 || ANY_RETURN_P (PATTERN (insn)))
2558 && (next = NEXT_INSN (insn))
2559 && BARRIER_P (next));
2562 rtx this_label = JUMP_LABEL (insn);
2565 /* If we have found a cycle, make the insn jump to itself. */
2566 if (this_label == label)
2568 if (ANY_RETURN_P (this_label))
2570 tem = next_active_insn (this_label);
2572 && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2573 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2583 /* Try to find insns to place in delay slots.
2585 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2586 or is an unconditional branch if CONDITION is const_true_rtx.
2587 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2589 THREAD is a flow-of-control, either the insns to be executed if the
2590 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2592 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2593 to see if any potential delay slot insns set things needed there.
2595 LIKELY is nonzero if it is extremely likely that the branch will be
2596 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2597 end of a loop back up to the top.
2599 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2600 thread. I.e., it is the fallthrough code of our jump or the target of the
2601 jump when we are the only jump going there.
2603 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2604 case, we can only take insns from the head of the thread for our delay
2605 slot. We then adjust the jump to point after the insns we have taken. */
2608 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2609 rtx opposite_thread, int likely, int thread_if_true,
2610 int own_thread, int slots_to_fill,
2611 int *pslots_filled, rtx delay_list)
2614 struct resources opposite_needed, set, needed;
2620 /* Validate our arguments. */
2621 gcc_assert(condition != const_true_rtx || thread_if_true);
2622 gcc_assert(own_thread || thread_if_true);
2624 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2626 /* If our thread is the end of subroutine, we can't get any delay
2628 if (thread == NULL_RTX || ANY_RETURN_P (thread))
2631 /* If this is an unconditional branch, nothing is needed at the
2632 opposite thread. Otherwise, compute what is needed there. */
2633 if (condition == const_true_rtx)
2634 CLEAR_RESOURCE (&opposite_needed);
2636 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2638 /* If the insn at THREAD can be split, do it here to avoid having to
2639 update THREAD and NEW_THREAD if it is done in the loop below. Also
2640 initialize NEW_THREAD. */
2642 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2644 /* Scan insns at THREAD. We are looking for an insn that can be removed
2645 from THREAD (it neither sets nor references resources that were set
2646 ahead of it and it doesn't set anything needs by the insns ahead of
2647 it) and that either can be placed in an annulling insn or aren't
2648 needed at OPPOSITE_THREAD. */
2650 CLEAR_RESOURCE (&needed);
2651 CLEAR_RESOURCE (&set);
2653 /* If we do not own this thread, we must stop as soon as we find
2654 something that we can't put in a delay slot, since all we can do
2655 is branch into THREAD at a later point. Therefore, labels stop
2656 the search if this is not the `true' thread. */
2658 for (trial = thread;
2659 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2660 trial = next_nonnote_insn (trial))
2664 /* If we have passed a label, we no longer own this thread. */
2665 if (LABEL_P (trial))
2671 pat = PATTERN (trial);
2672 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2675 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2676 don't separate or copy insns that set and use CC0. */
2677 if (! insn_references_resource_p (trial, &set, true)
2678 && ! insn_sets_resource_p (trial, &set, true)
2679 && ! insn_sets_resource_p (trial, &needed, true)
2681 && ! (reg_mentioned_p (cc0_rtx, pat)
2682 && (! own_thread || ! sets_cc0_p (pat)))
2684 && ! can_throw_internal (trial))
2688 /* If TRIAL is redundant with some insn before INSN, we don't
2689 actually need to add it to the delay list; we can merely pretend
2691 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2693 fix_reg_dead_note (prior_insn, insn);
2696 update_block (trial, thread);
2697 if (trial == thread)
2699 thread = next_active_insn (thread);
2700 if (new_thread == trial)
2701 new_thread = thread;
2704 delete_related_insns (trial);
2708 update_reg_unused_notes (prior_insn, trial);
2709 new_thread = next_active_insn (trial);
2715 /* There are two ways we can win: If TRIAL doesn't set anything
2716 needed at the opposite thread and can't trap, or if it can
2717 go into an annulled delay slot. */
2719 && (condition == const_true_rtx
2720 || (! insn_sets_resource_p (trial, &opposite_needed, true)
2721 && ! may_trap_or_fault_p (pat))))
2724 trial = try_split (pat, trial, 0);
2725 if (new_thread == old_trial)
2727 if (thread == old_trial)
2729 pat = PATTERN (trial);
2730 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2734 #ifdef ANNUL_IFTRUE_SLOTS
2737 #ifdef ANNUL_IFFALSE_SLOTS
2743 trial = try_split (pat, trial, 0);
2744 if (new_thread == old_trial)
2746 if (thread == old_trial)
2748 pat = PATTERN (trial);
2749 if ((must_annul || delay_list == NULL) && (thread_if_true
2750 ? check_annul_list_true_false (0, delay_list)
2751 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2752 : check_annul_list_true_false (1, delay_list)
2753 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2761 if (reg_mentioned_p (cc0_rtx, pat))
2762 link_cc0_insns (trial);
2765 /* If we own this thread, delete the insn. If this is the
2766 destination of a branch, show that a basic block status
2767 may have been updated. In any case, mark the new
2768 starting point of this thread. */
2773 update_block (trial, thread);
2774 if (trial == thread)
2776 thread = next_active_insn (thread);
2777 if (new_thread == trial)
2778 new_thread = thread;
2781 /* We are moving this insn, not deleting it. We must
2782 temporarily increment the use count on any referenced
2783 label lest it be deleted by delete_related_insns. */
2784 for (note = REG_NOTES (trial);
2786 note = XEXP (note, 1))
2787 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2788 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2790 /* REG_LABEL_OPERAND could be
2791 NOTE_INSN_DELETED_LABEL too. */
2792 if (LABEL_P (XEXP (note, 0)))
2793 LABEL_NUSES (XEXP (note, 0))++;
2795 gcc_assert (REG_NOTE_KIND (note)
2796 == REG_LABEL_OPERAND);
2798 if (jump_to_label_p (trial))
2799 LABEL_NUSES (JUMP_LABEL (trial))++;
2801 delete_related_insns (trial);
2803 for (note = REG_NOTES (trial);
2805 note = XEXP (note, 1))
2806 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2807 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2809 /* REG_LABEL_OPERAND could be
2810 NOTE_INSN_DELETED_LABEL too. */
2811 if (LABEL_P (XEXP (note, 0)))
2812 LABEL_NUSES (XEXP (note, 0))--;
2814 gcc_assert (REG_NOTE_KIND (note)
2815 == REG_LABEL_OPERAND);
2817 if (jump_to_label_p (trial))
2818 LABEL_NUSES (JUMP_LABEL (trial))--;
2821 new_thread = next_active_insn (trial);
2823 temp = own_thread ? trial : copy_rtx (trial);
2825 INSN_FROM_TARGET_P (temp) = 1;
2827 delay_list = add_to_delay_list (temp, delay_list);
2829 if (slots_to_fill == ++(*pslots_filled))
2831 /* Even though we have filled all the slots, we
2832 may be branching to a location that has a
2833 redundant insn. Skip any if so. */
2834 while (new_thread && ! own_thread
2835 && ! insn_sets_resource_p (new_thread, &set, true)
2836 && ! insn_sets_resource_p (new_thread, &needed,
2838 && ! insn_references_resource_p (new_thread,
2841 = redundant_insn (new_thread, insn,
2844 /* We know we do not own the thread, so no need
2845 to call update_block and delete_insn. */
2846 fix_reg_dead_note (prior_insn, insn);
2847 update_reg_unused_notes (prior_insn, new_thread);
2848 new_thread = next_active_insn (new_thread);
2858 /* This insn can't go into a delay slot. */
2860 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2861 mark_referenced_resources (trial, &needed, true);
2863 /* Ensure we don't put insns between the setting of cc and the comparison
2864 by moving a setting of cc into an earlier delay slot since these insns
2865 could clobber the condition code. */
2868 /* If this insn is a register-register copy and the next insn has
2869 a use of our destination, change it to use our source. That way,
2870 it will become a candidate for our delay slot the next time
2871 through this loop. This case occurs commonly in loops that
2874 We could check for more complex cases than those tested below,
2875 but it doesn't seem worth it. It might also be a good idea to try
2876 to swap the two insns. That might do better.
2878 We can't do this if the next insn modifies our destination, because
2879 that would make the replacement into the insn invalid. We also can't
2880 do this if it modifies our source, because it might be an earlyclobber
2881 operand. This latter test also prevents updating the contents of
2882 a PRE_INC. We also can't do this if there's overlap of source and
2883 destination. Overlap may happen for larger-than-register-size modes. */
2885 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2886 && REG_P (SET_SRC (pat))
2887 && REG_P (SET_DEST (pat))
2888 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2890 rtx next = next_nonnote_insn (trial);
2892 if (next && NONJUMP_INSN_P (next)
2893 && GET_CODE (PATTERN (next)) != USE
2894 && ! reg_set_p (SET_DEST (pat), next)
2895 && ! reg_set_p (SET_SRC (pat), next)
2896 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2897 && ! modified_in_p (SET_DEST (pat), next))
2898 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2902 /* If we stopped on a branch insn that has delay slots, see if we can
2903 steal some of the insns in those slots. */
2904 if (trial && NONJUMP_INSN_P (trial)
2905 && GET_CODE (PATTERN (trial)) == SEQUENCE
2906 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2908 /* If this is the `true' thread, we will want to follow the jump,
2909 so we can only do this if we have taken everything up to here. */
2910 if (thread_if_true && trial == new_thread)
2913 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2914 delay_list, &set, &needed,
2915 &opposite_needed, slots_to_fill,
2916 pslots_filled, &must_annul,
2918 /* If we owned the thread and are told that it branched
2919 elsewhere, make sure we own the thread at the new location. */
2920 if (own_thread && trial != new_thread)
2921 own_thread = own_thread_p (new_thread, new_thread, 0);
2923 else if (! thread_if_true)
2925 = steal_delay_list_from_fallthrough (insn, condition,
2927 delay_list, &set, &needed,
2928 &opposite_needed, slots_to_fill,
2929 pslots_filled, &must_annul);
2932 /* If we haven't found anything for this delay slot and it is very
2933 likely that the branch will be taken, see if the insn at our target
2934 increments or decrements a register with an increment that does not
2935 depend on the destination register. If so, try to place the opposite
2936 arithmetic insn after the jump insn and put the arithmetic insn in the
2937 delay slot. If we can't do this, return. */
2938 if (delay_list == 0 && likely
2939 && new_thread && !ANY_RETURN_P (new_thread)
2940 && NONJUMP_INSN_P (new_thread)
2941 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2942 && asm_noperands (PATTERN (new_thread)) < 0)
2944 rtx pat = PATTERN (new_thread);
2949 pat = PATTERN (trial);
2951 if (!NONJUMP_INSN_P (trial)
2952 || GET_CODE (pat) != SET
2953 || ! eligible_for_delay (insn, 0, trial, flags)
2954 || can_throw_internal (trial))
2957 dest = SET_DEST (pat), src = SET_SRC (pat);
2958 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2959 && rtx_equal_p (XEXP (src, 0), dest)
2960 && (!FLOAT_MODE_P (GET_MODE (src))
2961 || flag_unsafe_math_optimizations)
2962 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2963 && ! side_effects_p (pat))
2965 rtx other = XEXP (src, 1);
2969 /* If this is a constant adjustment, use the same code with
2970 the negated constant. Otherwise, reverse the sense of the
2972 if (CONST_INT_P (other))
2973 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2974 negate_rtx (GET_MODE (src), other));
2976 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2977 GET_MODE (src), dest, other);
2979 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2982 if (recog_memoized (ninsn) < 0
2983 || (extract_insn (ninsn), ! constrain_operands (1)))
2985 delete_related_insns (ninsn);
2991 update_block (trial, thread);
2992 if (trial == thread)
2994 thread = next_active_insn (thread);
2995 if (new_thread == trial)
2996 new_thread = thread;
2998 delete_related_insns (trial);
3001 new_thread = next_active_insn (trial);
3003 ninsn = own_thread ? trial : copy_rtx (trial);
3005 INSN_FROM_TARGET_P (ninsn) = 1;
3007 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3012 if (delay_list && must_annul)
3013 INSN_ANNULLED_BRANCH_P (insn) = 1;
3015 /* If we are to branch into the middle of this thread, find an appropriate
3016 label or make a new one if none, and redirect INSN to it. If we hit the
3017 end of the function, use the end-of-function label. */
3018 if (new_thread != thread)
3022 gcc_assert (thread_if_true);
3024 if (new_thread && simplejump_or_return_p (new_thread)
3025 && redirect_with_delay_list_safe_p (insn,
3026 JUMP_LABEL (new_thread),
3028 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3030 if (ANY_RETURN_P (new_thread))
3031 label = find_end_label (new_thread);
3032 else if (LABEL_P (new_thread))
3035 label = get_label_before (new_thread);
3038 reorg_redirect_jump (insn, label);
3044 /* Make another attempt to find insns to place in delay slots.
3046 We previously looked for insns located in front of the delay insn
3047 and, for non-jump delay insns, located behind the delay insn.
3049 Here only try to schedule jump insns and try to move insns from either
3050 the target or the following insns into the delay slot. If annulling is
3051 supported, we will be likely to do this. Otherwise, we can do this only
3055 fill_eager_delay_slots (void)
3059 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3061 for (i = 0; i < num_unfilled_slots; i++)
3064 rtx target_label, insn_at_target, fallthrough_insn;
3067 int own_fallthrough;
3068 int prediction, slots_to_fill, slots_filled;
3070 insn = unfilled_slots_base[i];
3072 || INSN_DELETED_P (insn)
3074 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3077 slots_to_fill = num_delay_slots (insn);
3078 /* Some machine description have defined instructions to have
3079 delay slots only in certain circumstances which may depend on
3080 nearby insns (which change due to reorg's actions).
3082 For example, the PA port normally has delay slots for unconditional
3085 However, the PA port claims such jumps do not have a delay slot
3086 if they are immediate successors of certain CALL_INSNs. This
3087 allows the port to favor filling the delay slot of the call with
3088 the unconditional jump. */
3089 if (slots_to_fill == 0)
3093 target_label = JUMP_LABEL (insn);
3094 condition = get_branch_condition (insn, target_label);
3099 /* Get the next active fallthrough and target insns and see if we own
3100 them. Then see whether the branch is likely true. We don't need
3101 to do a lot of this for unconditional branches. */
3103 insn_at_target = first_active_target_insn (target_label);
3104 own_target = own_thread_p (target_label, target_label, 0);
3106 if (condition == const_true_rtx)
3108 own_fallthrough = 0;
3109 fallthrough_insn = 0;
3114 fallthrough_insn = next_active_insn (insn);
3115 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3116 prediction = mostly_true_jump (insn, condition);
3119 /* If this insn is expected to branch, first try to get insns from our
3120 target, then our fallthrough insns. If it is not expected to branch,
3121 try the other order. */
3126 = fill_slots_from_thread (insn, condition, insn_at_target,
3127 fallthrough_insn, prediction == 2, 1,
3129 slots_to_fill, &slots_filled, delay_list);
3131 if (delay_list == 0 && own_fallthrough)
3133 /* Even though we didn't find anything for delay slots,
3134 we might have found a redundant insn which we deleted
3135 from the thread that was filled. So we have to recompute
3136 the next insn at the target. */
3137 target_label = JUMP_LABEL (insn);
3138 insn_at_target = first_active_target_insn (target_label);
3141 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3142 insn_at_target, 0, 0,
3144 slots_to_fill, &slots_filled,
3150 if (own_fallthrough)
3152 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3153 insn_at_target, 0, 0,
3155 slots_to_fill, &slots_filled,
3158 if (delay_list == 0)
3160 = fill_slots_from_thread (insn, condition, insn_at_target,
3161 next_active_insn (insn), 0, 1,
3163 slots_to_fill, &slots_filled,
3168 unfilled_slots_base[i]
3169 = emit_delay_sequence (insn, delay_list, slots_filled);
3171 if (slots_to_fill == slots_filled)
3172 unfilled_slots_base[i] = 0;
3174 note_delay_statistics (slots_filled, 1);
3178 static void delete_computation (rtx insn);
3180 /* Recursively delete prior insns that compute the value (used only by INSN
3181 which the caller is deleting) stored in the register mentioned by NOTE
3182 which is a REG_DEAD note associated with INSN. */
3185 delete_prior_computation (rtx note, rtx insn)
3188 rtx reg = XEXP (note, 0);
3190 for (our_prev = prev_nonnote_insn (insn);
3191 our_prev && (NONJUMP_INSN_P (our_prev)
3192 || CALL_P (our_prev));
3193 our_prev = prev_nonnote_insn (our_prev))
3195 rtx pat = PATTERN (our_prev);
3197 /* If we reach a CALL which is not calling a const function
3198 or the callee pops the arguments, then give up. */
3199 if (CALL_P (our_prev)
3200 && (! RTL_CONST_CALL_P (our_prev)
3201 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3204 /* If we reach a SEQUENCE, it is too complex to try to
3205 do anything with it, so give up. We can be run during
3206 and after reorg, so SEQUENCE rtl can legitimately show
3208 if (GET_CODE (pat) == SEQUENCE)
3211 if (GET_CODE (pat) == USE
3212 && NONJUMP_INSN_P (XEXP (pat, 0)))
3213 /* reorg creates USEs that look like this. We leave them
3214 alone because reorg needs them for its own purposes. */
3217 if (reg_set_p (reg, pat))
3219 if (side_effects_p (pat) && !CALL_P (our_prev))
3222 if (GET_CODE (pat) == PARALLEL)
3224 /* If we find a SET of something else, we can't
3229 for (i = 0; i < XVECLEN (pat, 0); i++)
3231 rtx part = XVECEXP (pat, 0, i);
3233 if (GET_CODE (part) == SET
3234 && SET_DEST (part) != reg)
3238 if (i == XVECLEN (pat, 0))
3239 delete_computation (our_prev);
3241 else if (GET_CODE (pat) == SET
3242 && REG_P (SET_DEST (pat)))
3244 int dest_regno = REGNO (SET_DEST (pat));
3245 int dest_endregno = END_REGNO (SET_DEST (pat));
3246 int regno = REGNO (reg);
3247 int endregno = END_REGNO (reg);
3249 if (dest_regno >= regno
3250 && dest_endregno <= endregno)
3251 delete_computation (our_prev);
3253 /* We may have a multi-word hard register and some, but not
3254 all, of the words of the register are needed in subsequent
3255 insns. Write REG_UNUSED notes for those parts that were not
3257 else if (dest_regno <= regno
3258 && dest_endregno >= endregno)
3262 add_reg_note (our_prev, REG_UNUSED, reg);
3264 for (i = dest_regno; i < dest_endregno; i++)
3265 if (! find_regno_note (our_prev, REG_UNUSED, i))
3268 if (i == dest_endregno)
3269 delete_computation (our_prev);
3276 /* If PAT references the register that dies here, it is an
3277 additional use. Hence any prior SET isn't dead. However, this
3278 insn becomes the new place for the REG_DEAD note. */
3279 if (reg_overlap_mentioned_p (reg, pat))
3281 XEXP (note, 1) = REG_NOTES (our_prev);
3282 REG_NOTES (our_prev) = note;
3288 /* Delete INSN and recursively delete insns that compute values used only
3289 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3291 Look at all our REG_DEAD notes. If a previous insn does nothing other
3292 than set a register that dies in this insn, we can delete that insn
3295 On machines with CC0, if CC0 is used in this insn, we may be able to
3296 delete the insn that set it. */
3299 delete_computation (rtx insn)
3304 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3306 rtx prev = prev_nonnote_insn (insn);
3307 /* We assume that at this stage
3308 CC's are always set explicitly
3309 and always immediately before the jump that
3310 will use them. So if the previous insn
3311 exists to set the CC's, delete it
3312 (unless it performs auto-increments, etc.). */
3313 if (prev && NONJUMP_INSN_P (prev)
3314 && sets_cc0_p (PATTERN (prev)))
3316 if (sets_cc0_p (PATTERN (prev)) > 0
3317 && ! side_effects_p (PATTERN (prev)))
3318 delete_computation (prev);
3320 /* Otherwise, show that cc0 won't be used. */
3321 add_reg_note (prev, REG_UNUSED, cc0_rtx);
3326 for (note = REG_NOTES (insn); note; note = next)
3328 next = XEXP (note, 1);
3330 if (REG_NOTE_KIND (note) != REG_DEAD
3331 /* Verify that the REG_NOTE is legitimate. */
3332 || !REG_P (XEXP (note, 0)))
3335 delete_prior_computation (note, insn);
3338 delete_related_insns (insn);
3341 /* If all INSN does is set the pc, delete it,
3342 and delete the insn that set the condition codes for it
3343 if that's what the previous thing was. */
3346 delete_jump (rtx insn)
3348 rtx set = single_set (insn);
3350 if (set && GET_CODE (SET_DEST (set)) == PC)
3351 delete_computation (insn);
3355 /* Once we have tried two ways to fill a delay slot, make a pass over the
3356 code to try to improve the results and to do such things as more jump
3360 relax_delay_slots (rtx first)
3362 rtx insn, next, pat;
3363 rtx trial, delay_insn, target_label;
3365 /* Look at every JUMP_INSN and see if we can improve it. */
3366 for (insn = first; insn; insn = next)
3370 next = next_active_insn (insn);
3372 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3373 the next insn, or jumps to a label that is not the last of a
3374 group of consecutive labels. */
3376 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3377 && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3379 target_label = skip_consecutive_labels (follow_jumps (target_label));
3380 if (ANY_RETURN_P (target_label))
3381 target_label = find_end_label (target_label);
3383 if (target_label && next_active_insn (target_label) == next
3384 && ! condjump_in_parallel_p (insn))
3390 if (target_label && target_label != JUMP_LABEL (insn))
3391 reorg_redirect_jump (insn, target_label);
3393 /* See if this jump conditionally branches around an unconditional
3394 jump. If so, invert this jump and point it to the target of the
3396 if (next && simplejump_or_return_p (next)
3397 && any_condjump_p (insn)
3399 && next_active_insn (target_label) == next_active_insn (next)
3400 && no_labels_between_p (insn, next))
3402 rtx label = JUMP_LABEL (next);
3404 /* Be careful how we do this to avoid deleting code or
3405 labels that are momentarily dead. See similar optimization
3408 We also need to ensure we properly handle the case when
3409 invert_jump fails. */
3411 ++LABEL_NUSES (target_label);
3412 if (!ANY_RETURN_P (label))
3413 ++LABEL_NUSES (label);
3415 if (invert_jump (insn, label, 1))
3417 delete_related_insns (next);
3421 if (!ANY_RETURN_P (label))
3422 --LABEL_NUSES (label);
3424 if (--LABEL_NUSES (target_label) == 0)
3425 delete_related_insns (target_label);
3431 /* If this is an unconditional jump and the previous insn is a
3432 conditional jump, try reversing the condition of the previous
3433 insn and swapping our targets. The next pass might be able to
3436 Don't do this if we expect the conditional branch to be true, because
3437 we would then be making the more common case longer. */
3439 if (simplejump_or_return_p (insn)
3440 && (other = prev_active_insn (insn)) != 0
3441 && any_condjump_p (other)
3442 && no_labels_between_p (other, insn)
3443 && 0 > mostly_true_jump (other,
3444 get_branch_condition (other,
3445 JUMP_LABEL (other))))
3447 rtx other_target = JUMP_LABEL (other);
3448 target_label = JUMP_LABEL (insn);
3450 if (invert_jump (other, target_label, 0))
3451 reorg_redirect_jump (insn, other_target);
3454 /* Now look only at cases where we have filled a delay slot. */
3455 if (!NONJUMP_INSN_P (insn)
3456 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3459 pat = PATTERN (insn);
3460 delay_insn = XVECEXP (pat, 0, 0);
3462 /* See if the first insn in the delay slot is redundant with some
3463 previous insn. Remove it from the delay slot if so; then set up
3464 to reprocess this insn. */
3465 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3467 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3468 next = prev_active_insn (next);
3472 /* See if we have a RETURN insn with a filled delay slot followed
3473 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3474 the first RETURN (but not its delay insn). This gives the same
3475 effect in fewer instructions.
3477 Only do so if optimizing for size since this results in slower, but
3479 if (optimize_function_for_size_p (cfun)
3480 && ANY_RETURN_P (PATTERN (delay_insn))
3483 && PATTERN (next) == PATTERN (delay_insn))
3488 /* Delete the RETURN and just execute the delay list insns.
3490 We do this by deleting the INSN containing the SEQUENCE, then
3491 re-emitting the insns separately, and then deleting the RETURN.
3492 This allows the count of the jump target to be properly
3495 Note that we need to change the INSN_UID of the re-emitted insns
3496 since it is used to hash the insns for mark_target_live_regs and
3497 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3499 Clear the from target bit, since these insns are no longer
3501 for (i = 0; i < XVECLEN (pat, 0); i++)
3502 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3504 trial = PREV_INSN (insn);
3505 delete_related_insns (insn);
3506 gcc_assert (GET_CODE (pat) == SEQUENCE);
3507 add_insn_after (delay_insn, trial, NULL);
3509 for (i = 1; i < XVECLEN (pat, 0); i++)
3510 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3511 delete_scheduled_jump (delay_insn);
3515 /* Now look only at the cases where we have a filled JUMP_INSN. */
3516 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3517 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3518 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3521 target_label = JUMP_LABEL (delay_insn);
3522 if (target_label && ANY_RETURN_P (target_label))
3525 /* If this jump goes to another unconditional jump, thread it, but
3526 don't convert a jump into a RETURN here. */
3527 trial = skip_consecutive_labels (follow_jumps (target_label));
3528 if (ANY_RETURN_P (trial))
3529 trial = find_end_label (trial);
3531 if (trial && trial != target_label
3532 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3534 reorg_redirect_jump (delay_insn, trial);
3535 target_label = trial;
3538 /* If the first insn at TARGET_LABEL is redundant with a previous
3539 insn, redirect the jump to the following insn and process again.
3540 We use next_real_insn instead of next_active_insn so we
3541 don't skip USE-markers, or we'll end up with incorrect
3543 trial = next_real_insn (target_label);
3544 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3545 && redundant_insn (trial, insn, 0)
3546 && ! can_throw_internal (trial))
3548 /* Figure out where to emit the special USE insn so we don't
3549 later incorrectly compute register live/death info. */
3550 rtx tmp = next_active_insn (trial);
3552 tmp = find_end_label (simple_return_rtx);
3556 /* Insert the special USE insn and update dataflow info. */
3557 update_block (trial, tmp);
3559 /* Now emit a label before the special USE insn, and
3560 redirect our jump to the new label. */
3561 target_label = get_label_before (PREV_INSN (tmp));
3562 reorg_redirect_jump (delay_insn, target_label);
3568 /* Similarly, if it is an unconditional jump with one insn in its
3569 delay list and that insn is redundant, thread the jump. */
3570 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3571 && XVECLEN (PATTERN (trial), 0) == 2
3572 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3573 && simplejump_or_return_p (XVECEXP (PATTERN (trial), 0, 0))
3574 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3576 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3577 if (ANY_RETURN_P (target_label))
3578 target_label = find_end_label (target_label);
3581 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3584 reorg_redirect_jump (delay_insn, target_label);
3590 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3591 && prev_active_insn (target_label) == insn
3592 && ! condjump_in_parallel_p (delay_insn)
3594 /* If the last insn in the delay slot sets CC0 for some insn,
3595 various code assumes that it is in a delay slot. We could
3596 put it back where it belonged and delete the register notes,
3597 but it doesn't seem worthwhile in this uncommon case. */
3598 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3599 REG_CC_USER, NULL_RTX)
3606 /* All this insn does is execute its delay list and jump to the
3607 following insn. So delete the jump and just execute the delay
3610 We do this by deleting the INSN containing the SEQUENCE, then
3611 re-emitting the insns separately, and then deleting the jump.
3612 This allows the count of the jump target to be properly
3615 Note that we need to change the INSN_UID of the re-emitted insns
3616 since it is used to hash the insns for mark_target_live_regs and
3617 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3619 Clear the from target bit, since these insns are no longer
3621 for (i = 0; i < XVECLEN (pat, 0); i++)
3622 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3624 trial = PREV_INSN (insn);
3625 delete_related_insns (insn);
3626 gcc_assert (GET_CODE (pat) == SEQUENCE);
3627 add_insn_after (delay_insn, trial, NULL);
3629 for (i = 1; i < XVECLEN (pat, 0); i++)
3630 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3631 delete_scheduled_jump (delay_insn);
3635 /* See if this is an unconditional jump around a single insn which is
3636 identical to the one in its delay slot. In this case, we can just
3637 delete the branch and the insn in its delay slot. */
3638 if (next && NONJUMP_INSN_P (next)
3639 && prev_label (next_active_insn (next)) == target_label
3640 && simplejump_p (insn)
3641 && XVECLEN (pat, 0) == 2
3642 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3644 delete_related_insns (insn);
3648 /* See if this jump (with its delay slots) conditionally branches
3649 around an unconditional jump (without delay slots). If so, invert
3650 this jump and point it to the target of the second jump. We cannot
3651 do this for annulled jumps, though. Again, don't convert a jump to
3653 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3654 && any_condjump_p (delay_insn)
3655 && next && simplejump_or_return_p (next)
3656 && next_active_insn (target_label) == next_active_insn (next)
3657 && no_labels_between_p (insn, next))
3659 rtx label = JUMP_LABEL (next);
3660 rtx old_label = JUMP_LABEL (delay_insn);
3662 if (ANY_RETURN_P (label))
3663 label = find_end_label (label);
3665 /* find_end_label can generate a new label. Check this first. */
3667 && no_labels_between_p (insn, next)
3668 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3670 /* Be careful how we do this to avoid deleting code or labels
3671 that are momentarily dead. See similar optimization in
3674 ++LABEL_NUSES (old_label);
3676 if (invert_jump (delay_insn, label, 1))
3680 /* Must update the INSN_FROM_TARGET_P bits now that
3681 the branch is reversed, so that mark_target_live_regs
3682 will handle the delay slot insn correctly. */
3683 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3685 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3686 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3689 delete_related_insns (next);
3693 if (old_label && --LABEL_NUSES (old_label) == 0)
3694 delete_related_insns (old_label);
3699 /* If we own the thread opposite the way this insn branches, see if we
3700 can merge its delay slots with following insns. */
3701 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3702 && own_thread_p (NEXT_INSN (insn), 0, 1))
3703 try_merge_delay_insns (insn, next);
3704 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3705 && own_thread_p (target_label, target_label, 0))
3706 try_merge_delay_insns (insn, next_active_insn (target_label));
3708 /* If we get here, we haven't deleted INSN. But we may have deleted
3709 NEXT, so recompute it. */
3710 next = next_active_insn (insn);
3716 /* Look for filled jumps to the end of function label. We can try to convert
3717 them into RETURN insns if the insns in the delay slot are valid for the
3721 make_return_insns (rtx first)
3723 rtx insn, jump_insn, pat;
3724 rtx real_return_label = function_return_label;
3725 rtx real_simple_return_label = function_simple_return_label;
3728 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3729 /* If a previous pass filled delay slots in the epilogue, things get a
3730 bit more complicated, as those filler insns would generally (without
3731 data flow analysis) have to be executed after any existing branch
3732 delay slot filler insns. It is also unknown whether such a
3733 transformation would actually be profitable. Note that the existing
3734 code only cares for branches with (some) filled delay slots. */
3735 if (crtl->epilogue_delay_list != NULL)
3739 /* See if there is a RETURN insn in the function other than the one we
3740 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3741 into a RETURN to jump to it. */
3742 for (insn = first; insn; insn = NEXT_INSN (insn))
3743 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
3745 rtx t = get_label_before (insn);
3746 if (PATTERN (insn) == ret_rtx)
3747 real_return_label = t;
3749 real_simple_return_label = t;
3753 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3754 was equal to END_OF_FUNCTION_LABEL. */
3755 if (real_return_label)
3756 LABEL_NUSES (real_return_label)++;
3757 if (real_simple_return_label)
3758 LABEL_NUSES (real_simple_return_label)++;
3760 /* Clear the list of insns to fill so we can use it. */
3761 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3763 for (insn = first; insn; insn = NEXT_INSN (insn))
3766 rtx kind, real_label;
3768 /* Only look at filled JUMP_INSNs that go to the end of function
3770 if (!NONJUMP_INSN_P (insn)
3771 || GET_CODE (PATTERN (insn)) != SEQUENCE
3772 || !jump_to_label_p (XVECEXP (PATTERN (insn), 0, 0)))
3775 if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) == function_return_label)
3778 real_label = real_return_label;
3780 else if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0))
3781 == function_simple_return_label)
3783 kind = simple_return_rtx;
3784 real_label = real_simple_return_label;
3789 pat = PATTERN (insn);
3790 jump_insn = XVECEXP (pat, 0, 0);
3792 /* If we can't make the jump into a RETURN, try to redirect it to the best
3793 RETURN and go on to the next insn. */
3794 if (!reorg_redirect_jump (jump_insn, kind))
3796 /* Make sure redirecting the jump will not invalidate the delay
3798 if (redirect_with_delay_slots_safe_p (jump_insn, real_label, insn))
3799 reorg_redirect_jump (jump_insn, real_label);
3803 /* See if this RETURN can accept the insns current in its delay slot.
3804 It can if it has more or an equal number of slots and the contents
3805 of each is valid. */
3807 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3808 slots = num_delay_slots (jump_insn);
3809 if (slots >= XVECLEN (pat, 0) - 1)
3811 for (i = 1; i < XVECLEN (pat, 0); i++)
3813 #ifdef ANNUL_IFFALSE_SLOTS
3814 (INSN_ANNULLED_BRANCH_P (jump_insn)
3815 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3816 ? eligible_for_annul_false (jump_insn, i - 1,
3817 XVECEXP (pat, 0, i), flags) :
3819 #ifdef ANNUL_IFTRUE_SLOTS
3820 (INSN_ANNULLED_BRANCH_P (jump_insn)
3821 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3822 ? eligible_for_annul_true (jump_insn, i - 1,
3823 XVECEXP (pat, 0, i), flags) :
3825 eligible_for_delay (jump_insn, i - 1,
3826 XVECEXP (pat, 0, i), flags)))
3832 if (i == XVECLEN (pat, 0))
3835 /* We have to do something with this insn. If it is an unconditional
3836 RETURN, delete the SEQUENCE and output the individual insns,
3837 followed by the RETURN. Then set things up so we try to find
3838 insns for its delay slots, if it needs some. */
3839 if (ANY_RETURN_P (PATTERN (jump_insn)))
3841 rtx prev = PREV_INSN (insn);
3843 delete_related_insns (insn);
3844 for (i = 1; i < XVECLEN (pat, 0); i++)
3845 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3847 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3848 emit_barrier_after (insn);
3851 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3854 /* It is probably more efficient to keep this with its current
3855 delay slot as a branch to a RETURN. */
3856 reorg_redirect_jump (jump_insn, real_label);
3859 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3860 new delay slots we have created. */
3861 if (real_return_label != NULL_RTX && --LABEL_NUSES (real_return_label) == 0)
3862 delete_related_insns (real_return_label);
3863 if (real_simple_return_label != NULL_RTX
3864 && --LABEL_NUSES (real_simple_return_label) == 0)
3865 delete_related_insns (real_simple_return_label);
3867 fill_simple_delay_slots (1);
3868 fill_simple_delay_slots (0);
3872 /* Try to find insns to place in delay slots. */
3875 dbr_schedule (rtx first)
3877 rtx insn, next, epilogue_insn = 0;
3879 bool need_return_insns;
3881 /* If the current function has no insns other than the prologue and
3882 epilogue, then do not try to fill any delay slots. */
3883 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3886 /* Find the highest INSN_UID and allocate and initialize our map from
3887 INSN_UID's to position in code. */
3888 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3890 if (INSN_UID (insn) > max_uid)
3891 max_uid = INSN_UID (insn);
3893 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3894 epilogue_insn = insn;
3897 uid_to_ruid = XNEWVEC (int, max_uid + 1);
3898 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3899 uid_to_ruid[INSN_UID (insn)] = i;
3901 /* Initialize the list of insns that need filling. */
3902 if (unfilled_firstobj == 0)
3904 gcc_obstack_init (&unfilled_slots_obstack);
3905 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3908 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3913 INSN_ANNULLED_BRANCH_P (insn) = 0;
3914 INSN_FROM_TARGET_P (insn) = 0;
3916 /* Skip vector tables. We can't get attributes for them. */
3917 if (JUMP_TABLE_DATA_P (insn))
3920 if (num_delay_slots (insn) > 0)
3921 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3923 /* Ensure all jumps go to the last of a set of consecutive labels. */
3925 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3926 && !ANY_RETURN_P (JUMP_LABEL (insn))
3927 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3928 != JUMP_LABEL (insn)))
3929 redirect_jump (insn, target, 1);
3932 init_resource_info (epilogue_insn);
3934 /* Show we haven't computed an end-of-function label yet. */
3935 function_return_label = function_simple_return_label = NULL_RTX;
3937 /* Initialize the statistics for this function. */
3938 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3939 memset (num_filled_delays, 0, sizeof num_filled_delays);
3941 /* Now do the delay slot filling. Try everything twice in case earlier
3942 changes make more slots fillable. */
3944 for (reorg_pass_number = 0;
3945 reorg_pass_number < MAX_REORG_PASSES;
3946 reorg_pass_number++)
3948 fill_simple_delay_slots (1);
3949 fill_simple_delay_slots (0);
3950 fill_eager_delay_slots ();
3951 relax_delay_slots (first);
3954 /* If we made an end of function label, indicate that it is now
3955 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3956 If it is now unused, delete it. */
3957 if (function_return_label && --LABEL_NUSES (function_return_label) == 0)
3958 delete_related_insns (function_return_label);
3959 if (function_simple_return_label
3960 && --LABEL_NUSES (function_simple_return_label) == 0)
3961 delete_related_insns (function_simple_return_label);
3963 need_return_insns = false;
3965 need_return_insns |= HAVE_return && function_return_label != 0;
3967 #ifdef HAVE_simple_return
3968 need_return_insns |= HAVE_simple_return && function_simple_return_label != 0;
3970 if (need_return_insns)
3971 make_return_insns (first);
3973 /* Delete any USE insns made by update_block; subsequent passes don't need
3974 them or know how to deal with them. */
3975 for (insn = first; insn; insn = next)
3977 next = NEXT_INSN (insn);
3979 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3980 && INSN_P (XEXP (PATTERN (insn), 0)))
3981 next = delete_related_insns (insn);
3984 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3986 /* It is not clear why the line below is needed, but it does seem to be. */
3987 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3991 int i, j, need_comma;
3992 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3993 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3995 for (reorg_pass_number = 0;
3996 reorg_pass_number < MAX_REORG_PASSES;
3997 reorg_pass_number++)
3999 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4000 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4003 fprintf (dump_file, ";; Reorg function #%d\n", i);
4005 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
4006 num_insns_needing_delays[i][reorg_pass_number]);
4008 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4009 if (num_filled_delays[i][j][reorg_pass_number])
4012 fprintf (dump_file, ", ");
4014 fprintf (dump_file, "%d got %d delays",
4015 num_filled_delays[i][j][reorg_pass_number], j);
4017 fprintf (dump_file, "\n");
4020 memset (total_delay_slots, 0, sizeof total_delay_slots);
4021 memset (total_annul_slots, 0, sizeof total_annul_slots);
4022 for (insn = first; insn; insn = NEXT_INSN (insn))
4024 if (! INSN_DELETED_P (insn)
4025 && NONJUMP_INSN_P (insn)
4026 && GET_CODE (PATTERN (insn)) != USE
4027 && GET_CODE (PATTERN (insn)) != CLOBBER)
4029 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
4032 j = XVECLEN (PATTERN (insn), 0) - 1;
4033 if (j > MAX_DELAY_HISTOGRAM)
4034 j = MAX_DELAY_HISTOGRAM;
4035 control = XVECEXP (PATTERN (insn), 0, 0);
4036 if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
4037 total_annul_slots[j]++;
4039 total_delay_slots[j]++;
4041 else if (num_delay_slots (insn) > 0)
4042 total_delay_slots[0]++;
4045 fprintf (dump_file, ";; Reorg totals: ");
4047 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4049 if (total_delay_slots[j])
4052 fprintf (dump_file, ", ");
4054 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
4057 fprintf (dump_file, "\n");
4058 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
4059 fprintf (dump_file, ";; Reorg annuls: ");
4061 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4063 if (total_annul_slots[j])
4066 fprintf (dump_file, ", ");
4068 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
4071 fprintf (dump_file, "\n");
4073 fprintf (dump_file, "\n");
4076 /* For all JUMP insns, fill in branch prediction notes, so that during
4077 assembler output a target can set branch prediction bits in the code.
4078 We have to do this now, as up until this point the destinations of
4079 JUMPS can be moved around and changed, but past right here that cannot
4081 for (insn = first; insn; insn = NEXT_INSN (insn))
4085 if (NONJUMP_INSN_P (insn))
4087 rtx pat = PATTERN (insn);
4089 if (GET_CODE (pat) == SEQUENCE)
4090 insn = XVECEXP (pat, 0, 0);
4095 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4096 add_reg_note (insn, REG_BR_PRED, GEN_INT (pred_flags));
4098 free_resource_info ();
4100 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4101 /* SPARC assembler, for instance, emit warning when debug info is output
4102 into the delay slot. */
4106 for (link = crtl->epilogue_delay_list;
4108 link = XEXP (link, 1))
4109 INSN_LOCATOR (XEXP (link, 0)) = 0;
4113 crtl->dbr_scheduled_p = true;
4115 #endif /* DELAY_SLOTS */
4118 gate_handle_delay_slots (void)
4121 /* At -O0 dataflow info isn't updated after RA. */
4122 return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
4128 /* Run delay slot optimization. */
4130 rest_of_handle_delay_slots (void)
4133 dbr_schedule (get_insns ());
4138 struct rtl_opt_pass pass_delay_slots =
4143 gate_handle_delay_slots, /* gate */
4144 rest_of_handle_delay_slots, /* execute */
4147 0, /* static_pass_number */
4148 TV_DBR_SCHED, /* tv_id */
4149 0, /* properties_required */
4150 0, /* properties_provided */
4151 0, /* properties_destroyed */
4152 0, /* todo_flags_start */
4153 TODO_ggc_collect /* todo_flags_finish */
4157 /* Machine dependent reorg pass. */
4159 gate_handle_machine_reorg (void)
4161 return targetm.machine_dependent_reorg != 0;
4166 rest_of_handle_machine_reorg (void)
4168 targetm.machine_dependent_reorg ();
4172 struct rtl_opt_pass pass_machine_reorg =
4177 gate_handle_machine_reorg, /* gate */
4178 rest_of_handle_machine_reorg, /* execute */
4181 0, /* static_pass_number */
4182 TV_MACH_DEP, /* tv_id */
4183 0, /* properties_required */
4184 0, /* properties_provided */
4185 0, /* properties_destroyed */
4186 0, /* todo_flags_start */
4187 TODO_ggc_collect /* todo_flags_finish */