1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
56 The HP-PA always has a branch delay slot. For unconditional branches
57 its effects can be annulled when the branch is taken. The effects
58 of the delay slot in a conditional branch can be nullified for forward
59 taken branches, or for untaken backward branches. This means
60 we can hoist insns from the fall-through path for forward branches or
61 steal insns from the target of backward branches.
63 Three techniques for filling delay slots have been implemented so far:
65 (1) `fill_simple_delay_slots' is the simplest, most efficient way
66 to fill delay slots. This pass first looks for insns which come
67 from before the branch and which are safe to execute after the
68 branch. Then it searches after the insn requiring delay slots or,
69 in the case of a branch, for insns that are after the point at
70 which the branch merges into the fallthrough code, if such a point
71 exists. When such insns are found, the branch penalty decreases
72 and no code expansion takes place.
74 (2) `fill_eager_delay_slots' is more complicated: it is used for
75 scheduling conditional jumps, or for scheduling jumps which cannot
76 be filled using (1). A machine need not have annulled jumps to use
77 this strategy, but it helps (by keeping more options open).
78 `fill_eager_delay_slots' tries to guess the direction the branch
79 will go; if it guesses right 100% of the time, it can reduce the
80 branch penalty as much as `fill_simple_delay_slots' does. If it
81 guesses wrong 100% of the time, it might as well schedule nops (or
82 on the m88k, unexpose the branch slot). When
83 `fill_eager_delay_slots' takes insns from the fall-through path of
84 the jump, usually there is no code expansion; when it takes insns
85 from the branch target, there is code expansion if it is not the
86 only way to reach that target.
88 (3) `relax_delay_slots' uses a set of rules to simplify code that
89 has been reorganized by (1) and (2). It finds cases where
90 conditional test can be eliminated, jumps can be threaded, extra
91 insns can be eliminated, etc. It is the job of (1) and (2) to do a
92 good job of scheduling locally; `relax_delay_slots' takes care of
93 making the various individual schedules work well together. It is
94 especially tuned to handle the control flow interactions of branch
95 insns. It does nothing for insns with delay slots that do not
98 On machines that use CC0, we are very conservative. We will not make
99 a copy of an insn involving CC0 since we want to maintain a 1-1
100 correspondence between the insn that sets and uses CC0. The insns are
101 allowed to be separated by placing an insn that sets CC0 (but not an insn
102 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
103 delay slot. In that case, we point each insn at the other with REG_CC_USER
104 and REG_CC_SETTER notes. Note that these restrictions affect very few
105 machines because most RISC machines with delay slots will not use CC0
106 (the RT is the only known exception at this point).
110 The Acorn Risc Machine can conditionally execute most insns, so
111 it is profitable to move single insns into a position to execute
112 based on the condition code of the previous insn.
114 The HP-PA can conditionally nullify insns, providing a similar
115 effect to the ARM, differing mostly in which insn is "in charge". */
120 #include "insn-config.h"
121 #include "conditions.h"
122 #include "hard-reg-set.h"
123 #include "basic-block.h"
125 #include "insn-flags.h"
130 #include "insn-attr.h"
134 #define obstack_chunk_alloc xmalloc
135 #define obstack_chunk_free free
137 #ifndef ANNUL_IFTRUE_SLOTS
138 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
140 #ifndef ANNUL_IFFALSE_SLOTS
141 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 /* Insns which have delay slots that have not yet been filled. */
146 static struct obstack unfilled_slots_obstack;
147 static rtx *unfilled_firstobj;
149 /* Define macros to refer to the first and last slot containing unfilled
150 insns. These are used because the list may move and its address
151 should be recomputed at each use. */
153 #define unfilled_slots_base \
154 ((rtx *) obstack_base (&unfilled_slots_obstack))
156 #define unfilled_slots_next \
157 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
159 /* This structure is used to indicate which hardware resources are set or
160 needed by insns so far. */
164 char memory; /* Insn sets or needs a memory location. */
165 char volatil; /* Insn sets or needs a volatile memory loc. */
166 char cc; /* Insn sets or needs the condition codes. */
167 HARD_REG_SET regs; /* Which registers are set or needed. */
170 /* Macro to clear all resources. */
171 #define CLEAR_RESOURCE(RES) \
172 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
173 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
175 /* Indicates what resources are required at the beginning of the epilogue. */
176 static struct resources start_of_epilogue_needs;
178 /* Indicates what resources are required at function end. */
179 static struct resources end_of_function_needs;
181 /* Points to the label before the end of the function. */
182 static rtx end_of_function_label;
184 /* This structure is used to record liveness information at the targets or
185 fallthrough insns of branches. We will most likely need the information
186 at targets again, so save them in a hash table rather than recomputing them
191 int uid; /* INSN_UID of target. */
192 struct target_info *next; /* Next info for same hash bucket. */
193 HARD_REG_SET live_regs; /* Registers live at target. */
194 int block; /* Basic block number containing target. */
195 int bb_tick; /* Generation count of basic block info. */
198 #define TARGET_HASH_PRIME 257
200 /* Define the hash table itself. */
201 static struct target_info **target_hash_table;
203 /* For each basic block, we maintain a generation number of its basic
204 block info, which is updated each time we move an insn from the
205 target of a jump. This is the generation number indexed by block
208 static int *bb_ticks;
210 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
211 not always monotonically increase. */
212 static int *uid_to_ruid;
214 /* Highest valid index in `uid_to_ruid'. */
217 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
218 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
219 static int stop_search_p PROTO((rtx, int));
220 static int resource_conflicts_p PROTO((struct resources *,
221 struct resources *));
222 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
223 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
224 static rtx find_end_label PROTO((void));
225 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
226 static rtx add_to_delay_list PROTO((rtx, rtx));
227 static void delete_from_delay_slot PROTO((rtx));
228 static void delete_scheduled_jump PROTO((rtx));
229 static void note_delay_statistics PROTO((int, int));
230 static rtx optimize_skip PROTO((rtx));
231 static int get_jump_flags PROTO((rtx, rtx));
232 static int rare_destination PROTO((rtx));
233 static int mostly_true_jump PROTO((rtx, rtx));
234 static rtx get_branch_condition PROTO((rtx, rtx));
235 static int condition_dominates_p PROTO((rtx, rtx));
236 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
240 int, int *, int *, rtx *));
241 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
246 static void try_merge_delay_insns PROTO((rtx, rtx));
247 static int redundant_insn_p PROTO((rtx, rtx, rtx));
248 static int own_thread_p PROTO((rtx, rtx, int));
249 static int find_basic_block PROTO((rtx));
250 static void update_block PROTO((rtx, rtx));
251 static int reorg_redirect_jump PROTO((rtx, rtx));
252 static void update_reg_dead_notes PROTO((rtx, rtx));
253 static void update_live_status PROTO((rtx, rtx));
254 static rtx next_insn_no_annul PROTO((rtx));
255 static void mark_target_live_regs PROTO((rtx, struct resources *));
256 static void fill_simple_delay_slots PROTO((rtx, int));
257 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
258 int, int, int, int *));
259 static void fill_eager_delay_slots PROTO((rtx));
260 static void relax_delay_slots PROTO((rtx));
261 static void make_return_insns PROTO((rtx));
262 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
263 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
265 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
266 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
267 is TRUE, resources used by the called routine will be included for
271 mark_referenced_resources (x, res, include_delayed_effects)
273 register struct resources *res;
274 register int include_delayed_effects;
276 register enum rtx_code code = GET_CODE (x);
278 register char *format_ptr;
280 /* Handle leaf items for which we set resource flags. Also, special-case
281 CALL, SET and CLOBBER operators. */
293 if (GET_CODE (SUBREG_REG (x)) != REG)
294 mark_referenced_resources (SUBREG_REG (x), res, 0);
297 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
298 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
299 for (i = regno; i < last_regno; i++)
300 SET_HARD_REG_BIT (res->regs, i);
305 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
306 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
310 /* If this memory shouldn't change, it really isn't referencing
312 if (! RTX_UNCHANGING_P (x))
314 res->volatil = MEM_VOLATILE_P (x);
316 /* Mark registers used to access memory. */
317 mark_referenced_resources (XEXP (x, 0), res, 0);
324 case UNSPEC_VOLATILE:
326 /* Traditional asm's are always volatile. */
331 res->volatil = MEM_VOLATILE_P (x);
333 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
334 We can not just fall through here since then we would be confused
335 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
336 traditional asms unlike their normal usage. */
338 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
339 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
343 /* The first operand will be a (MEM (xxx)) but doesn't really reference
344 memory. The second operand may be referenced, though. */
345 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
346 mark_referenced_resources (XEXP (x, 1), res, 0);
350 /* Usually, the first operand of SET is set, not referenced. But
351 registers used to access memory are referenced. SET_DEST is
352 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
354 mark_referenced_resources (SET_SRC (x), res, 0);
357 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
358 mark_referenced_resources (x, res, 0);
359 else if (GET_CODE (x) == SUBREG)
361 if (GET_CODE (x) == MEM)
362 mark_referenced_resources (XEXP (x, 0), res, 0);
369 if (include_delayed_effects)
371 /* A CALL references memory, the frame pointer if it exists, the
372 stack pointer, any global registers and any registers given in
373 USE insns immediately in front of the CALL.
375 However, we may have moved some of the parameter loading insns
376 into the delay slot of this CALL. If so, the USE's for them
377 don't count and should be skipped. */
378 rtx insn = PREV_INSN (x);
383 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
384 if (NEXT_INSN (insn) != x)
386 sequence = PATTERN (NEXT_INSN (insn));
387 seq_size = XVECLEN (sequence, 0);
388 if (GET_CODE (sequence) != SEQUENCE)
393 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
394 if (frame_pointer_needed)
396 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
397 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
398 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
402 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
404 SET_HARD_REG_BIT (res->regs, i);
409 for (link = CALL_INSN_FUNCTION_USAGE (x);
411 link = XEXP (link, 1))
412 if (GET_CODE (XEXP (link, 0)) == USE)
414 for (i = 1; i < seq_size; i++)
416 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
417 if (GET_CODE (slot_pat) == SET
418 && rtx_equal_p (SET_DEST (slot_pat),
419 SET_DEST (XEXP (link, 0))))
423 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
429 /* ... fall through to other INSN processing ... */
434 #ifdef INSN_REFERENCES_ARE_DELAYED
435 if (! include_delayed_effects
436 && INSN_REFERENCES_ARE_DELAYED (x))
440 /* No special processing, just speed up. */
441 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
445 /* Process each sub-expression and flag what it needs. */
446 format_ptr = GET_RTX_FORMAT (code);
447 for (i = 0; i < GET_RTX_LENGTH (code); i++)
448 switch (*format_ptr++)
451 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
455 for (j = 0; j < XVECLEN (x, i); j++)
456 mark_referenced_resources (XVECEXP (x, i, j), res,
457 include_delayed_effects);
462 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
463 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
464 is nonzero, also mark resources potentially set by the called routine.
466 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
467 objects are being referenced instead of set.
469 We never mark the insn as modifying the condition code unless it explicitly
470 SETs CC0 even though this is not totally correct. The reason for this is
471 that we require a SET of CC0 to immediately precede the reference to CC0.
472 So if some other insn sets CC0 as a side-effect, we know it cannot affect
473 our computation and thus may be placed in a delay slot. */
476 mark_set_resources (x, res, in_dest, include_delayed_effects)
478 register struct resources *res;
480 int include_delayed_effects;
482 register enum rtx_code code;
484 register char *format_ptr;
502 /* These don't set any resources. */
511 /* Called routine modifies the condition code, memory, any registers
512 that aren't saved across calls, global registers and anything
513 explicitly CLOBBERed immediately after the CALL_INSN. */
515 if (include_delayed_effects)
517 rtx next = NEXT_INSN (x);
518 rtx prev = PREV_INSN (x);
521 res->cc = res->memory = 1;
522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
523 if (call_used_regs[i] || global_regs[i])
524 SET_HARD_REG_BIT (res->regs, i);
526 /* If X is part of a delay slot sequence, then NEXT should be
527 the first insn after the sequence. */
528 if (NEXT_INSN (prev) != x)
529 next = NEXT_INSN (NEXT_INSN (prev));
531 for (link = CALL_INSN_FUNCTION_USAGE (x);
532 link; link = XEXP (link, 1))
533 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
534 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
536 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
537 assume that this call can clobber any register. */
538 if (next && GET_CODE (next) == NOTE
539 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
540 SET_HARD_REG_SET (res->regs);
543 /* ... and also what it's RTL says it modifies, if anything. */
548 /* An insn consisting of just a CLOBBER (or USE) is just for flow
549 and doesn't actually do anything, so we ignore it. */
551 #ifdef INSN_SETS_ARE_DELAYED
552 if (! include_delayed_effects
553 && INSN_SETS_ARE_DELAYED (x))
558 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
563 /* If the source of a SET is a CALL, this is actually done by
564 the called routine. So only include it if we are to include the
565 effects of the calling routine. */
567 mark_set_resources (SET_DEST (x), res,
568 (include_delayed_effects
569 || GET_CODE (SET_SRC (x)) != CALL),
572 mark_set_resources (SET_SRC (x), res, 0, 0);
576 mark_set_resources (XEXP (x, 0), res, 1, 0);
580 for (i = 0; i < XVECLEN (x, 0); i++)
581 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
582 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
583 mark_set_resources (XVECEXP (x, 0, i), res, 0,
584 include_delayed_effects);
591 mark_set_resources (XEXP (x, 0), res, 1, 0);
595 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
596 mark_set_resources (XEXP (x, 1), res, 0, 0);
597 mark_set_resources (XEXP (x, 2), res, 0, 0);
604 res->volatil = MEM_VOLATILE_P (x);
607 mark_set_resources (XEXP (x, 0), res, 0, 0);
612 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
613 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
617 /* Process each sub-expression and flag what it needs. */
618 format_ptr = GET_RTX_FORMAT (code);
619 for (i = 0; i < GET_RTX_LENGTH (code); i++)
620 switch (*format_ptr++)
623 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
627 for (j = 0; j < XVECLEN (x, i); j++)
628 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
629 include_delayed_effects);
634 /* Return TRUE if this insn should stop the search for insn to fill delay
635 slots. LABELS_P indicates that labels should terminate the search.
636 In all cases, jumps terminate the search. */
639 stop_search_p (insn, labels_p)
646 switch (GET_CODE (insn))
660 /* OK unless it contains a delay slot or is an `asm' insn of some type.
661 We don't know anything about these. */
662 return (GET_CODE (PATTERN (insn)) == SEQUENCE
663 || GET_CODE (PATTERN (insn)) == ASM_INPUT
664 || asm_noperands (PATTERN (insn)) >= 0);
671 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
672 resource set contains a volatile memory reference. Otherwise, return FALSE. */
675 resource_conflicts_p (res1, res2)
676 struct resources *res1, *res2;
678 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
679 || res1->volatil || res2->volatil)
683 return (res1->regs & res2->regs) != HARD_CONST (0);
688 for (i = 0; i < HARD_REG_SET_LONGS; i++)
689 if ((res1->regs[i] & res2->regs[i]) != 0)
696 /* Return TRUE if any resource marked in RES, a `struct resources', is
697 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
698 routine is using those resources.
700 We compute this by computing all the resources referenced by INSN and
701 seeing if this conflicts with RES. It might be faster to directly check
702 ourselves, and this is the way it used to work, but it means duplicating
703 a large block of complex code. */
706 insn_references_resource_p (insn, res, include_delayed_effects)
708 register struct resources *res;
709 int include_delayed_effects;
711 struct resources insn_res;
713 CLEAR_RESOURCE (&insn_res);
714 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
715 return resource_conflicts_p (&insn_res, res);
718 /* Return TRUE if INSN modifies resources that are marked in RES.
719 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
720 included. CC0 is only modified if it is explicitly set; see comments
721 in front of mark_set_resources for details. */
724 insn_sets_resource_p (insn, res, include_delayed_effects)
726 register struct resources *res;
727 int include_delayed_effects;
729 struct resources insn_sets;
731 CLEAR_RESOURCE (&insn_sets);
732 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
733 return resource_conflicts_p (&insn_sets, res);
736 /* Find a label at the end of the function or before a RETURN. If there is
744 /* If we found one previously, return it. */
745 if (end_of_function_label)
746 return end_of_function_label;
748 /* Otherwise, see if there is a label at the end of the function. If there
749 is, it must be that RETURN insns aren't needed, so that is our return
750 label and we don't have to do anything else. */
752 insn = get_last_insn ();
753 while (GET_CODE (insn) == NOTE
754 || (GET_CODE (insn) == INSN
755 && (GET_CODE (PATTERN (insn)) == USE
756 || GET_CODE (PATTERN (insn)) == CLOBBER)))
757 insn = PREV_INSN (insn);
759 /* When a target threads its epilogue we might already have a
760 suitable return insn. If so put a label before it for the
761 end_of_function_label. */
762 if (GET_CODE (insn) == BARRIER
763 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
764 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
766 rtx temp = PREV_INSN (PREV_INSN (insn));
767 end_of_function_label = gen_label_rtx ();
768 LABEL_NUSES (end_of_function_label) = 0;
770 /* Put the label before an USE insns that may proceed the RETURN insn. */
771 while (GET_CODE (temp) == USE)
772 temp = PREV_INSN (temp);
774 emit_label_after (end_of_function_label, temp);
777 else if (GET_CODE (insn) == CODE_LABEL)
778 end_of_function_label = insn;
781 /* Otherwise, make a new label and emit a RETURN and BARRIER,
783 end_of_function_label = gen_label_rtx ();
784 LABEL_NUSES (end_of_function_label) = 0;
785 emit_label (end_of_function_label);
789 /* The return we make may have delay slots too. */
790 rtx insn = gen_return ();
791 insn = emit_jump_insn (insn);
793 if (num_delay_slots (insn) > 0)
794 obstack_ptr_grow (&unfilled_slots_obstack, insn);
799 /* Show one additional use for this label so it won't go away until
801 ++LABEL_NUSES (end_of_function_label);
803 return end_of_function_label;
806 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
807 the pattern of INSN with the SEQUENCE.
809 Chain the insns so that NEXT_INSN of each insn in the sequence points to
810 the next and NEXT_INSN of the last insn in the sequence points to
811 the first insn after the sequence. Similarly for PREV_INSN. This makes
812 it easier to scan all insns.
814 Returns the SEQUENCE that replaces INSN. */
817 emit_delay_sequence (insn, list, length, avail)
827 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
828 rtvec seqv = rtvec_alloc (length + 1);
829 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
830 rtx seq_insn = make_insn_raw (seq);
831 rtx first = get_insns ();
832 rtx last = get_last_insn ();
834 /* Make a copy of the insn having delay slots. */
835 rtx delay_insn = copy_rtx (insn);
837 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
838 confuse further processing. Update LAST in case it was the last insn.
839 We will put the BARRIER back in later. */
840 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
842 delete_insn (NEXT_INSN (insn));
843 last = get_last_insn ();
847 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
848 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
849 PREV_INSN (seq_insn) = PREV_INSN (insn);
852 set_new_first_and_last_insn (first, seq_insn);
854 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
857 set_new_first_and_last_insn (seq_insn, last);
859 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
861 /* Build our SEQUENCE and rebuild the insn chain. */
862 XVECEXP (seq, 0, 0) = delay_insn;
863 INSN_DELETED_P (delay_insn) = 0;
864 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
866 for (li = list; li; li = XEXP (li, 1), i++)
868 rtx tem = XEXP (li, 0);
871 /* Show that this copy of the insn isn't deleted. */
872 INSN_DELETED_P (tem) = 0;
874 XVECEXP (seq, 0, i) = tem;
875 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
876 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
878 /* Remove any REG_DEAD notes because we can't rely on them now
879 that the insn has been moved. */
880 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
881 if (REG_NOTE_KIND (note) == REG_DEAD)
882 XEXP (note, 0) = const0_rtx;
885 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
887 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
888 last insn in that SEQUENCE to point to us. Similarly for the first
889 insn in the following insn if it is a SEQUENCE. */
891 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
892 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
893 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
894 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
897 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
898 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
899 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
901 /* If there used to be a BARRIER, put it back. */
903 emit_barrier_after (seq_insn);
911 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
912 be in the order in which the insns are to be executed. */
915 add_to_delay_list (insn, delay_list)
919 /* If we have an empty list, just make a new list element. If
920 INSN has it's block number recorded, clear it since we may
921 be moving the insn to a new block. */
925 struct target_info *tinfo;
927 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
928 tinfo; tinfo = tinfo->next)
929 if (tinfo->uid == INSN_UID (insn))
935 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
938 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
940 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
945 /* Delete INSN from the the delay slot of the insn that it is in. This may
946 produce an insn without anything in its delay slots. */
949 delete_from_delay_slot (insn)
952 rtx trial, seq_insn, seq, prev;
956 /* We first must find the insn containing the SEQUENCE with INSN in its
957 delay slot. Do this by finding an insn, TRIAL, where
958 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
961 PREV_INSN (NEXT_INSN (trial)) == trial;
962 trial = NEXT_INSN (trial))
965 seq_insn = PREV_INSN (NEXT_INSN (trial));
966 seq = PATTERN (seq_insn);
968 /* Create a delay list consisting of all the insns other than the one
969 we are deleting (unless we were the only one). */
970 if (XVECLEN (seq, 0) > 2)
971 for (i = 1; i < XVECLEN (seq, 0); i++)
972 if (XVECEXP (seq, 0, i) != insn)
973 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
975 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
976 list, and rebuild the delay list if non-empty. */
977 prev = PREV_INSN (seq_insn);
978 trial = XVECEXP (seq, 0, 0);
979 delete_insn (seq_insn);
980 add_insn_after (trial, prev);
982 if (GET_CODE (trial) == JUMP_INSN
983 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
984 emit_barrier_after (trial);
986 /* If there are any delay insns, remit them. Otherwise clear the
989 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
991 INSN_ANNULLED_BRANCH_P (trial) = 0;
993 INSN_FROM_TARGET_P (insn) = 0;
995 /* Show we need to fill this insn again. */
996 obstack_ptr_grow (&unfilled_slots_obstack, trial);
999 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1000 the insn that sets CC0 for it and delete it too. */
1003 delete_scheduled_jump (insn)
1006 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1007 delete the insn that sets the condition code, but it is hard to find it.
1008 Since this case is rare anyway, don't bother trying; there would likely
1009 be other insns that became dead anyway, which we wouldn't know to
1013 if (reg_mentioned_p (cc0_rtx, insn))
1015 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1017 /* If a reg-note was found, it points to an insn to set CC0. This
1018 insn is in the delay list of some other insn. So delete it from
1019 the delay list it was in. */
1022 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1023 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1024 delete_from_delay_slot (XEXP (note, 0));
1028 /* The insn setting CC0 is our previous insn, but it may be in
1029 a delay slot. It will be the last insn in the delay slot, if
1031 rtx trial = previous_insn (insn);
1032 if (GET_CODE (trial) == NOTE)
1033 trial = prev_nonnote_insn (trial);
1034 if (sets_cc0_p (PATTERN (trial)) != 1
1035 || FIND_REG_INC_NOTE (trial, 0))
1037 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1038 delete_insn (trial);
1040 delete_from_delay_slot (trial);
1048 /* Counters for delay-slot filling. */
1050 #define NUM_REORG_FUNCTIONS 2
1051 #define MAX_DELAY_HISTOGRAM 3
1052 #define MAX_REORG_PASSES 2
1054 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1056 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1058 static int reorg_pass_number;
1061 note_delay_statistics (slots_filled, index)
1062 int slots_filled, index;
1064 num_insns_needing_delays[index][reorg_pass_number]++;
1065 if (slots_filled > MAX_DELAY_HISTOGRAM)
1066 slots_filled = MAX_DELAY_HISTOGRAM;
1067 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1070 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1072 /* Optimize the following cases:
1074 1. When a conditional branch skips over only one instruction,
1075 use an annulling branch and put that insn in the delay slot.
1076 Use either a branch that annuls when the condition if true or
1077 invert the test with a branch that annuls when the condition is
1078 false. This saves insns, since otherwise we must copy an insn
1081 (orig) (skip) (otherwise)
1082 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1089 2. When a conditional branch skips over only one instruction,
1090 and after that, it unconditionally branches somewhere else,
1091 perform the similar optimization. This saves executing the
1092 second branch in the case where the inverted condition is true.
1099 INSN is a JUMP_INSN.
1101 This should be expanded to skip over N insns, where N is the number
1102 of delay slots required. */
1105 optimize_skip (insn)
1108 register rtx trial = next_nonnote_insn (insn);
1109 rtx next_trial = next_active_insn (trial);
1114 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1117 || GET_CODE (trial) != INSN
1118 || GET_CODE (PATTERN (trial)) == SEQUENCE
1119 || recog_memoized (trial) < 0
1120 || (! eligible_for_annul_false (insn, 0, trial, flags)
1121 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1124 /* There are two cases where we are just executing one insn (we assume
1125 here that a branch requires only one insn; this should be generalized
1126 at some point): Where the branch goes around a single insn or where
1127 we have one insn followed by a branch to the same label we branch to.
1128 In both of these cases, inverting the jump and annulling the delay
1129 slot give the same effect in fewer insns. */
1130 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1132 && GET_CODE (next_trial) == JUMP_INSN
1133 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1134 && (simplejump_p (next_trial)
1135 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1137 if (eligible_for_annul_false (insn, 0, trial, flags))
1139 if (invert_jump (insn, JUMP_LABEL (insn)))
1140 INSN_FROM_TARGET_P (trial) = 1;
1141 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1145 delay_list = add_to_delay_list (trial, NULL_RTX);
1146 next_trial = next_active_insn (trial);
1147 update_block (trial, trial);
1148 delete_insn (trial);
1150 /* Also, if we are targeting an unconditional
1151 branch, thread our jump to the target of that branch. Don't
1152 change this into a RETURN here, because it may not accept what
1153 we have in the delay slot. We'll fix this up later. */
1154 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1155 && (simplejump_p (next_trial)
1156 || GET_CODE (PATTERN (next_trial)) == RETURN))
1158 target_label = JUMP_LABEL (next_trial);
1159 if (target_label == 0)
1160 target_label = find_end_label ();
1162 /* Recompute the flags based on TARGET_LABEL since threading
1163 the jump to TARGET_LABEL may change the direction of the
1164 jump (which may change the circumstances in which the
1165 delay slot is nullified). */
1166 flags = get_jump_flags (insn, target_label);
1167 if (eligible_for_annul_true (insn, 0, trial, flags))
1168 reorg_redirect_jump (insn, target_label);
1171 INSN_ANNULLED_BRANCH_P (insn) = 1;
1179 /* Encode and return branch direction and prediction information for
1180 INSN assuming it will jump to LABEL.
1182 Non conditional branches return no direction information and
1183 are predicted as very likely taken. */
1185 get_jump_flags (insn, label)
1190 /* get_jump_flags can be passed any insn with delay slots, these may
1191 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1192 direction information, and only if they are conditional jumps.
1194 If LABEL is zero, then there is no way to determine the branch
1196 if (GET_CODE (insn) == JUMP_INSN
1197 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1198 && INSN_UID (insn) <= max_uid
1200 && INSN_UID (label) <= max_uid)
1202 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1203 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1204 /* No valid direction information. */
1208 /* If insn is a conditional branch call mostly_true_jump to get
1209 determine the branch prediction.
1211 Non conditional branches are predicted as very likely taken. */
1212 if (GET_CODE (insn) == JUMP_INSN
1213 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1217 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1221 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1224 flags |= ATTR_FLAG_likely;
1227 flags |= ATTR_FLAG_unlikely;
1230 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1238 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1243 /* Return 1 if INSN is a destination that will be branched to rarely (the
1244 return point of a function); return 2 if DEST will be branched to very
1245 rarely (a call to a function that doesn't return). Otherwise,
1249 rare_destination (insn)
1255 for (; insn; insn = next)
1257 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1258 insn = XVECEXP (PATTERN (insn), 0, 0);
1260 next = NEXT_INSN (insn);
1262 switch (GET_CODE (insn))
1267 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1268 don't scan past JUMP_INSNs, so any barrier we find here must
1269 have been after a CALL_INSN and hence mean the call doesn't
1273 if (GET_CODE (PATTERN (insn)) == RETURN)
1275 else if (simplejump_p (insn)
1276 && jump_count++ < 10)
1277 next = JUMP_LABEL (insn);
1283 /* If we got here it means we hit the end of the function. So this
1284 is an unlikely destination. */
1289 /* Return truth value of the statement that this branch
1290 is mostly taken. If we think that the branch is extremely likely
1291 to be taken, we return 2. If the branch is slightly more likely to be
1292 taken, return 1. If the branch is slightly less likely to be taken,
1293 return 0 and if the branch is highly unlikely to be taken, return -1.
1295 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1298 mostly_true_jump (jump_insn, condition)
1299 rtx jump_insn, condition;
1301 rtx target_label = JUMP_LABEL (jump_insn);
1303 int rare_dest = rare_destination (target_label);
1304 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1306 /* If this is a branch outside a loop, it is highly unlikely. */
1307 if (GET_CODE (PATTERN (jump_insn)) == SET
1308 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1309 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1310 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1311 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1312 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1317 /* If this is the test of a loop, it is very likely true. We scan
1318 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1319 before the next real insn, we assume the branch is to the top of
1321 for (insn = PREV_INSN (target_label);
1322 insn && GET_CODE (insn) == NOTE;
1323 insn = PREV_INSN (insn))
1324 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1327 /* If this is a jump to the test of a loop, it is likely true. We scan
1328 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1329 before the next real insn, we assume the branch is to the loop branch
1331 for (insn = NEXT_INSN (target_label);
1332 insn && GET_CODE (insn) == NOTE;
1333 insn = PREV_INSN (insn))
1334 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1338 /* Look at the relative rarities of the fallthough and destination. If
1339 they differ, we can predict the branch that way. */
1341 switch (rare_fallthrough - rare_dest)
1355 /* If we couldn't figure out what this jump was, assume it won't be
1356 taken. This should be rare. */
1360 /* EQ tests are usually false and NE tests are usually true. Also,
1361 most quantities are positive, so we can make the appropriate guesses
1362 about signed comparisons against zero. */
1363 switch (GET_CODE (condition))
1366 /* Unconditional branch. */
1374 if (XEXP (condition, 1) == const0_rtx)
1379 if (XEXP (condition, 1) == const0_rtx)
1384 /* Predict backward branches usually take, forward branches usually not. If
1385 we don't know whether this is forward or backward, assume the branch
1386 will be taken, since most are. */
1387 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1388 || INSN_UID (target_label) > max_uid
1389 || (uid_to_ruid[INSN_UID (jump_insn)]
1390 > uid_to_ruid[INSN_UID (target_label)]));;
1393 /* Return the condition under which INSN will branch to TARGET. If TARGET
1394 is zero, return the condition under which INSN will return. If INSN is
1395 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1396 type of jump, or it doesn't go to TARGET, return 0. */
1399 get_branch_condition (insn, target)
1403 rtx pat = PATTERN (insn);
1406 if (condjump_in_parallel_p (insn))
1407 pat = XVECEXP (pat, 0, 0);
1409 if (GET_CODE (pat) == RETURN)
1410 return target == 0 ? const_true_rtx : 0;
1412 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1415 src = SET_SRC (pat);
1416 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1417 return const_true_rtx;
1419 else if (GET_CODE (src) == IF_THEN_ELSE
1420 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1421 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1422 && XEXP (XEXP (src, 1), 0) == target))
1423 && XEXP (src, 2) == pc_rtx)
1424 return XEXP (src, 0);
1426 else if (GET_CODE (src) == IF_THEN_ELSE
1427 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1428 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1429 && XEXP (XEXP (src, 2), 0) == target))
1430 && XEXP (src, 1) == pc_rtx)
1431 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1432 GET_MODE (XEXP (src, 0)),
1433 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1438 /* Return non-zero if CONDITION is more strict than the condition of
1439 INSN, i.e., if INSN will always branch if CONDITION is true. */
1442 condition_dominates_p (condition, insn)
1446 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1447 enum rtx_code code = GET_CODE (condition);
1448 enum rtx_code other_code;
1450 if (rtx_equal_p (condition, other_condition)
1451 || other_condition == const_true_rtx)
1454 else if (condition == const_true_rtx || other_condition == 0)
1457 other_code = GET_CODE (other_condition);
1458 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1459 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1460 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1463 return comparison_dominates_p (code, other_code);
1466 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1467 any insns already in the delay slot of JUMP. */
1470 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1471 rtx jump, newlabel, seq;
1473 int flags, slots, i;
1474 rtx pat = PATTERN (seq);
1476 /* Make sure all the delay slots of this jump would still
1477 be valid after threading the jump. If they are still
1478 valid, then return non-zero. */
1480 flags = get_jump_flags (jump, newlabel);
1481 for (i = 1; i < XVECLEN (pat, 0); i++)
1483 #ifdef ANNUL_IFFALSE_SLOTS
1484 (INSN_ANNULLED_BRANCH_P (jump)
1485 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1486 ? eligible_for_annul_false (jump, i - 1,
1487 XVECEXP (pat, 0, i), flags) :
1489 #ifdef ANNUL_IFTRUE_SLOTS
1490 (INSN_ANNULLED_BRANCH_P (jump)
1491 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1492 ? eligible_for_annul_true (jump, i - 1,
1493 XVECEXP (pat, 0, i), flags) :
1495 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1498 return (i == XVECLEN (pat, 0));
1501 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1502 any insns we wish to place in the delay slot of JUMP. */
1505 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1506 rtx jump, newlabel, delay_list;
1511 /* Make sure all the insns in DELAY_LIST would still be
1512 valid after threading the jump. If they are still
1513 valid, then return non-zero. */
1515 flags = get_jump_flags (jump, newlabel);
1516 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1518 #ifdef ANNUL_IFFALSE_SLOTS
1519 (INSN_ANNULLED_BRANCH_P (jump)
1520 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1521 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1523 #ifdef ANNUL_IFTRUE_SLOTS
1524 (INSN_ANNULLED_BRANCH_P (jump)
1525 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1526 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1528 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1531 return (li == NULL);
1535 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1536 the condition tested by INSN is CONDITION and the resources shown in
1537 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1538 from SEQ's delay list, in addition to whatever insns it may execute
1539 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1540 needed while searching for delay slot insns. Return the concatenated
1541 delay list if possible, otherwise, return 0.
1543 SLOTS_TO_FILL is the total number of slots required by INSN, and
1544 PSLOTS_FILLED points to the number filled so far (also the number of
1545 insns in DELAY_LIST). It is updated with the number that have been
1546 filled from the SEQUENCE, if any.
1548 PANNUL_P points to a non-zero value if we already know that we need
1549 to annul INSN. If this routine determines that annulling is needed,
1550 it may set that value non-zero.
1552 PNEW_THREAD points to a location that is to receive the place at which
1553 execution should continue. */
1556 steal_delay_list_from_target (insn, condition, seq, delay_list,
1557 sets, needed, other_needed,
1558 slots_to_fill, pslots_filled, pannul_p,
1560 rtx insn, condition;
1563 struct resources *sets, *needed, *other_needed;
1570 int slots_remaining = slots_to_fill - *pslots_filled;
1571 int total_slots_filled = *pslots_filled;
1572 rtx new_delay_list = 0;
1573 int must_annul = *pannul_p;
1576 /* We can't do anything if there are more delay slots in SEQ than we
1577 can handle, or if we don't know that it will be a taken branch.
1579 We know that it will be a taken branch if it is either an unconditional
1580 branch or a conditional branch with a stricter branch condition. */
1582 if (XVECLEN (seq, 0) - 1 > slots_remaining
1583 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0)))
1586 for (i = 1; i < XVECLEN (seq, 0); i++)
1588 rtx trial = XVECEXP (seq, 0, i);
1591 if (insn_references_resource_p (trial, sets, 0)
1592 || insn_sets_resource_p (trial, needed, 0)
1593 || insn_sets_resource_p (trial, sets, 0)
1595 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1597 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1599 /* If TRIAL is from the fallthrough code of an annulled branch insn
1600 in SEQ, we cannot use it. */
1601 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1602 && ! INSN_FROM_TARGET_P (trial)))
1605 /* If this insn was already done (usually in a previous delay slot),
1606 pretend we put it in our delay slot. */
1607 if (redundant_insn_p (trial, insn, new_delay_list))
1610 /* We will end up re-vectoring this branch, so compute flags
1611 based on jumping to the new label. */
1612 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1615 && ((condition == const_true_rtx
1616 || (! insn_sets_resource_p (trial, other_needed, 0)
1617 && ! may_trap_p (PATTERN (trial)))))
1618 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1620 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1622 temp = copy_rtx (trial);
1623 INSN_FROM_TARGET_P (temp) = 1;
1624 new_delay_list = add_to_delay_list (temp, new_delay_list);
1625 total_slots_filled++;
1627 if (--slots_remaining == 0)
1634 /* Show the place to which we will be branching. */
1635 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1637 /* Add any new insns to the delay list and update the count of the
1638 number of slots filled. */
1639 *pslots_filled = total_slots_filled;
1640 *pannul_p = must_annul;
1642 if (delay_list == 0)
1643 return new_delay_list;
1645 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1646 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1651 /* Similar to steal_delay_list_from_target except that SEQ is on the
1652 fallthrough path of INSN. Here we only do something if the delay insn
1653 of SEQ is an unconditional branch. In that case we steal its delay slot
1654 for INSN since unconditional branches are much easier to fill. */
1657 steal_delay_list_from_fallthrough (insn, condition, seq,
1658 delay_list, sets, needed, other_needed,
1659 slots_to_fill, pslots_filled, pannul_p)
1660 rtx insn, condition;
1663 struct resources *sets, *needed, *other_needed;
1671 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1673 /* We can't do anything if SEQ's delay insn isn't an
1674 unconditional branch. */
1676 if (! simplejump_p (XVECEXP (seq, 0, 0))
1677 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1680 for (i = 1; i < XVECLEN (seq, 0); i++)
1682 rtx trial = XVECEXP (seq, 0, i);
1684 /* If TRIAL sets CC0, stealing it will move it too far from the use
1686 if (insn_references_resource_p (trial, sets, 0)
1687 || insn_sets_resource_p (trial, needed, 0)
1688 || insn_sets_resource_p (trial, sets, 0)
1690 || sets_cc0_p (PATTERN (trial))
1696 /* If this insn was already done, we don't need it. */
1697 if (redundant_insn_p (trial, insn, delay_list))
1699 delete_from_delay_slot (trial);
1704 && ((condition == const_true_rtx
1705 || (! insn_sets_resource_p (trial, other_needed, 0)
1706 && ! may_trap_p (PATTERN (trial)))))
1707 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1709 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1711 delete_from_delay_slot (trial);
1712 delay_list = add_to_delay_list (trial, delay_list);
1714 if (++(*pslots_filled) == slots_to_fill)
1724 /* Try merging insns starting at THREAD which match exactly the insns in
1727 If all insns were matched and the insn was previously annulling, the
1728 annul bit will be cleared.
1730 For each insn that is merged, if the branch is or will be non-annulling,
1731 we delete the merged insn. */
1734 try_merge_delay_insns (insn, thread)
1737 rtx trial, next_trial;
1738 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1739 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1740 int slot_number = 1;
1741 int num_slots = XVECLEN (PATTERN (insn), 0);
1742 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1743 struct resources set, needed;
1744 rtx merged_insns = 0;
1748 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1750 CLEAR_RESOURCE (&needed);
1751 CLEAR_RESOURCE (&set);
1753 /* If this is not an annulling branch, take into account anything needed in
1754 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1755 folded into one. If we are annulling, this would be the correct
1756 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1757 will essentially disable this optimization. This method is somewhat of
1758 a kludge, but I don't see a better way.) */
1760 mark_referenced_resources (next_to_match, &needed, 1);
1762 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1764 rtx pat = PATTERN (trial);
1766 next_trial = next_nonnote_insn (trial);
1768 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1769 if (GET_CODE (trial) == INSN
1770 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1773 if (GET_CODE (next_to_match) == GET_CODE (trial)
1775 /* We can't share an insn that sets cc0. */
1776 && ! sets_cc0_p (pat)
1778 && ! insn_references_resource_p (trial, &set, 1)
1779 && ! insn_sets_resource_p (trial, &set, 1)
1780 && ! insn_sets_resource_p (trial, &needed, 1)
1781 && (trial = try_split (pat, trial, 0)) != 0
1782 /* Update next_trial, in case try_split succeeded. */
1783 && (next_trial = next_nonnote_insn (trial))
1784 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1785 /* Have to test this condition if annul condition is different
1786 from (and less restrictive than) non-annulling one. */
1787 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1792 update_block (trial, thread);
1793 if (trial == thread)
1794 thread = next_active_insn (thread);
1796 delete_insn (trial);
1797 INSN_FROM_TARGET_P (next_to_match) = 0;
1800 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1802 if (++slot_number == num_slots)
1805 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1807 mark_referenced_resources (next_to_match, &needed, 1);
1810 mark_set_resources (trial, &set, 0, 1);
1811 mark_referenced_resources (trial, &needed, 1);
1814 /* See if we stopped on a filled insn. If we did, try to see if its
1815 delay slots match. */
1816 if (slot_number != num_slots
1817 && trial && GET_CODE (trial) == INSN
1818 && GET_CODE (PATTERN (trial)) == SEQUENCE
1819 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1821 rtx pat = PATTERN (trial);
1822 rtx filled_insn = XVECEXP (pat, 0, 0);
1824 /* Account for resources set/needed by the filled insn. */
1825 mark_set_resources (filled_insn, &set, 0, 1);
1826 mark_referenced_resources (filled_insn, &needed, 1);
1828 for (i = 1; i < XVECLEN (pat, 0); i++)
1830 rtx dtrial = XVECEXP (pat, 0, i);
1832 if (! insn_references_resource_p (dtrial, &set, 1)
1833 && ! insn_sets_resource_p (dtrial, &set, 1)
1834 && ! insn_sets_resource_p (dtrial, &needed, 1)
1836 && ! sets_cc0_p (PATTERN (dtrial))
1838 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1839 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1843 update_block (dtrial, thread);
1844 delete_from_delay_slot (dtrial);
1845 INSN_FROM_TARGET_P (next_to_match) = 0;
1848 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1851 if (++slot_number == num_slots)
1854 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1859 /* If all insns in the delay slot have been matched and we were previously
1860 annulling the branch, we need not any more. In that case delete all the
1861 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1862 the delay list so that we know that it isn't only being used at the
1864 if (slot_number == num_slots && annul_p)
1866 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1868 if (GET_MODE (merged_insns) == SImode)
1870 update_block (XEXP (merged_insns, 0), thread);
1871 delete_from_delay_slot (XEXP (merged_insns, 0));
1875 update_block (XEXP (merged_insns, 0), thread);
1876 delete_insn (XEXP (merged_insns, 0));
1880 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1882 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1883 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1887 /* See if INSN is redundant with an insn in front of TARGET. Often this
1888 is called when INSN is a candidate for a delay slot of TARGET.
1889 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1890 of INSN. Often INSN will be redundant with an insn in a delay slot of
1891 some previous insn. This happens when we have a series of branches to the
1892 same label; in that case the first insn at the target might want to go
1893 into each of the delay slots.
1895 If we are not careful, this routine can take up a significant fraction
1896 of the total compilation time (4%), but only wins rarely. Hence we
1897 speed this routine up by making two passes. The first pass goes back
1898 until it hits a label and sees if it find an insn with an identical
1899 pattern. Only in this (relatively rare) event does it check for
1902 We do not split insns we encounter. This could cause us not to find a
1903 redundant insn, but the cost of splitting seems greater than the possible
1904 gain in rare cases. */
1907 redundant_insn_p (insn, target, delay_list)
1912 rtx target_main = target;
1913 rtx ipat = PATTERN (insn);
1915 struct resources needed, set;
1918 /* Scan backwards looking for a match. */
1919 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1921 if (GET_CODE (trial) == CODE_LABEL)
1924 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1927 pat = PATTERN (trial);
1928 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1931 if (GET_CODE (pat) == SEQUENCE)
1933 /* Stop for a CALL and its delay slots because it is difficult to
1934 track its resource needs correctly. */
1935 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1938 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1939 slots because it is difficult to track its resource needs
1942 #ifdef INSN_SETS_ARE_DELAYED
1943 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1947 #ifdef INSN_REFERENCES_ARE_DELAYED
1948 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1952 /* See if any of the insns in the delay slot match, updating
1953 resource requirements as we go. */
1954 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1955 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1956 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
1959 /* If found a match, exit this loop early. */
1964 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
1968 /* If we didn't find an insn that matches, return 0. */
1972 /* See what resources this insn sets and needs. If they overlap, or
1973 if this insn references CC0, it can't be redundant. */
1975 CLEAR_RESOURCE (&needed);
1976 CLEAR_RESOURCE (&set);
1977 mark_set_resources (insn, &set, 0, 1);
1978 mark_referenced_resources (insn, &needed, 1);
1980 /* If TARGET is a SEQUENCE, get the main insn. */
1981 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1982 target_main = XVECEXP (PATTERN (target), 0, 0);
1984 if (resource_conflicts_p (&needed, &set)
1986 || reg_mentioned_p (cc0_rtx, ipat)
1988 /* The insn requiring the delay may not set anything needed or set by
1990 || insn_sets_resource_p (target_main, &needed, 1)
1991 || insn_sets_resource_p (target_main, &set, 1))
1994 /* Insns we pass may not set either NEEDED or SET, so merge them for
1996 needed.memory |= set.memory;
1997 IOR_HARD_REG_SET (needed.regs, set.regs);
1999 /* This insn isn't redundant if it conflicts with an insn that either is
2000 or will be in a delay slot of TARGET. */
2004 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2006 delay_list = XEXP (delay_list, 1);
2009 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2010 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2011 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2014 /* Scan backwards until we reach a label or an insn that uses something
2015 INSN sets or sets something insn uses or sets. */
2017 for (trial = PREV_INSN (target);
2018 trial && GET_CODE (trial) != CODE_LABEL;
2019 trial = PREV_INSN (trial))
2021 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2022 && GET_CODE (trial) != JUMP_INSN)
2025 pat = PATTERN (trial);
2026 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2029 if (GET_CODE (pat) == SEQUENCE)
2031 /* If this is a CALL_INSN and its delay slots, it is hard to track
2032 the resource needs properly, so give up. */
2033 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2036 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2037 is hard to track the resource needs properly, so give up. */
2039 #ifdef INSN_SETS_ARE_DELAYED
2040 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2044 #ifdef INSN_REFERENCES_ARE_DELAYED
2045 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2049 /* See if any of the insns in the delay slot match, updating
2050 resource requirements as we go. */
2051 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2053 rtx candidate = XVECEXP (pat, 0, i);
2055 /* If an insn will be annulled if the branch is false, it isn't
2056 considered as a possible duplicate insn. */
2057 if (rtx_equal_p (PATTERN (candidate), ipat)
2058 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2059 && INSN_FROM_TARGET_P (candidate)))
2061 /* Show that this insn will be used in the sequel. */
2062 INSN_FROM_TARGET_P (candidate) = 0;
2066 /* Unless this is an annulled insn from the target of a branch,
2067 we must stop if it sets anything needed or set by INSN. */
2068 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2069 || ! INSN_FROM_TARGET_P (candidate))
2070 && insn_sets_resource_p (candidate, &needed, 1))
2075 /* If the insn requiring the delay slot conflicts with INSN, we
2077 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2082 /* See if TRIAL is the same as INSN. */
2083 pat = PATTERN (trial);
2084 if (rtx_equal_p (pat, ipat))
2087 /* Can't go any further if TRIAL conflicts with INSN. */
2088 if (insn_sets_resource_p (trial, &needed, 1))
2096 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2097 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2098 is non-zero, we are allowed to fall into this thread; otherwise, we are
2101 If LABEL is used more than one or we pass a label other than LABEL before
2102 finding an active insn, we do not own this thread. */
2105 own_thread_p (thread, label, allow_fallthrough)
2108 int allow_fallthrough;
2113 /* We don't own the function end. */
2117 /* Get the first active insn, or THREAD, if it is an active insn. */
2118 active_insn = next_active_insn (PREV_INSN (thread));
2120 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2121 if (GET_CODE (insn) == CODE_LABEL
2122 && (insn != label || LABEL_NUSES (insn) != 1))
2125 if (allow_fallthrough)
2128 /* Ensure that we reach a BARRIER before any insn or label. */
2129 for (insn = prev_nonnote_insn (thread);
2130 insn == 0 || GET_CODE (insn) != BARRIER;
2131 insn = prev_nonnote_insn (insn))
2133 || GET_CODE (insn) == CODE_LABEL
2134 || (GET_CODE (insn) == INSN
2135 && GET_CODE (PATTERN (insn)) != USE
2136 && GET_CODE (PATTERN (insn)) != CLOBBER))
2142 /* Find the number of the basic block that starts closest to INSN. Return -1
2143 if we couldn't find such a basic block. */
2146 find_basic_block (insn)
2151 /* Scan backwards to the previous BARRIER. Then see if we can find a
2152 label that starts a basic block. Return the basic block number. */
2154 for (insn = prev_nonnote_insn (insn);
2155 insn && GET_CODE (insn) != BARRIER;
2156 insn = prev_nonnote_insn (insn))
2159 /* The start of the function is basic block zero. */
2163 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2164 anything other than a CODE_LABEL or note, we can't find this code. */
2165 for (insn = next_nonnote_insn (insn);
2166 insn && GET_CODE (insn) == CODE_LABEL;
2167 insn = next_nonnote_insn (insn))
2169 for (i = 0; i < n_basic_blocks; i++)
2170 if (insn == basic_block_head[i])
2177 /* Called when INSN is being moved from a location near the target of a jump.
2178 We leave a marker of the form (use (INSN)) immediately in front
2179 of WHERE for mark_target_live_regs. These markers will be deleted when
2182 We used to try to update the live status of registers if WHERE is at
2183 the start of a basic block, but that can't work since we may remove a
2184 BARRIER in relax_delay_slots. */
2187 update_block (insn, where)
2193 /* Ignore if this was in a delay slot and it came from the target of
2195 if (INSN_FROM_TARGET_P (insn))
2198 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2200 /* INSN might be making a value live in a block where it didn't use to
2201 be. So recompute liveness information for this block. */
2203 b = find_basic_block (insn);
2208 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2209 the basic block containing the jump. */
2212 reorg_redirect_jump (jump, nlabel)
2216 int b = find_basic_block (jump);
2221 return redirect_jump (jump, nlabel);
2224 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2225 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2226 that reference values used in INSN. If we find one, then we move the
2227 REG_DEAD note to INSN.
2229 This is needed to handle the case where an later insn (after INSN) has a
2230 REG_DEAD note for a register used by INSN, and this later insn subsequently
2231 gets moved before a CODE_LABEL because it is a redundant insn. In this
2232 case, mark_target_live_regs may be confused into thinking the register
2233 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2236 update_reg_dead_notes (insn, delayed_insn)
2237 rtx insn, delayed_insn;
2241 for (p = next_nonnote_insn (insn); p != delayed_insn;
2242 p = next_nonnote_insn (p))
2243 for (link = REG_NOTES (p); link; link = next)
2245 next = XEXP (link, 1);
2247 if (REG_NOTE_KIND (link) != REG_DEAD
2248 || GET_CODE (XEXP (link, 0)) != REG)
2251 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2253 /* Move the REG_DEAD note from P to INSN. */
2254 remove_note (p, link);
2255 XEXP (link, 1) = REG_NOTES (insn);
2256 REG_NOTES (insn) = link;
2261 /* Marks registers possibly live at the current place being scanned by
2262 mark_target_live_regs. Used only by next two function. */
2264 static HARD_REG_SET current_live_regs;
2266 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2267 Also only used by the next two functions. */
2269 static HARD_REG_SET pending_dead_regs;
2271 /* Utility function called from mark_target_live_regs via note_stores.
2272 It deadens any CLOBBERed registers and livens any SET registers. */
2275 update_live_status (dest, x)
2279 int first_regno, last_regno;
2282 if (GET_CODE (dest) != REG
2283 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2286 if (GET_CODE (dest) == SUBREG)
2287 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2289 first_regno = REGNO (dest);
2291 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2293 if (GET_CODE (x) == CLOBBER)
2294 for (i = first_regno; i < last_regno; i++)
2295 CLEAR_HARD_REG_BIT (current_live_regs, i);
2297 for (i = first_regno; i < last_regno; i++)
2299 SET_HARD_REG_BIT (current_live_regs, i);
2300 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2304 /* Similar to next_insn, but ignores insns in the delay slots of
2305 an annulled branch. */
2308 next_insn_no_annul (insn)
2313 /* If INSN is an annulled branch, skip any insns from the target
2315 if (INSN_ANNULLED_BRANCH_P (insn)
2316 && NEXT_INSN (PREV_INSN (insn)) != insn)
2317 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2318 insn = NEXT_INSN (insn);
2320 insn = NEXT_INSN (insn);
2321 if (insn && GET_CODE (insn) == INSN
2322 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2323 insn = XVECEXP (PATTERN (insn), 0, 0);
2329 /* Set the resources that are live at TARGET.
2331 If TARGET is zero, we refer to the end of the current function and can
2332 return our precomputed value.
2334 Otherwise, we try to find out what is live by consulting the basic block
2335 information. This is tricky, because we must consider the actions of
2336 reload and jump optimization, which occur after the basic block information
2339 Accordingly, we proceed as follows::
2341 We find the previous BARRIER and look at all immediately following labels
2342 (with no intervening active insns) to see if any of them start a basic
2343 block. If we hit the start of the function first, we use block 0.
2345 Once we have found a basic block and a corresponding first insns, we can
2346 accurately compute the live status from basic_block_live_regs and
2347 reg_renumber. (By starting at a label following a BARRIER, we are immune
2348 to actions taken by reload and jump.) Then we scan all insns between
2349 that point and our target. For each CLOBBER (or for call-clobbered regs
2350 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2351 a SET, mark them as live.
2353 We have to be careful when using REG_DEAD notes because they are not
2354 updated by such things as find_equiv_reg. So keep track of registers
2355 marked as dead that haven't been assigned to, and mark them dead at the
2356 next CODE_LABEL since reload and jump won't propagate values across labels.
2358 If we cannot find the start of a basic block (should be a very rare
2359 case, if it can happen at all), mark everything as potentially live.
2361 Next, scan forward from TARGET looking for things set or clobbered
2362 before they are used. These are not live.
2364 Because we can be called many times on the same target, save our results
2365 in a hash table indexed by INSN_UID. */
2368 mark_target_live_regs (target, res)
2370 struct resources *res;
2374 struct target_info *tinfo;
2378 HARD_REG_SET scratch;
2379 struct resources set, needed;
2382 /* Handle end of function. */
2385 *res = end_of_function_needs;
2389 /* We have to assume memory is needed, but the CC isn't. */
2394 /* See if we have computed this value already. */
2395 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2396 tinfo; tinfo = tinfo->next)
2397 if (tinfo->uid == INSN_UID (target))
2400 /* Start by getting the basic block number. If we have saved information,
2401 we can get it from there unless the insn at the start of the basic block
2402 has been deleted. */
2403 if (tinfo && tinfo->block != -1
2404 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2408 b = find_basic_block (target);
2412 /* If the information is up-to-date, use it. Otherwise, we will
2414 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2416 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2422 /* Allocate a place to put our results and chain it into the
2424 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2425 tinfo->uid = INSN_UID (target);
2427 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2428 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2431 CLEAR_HARD_REG_SET (pending_dead_regs);
2433 /* If we found a basic block, get the live registers from it and update
2434 them with anything set or killed between its start and the insn before
2435 TARGET. Otherwise, we must assume everything is live. */
2438 regset regs_live = basic_block_live_at_start[b];
2440 REGSET_ELT_TYPE bit;
2442 rtx start_insn, stop_insn;
2444 /* Compute hard regs live at start of block -- this is the real hard regs
2445 marked live, plus live pseudo regs that have been renumbered to
2449 current_live_regs = *regs_live;
2451 COPY_HARD_REG_SET (current_live_regs, regs_live);
2454 for (offset = 0, i = 0; offset < regset_size; offset++)
2456 if (regs_live[offset] == 0)
2457 i += REGSET_ELT_BITS;
2459 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2460 if ((regs_live[offset] & bit)
2461 && (regno = reg_renumber[i]) >= 0)
2463 j < regno + HARD_REGNO_NREGS (regno,
2464 PSEUDO_REGNO_MODE (i));
2466 SET_HARD_REG_BIT (current_live_regs, j);
2469 /* Get starting and ending insn, handling the case where each might
2471 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2474 if (GET_CODE (start_insn) == INSN
2475 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2476 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2478 if (GET_CODE (stop_insn) == INSN
2479 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2480 stop_insn = next_insn (PREV_INSN (stop_insn));
2482 for (insn = start_insn; insn != stop_insn;
2483 insn = next_insn_no_annul (insn))
2486 rtx real_insn = insn;
2488 /* If this insn is from the target of a branch, it isn't going to
2489 be used in the sequel. If it is used in both cases, this
2490 test will not be true. */
2491 if (INSN_FROM_TARGET_P (insn))
2494 /* If this insn is a USE made by update_block, we care about the
2496 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2497 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2498 real_insn = XEXP (PATTERN (insn), 0);
2500 if (GET_CODE (real_insn) == CALL_INSN)
2502 /* CALL clobbers all call-used regs that aren't fixed except
2503 sp, ap, and fp. Do this before setting the result of the
2505 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2506 if (call_used_regs[i]
2507 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2508 && i != ARG_POINTER_REGNUM
2509 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2510 && i != HARD_FRAME_POINTER_REGNUM
2512 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2513 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2515 #ifdef PIC_OFFSET_TABLE_REGNUM
2516 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2519 CLEAR_HARD_REG_BIT (current_live_regs, i);
2521 /* A CALL_INSN sets any global register live, since it may
2522 have been modified by the call. */
2523 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2525 SET_HARD_REG_BIT (current_live_regs, i);
2528 /* Mark anything killed in an insn to be deadened at the next
2529 label. Ignore USE insns; the only REG_DEAD notes will be for
2530 parameters. But they might be early. A CALL_INSN will usually
2531 clobber registers used for parameters. It isn't worth bothering
2532 with the unlikely case when it won't. */
2533 if ((GET_CODE (real_insn) == INSN
2534 && GET_CODE (PATTERN (real_insn)) != USE
2535 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2536 || GET_CODE (real_insn) == JUMP_INSN
2537 || GET_CODE (real_insn) == CALL_INSN)
2539 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2540 if (REG_NOTE_KIND (link) == REG_DEAD
2541 && GET_CODE (XEXP (link, 0)) == REG
2542 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2544 int first_regno = REGNO (XEXP (link, 0));
2547 + HARD_REGNO_NREGS (first_regno,
2548 GET_MODE (XEXP (link, 0))));
2550 for (i = first_regno; i < last_regno; i++)
2551 SET_HARD_REG_BIT (pending_dead_regs, i);
2554 note_stores (PATTERN (real_insn), update_live_status);
2556 /* If any registers were unused after this insn, kill them.
2557 These notes will always be accurate. */
2558 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2559 if (REG_NOTE_KIND (link) == REG_UNUSED
2560 && GET_CODE (XEXP (link, 0)) == REG
2561 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2563 int first_regno = REGNO (XEXP (link, 0));
2566 + HARD_REGNO_NREGS (first_regno,
2567 GET_MODE (XEXP (link, 0))));
2569 for (i = first_regno; i < last_regno; i++)
2570 CLEAR_HARD_REG_BIT (current_live_regs, i);
2574 else if (GET_CODE (real_insn) == CODE_LABEL)
2576 /* A label clobbers the pending dead registers since neither
2577 reload nor jump will propagate a value across a label. */
2578 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2579 CLEAR_HARD_REG_SET (pending_dead_regs);
2582 /* The beginning of the epilogue corresponds to the end of the
2583 RTL chain when there are no epilogue insns. Certain resources
2584 are implicitly required at that point. */
2585 else if (GET_CODE (real_insn) == NOTE
2586 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2587 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2590 COPY_HARD_REG_SET (res->regs, current_live_regs);
2592 tinfo->bb_tick = bb_ticks[b];
2595 /* We didn't find the start of a basic block. Assume everything
2596 in use. This should happen only extremely rarely. */
2597 SET_HARD_REG_SET (res->regs);
2599 /* Now step forward from TARGET looking for registers that are set before
2600 they are used. These are dead. If we pass a label, any pending dead
2601 registers that weren't yet used can be made dead. Stop when we pass a
2602 conditional JUMP_INSN; follow the first few unconditional branches. */
2604 CLEAR_RESOURCE (&set);
2605 CLEAR_RESOURCE (&needed);
2607 for (insn = target; insn; insn = next)
2609 rtx this_jump_insn = insn;
2611 next = NEXT_INSN (insn);
2612 switch (GET_CODE (insn))
2615 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2616 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2617 CLEAR_HARD_REG_SET (pending_dead_regs);
2625 if (GET_CODE (PATTERN (insn)) == USE)
2627 /* If INSN is a USE made by update_block, we care about the
2628 underlying insn. Any registers set by the underlying insn
2629 are live since the insn is being done somewhere else. */
2630 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2631 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2633 /* All other USE insns are to be ignored. */
2636 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2638 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2640 /* An unconditional jump can be used to fill the delay slot
2641 of a call, so search for a JUMP_INSN in any position. */
2642 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2644 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2645 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2651 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2653 if (jump_count++ < 10
2654 && (simplejump_p (this_jump_insn)
2655 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2657 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2661 jump_target = JUMP_LABEL (this_jump_insn);
2668 mark_referenced_resources (insn, &needed, 1);
2669 mark_set_resources (insn, &set, 0, 1);
2671 COPY_HARD_REG_SET (scratch, set.regs);
2672 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2673 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2676 /* If we hit an unconditional branch, we have another way of finding out
2677 what is live: we can see what is live at the branch target and include
2678 anything used but not set before the branch. The only things that are
2679 live are those that are live using the above test and the test below.
2681 Don't try this if we expired our jump count above, since that would
2682 mean there may be an infinite loop in the function being compiled. */
2684 if (jump_insn && jump_count < 10)
2686 struct resources new_resources;
2687 rtx stop_insn = next_active_insn (jump_insn);
2689 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2690 CLEAR_RESOURCE (&set);
2691 CLEAR_RESOURCE (&needed);
2693 /* Include JUMP_INSN in the needed registers. */
2694 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2696 mark_referenced_resources (insn, &needed, 1);
2698 COPY_HARD_REG_SET (scratch, needed.regs);
2699 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2700 IOR_HARD_REG_SET (new_resources.regs, scratch);
2702 mark_set_resources (insn, &set, 0, 1);
2705 AND_HARD_REG_SET (res->regs, new_resources.regs);
2708 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2711 /* Scan a function looking for insns that need a delay slot and find insns to
2712 put into the delay slot.
2714 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2715 as calls). We do these first since we don't want jump insns (that are
2716 easier to fill) to get the only insns that could be used for non-jump insns.
2717 When it is zero, only try to fill JUMP_INSNs.
2719 When slots are filled in this manner, the insns (including the
2720 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2721 it is possible to tell whether a delay slot has really been filled
2722 or not. `final' knows how to deal with this, by communicating
2723 through FINAL_SEQUENCE. */
2726 fill_simple_delay_slots (first, non_jumps_p)
2730 register rtx insn, pat, trial, next_trial;
2732 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2733 struct resources needed, set;
2734 register int slots_to_fill, slots_filled;
2737 for (i = 0; i < num_unfilled_slots; i++)
2740 /* Get the next insn to fill. If it has already had any slots assigned,
2741 we can't do anything with it. Maybe we'll improve this later. */
2743 insn = unfilled_slots_base[i];
2745 || INSN_DELETED_P (insn)
2746 || (GET_CODE (insn) == INSN
2747 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2748 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2749 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2752 if (GET_CODE (insn) == JUMP_INSN)
2753 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2755 flags = get_jump_flags (insn, NULL_RTX);
2756 slots_to_fill = num_delay_slots (insn);
2757 if (slots_to_fill == 0)
2760 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2761 says how many. After initialization, first try optimizing
2764 nop add %o7,.-L1,%o7
2768 If this case applies, the delay slot of the call is filled with
2769 the unconditional jump. This is done first to avoid having the
2770 delay slot of the call filled in the backward scan. Also, since
2771 the unconditional jump is likely to also have a delay slot, that
2772 insn must exist when it is subsequently scanned.
2774 This is tried on each insn with delay slots as some machines
2775 have insns which perform calls, but are not represented as
2781 if ((trial = next_active_insn (insn))
2782 && GET_CODE (trial) == JUMP_INSN
2783 && simplejump_p (trial)
2784 && eligible_for_delay (insn, slots_filled, trial, flags)
2785 && no_labels_between_p (insn, trial))
2788 delay_list = add_to_delay_list (trial, delay_list);
2789 /* Remove the unconditional jump from consideration for delay slot
2790 filling and unthread it. */
2791 if (unfilled_slots_base[i + 1] == trial)
2792 unfilled_slots_base[i + 1] = 0;
2794 rtx next = NEXT_INSN (trial);
2795 rtx prev = PREV_INSN (trial);
2797 NEXT_INSN (prev) = next;
2799 PREV_INSN (next) = prev;
2803 /* Now, scan backwards from the insn to search for a potential
2804 delay-slot candidate. Stop searching when a label or jump is hit.
2806 For each candidate, if it is to go into the delay slot (moved
2807 forward in execution sequence), it must not need or set any resources
2808 that were set by later insns and must not set any resources that
2809 are needed for those insns.
2811 The delay slot insn itself sets resources unless it is a call
2812 (in which case the called routine, not the insn itself, is doing
2815 if (slots_filled < slots_to_fill)
2817 CLEAR_RESOURCE (&needed);
2818 CLEAR_RESOURCE (&set);
2819 mark_set_resources (insn, &set, 0, 0);
2820 mark_referenced_resources (insn, &needed, 0);
2822 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2825 next_trial = prev_nonnote_insn (trial);
2827 /* This must be an INSN or CALL_INSN. */
2828 pat = PATTERN (trial);
2830 /* USE and CLOBBER at this level was just for flow; ignore it. */
2831 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2834 /* Check for resource conflict first, to avoid unnecessary
2836 if (! insn_references_resource_p (trial, &set, 1)
2837 && ! insn_sets_resource_p (trial, &set, 1)
2838 && ! insn_sets_resource_p (trial, &needed, 1)
2840 /* Can't separate set of cc0 from its use. */
2841 && ! (reg_mentioned_p (cc0_rtx, pat)
2842 && ! sets_cc0_p (cc0_rtx, pat))
2846 trial = try_split (pat, trial, 1);
2847 next_trial = prev_nonnote_insn (trial);
2848 if (eligible_for_delay (insn, slots_filled, trial, flags))
2850 /* In this case, we are searching backward, so if we
2851 find insns to put on the delay list, we want
2852 to put them at the head, rather than the
2853 tail, of the list. */
2855 update_reg_dead_notes (trial, insn);
2856 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2858 update_block (trial, trial);
2859 delete_insn (trial);
2860 if (slots_to_fill == ++slots_filled)
2866 mark_set_resources (trial, &set, 0, 1);
2867 mark_referenced_resources (trial, &needed, 1);
2871 /* If all needed slots haven't been filled, we come here. */
2873 /* Try to optimize case of jumping around a single insn. */
2874 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2875 if (slots_filled != slots_to_fill
2877 && GET_CODE (insn) == JUMP_INSN
2878 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2880 delay_list = optimize_skip (insn);
2886 /* Try to get insns from beyond the insn needing the delay slot.
2887 These insns can neither set or reference resources set in insns being
2888 skipped, cannot set resources in the insn being skipped, and, if this
2889 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2890 call might not return).
2892 If this is a conditional jump, see if it merges back to us early
2893 enough for us to pick up insns from the merge point. Don't do
2894 this if there is another branch to our label unless we pass all of
2897 Another similar merge is if we jump to the same place that a
2898 later unconditional jump branches to. In that case, we don't
2899 care about the number of uses of our label. */
2901 if (slots_filled != slots_to_fill
2902 && (GET_CODE (insn) != JUMP_INSN
2903 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2904 && ! simplejump_p (insn)
2905 && JUMP_LABEL (insn) != 0)))
2908 int maybe_never = 0;
2909 int passed_label = 0;
2911 struct resources needed_at_jump;
2913 CLEAR_RESOURCE (&needed);
2914 CLEAR_RESOURCE (&set);
2916 if (GET_CODE (insn) == CALL_INSN)
2918 mark_set_resources (insn, &set, 0, 1);
2919 mark_referenced_resources (insn, &needed, 1);
2924 mark_set_resources (insn, &set, 0, 1);
2925 mark_referenced_resources (insn, &needed, 1);
2926 if (GET_CODE (insn) == JUMP_INSN)
2928 /* Get our target and show how many more uses we want to
2929 see before we hit the label. */
2930 target = JUMP_LABEL (insn);
2931 target_uses = LABEL_NUSES (target) - 1;
2936 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2938 rtx pat, trial_delay;
2940 next_trial = next_nonnote_insn (trial);
2942 if (GET_CODE (trial) == CODE_LABEL)
2946 /* If this is our target, see if we have seen all its uses.
2947 If so, indicate we have passed our target and ignore it.
2948 All other labels cause us to stop our search. */
2949 if (trial == target && target_uses == 0)
2957 else if (GET_CODE (trial) == BARRIER)
2960 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2961 pat = PATTERN (trial);
2963 /* Stand-alone USE and CLOBBER are just for flow. */
2964 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2967 /* If this already has filled delay slots, get the insn needing
2969 if (GET_CODE (pat) == SEQUENCE)
2970 trial_delay = XVECEXP (pat, 0, 0);
2972 trial_delay = trial;
2974 /* If this is a jump insn to our target, indicate that we have
2975 seen another jump to it. If we aren't handling a conditional
2976 jump, stop our search. Otherwise, compute the needs at its
2977 target and add them to NEEDED. */
2978 if (GET_CODE (trial_delay) == JUMP_INSN)
2982 else if (JUMP_LABEL (trial_delay) == target)
2986 mark_target_live_regs
2987 (next_active_insn (JUMP_LABEL (trial_delay)),
2989 needed.memory |= needed_at_jump.memory;
2990 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2994 /* See if we have a resource problem before we try to
2997 && GET_CODE (pat) != SEQUENCE
2998 && ! insn_references_resource_p (trial, &set, 1)
2999 && ! insn_sets_resource_p (trial, &set, 1)
3000 && ! insn_sets_resource_p (trial, &needed, 1)
3002 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3004 && ! (maybe_never && may_trap_p (pat))
3005 && (trial = try_split (pat, trial, 0))
3006 && eligible_for_delay (insn, slots_filled, trial, flags))
3008 next_trial = next_nonnote_insn (trial);
3009 delay_list = add_to_delay_list (trial, delay_list);
3012 if (reg_mentioned_p (cc0_rtx, pat))
3013 link_cc0_insns (trial);
3017 update_block (trial, trial);
3018 delete_insn (trial);
3019 if (slots_to_fill == ++slots_filled)
3024 mark_set_resources (trial, &set, 0, 1);
3025 mark_referenced_resources (trial, &needed, 1);
3027 /* Ensure we don't put insns between the setting of cc and the
3028 comparison by moving a setting of cc into an earlier delay
3029 slot since these insns could clobber the condition code. */
3032 /* If this is a call or jump, we might not get here. */
3033 if (GET_CODE (trial) == CALL_INSN
3034 || GET_CODE (trial) == JUMP_INSN)
3038 /* If there are slots left to fill and our search was stopped by an
3039 unconditional branch, try the insn at the branch target. We can
3040 redirect the branch if it works. */
3041 if (slots_to_fill != slots_filled
3043 && GET_CODE (trial) == JUMP_INSN
3044 && simplejump_p (trial)
3045 && (target == 0 || JUMP_LABEL (trial) == target)
3046 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3047 && ! (GET_CODE (next_trial) == INSN
3048 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3049 && ! insn_references_resource_p (next_trial, &set, 1)
3050 && ! insn_sets_resource_p (next_trial, &set, 1)
3051 && ! insn_sets_resource_p (next_trial, &needed, 1)
3053 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3055 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3056 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3057 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3059 rtx new_label = next_active_insn (next_trial);
3062 new_label = get_label_before (new_label);
3064 new_label = find_end_label ();
3067 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3069 reorg_redirect_jump (trial, new_label);
3071 /* If we merged because we both jumped to the same place,
3072 redirect the original insn also. */
3074 reorg_redirect_jump (insn, new_label);
3079 unfilled_slots_base[i]
3080 = emit_delay_sequence (insn, delay_list,
3081 slots_filled, slots_to_fill);
3083 if (slots_to_fill == slots_filled)
3084 unfilled_slots_base[i] = 0;
3086 note_delay_statistics (slots_filled, 0);
3089 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3090 /* See if the epilogue needs any delay slots. Try to fill them if so.
3091 The only thing we can do is scan backwards from the end of the
3092 function. If we did this in a previous pass, it is incorrect to do it
3094 if (current_function_epilogue_delay_list)
3097 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3098 if (slots_to_fill == 0)
3102 needed = end_of_function_needs;
3103 CLEAR_RESOURCE (&set);
3105 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3106 trial = PREV_INSN (trial))
3108 if (GET_CODE (trial) == NOTE)
3110 pat = PATTERN (trial);
3111 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3114 if (! insn_references_resource_p (trial, &set, 1)
3115 && ! insn_sets_resource_p (trial, &needed, 1)
3117 /* Don't want to mess with cc0 here. */
3118 && ! reg_mentioned_p (cc0_rtx, pat)
3122 trial = try_split (pat, trial, 1);
3123 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3125 /* Here as well we are searching backward, so put the
3126 insns we find on the head of the list. */
3128 current_function_epilogue_delay_list
3129 = gen_rtx (INSN_LIST, VOIDmode, trial,
3130 current_function_epilogue_delay_list);
3131 mark_referenced_resources (trial, &end_of_function_needs, 1);
3132 update_block (trial, trial);
3133 delete_insn (trial);
3135 /* Clear deleted bit so final.c will output the insn. */
3136 INSN_DELETED_P (trial) = 0;
3138 if (slots_to_fill == ++slots_filled)
3144 mark_set_resources (trial, &set, 0, 1);
3145 mark_referenced_resources (trial, &needed, 1);
3148 note_delay_statistics (slots_filled, 0);
3152 /* Try to find insns to place in delay slots.
3154 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3155 or is an unconditional branch if CONDITION is const_true_rtx.
3156 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3158 THREAD is a flow-of-control, either the insns to be executed if the
3159 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3161 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3162 to see if any potential delay slot insns set things needed there.
3164 LIKELY is non-zero if it is extremely likely that the branch will be
3165 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3166 end of a loop back up to the top.
3168 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3169 thread. I.e., it is the fallthrough code of our jump or the target of the
3170 jump when we are the only jump going there.
3172 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3173 case, we can only take insns from the head of the thread for our delay
3174 slot. We then adjust the jump to point after the insns we have taken. */
3177 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3178 thread_if_true, own_thread, own_opposite_thread,
3179 slots_to_fill, pslots_filled)
3182 rtx thread, opposite_thread;
3185 int own_thread, own_opposite_thread;
3186 int slots_to_fill, *pslots_filled;
3190 struct resources opposite_needed, set, needed;
3196 /* Validate our arguments. */
3197 if ((condition == const_true_rtx && ! thread_if_true)
3198 || (! own_thread && ! thread_if_true))
3201 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3203 /* If our thread is the end of subroutine, we can't get any delay
3208 /* If this is an unconditional branch, nothing is needed at the
3209 opposite thread. Otherwise, compute what is needed there. */
3210 if (condition == const_true_rtx)
3211 CLEAR_RESOURCE (&opposite_needed);
3213 mark_target_live_regs (opposite_thread, &opposite_needed);
3215 /* If the insn at THREAD can be split, do it here to avoid having to
3216 update THREAD and NEW_THREAD if it is done in the loop below. Also
3217 initialize NEW_THREAD. */
3219 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3221 /* Scan insns at THREAD. We are looking for an insn that can be removed
3222 from THREAD (it neither sets nor references resources that were set
3223 ahead of it and it doesn't set anything needs by the insns ahead of
3224 it) and that either can be placed in an annulling insn or aren't
3225 needed at OPPOSITE_THREAD. */
3227 CLEAR_RESOURCE (&needed);
3228 CLEAR_RESOURCE (&set);
3230 /* If we do not own this thread, we must stop as soon as we find
3231 something that we can't put in a delay slot, since all we can do
3232 is branch into THREAD at a later point. Therefore, labels stop
3233 the search if this is not the `true' thread. */
3235 for (trial = thread;
3236 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3237 trial = next_nonnote_insn (trial))
3241 /* If we have passed a label, we no longer own this thread. */
3242 if (GET_CODE (trial) == CODE_LABEL)
3248 pat = PATTERN (trial);
3249 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3252 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3253 don't separate or copy insns that set and use CC0. */
3254 if (! insn_references_resource_p (trial, &set, 1)
3255 && ! insn_sets_resource_p (trial, &set, 1)
3256 && ! insn_sets_resource_p (trial, &needed, 1)
3258 && ! (reg_mentioned_p (cc0_rtx, pat)
3259 && (! own_thread || ! sets_cc0_p (pat)))
3263 /* If TRIAL is redundant with some insn before INSN, we don't
3264 actually need to add it to the delay list; we can merely pretend
3266 if (redundant_insn_p (trial, insn, delay_list))
3270 update_block (trial, thread);
3271 if (trial == thread)
3273 thread = next_active_insn (thread);
3274 if (new_thread == trial)
3275 new_thread = thread;
3278 delete_insn (trial);
3281 new_thread = next_active_insn (trial);
3286 /* There are two ways we can win: If TRIAL doesn't set anything
3287 needed at the opposite thread and can't trap, or if it can
3288 go into an annulled delay slot. */
3289 if (condition == const_true_rtx
3290 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3291 && ! may_trap_p (pat)))
3294 trial = try_split (pat, trial, 0);
3295 if (new_thread == old_trial)
3297 pat = PATTERN (trial);
3298 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3302 #ifdef ANNUL_IFTRUE_SLOTS
3305 #ifdef ANNUL_IFFALSE_SLOTS
3311 trial = try_split (pat, trial, 0);
3312 if (new_thread == old_trial)
3314 pat = PATTERN (trial);
3316 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3317 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3325 if (reg_mentioned_p (cc0_rtx, pat))
3326 link_cc0_insns (trial);
3329 /* If we own this thread, delete the insn. If this is the
3330 destination of a branch, show that a basic block status
3331 may have been updated. In any case, mark the new
3332 starting point of this thread. */
3335 update_block (trial, thread);
3336 delete_insn (trial);
3339 new_thread = next_active_insn (trial);
3341 temp = own_thread ? trial : copy_rtx (trial);
3343 INSN_FROM_TARGET_P (temp) = 1;
3345 delay_list = add_to_delay_list (temp, delay_list);
3347 if (slots_to_fill == ++(*pslots_filled))
3349 /* Even though we have filled all the slots, we
3350 may be branching to a location that has a
3351 redundant insn. Skip any if so. */
3352 while (new_thread && ! own_thread
3353 && ! insn_sets_resource_p (new_thread, &set, 1)
3354 && ! insn_sets_resource_p (new_thread, &needed, 1)
3355 && ! insn_references_resource_p (new_thread,
3357 && redundant_insn_p (new_thread, insn,
3359 new_thread = next_active_insn (new_thread);
3368 /* This insn can't go into a delay slot. */
3370 mark_set_resources (trial, &set, 0, 1);
3371 mark_referenced_resources (trial, &needed, 1);
3373 /* Ensure we don't put insns between the setting of cc and the comparison
3374 by moving a setting of cc into an earlier delay slot since these insns
3375 could clobber the condition code. */
3378 /* If this insn is a register-register copy and the next insn has
3379 a use of our destination, change it to use our source. That way,
3380 it will become a candidate for our delay slot the next time
3381 through this loop. This case occurs commonly in loops that
3384 We could check for more complex cases than those tested below,
3385 but it doesn't seem worth it. It might also be a good idea to try
3386 to swap the two insns. That might do better.
3388 We can't do this if the next insn modifies our destination, because
3389 that would make the replacement into the insn invalid. We also can't
3390 do this if it modifies our source, because it might be an earlyclobber
3391 operand. This latter test also prevents updating the contents of
3394 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3395 && GET_CODE (SET_SRC (pat)) == REG
3396 && GET_CODE (SET_DEST (pat)) == REG)
3398 rtx next = next_nonnote_insn (trial);
3400 if (next && GET_CODE (next) == INSN
3401 && GET_CODE (PATTERN (next)) != USE
3402 && ! reg_set_p (SET_DEST (pat), next)
3403 && ! reg_set_p (SET_SRC (pat), next)
3404 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3405 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3409 /* If we stopped on a branch insn that has delay slots, see if we can
3410 steal some of the insns in those slots. */
3411 if (trial && GET_CODE (trial) == INSN
3412 && GET_CODE (PATTERN (trial)) == SEQUENCE
3413 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3415 /* If this is the `true' thread, we will want to follow the jump,
3416 so we can only do this if we have taken everything up to here. */
3417 if (thread_if_true && trial == new_thread)
3419 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3420 delay_list, &set, &needed,
3421 &opposite_needed, slots_to_fill,
3422 pslots_filled, &must_annul,
3424 else if (! thread_if_true)
3426 = steal_delay_list_from_fallthrough (insn, condition,
3428 delay_list, &set, &needed,
3429 &opposite_needed, slots_to_fill,
3430 pslots_filled, &must_annul);
3433 /* If we haven't found anything for this delay slot and it is very
3434 likely that the branch will be taken, see if the insn at our target
3435 increments or decrements a register with an increment that does not
3436 depend on the destination register. If so, try to place the opposite
3437 arithmetic insn after the jump insn and put the arithmetic insn in the
3438 delay slot. If we can't do this, return. */
3439 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3441 rtx pat = PATTERN (new_thread);
3446 pat = PATTERN (trial);
3448 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3449 || ! eligible_for_delay (insn, 0, trial, flags))
3452 dest = SET_DEST (pat), src = SET_SRC (pat);
3453 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3454 && rtx_equal_p (XEXP (src, 0), dest)
3455 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3457 rtx other = XEXP (src, 1);
3461 /* If this is a constant adjustment, use the same code with
3462 the negated constant. Otherwise, reverse the sense of the
3464 if (GET_CODE (other) == CONST_INT)
3465 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3466 negate_rtx (GET_MODE (src), other));
3468 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3469 GET_MODE (src), dest, other);
3471 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3474 if (recog_memoized (ninsn) < 0
3475 || (insn_extract (ninsn),
3476 ! constrain_operands (INSN_CODE (ninsn), 1)))
3478 delete_insn (ninsn);
3484 update_block (trial, thread);
3485 delete_insn (trial);
3488 new_thread = next_active_insn (trial);
3490 ninsn = own_thread ? trial : copy_rtx (trial);
3492 INSN_FROM_TARGET_P (ninsn) = 1;
3494 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3499 if (delay_list && must_annul)
3500 INSN_ANNULLED_BRANCH_P (insn) = 1;
3502 /* If we are to branch into the middle of this thread, find an appropriate
3503 label or make a new one if none, and redirect INSN to it. If we hit the
3504 end of the function, use the end-of-function label. */
3505 if (new_thread != thread)
3509 if (! thread_if_true)
3512 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3513 && (simplejump_p (new_thread)
3514 || GET_CODE (PATTERN (new_thread)) == RETURN)
3515 && redirect_with_delay_list_safe_p (insn,
3516 JUMP_LABEL (new_thread),
3518 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3520 if (new_thread == 0)
3521 label = find_end_label ();
3522 else if (GET_CODE (new_thread) == CODE_LABEL)
3525 label = get_label_before (new_thread);
3527 reorg_redirect_jump (insn, label);
3533 /* Make another attempt to find insns to place in delay slots.
3535 We previously looked for insns located in front of the delay insn
3536 and, for non-jump delay insns, located behind the delay insn.
3538 Here only try to schedule jump insns and try to move insns from either
3539 the target or the following insns into the delay slot. If annulling is
3540 supported, we will be likely to do this. Otherwise, we can do this only
3544 fill_eager_delay_slots (first)
3549 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3551 for (i = 0; i < num_unfilled_slots; i++)
3554 rtx target_label, insn_at_target, fallthrough_insn;
3557 int own_fallthrough;
3558 int prediction, slots_to_fill, slots_filled;
3560 insn = unfilled_slots_base[i];
3562 || INSN_DELETED_P (insn)
3563 || GET_CODE (insn) != JUMP_INSN
3564 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3567 slots_to_fill = num_delay_slots (insn);
3568 if (slots_to_fill == 0)
3572 target_label = JUMP_LABEL (insn);
3573 condition = get_branch_condition (insn, target_label);
3578 /* Get the next active fallthough and target insns and see if we own
3579 them. Then see whether the branch is likely true. We don't need
3580 to do a lot of this for unconditional branches. */
3582 insn_at_target = next_active_insn (target_label);
3583 own_target = own_thread_p (target_label, target_label, 0);
3585 if (condition == const_true_rtx)
3587 own_fallthrough = 0;
3588 fallthrough_insn = 0;
3593 fallthrough_insn = next_active_insn (insn);
3594 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3595 prediction = mostly_true_jump (insn, condition);
3598 /* If this insn is expected to branch, first try to get insns from our
3599 target, then our fallthrough insns. If it is not, expected to branch,
3600 try the other order. */
3605 = fill_slots_from_thread (insn, condition, insn_at_target,
3606 fallthrough_insn, prediction == 2, 1,
3607 own_target, own_fallthrough,
3608 slots_to_fill, &slots_filled);
3610 if (delay_list == 0 && own_fallthrough)
3612 /* Even though we didn't find anything for delay slots,
3613 we might have found a redundant insn which we deleted
3614 from the thread that was filled. So we have to recompute
3615 the next insn at the target. */
3616 target_label = JUMP_LABEL (insn);
3617 insn_at_target = next_active_insn (target_label);
3620 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3621 insn_at_target, 0, 0,
3622 own_fallthrough, own_target,
3623 slots_to_fill, &slots_filled);
3628 if (own_fallthrough)
3630 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3631 insn_at_target, 0, 0,
3632 own_fallthrough, own_target,
3633 slots_to_fill, &slots_filled);
3635 if (delay_list == 0)
3637 = fill_slots_from_thread (insn, condition, insn_at_target,
3638 next_active_insn (insn), 0, 1,
3639 own_target, own_fallthrough,
3640 slots_to_fill, &slots_filled);
3644 unfilled_slots_base[i]
3645 = emit_delay_sequence (insn, delay_list,
3646 slots_filled, slots_to_fill);
3648 if (slots_to_fill == slots_filled)
3649 unfilled_slots_base[i] = 0;
3651 note_delay_statistics (slots_filled, 1);
3655 /* Once we have tried two ways to fill a delay slot, make a pass over the
3656 code to try to improve the results and to do such things as more jump
3660 relax_delay_slots (first)
3663 register rtx insn, next, pat;
3664 register rtx trial, delay_insn, target_label;
3666 /* Look at every JUMP_INSN and see if we can improve it. */
3667 for (insn = first; insn; insn = next)
3671 next = next_active_insn (insn);
3673 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3674 the next insn, or jumps to a label that is not the last of a
3675 group of consecutive labels. */
3676 if (GET_CODE (insn) == JUMP_INSN
3677 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3678 && (target_label = JUMP_LABEL (insn)) != 0)
3680 target_label = follow_jumps (target_label);
3681 target_label = prev_label (next_active_insn (target_label));
3683 if (target_label == 0)
3684 target_label = find_end_label ();
3686 if (next_active_insn (target_label) == next
3687 && ! condjump_in_parallel_p (insn))
3693 if (target_label != JUMP_LABEL (insn))
3694 reorg_redirect_jump (insn, target_label);
3696 /* See if this jump branches around a unconditional jump.
3697 If so, invert this jump and point it to the target of the
3699 if (next && GET_CODE (next) == JUMP_INSN
3700 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3701 && next_active_insn (target_label) == next_active_insn (next)
3702 && no_labels_between_p (insn, next))
3704 rtx label = JUMP_LABEL (next);
3706 /* Be careful how we do this to avoid deleting code or
3707 labels that are momentarily dead. See similar optimization
3710 We also need to ensure we properly handle the case when
3711 invert_jump fails. */
3713 ++LABEL_NUSES (target_label);
3715 ++LABEL_NUSES (label);
3717 if (invert_jump (insn, label))
3724 --LABEL_NUSES (label);
3726 if (--LABEL_NUSES (target_label) == 0)
3727 delete_insn (target_label);
3733 /* If this is an unconditional jump and the previous insn is a
3734 conditional jump, try reversing the condition of the previous
3735 insn and swapping our targets. The next pass might be able to
3738 Don't do this if we expect the conditional branch to be true, because
3739 we would then be making the more common case longer. */
3741 if (GET_CODE (insn) == JUMP_INSN
3742 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3743 && (other = prev_active_insn (insn)) != 0
3744 && (condjump_p (other) || condjump_in_parallel_p (other))
3745 && no_labels_between_p (other, insn)
3746 && 0 < mostly_true_jump (other,
3747 get_branch_condition (other,
3748 JUMP_LABEL (other))))
3750 rtx other_target = JUMP_LABEL (other);
3751 target_label = JUMP_LABEL (insn);
3753 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3754 as we move the label. */
3756 ++LABEL_NUSES (other_target);
3758 if (invert_jump (other, target_label))
3759 reorg_redirect_jump (insn, other_target);
3762 --LABEL_NUSES (other_target);
3765 /* Now look only at cases where we have filled a delay slot. */
3766 if (GET_CODE (insn) != INSN
3767 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3770 pat = PATTERN (insn);
3771 delay_insn = XVECEXP (pat, 0, 0);
3773 /* See if the first insn in the delay slot is redundant with some
3774 previous insn. Remove it from the delay slot if so; then set up
3775 to reprocess this insn. */
3776 if (redundant_insn_p (XVECEXP (pat, 0, 1), delay_insn, 0))
3778 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3779 next = prev_active_insn (next);
3783 /* Now look only at the cases where we have a filled JUMP_INSN. */
3784 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3785 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3786 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3789 target_label = JUMP_LABEL (delay_insn);
3793 /* If this jump goes to another unconditional jump, thread it, but
3794 don't convert a jump into a RETURN here. */
3795 trial = follow_jumps (target_label);
3796 trial = prev_label (next_active_insn (trial));
3797 if (trial == 0 && target_label != 0)
3798 trial = find_end_label ();
3800 if (trial != target_label
3801 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3803 reorg_redirect_jump (delay_insn, trial);
3804 target_label = trial;
3807 /* If the first insn at TARGET_LABEL is redundant with a previous
3808 insn, redirect the jump to the following insn process again. */
3809 trial = next_active_insn (target_label);
3810 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3811 && redundant_insn_p (trial, insn, 0))
3813 trial = next_active_insn (trial);
3815 target_label = find_end_label ();
3817 target_label = get_label_before (trial);
3818 reorg_redirect_jump (delay_insn, target_label);
3823 /* Similarly, if it is an unconditional jump with one insn in its
3824 delay list and that insn is redundant, thread the jump. */
3825 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3826 && XVECLEN (PATTERN (trial), 0) == 2
3827 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3828 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3829 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3830 && redundant_insn_p (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3832 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3833 if (target_label == 0)
3834 target_label = find_end_label ();
3836 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3839 reorg_redirect_jump (delay_insn, target_label);
3846 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3847 && prev_active_insn (target_label) == insn
3848 && ! condjump_in_parallel_p (delay_insn)
3850 /* If the last insn in the delay slot sets CC0 for some insn,
3851 various code assumes that it is in a delay slot. We could
3852 put it back where it belonged and delete the register notes,
3853 but it doesn't seem worthwhile in this uncommon case. */
3854 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3855 REG_CC_USER, NULL_RTX)
3861 /* All this insn does is execute its delay list and jump to the
3862 following insn. So delete the jump and just execute the delay
3865 We do this by deleting the INSN containing the SEQUENCE, then
3866 re-emitting the insns separately, and then deleting the jump.
3867 This allows the count of the jump target to be properly
3870 /* Clear the from target bit, since these insns are no longer
3872 for (i = 0; i < XVECLEN (pat, 0); i++)
3873 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3875 trial = PREV_INSN (insn);
3877 emit_insn_after (pat, trial);
3878 delete_scheduled_jump (delay_insn);
3882 /* See if this is an unconditional jump around a single insn which is
3883 identical to the one in its delay slot. In this case, we can just
3884 delete the branch and the insn in its delay slot. */
3885 if (next && GET_CODE (next) == INSN
3886 && prev_label (next_active_insn (next)) == target_label
3887 && simplejump_p (insn)
3888 && XVECLEN (pat, 0) == 2
3889 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3895 /* See if this jump (with its delay slots) branches around another
3896 jump (without delay slots). If so, invert this jump and point
3897 it to the target of the second jump. We cannot do this for
3898 annulled jumps, though. Again, don't convert a jump to a RETURN
3900 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3901 && next && GET_CODE (next) == JUMP_INSN
3902 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3903 && next_active_insn (target_label) == next_active_insn (next)
3904 && no_labels_between_p (insn, next))
3906 rtx label = JUMP_LABEL (next);
3907 rtx old_label = JUMP_LABEL (delay_insn);
3910 label = find_end_label ();
3912 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3914 /* Be careful how we do this to avoid deleting code or labels
3915 that are momentarily dead. See similar optimization in
3918 ++LABEL_NUSES (old_label);
3920 if (invert_jump (delay_insn, label))
3926 if (old_label && --LABEL_NUSES (old_label) == 0)
3927 delete_insn (old_label);
3932 /* If we own the thread opposite the way this insn branches, see if we
3933 can merge its delay slots with following insns. */
3934 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3935 && own_thread_p (NEXT_INSN (insn), 0, 1))
3936 try_merge_delay_insns (insn, next);
3937 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3938 && own_thread_p (target_label, target_label, 0))
3939 try_merge_delay_insns (insn, next_active_insn (target_label));
3941 /* If we get here, we haven't deleted INSN. But we may have deleted
3942 NEXT, so recompute it. */
3943 next = next_active_insn (insn);
3949 /* Look for filled jumps to the end of function label. We can try to convert
3950 them into RETURN insns if the insns in the delay slot are valid for the
3954 make_return_insns (first)
3957 rtx insn, jump_insn, pat;
3958 rtx real_return_label = end_of_function_label;
3961 /* See if there is a RETURN insn in the function other than the one we
3962 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3963 into a RETURN to jump to it. */
3964 for (insn = first; insn; insn = NEXT_INSN (insn))
3965 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3967 real_return_label = get_label_before (insn);
3971 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3972 was equal to END_OF_FUNCTION_LABEL. */
3973 LABEL_NUSES (real_return_label)++;
3975 /* Clear the list of insns to fill so we can use it. */
3976 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3978 for (insn = first; insn; insn = NEXT_INSN (insn))
3982 /* Only look at filled JUMP_INSNs that go to the end of function
3984 if (GET_CODE (insn) != INSN
3985 || GET_CODE (PATTERN (insn)) != SEQUENCE
3986 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3987 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3990 pat = PATTERN (insn);
3991 jump_insn = XVECEXP (pat, 0, 0);
3993 /* If we can't make the jump into a RETURN, try to redirect it to the best
3994 RETURN and go on to the next insn. */
3995 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3997 /* Make sure redirecting the jump will not invalidate the delay
3999 if (redirect_with_delay_slots_safe_p (jump_insn,
4002 reorg_redirect_jump (jump_insn, real_return_label);
4006 /* See if this RETURN can accept the insns current in its delay slot.
4007 It can if it has more or an equal number of slots and the contents
4008 of each is valid. */
4010 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4011 slots = num_delay_slots (jump_insn);
4012 if (slots >= XVECLEN (pat, 0) - 1)
4014 for (i = 1; i < XVECLEN (pat, 0); i++)
4016 #ifdef ANNUL_IFFALSE_SLOTS
4017 (INSN_ANNULLED_BRANCH_P (jump_insn)
4018 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4019 ? eligible_for_annul_false (jump_insn, i - 1,
4020 XVECEXP (pat, 0, i), flags) :
4022 #ifdef ANNUL_IFTRUE_SLOTS
4023 (INSN_ANNULLED_BRANCH_P (jump_insn)
4024 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4025 ? eligible_for_annul_true (jump_insn, i - 1,
4026 XVECEXP (pat, 0, i), flags) :
4028 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4034 if (i == XVECLEN (pat, 0))
4037 /* We have to do something with this insn. If it is an unconditional
4038 RETURN, delete the SEQUENCE and output the individual insns,
4039 followed by the RETURN. Then set things up so we try to find
4040 insns for its delay slots, if it needs some. */
4041 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4043 rtx prev = PREV_INSN (insn);
4046 for (i = 1; i < XVECLEN (pat, 0); i++)
4047 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4049 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4050 emit_barrier_after (insn);
4053 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4056 /* It is probably more efficient to keep this with its current
4057 delay slot as a branch to a RETURN. */
4058 reorg_redirect_jump (jump_insn, real_return_label);
4061 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4062 new delay slots we have created. */
4063 if (--LABEL_NUSES (real_return_label) == 0)
4064 delete_insn (real_return_label);
4066 fill_simple_delay_slots (first, 1);
4067 fill_simple_delay_slots (first, 0);
4071 /* Try to find insns to place in delay slots. */
4074 dbr_schedule (first, file)
4078 rtx insn, next, epilogue_insn = 0;
4081 int old_flag_no_peephole = flag_no_peephole;
4083 /* Execute `final' once in prescan mode to delete any insns that won't be
4084 used. Don't let final try to do any peephole optimization--it will
4085 ruin dataflow information for this pass. */
4087 flag_no_peephole = 1;
4088 final (first, 0, NO_DEBUG, 1, 1);
4089 flag_no_peephole = old_flag_no_peephole;
4092 /* If the current function has no insns other than the prologue and
4093 epilogue, then do not try to fill any delay slots. */
4094 if (n_basic_blocks == 0)
4097 /* Find the highest INSN_UID and allocate and initialize our map from
4098 INSN_UID's to position in code. */
4099 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4101 if (INSN_UID (insn) > max_uid)
4102 max_uid = INSN_UID (insn);
4103 if (GET_CODE (insn) == NOTE
4104 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4105 epilogue_insn = insn;
4108 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4109 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4110 uid_to_ruid[INSN_UID (insn)] = i;
4112 /* Initialize the list of insns that need filling. */
4113 if (unfilled_firstobj == 0)
4115 gcc_obstack_init (&unfilled_slots_obstack);
4116 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4119 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4123 INSN_ANNULLED_BRANCH_P (insn) = 0;
4124 INSN_FROM_TARGET_P (insn) = 0;
4126 /* Skip vector tables. We can't get attributes for them. */
4127 if (GET_CODE (insn) == JUMP_INSN
4128 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4129 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4132 if (num_delay_slots (insn) > 0)
4133 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4135 /* Ensure all jumps go to the last of a set of consecutive labels. */
4136 if (GET_CODE (insn) == JUMP_INSN
4137 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4138 && JUMP_LABEL (insn) != 0
4139 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4140 != JUMP_LABEL (insn)))
4141 redirect_jump (insn, target);
4144 /* Indicate what resources are required to be valid at the end of the current
4145 function. The condition code never is and memory always is. If the
4146 frame pointer is needed, it is and so is the stack pointer unless
4147 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4148 stack pointer is. Registers used to return the function value are
4149 needed. Registers holding global variables are needed. */
4151 end_of_function_needs.cc = 0;
4152 end_of_function_needs.memory = 1;
4153 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4155 if (frame_pointer_needed)
4157 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4158 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4159 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4161 #ifdef EXIT_IGNORE_STACK
4162 if (! EXIT_IGNORE_STACK)
4164 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4167 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4169 if (current_function_return_rtx != 0
4170 && GET_CODE (current_function_return_rtx) == REG)
4171 mark_referenced_resources (current_function_return_rtx,
4172 &end_of_function_needs, 1);
4174 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4176 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4178 /* The registers required to be live at the end of the function are
4179 represented in the flow information as being dead just prior to
4180 reaching the end of the function. For example, the return of a value
4181 might be represented by a USE of the return register immediately
4182 followed by an unconditional jump to the return label where the
4183 return label is the end of the RTL chain. The end of the RTL chain
4184 is then taken to mean that the return register is live.
4186 This sequence is no longer maintained when epilogue instructions are
4187 added to the RTL chain. To reconstruct the original meaning, the
4188 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4189 point where these registers become live (start_of_epilogue_needs).
4190 If epilogue instructions are present, the registers set by those
4191 instructions won't have been processed by flow. Thus, those
4192 registers are additionally required at the end of the RTL chain
4193 (end_of_function_needs). */
4195 start_of_epilogue_needs = end_of_function_needs;
4197 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4198 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4200 /* Show we haven't computed an end-of-function label yet. */
4201 end_of_function_label = 0;
4203 /* Allocate and initialize the tables used by mark_target_live_regs. */
4205 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4206 * sizeof (struct target_info *)));
4207 bzero (target_hash_table, TARGET_HASH_PRIME * sizeof (struct target_info *));
4209 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4210 bzero (bb_ticks, n_basic_blocks * sizeof (int));
4212 /* Initialize the statistics for this function. */
4213 bzero (num_insns_needing_delays, sizeof num_insns_needing_delays);
4214 bzero (num_filled_delays, sizeof num_filled_delays);
4216 /* Now do the delay slot filling. Try everything twice in case earlier
4217 changes make more slots fillable. */
4219 for (reorg_pass_number = 0;
4220 reorg_pass_number < MAX_REORG_PASSES;
4221 reorg_pass_number++)
4223 fill_simple_delay_slots (first, 1);
4224 fill_simple_delay_slots (first, 0);
4225 fill_eager_delay_slots (first);
4226 relax_delay_slots (first);
4229 /* Delete any USE insns made by update_block; subsequent passes don't need
4230 them or know how to deal with them. */
4231 for (insn = first; insn; insn = next)
4233 next = NEXT_INSN (insn);
4235 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4236 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4237 next = delete_insn (insn);
4240 /* If we made an end of function label, indicate that it is now
4241 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4242 If it is now unused, delete it. */
4243 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4244 delete_insn (end_of_function_label);
4247 if (HAVE_return && end_of_function_label != 0)
4248 make_return_insns (first);
4251 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4253 /* It is not clear why the line below is needed, but it does seem to be. */
4254 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4256 /* Reposition the prologue and epilogue notes in case we moved the
4257 prologue/epilogue insns. */
4258 reposition_prologue_and_epilogue_notes (first);
4262 register int i, j, need_comma;
4264 for (reorg_pass_number = 0;
4265 reorg_pass_number < MAX_REORG_PASSES;
4266 reorg_pass_number++)
4268 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4269 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4272 fprintf (file, ";; Reorg function #%d\n", i);
4274 fprintf (file, ";; %d insns needing delay slots\n;; ",
4275 num_insns_needing_delays[i][reorg_pass_number]);
4277 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4278 if (num_filled_delays[i][j][reorg_pass_number])
4281 fprintf (file, ", ");
4283 fprintf (file, "%d got %d delays",
4284 num_filled_delays[i][j][reorg_pass_number], j);
4286 fprintf (file, "\n");
4291 #endif /* DELAY_SLOTS */