1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
56 Three techniques for filling delay slots have been implemented so far:
58 (1) `fill_simple_delay_slots' is the simplest, most efficient way
59 to fill delay slots. This pass first looks for insns which come
60 from before the branch and which are safe to execute after the
61 branch. Then it searches after the insn requiring delay slots or,
62 in the case of a branch, for insns that are after the point at
63 which the branch merges into the fallthrough code, if such a point
64 exists. When such insns are found, the branch penalty decreases
65 and no code expansion takes place.
67 (2) `fill_eager_delay_slots' is more complicated: it is used for
68 scheduling conditional jumps, or for scheduling jumps which cannot
69 be filled using (1). A machine need not have annulled jumps to use
70 this strategy, but it helps (by keeping more options open).
71 `fill_eager_delay_slots' tries to guess the direction the branch
72 will go; if it guesses right 100% of the time, it can reduce the
73 branch penalty as much as `fill_simple_delay_slots' does. If it
74 guesses wrong 100% of the time, it might as well schedule nops (or
75 on the m88k, unexpose the branch slot). When
76 `fill_eager_delay_slots' takes insns from the fall-through path of
77 the jump, usually there is no code expansion; when it takes insns
78 from the branch target, there is code expansion if it is not the
79 only way to reach that target.
81 (3) `relax_delay_slots' uses a set of rules to simplify code that
82 has been reorganized by (1) and (2). It finds cases where
83 conditional test can be eliminated, jumps can be threaded, extra
84 insns can be eliminated, etc. It is the job of (1) and (2) to do a
85 good job of scheduling locally; `relax_delay_slots' takes care of
86 making the various individual schedules work well together. It is
87 especially tuned to handle the control flow interactions of branch
88 insns. It does nothing for insns with delay slots that do not
91 On machines that use CC0, we are very conservative. We will not make
92 a copy of an insn involving CC0 since we want to maintain a 1-1
93 correspondence between the insn that sets and uses CC0. The insns are
94 allowed to be separated by placing an insn that sets CC0 (but not an insn
95 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
96 delay slot. In that case, we point each insn at the other with REG_CC_USER
97 and REG_CC_SETTER notes. Note that these restrictions affect very few
98 machines because most RISC machines with delay slots will not use CC0
99 (the RT is the only known exception at this point).
103 The Acorn Risc Machine can conditionally execute most insns, so
104 it is profitable to move single insns into a position to execute
105 based on the condition code of the previous insn.
107 The HP-PA can conditionally nullify insns, providing a similar
108 effect to the ARM, differing mostly in which insn is "in charge". */
113 #include "insn-config.h"
114 #include "conditions.h"
115 #include "hard-reg-set.h"
116 #include "basic-block.h"
118 #include "insn-flags.h"
123 #include "insn-attr.h"
127 #define obstack_chunk_alloc xmalloc
128 #define obstack_chunk_free free
130 #ifndef ANNUL_IFTRUE_SLOTS
131 #define eligible_for_annul_true(INSN, SLOTS, TRIAL) 0
133 #ifndef ANNUL_IFFALSE_SLOTS
134 #define eligible_for_annul_false(INSN, SLOTS, TRIAL) 0
137 /* Insns which have delay slots that have not yet been filled. */
139 static struct obstack unfilled_slots_obstack;
140 static rtx *unfilled_firstobj;
142 /* Define macros to refer to the first and last slot containing unfilled
143 insns. These are used because the list may move and its address
144 should be recomputed at each use. */
146 #define unfilled_slots_base \
147 ((rtx *) obstack_base (&unfilled_slots_obstack))
149 #define unfilled_slots_next \
150 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
152 /* This structure is used to indicate which hardware resources are set or
153 needed by insns so far. */
157 char memory; /* Insn sets or needs a memory location. */
158 char volatil; /* Insn sets or needs a volatile memory loc. */
159 char cc; /* Insn sets or needs the condition codes. */
160 HARD_REG_SET regs; /* Which registers are set or needed. */
163 /* Macro to clear all resources. */
164 #define CLEAR_RESOURCE(RES) \
165 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
166 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
168 /* Indicates what resources are required at the beginning of the epilogue. */
169 static struct resources start_of_epilogue_needs;
171 /* Indicates what resources are required at function end. */
172 static struct resources end_of_function_needs;
174 /* Points to the label before the end of the function. */
175 static rtx end_of_function_label;
177 /* This structure is used to record liveness information at the targets or
178 fallthrough insns of branches. We will most likely need the information
179 at targets again, so save them in a hash table rather than recomputing them
184 int uid; /* INSN_UID of target. */
185 struct target_info *next; /* Next info for same hash bucket. */
186 HARD_REG_SET live_regs; /* Registers live at target. */
187 int block; /* Basic block number containing target. */
188 int bb_tick; /* Generation count of basic block info. */
191 #define TARGET_HASH_PRIME 257
193 /* Define the hash table itself. */
194 static struct target_info **target_hash_table;
196 /* For each basic block, we maintain a generation number of its basic
197 block info, which is updated each time we move an insn from the
198 target of a jump. This is the generation number indexed by block
201 static int *bb_ticks;
203 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
204 not always monotonically increase. */
205 static int *uid_to_ruid;
207 /* Highest valid index in `uid_to_ruid'. */
210 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
211 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
212 static int stop_search_p PROTO((rtx, int));
213 static int resource_conflicts_p PROTO((struct resources *,
214 struct resources *));
215 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
216 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
217 static rtx find_end_label PROTO((void));
218 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
219 static rtx add_to_delay_list PROTO((rtx, rtx));
220 static void delete_from_delay_slot PROTO((rtx));
221 static void delete_scheduled_jump PROTO((rtx));
222 static void note_delay_statistics PROTO((int, int));
223 static rtx optimize_skip PROTO((rtx));
224 static int mostly_true_jump PROTO((rtx, rtx));
225 static rtx get_branch_condition PROTO((rtx, rtx));
226 static int condition_dominates_p PROTO((rtx, rtx));
227 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
231 int, int *, int *, rtx *));
232 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
237 static void try_merge_delay_insns PROTO((rtx, rtx));
238 static int redundant_insn_p PROTO((rtx, rtx, rtx));
239 static int own_thread_p PROTO((rtx, rtx, int));
240 static int find_basic_block PROTO((rtx));
241 static void update_block PROTO((rtx, rtx));
242 static void update_reg_dead_notes PROTO((rtx, rtx));
243 static void update_live_status PROTO((rtx, rtx));
244 static rtx next_insn_no_annul PROTO((rtx));
245 static void mark_target_live_regs PROTO((rtx, struct resources *));
246 static void fill_simple_delay_slots PROTO((rtx, int));
247 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
248 int, int, int, int *));
249 static void fill_eager_delay_slots PROTO((rtx));
250 static void relax_delay_slots PROTO((rtx));
251 static void make_return_insns PROTO((rtx));
253 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
254 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
255 is TRUE, resources used by the called routine will be included for
259 mark_referenced_resources (x, res, include_delayed_effects)
261 register struct resources *res;
262 register int include_delayed_effects;
264 register enum rtx_code code = GET_CODE (x);
266 register char *format_ptr;
268 /* Handle leaf items for which we set resource flags. Also, special-case
269 CALL, SET and CLOBBER operators. */
281 if (GET_CODE (SUBREG_REG (x)) != REG)
282 mark_referenced_resources (SUBREG_REG (x), res, 0);
285 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
286 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
287 for (i = regno; i < last_regno; i++)
288 SET_HARD_REG_BIT (res->regs, i);
293 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
294 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
298 /* If this memory shouldn't change, it really isn't referencing
300 if (! RTX_UNCHANGING_P (x))
302 res->volatil = MEM_VOLATILE_P (x);
304 /* Mark registers used to access memory. */
305 mark_referenced_resources (XEXP (x, 0), res, 0);
312 case UNSPEC_VOLATILE:
314 /* Traditional asm's are always volatile. */
319 res->volatil = MEM_VOLATILE_P (x);
321 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
322 We can not just fall through here since then we would be confused
323 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
324 traditional asms unlike their normal usage. */
326 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
327 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
331 /* The first operand will be a (MEM (xxx)) but doesn't really reference
332 memory. The second operand may be referenced, though. */
333 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
334 mark_referenced_resources (XEXP (x, 1), res, 0);
338 /* Usually, the first operand of SET is set, not referenced. But
339 registers used to access memory are referenced. SET_DEST is
340 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
342 mark_referenced_resources (SET_SRC (x), res, 0);
345 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
346 mark_referenced_resources (x, res, 0);
347 else if (GET_CODE (x) == SUBREG)
349 if (GET_CODE (x) == MEM)
350 mark_referenced_resources (XEXP (x, 0), res, 0);
357 if (include_delayed_effects)
359 /* A CALL references memory, the frame pointer if it exists, the
360 stack pointer, any global registers and any registers given in
361 USE insns immediately in front of the CALL.
363 However, we may have moved some of the parameter loading insns
364 into the delay slot of this CALL. If so, the USE's for them
365 don't count and should be skipped. */
366 rtx insn = PREV_INSN (x);
371 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
372 if (NEXT_INSN (insn) != x)
374 sequence = PATTERN (NEXT_INSN (insn));
375 seq_size = XVECLEN (sequence, 0);
376 if (GET_CODE (sequence) != SEQUENCE)
381 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
382 if (frame_pointer_needed)
383 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
385 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
387 SET_HARD_REG_BIT (res->regs, i);
389 /* Skip any labels between the CALL_INSN and possible USE insns. */
390 while (GET_CODE (insn) == CODE_LABEL)
391 insn = PREV_INSN (insn);
393 for ( ; (insn && GET_CODE (insn) == INSN
394 && GET_CODE (PATTERN (insn)) == USE);
395 insn = PREV_INSN (insn))
397 for (i = 1; i < seq_size; i++)
399 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
400 if (GET_CODE (slot_pat) == SET
401 && rtx_equal_p (SET_DEST (slot_pat),
402 XEXP (PATTERN (insn), 0)))
406 mark_referenced_resources (XEXP (PATTERN (insn), 0), res, 0);
410 /* ... fall through to other INSN processing ... */
415 #ifdef INSN_REFERENCES_ARE_DELAYED
416 if (! include_delayed_effects
417 && INSN_REFERENCES_ARE_DELAYED (x))
421 /* No special processing, just speed up. */
422 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
426 /* Process each sub-expression and flag what it needs. */
427 format_ptr = GET_RTX_FORMAT (code);
428 for (i = 0; i < GET_RTX_LENGTH (code); i++)
429 switch (*format_ptr++)
432 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
436 for (j = 0; j < XVECLEN (x, i); j++)
437 mark_referenced_resources (XVECEXP (x, i, j), res,
438 include_delayed_effects);
443 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
444 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
445 is nonzero, also mark resources potentially set by the called routine.
447 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
448 objects are being referenced instead of set.
450 We never mark the insn as modifying the condition code unless it explicitly
451 SETs CC0 even though this is not totally correct. The reason for this is
452 that we require a SET of CC0 to immediately precede the reference to CC0.
453 So if some other insn sets CC0 as a side-effect, we know it cannot affect
454 our computation and thus may be placed in a delay slot. */
457 mark_set_resources (x, res, in_dest, include_delayed_effects)
459 register struct resources *res;
461 int include_delayed_effects;
463 register enum rtx_code code;
465 register char *format_ptr;
483 /* These don't set any resources. */
492 /* Called routine modifies the condition code, memory, any registers
493 that aren't saved across calls, global registers and anything
494 explicitly CLOBBERed immediately after the CALL_INSN. */
496 if (include_delayed_effects)
498 rtx next = NEXT_INSN (x);
500 res->cc = res->memory = 1;
501 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
502 if (call_used_regs[i] || global_regs[i])
503 SET_HARD_REG_BIT (res->regs, i);
505 /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
506 while (GET_CODE (next) == CODE_LABEL)
507 next = NEXT_INSN (next);
509 for (; (next && GET_CODE (next) == INSN
510 && GET_CODE (PATTERN (next)) == CLOBBER);
511 next = NEXT_INSN (next))
512 mark_set_resources (XEXP (PATTERN (next), 0), res, 1, 0);
515 /* ... and also what it's RTL says it modifies, if anything. */
520 /* An insn consisting of just a CLOBBER (or USE) is just for flow
521 and doesn't actually do anything, so we ignore it. */
523 #ifdef INSN_SETS_ARE_DELAYED
524 if (! include_delayed_effects
525 && INSN_SETS_ARE_DELAYED (x))
530 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
535 /* If the source of a SET is a CALL, this is actually done by
536 the called routine. So only include it if we are to include the
537 effects of the calling routine. */
539 mark_set_resources (SET_DEST (x), res,
540 (include_delayed_effects
541 || GET_CODE (SET_SRC (x)) != CALL),
544 mark_set_resources (SET_SRC (x), res, 0, 0);
548 mark_set_resources (XEXP (x, 0), res, 1, 0);
552 for (i = 0; i < XVECLEN (x, 0); i++)
553 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
554 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
555 mark_set_resources (XVECEXP (x, 0, i), res, 0,
556 include_delayed_effects);
563 mark_set_resources (XEXP (x, 0), res, 1, 0);
567 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
568 mark_set_resources (XEXP (x, 1), res, 0, 0);
569 mark_set_resources (XEXP (x, 2), res, 0, 0);
576 res->volatil = MEM_VOLATILE_P (x);
579 mark_set_resources (XEXP (x, 0), res, 0, 0);
584 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
585 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
589 /* Process each sub-expression and flag what it needs. */
590 format_ptr = GET_RTX_FORMAT (code);
591 for (i = 0; i < GET_RTX_LENGTH (code); i++)
592 switch (*format_ptr++)
595 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
599 for (j = 0; j < XVECLEN (x, i); j++)
600 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
601 include_delayed_effects);
606 /* Return TRUE if this insn should stop the search for insn to fill delay
607 slots. LABELS_P indicates that labels should terminate the search.
608 In all cases, jumps terminate the search. */
611 stop_search_p (insn, labels_p)
618 switch (GET_CODE (insn))
632 /* OK unless it contains a delay slot or is an `asm' insn of some type.
633 We don't know anything about these. */
634 return (GET_CODE (PATTERN (insn)) == SEQUENCE
635 || GET_CODE (PATTERN (insn)) == ASM_INPUT
636 || asm_noperands (PATTERN (insn)) >= 0);
643 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
644 resource set contains a volatile memory reference. Otherwise, return FALSE. */
647 resource_conflicts_p (res1, res2)
648 struct resources *res1, *res2;
650 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
651 || res1->volatil || res2->volatil)
655 return (res1->regs & res2->regs) != HARD_CONST (0);
660 for (i = 0; i < HARD_REG_SET_LONGS; i++)
661 if ((res1->regs[i] & res2->regs[i]) != 0)
668 /* Return TRUE if any resource marked in RES, a `struct resources', is
669 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
670 routine is using those resources.
672 We compute this by computing all the resources referenced by INSN and
673 seeing if this conflicts with RES. It might be faster to directly check
674 ourselves, and this is the way it used to work, but it means duplicating
675 a large block of complex code. */
678 insn_references_resource_p (insn, res, include_delayed_effects)
680 register struct resources *res;
681 int include_delayed_effects;
683 struct resources insn_res;
685 CLEAR_RESOURCE (&insn_res);
686 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
687 return resource_conflicts_p (&insn_res, res);
690 /* Return TRUE if INSN modifies resources that are marked in RES.
691 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
692 included. CC0 is only modified if it is explicitly set; see comments
693 in front of mark_set_resources for details. */
696 insn_sets_resource_p (insn, res, include_delayed_effects)
698 register struct resources *res;
699 int include_delayed_effects;
701 struct resources insn_sets;
703 CLEAR_RESOURCE (&insn_sets);
704 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
705 return resource_conflicts_p (&insn_sets, res);
708 /* Find a label at the end of the function or before a RETURN. If there is
716 /* If we found one previously, return it. */
717 if (end_of_function_label)
718 return end_of_function_label;
720 /* Otherwise, see if there is a label at the end of the function. If there
721 is, it must be that RETURN insns aren't needed, so that is our return
722 label and we don't have to do anything else. */
724 insn = get_last_insn ();
725 while (GET_CODE (insn) == NOTE
726 || (GET_CODE (insn) == INSN
727 && (GET_CODE (PATTERN (insn)) == USE
728 || GET_CODE (PATTERN (insn)) == CLOBBER)))
729 insn = PREV_INSN (insn);
731 if (GET_CODE (insn) == CODE_LABEL)
732 end_of_function_label = insn;
735 /* Otherwise, make a new label and emit a RETURN and BARRIER,
737 end_of_function_label = gen_label_rtx ();
738 LABEL_NUSES (end_of_function_label) = 0;
739 emit_label (end_of_function_label);
743 /* The return we make may have delay slots too. */
744 rtx insn = gen_return();
745 emit_jump_insn (insn);
747 if (num_delay_slots (insn) > 0)
748 obstack_ptr_grow (&unfilled_slots_obstack, insn);
753 /* Show one additional use for this label so it won't go away until
755 ++LABEL_NUSES (end_of_function_label);
757 return end_of_function_label;
760 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
761 the pattern of INSN with the SEQUENCE.
763 Chain the insns so that NEXT_INSN of each insn in the sequence points to
764 the next and NEXT_INSN of the last insn in the sequence points to
765 the first insn after the sequence. Similarly for PREV_INSN. This makes
766 it easier to scan all insns.
768 Returns the SEQUENCE that replaces INSN. */
771 emit_delay_sequence (insn, list, length, avail)
781 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
782 rtvec seqv = rtvec_alloc (length + 1);
783 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
784 rtx seq_insn = make_insn_raw (seq);
785 rtx first = get_insns ();
786 rtx last = get_last_insn ();
788 /* Make a copy of the insn having delay slots. */
789 rtx delay_insn = copy_rtx (insn);
791 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
792 confuse further processing. Update LAST in case it was the last insn.
793 We will put the BARRIER back in later. */
794 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
796 delete_insn (NEXT_INSN (insn));
797 last = get_last_insn ();
801 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
802 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
803 PREV_INSN (seq_insn) = PREV_INSN (insn);
806 set_new_first_and_last_insn (first, seq_insn);
808 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
811 set_new_first_and_last_insn (seq_insn, last);
813 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
815 /* Build our SEQUENCE and rebuild the insn chain. */
816 XVECEXP (seq, 0, 0) = delay_insn;
817 INSN_DELETED_P (delay_insn) = 0;
818 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
820 for (li = list; li; li = XEXP (li, 1), i++)
822 rtx tem = XEXP (li, 0);
825 /* Show that this copy of the insn isn't deleted. */
826 INSN_DELETED_P (tem) = 0;
828 XVECEXP (seq, 0, i) = tem;
829 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
830 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
832 /* Remove any REG_DEAD notes because we can't rely on them now
833 that the insn has been moved. */
834 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
835 if (REG_NOTE_KIND (note) == REG_DEAD)
836 XEXP (note, 0) = const0_rtx;
839 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
841 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
842 last insn in that SEQUENCE to point to us. Similarly for the first
843 insn in the following insn if it is a SEQUENCE. */
845 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
846 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
847 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
848 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
851 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
852 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
853 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
855 /* If there used to be a BARRIER, put it back. */
857 emit_barrier_after (seq_insn);
865 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
866 be in the order in which the insns are to be executed. */
869 add_to_delay_list (insn, delay_list)
873 /* If we have an empty list, just make a new list element. */
875 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
877 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
879 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
884 /* Delete INSN from the the delay slot of the insn that it is in. This may
885 produce an insn without anything in its delay slots. */
888 delete_from_delay_slot (insn)
891 rtx trial, seq_insn, seq, prev;
895 /* We first must find the insn containing the SEQUENCE with INSN in its
896 delay slot. Do this by finding an insn, TRIAL, where
897 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
900 PREV_INSN (NEXT_INSN (trial)) == trial;
901 trial = NEXT_INSN (trial))
904 seq_insn = PREV_INSN (NEXT_INSN (trial));
905 seq = PATTERN (seq_insn);
907 /* Create a delay list consisting of all the insns other than the one
908 we are deleting (unless we were the only one). */
909 if (XVECLEN (seq, 0) > 2)
910 for (i = 1; i < XVECLEN (seq, 0); i++)
911 if (XVECEXP (seq, 0, i) != insn)
912 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
914 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
915 list, and rebuild the delay list if non-empty. */
916 prev = PREV_INSN (seq_insn);
917 trial = XVECEXP (seq, 0, 0);
918 delete_insn (seq_insn);
919 add_insn_after (trial, prev);
921 if (GET_CODE (trial) == JUMP_INSN
922 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
923 emit_barrier_after (trial);
925 /* If there are any delay insns, remit them. Otherwise clear the
928 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
930 INSN_ANNULLED_BRANCH_P (trial) = 0;
932 INSN_FROM_TARGET_P (insn) = 0;
934 /* Show we need to fill this insn again. */
935 obstack_ptr_grow (&unfilled_slots_obstack, trial);
938 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
939 the insn that sets CC0 for it and delete it too. */
942 delete_scheduled_jump (insn)
945 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
946 delete the insn that sets the condition code, but it is hard to find it.
947 Since this case is rare anyway, don't bother trying; there would likely
948 be other insns that became dead anyway, which we wouldn't know to
952 if (reg_mentioned_p (cc0_rtx, insn))
954 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
956 /* If a reg-note was found, it points to an insn to set CC0. This
957 insn is in the delay list of some other insn. So delete it from
958 the delay list it was in. */
961 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
962 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
963 delete_from_delay_slot (XEXP (note, 0));
967 /* The insn setting CC0 is our previous insn, but it may be in
968 a delay slot. It will be the last insn in the delay slot, if
970 rtx trial = previous_insn (insn);
971 if (GET_CODE (trial) == NOTE)
972 trial = prev_nonnote_insn (trial);
973 if (sets_cc0_p (PATTERN (trial)) != 1
974 || FIND_REG_INC_NOTE (trial, 0))
976 if (PREV_INSN (NEXT_INSN (trial)) == trial)
979 delete_from_delay_slot (trial);
987 /* Counters for delay-slot filling. */
989 #define NUM_REORG_FUNCTIONS 2
990 #define MAX_DELAY_HISTOGRAM 3
991 #define MAX_REORG_PASSES 2
993 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
995 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
997 static int reorg_pass_number;
1000 note_delay_statistics (slots_filled, index)
1001 int slots_filled, index;
1003 num_insns_needing_delays[index][reorg_pass_number]++;
1004 if (slots_filled > MAX_DELAY_HISTOGRAM)
1005 slots_filled = MAX_DELAY_HISTOGRAM;
1006 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1009 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1011 /* Optimize the following cases:
1013 1. When a conditional branch skips over only one instruction,
1014 use an annulling branch and put that insn in the delay slot.
1015 Use either a branch that annuls when the condition if true or
1016 invert the test with a branch that annuls when the condition is
1017 false. This saves insns, since otherwise we must copy an insn
1020 (orig) (skip) (otherwise)
1021 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1028 2. When a conditional branch skips over only one instruction,
1029 and after that, it unconditionally branches somewhere else,
1030 perform the similar optimization. This saves executing the
1031 second branch in the case where the inverted condition is true.
1038 INSN is a JUMP_INSN.
1040 This should be expanded to skip over N insns, where N is the number
1041 of delay slots required. */
1044 optimize_skip (insn)
1047 register rtx trial = next_nonnote_insn (insn);
1048 rtx next_trial = next_active_insn (trial);
1053 || GET_CODE (trial) != INSN
1054 || GET_CODE (PATTERN (trial)) == SEQUENCE
1055 || recog_memoized (trial) < 0
1056 || (! eligible_for_annul_false (insn, 0, trial)
1057 && ! eligible_for_annul_true (insn, 0, trial)))
1060 /* There are two cases where we are just executing one insn (we assume
1061 here that a branch requires only one insn; this should be generalized
1062 at some point): Where the branch goes around a single insn or where
1063 we have one insn followed by a branch to the same label we branch to.
1064 In both of these cases, inverting the jump and annulling the delay
1065 slot give the same effect in fewer insns. */
1066 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1068 && GET_CODE (next_trial) == JUMP_INSN
1069 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1070 && (simplejump_p (next_trial)
1071 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1073 if (eligible_for_annul_false (insn, 0, trial))
1075 if (invert_jump (insn, JUMP_LABEL (insn)))
1076 INSN_FROM_TARGET_P (trial) = 1;
1077 else if (! eligible_for_annul_true (insn, 0, trial))
1081 delay_list = add_to_delay_list (trial, NULL_RTX);
1082 next_trial = next_active_insn (trial);
1083 update_block (trial, trial);
1084 delete_insn (trial);
1086 /* Also, if we are targeting an unconditional
1087 branch, thread our jump to the target of that branch. Don't
1088 change this into a RETURN here, because it may not accept what
1089 we have in the delay slot. We'll fix this up later. */
1090 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1091 && (simplejump_p (next_trial)
1092 || GET_CODE (PATTERN (next_trial)) == RETURN))
1094 target_label = JUMP_LABEL (next_trial);
1095 if (target_label == 0)
1096 target_label = find_end_label ();
1097 redirect_jump (insn, target_label);
1100 INSN_ANNULLED_BRANCH_P (insn) = 1;
1107 /* Return truth value of the statement that this branch
1108 is mostly taken. If we think that the branch is extremely likely
1109 to be taken, we return 2. If the branch is slightly more likely to be
1110 taken, return 1. Otherwise, return 0.
1112 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1115 mostly_true_jump (jump_insn, condition)
1116 rtx jump_insn, condition;
1118 rtx target_label = JUMP_LABEL (jump_insn);
1121 /* If this is a conditional return insn, assume it won't return. */
1122 if (target_label == 0)
1125 /* If TARGET_LABEL has no jumps between it and the end of the function,
1126 this is essentially a conditional return, so predict it as false. */
1127 for (insn = NEXT_INSN (target_label); insn; insn = NEXT_INSN (insn))
1129 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1130 insn = XVECEXP (PATTERN (insn), 0, 0);
1131 if (GET_CODE (insn) == JUMP_INSN)
1138 /* If this is the test of a loop, it is very likely true. We scan backwards
1139 from the target label. If we find a NOTE_INSN_LOOP_BEG before the next
1140 real insn, we assume the branch is to the top of the loop. */
1141 for (insn = PREV_INSN (target_label);
1142 insn && GET_CODE (insn) == NOTE;
1143 insn = PREV_INSN (insn))
1144 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1147 /* If this is a jump to the test of a loop, it is likely true. We scan
1148 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1149 before the next real insn, we assume the branch is to the loop branch
1151 for (insn = NEXT_INSN (target_label);
1152 insn && GET_CODE (insn) == NOTE;
1153 insn = PREV_INSN (insn))
1154 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1157 /* If we couldn't figure out what this jump was, assume it won't be
1158 taken. This should be rare. */
1162 /* EQ tests are usually false and NE tests are usually true. Also,
1163 most quantities are positive, so we can make the appropriate guesses
1164 about signed comparisons against zero. */
1165 switch (GET_CODE (condition))
1168 /* Unconditional branch. */
1176 if (XEXP (condition, 1) == const0_rtx)
1181 if (XEXP (condition, 1) == const0_rtx)
1186 /* Predict backward branches usually take, forward branches usually not. If
1187 we don't know whether this is forward or backward, assume the branch
1188 will be taken, since most are. */
1189 return (INSN_UID (jump_insn) > max_uid || INSN_UID (target_label) > max_uid
1190 || (uid_to_ruid[INSN_UID (jump_insn)]
1191 > uid_to_ruid[INSN_UID (target_label)]));;
1194 /* Return the condition under which INSN will branch to TARGET. If TARGET
1195 is zero, return the condition under which INSN will return. If INSN is
1196 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1197 type of jump, or it doesn't go to TARGET, return 0. */
1200 get_branch_condition (insn, target)
1204 rtx pat = PATTERN (insn);
1207 if (GET_CODE (pat) == RETURN)
1208 return target == 0 ? const_true_rtx : 0;
1210 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1213 src = SET_SRC (pat);
1214 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1215 return const_true_rtx;
1217 else if (GET_CODE (src) == IF_THEN_ELSE
1218 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1219 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1220 && XEXP (XEXP (src, 1), 0) == target))
1221 && XEXP (src, 2) == pc_rtx)
1222 return XEXP (src, 0);
1224 else if (GET_CODE (src) == IF_THEN_ELSE
1225 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1226 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1227 && XEXP (XEXP (src, 2), 0) == target))
1228 && XEXP (src, 1) == pc_rtx)
1229 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1230 GET_MODE (XEXP (src, 0)),
1231 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1236 /* Return non-zero if CONDITION is more strict than the condition of
1237 INSN, i.e., if INSN will always branch if CONDITION is true. */
1240 condition_dominates_p (condition, insn)
1244 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1245 enum rtx_code code = GET_CODE (condition);
1246 enum rtx_code other_code;
1248 if (rtx_equal_p (condition, other_condition)
1249 || other_condition == const_true_rtx)
1252 else if (condition == const_true_rtx || other_condition == 0)
1255 other_code = GET_CODE (other_condition);
1256 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1257 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1258 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1261 return comparison_dominates_p (code, other_code);
1264 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1265 the condition tested by INSN is CONDITION and the resources shown in
1266 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1267 from SEQ's delay list, in addition to whatever insns it may execute
1268 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1269 needed while searching for delay slot insns. Return the concatenated
1270 delay list if possible, otherwise, return 0.
1272 SLOTS_TO_FILL is the total number of slots required by INSN, and
1273 PSLOTS_FILLED points to the number filled so far (also the number of
1274 insns in DELAY_LIST). It is updated with the number that have been
1275 filled from the SEQUENCE, if any.
1277 PANNUL_P points to a non-zero value if we already know that we need
1278 to annul INSN. If this routine determines that annulling is needed,
1279 it may set that value non-zero.
1281 PNEW_THREAD points to a location that is to receive the place at which
1282 execution should continue. */
1285 steal_delay_list_from_target (insn, condition, seq, delay_list,
1286 sets, needed, other_needed,
1287 slots_to_fill, pslots_filled, pannul_p,
1289 rtx insn, condition;
1292 struct resources *sets, *needed, *other_needed;
1299 int slots_remaining = slots_to_fill - *pslots_filled;
1300 int total_slots_filled = *pslots_filled;
1301 rtx new_delay_list = 0;
1302 int must_annul = *pannul_p;
1305 /* We can't do anything if there are more delay slots in SEQ than we
1306 can handle, or if we don't know that it will be a taken branch.
1308 We know that it will be a taken branch if it is either an unconditional
1309 branch or a conditional branch with a stricter branch condition. */
1311 if (XVECLEN (seq, 0) - 1 > slots_remaining
1312 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0)))
1315 for (i = 1; i < XVECLEN (seq, 0); i++)
1317 rtx trial = XVECEXP (seq, 0, i);
1319 if (insn_references_resource_p (trial, sets, 0)
1320 || insn_sets_resource_p (trial, needed, 0)
1321 || insn_sets_resource_p (trial, sets, 0)
1323 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1325 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1327 /* If TRIAL is from the fallthrough code of an annulled branch insn
1328 in SEQ, we cannot use it. */
1329 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1330 && ! INSN_FROM_TARGET_P (trial)))
1333 /* If this insn was already done (usually in a previous delay slot),
1334 pretend we put it in our delay slot. */
1335 if (redundant_insn_p (trial, insn, new_delay_list))
1339 && ((condition == const_true_rtx
1340 || (! insn_sets_resource_p (trial, other_needed, 0)
1341 && ! may_trap_p (PATTERN (trial)))))
1342 ? eligible_for_delay (insn, total_slots_filled, trial)
1344 eligible_for_annul_false (insn, total_slots_filled, trial)))
1346 temp = copy_rtx (trial);
1347 INSN_FROM_TARGET_P (temp) = 1;
1348 new_delay_list = add_to_delay_list (temp, new_delay_list);
1349 total_slots_filled++;
1351 if (--slots_remaining == 0)
1358 /* Show the place to which we will be branching. */
1359 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1361 /* Add any new insns to the delay list and update the count of the
1362 number of slots filled. */
1363 *pslots_filled = total_slots_filled;
1364 *pannul_p = must_annul;
1366 if (delay_list == 0)
1367 return new_delay_list;
1369 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1370 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1375 /* Similar to steal_delay_list_from_target except that SEQ is on the
1376 fallthrough path of INSN. Here we only do something if the delay insn
1377 of SEQ is an unconditional branch. In that case we steal its delay slot
1378 for INSN since unconditional branches are much easier to fill. */
1381 steal_delay_list_from_fallthrough (insn, condition, seq,
1382 delay_list, sets, needed, other_needed,
1383 slots_to_fill, pslots_filled, pannul_p)
1384 rtx insn, condition;
1387 struct resources *sets, *needed, *other_needed;
1394 /* We can't do anything if SEQ's delay insn isn't an
1395 unconditional branch. */
1397 if (! simplejump_p (XVECEXP (seq, 0, 0))
1398 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1401 for (i = 1; i < XVECLEN (seq, 0); i++)
1403 rtx trial = XVECEXP (seq, 0, i);
1405 /* If TRIAL sets CC0, stealing it will move it too far from the use
1407 if (insn_references_resource_p (trial, sets, 0)
1408 || insn_sets_resource_p (trial, needed, 0)
1409 || insn_sets_resource_p (trial, sets, 0)
1411 || sets_cc0_p (PATTERN (trial))
1417 /* If this insn was already done, we don't need it. */
1418 if (redundant_insn_p (trial, insn, delay_list))
1420 delete_from_delay_slot (trial);
1425 && ((condition == const_true_rtx
1426 || (! insn_sets_resource_p (trial, other_needed, 0)
1427 && ! may_trap_p (PATTERN (trial)))))
1428 ? eligible_for_delay (insn, *pslots_filled, trial)
1430 eligible_for_annul_true (insn, *pslots_filled, trial)))
1432 delete_from_delay_slot (trial);
1433 delay_list = add_to_delay_list (trial, delay_list);
1435 if (++(*pslots_filled) == slots_to_fill)
1445 /* Try merging insns starting at THREAD which match exactly the insns in
1448 If all insns were matched and the insn was previously annulling, the
1449 annul bit will be cleared.
1451 For each insn that is merged, if the branch is or will be non-annulling,
1452 we delete the merged insn. */
1455 try_merge_delay_insns (insn, thread)
1458 rtx trial, next_trial;
1459 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1460 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1461 int slot_number = 1;
1462 int num_slots = XVECLEN (PATTERN (insn), 0);
1463 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1464 struct resources set, needed;
1465 rtx merged_insns = 0;
1468 CLEAR_RESOURCE (&needed);
1469 CLEAR_RESOURCE (&set);
1471 /* If this is not an annulling branch, take into account anything needed in
1472 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1473 folded into one. If we are annulling, this would be the correct
1474 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1475 will essentially disable this optimization. This method is somewhat of
1476 a kludge, but I don't see a better way.) */
1478 mark_referenced_resources (next_to_match, &needed, 1);
1480 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1482 rtx pat = PATTERN (trial);
1484 next_trial = next_nonnote_insn (trial);
1486 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1487 if (GET_CODE (trial) == INSN
1488 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1491 if (GET_CODE (next_to_match) == GET_CODE (trial)
1493 /* We can't share an insn that sets cc0. */
1494 && ! sets_cc0_p (pat)
1496 && ! insn_references_resource_p (trial, &set, 1)
1497 && ! insn_sets_resource_p (trial, &set, 1)
1498 && ! insn_sets_resource_p (trial, &needed, 1)
1499 && (trial = try_split (pat, trial, 0)) != 0
1500 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1501 /* Have to test this condition if annul condition is different
1502 from (and less restrictive than) non-annulling one. */
1503 && eligible_for_delay (delay_insn, slot_number - 1, trial))
1505 next_trial = next_nonnote_insn (trial);
1509 update_block (trial, thread);
1510 delete_insn (trial);
1511 INSN_FROM_TARGET_P (next_to_match) = 0;
1514 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1516 if (++slot_number == num_slots)
1519 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1521 mark_referenced_resources (next_to_match, &needed, 1);
1524 mark_set_resources (trial, &set, 0, 1);
1525 mark_referenced_resources (trial, &needed, 1);
1528 /* See if we stopped on a filled insn. If we did, try to see if its
1529 delay slots match. */
1530 if (slot_number != num_slots
1531 && trial && GET_CODE (trial) == INSN
1532 && GET_CODE (PATTERN (trial)) == SEQUENCE
1533 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1535 rtx pat = PATTERN (trial);
1537 for (i = 1; i < XVECLEN (pat, 0); i++)
1539 rtx dtrial = XVECEXP (pat, 0, i);
1541 if (! insn_references_resource_p (dtrial, &set, 1)
1542 && ! insn_sets_resource_p (dtrial, &set, 1)
1543 && ! insn_sets_resource_p (dtrial, &needed, 1)
1545 && ! sets_cc0_p (PATTERN (dtrial))
1547 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1548 && eligible_for_delay (delay_insn, slot_number - 1, dtrial))
1552 update_block (dtrial, thread);
1553 delete_from_delay_slot (dtrial);
1554 INSN_FROM_TARGET_P (next_to_match) = 0;
1557 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1560 if (++slot_number == num_slots)
1563 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1568 /* If all insns in the delay slot have been matched and we were previously
1569 annulling the branch, we need not any more. In that case delete all the
1570 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1571 the delay list so that we know that it isn't only being used at the
1573 if (next_to_match == 0 && annul_p)
1575 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1577 if (GET_MODE (merged_insns) == SImode)
1579 update_block (XEXP (merged_insns, 0), thread);
1580 delete_from_delay_slot (XEXP (merged_insns, 0));
1584 update_block (XEXP (merged_insns, 0), thread);
1585 delete_insn (XEXP (merged_insns, 0));
1589 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1591 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1592 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1596 /* See if INSN is redundant with an insn in front of TARGET. Often this
1597 is called when INSN is a candidate for a delay slot of TARGET.
1598 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1599 of INSN. Often INSN will be redundant with an insn in a delay slot of
1600 some previous insn. This happens when we have a series of branches to the
1601 same label; in that case the first insn at the target might want to go
1602 into each of the delay slots.
1604 If we are not careful, this routine can take up a significant fraction
1605 of the total compilation time (4%), but only wins rarely. Hence we
1606 speed this routine up by making two passes. The first pass goes back
1607 until it hits a label and sees if it find an insn with an identical
1608 pattern. Only in this (relatively rare) event does it check for
1611 We do not split insns we encounter. This could cause us not to find a
1612 redundant insn, but the cost of splitting seems greater than the possible
1613 gain in rare cases. */
1616 redundant_insn_p (insn, target, delay_list)
1621 rtx target_main = target;
1622 rtx ipat = PATTERN (insn);
1624 struct resources needed, set;
1627 /* Scan backwards looking for a match. */
1628 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1630 if (GET_CODE (trial) == CODE_LABEL)
1633 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1636 pat = PATTERN (trial);
1637 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1640 if (GET_CODE (pat) == SEQUENCE)
1642 /* Stop for a CALL and its delay slots because it difficult to track
1643 its resource needs correctly. */
1644 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1647 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1648 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1649 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
1652 /* If found a match, exit this loop early. */
1657 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
1661 /* If we didn't find an insn that matches, return 0. */
1665 /* See what resources this insn sets and needs. If they overlap, or
1666 if this insn references CC0, it can't be redundant. */
1668 CLEAR_RESOURCE (&needed);
1669 CLEAR_RESOURCE (&set);
1670 mark_set_resources (insn, &set, 0, 1);
1671 mark_referenced_resources (insn, &needed, 1);
1673 /* If TARGET is a SEQUENCE, get the main insn. */
1674 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1675 target_main = XVECEXP (PATTERN (target), 0, 0);
1677 if (resource_conflicts_p (&needed, &set)
1679 || reg_mentioned_p (cc0_rtx, ipat)
1681 /* The insn requiring the delay may not set anything needed or set by
1683 || insn_sets_resource_p (target_main, &needed, 1)
1684 || insn_sets_resource_p (target_main, &set, 1))
1687 /* Insns we pass may not set either NEEDED or SET, so merge them for
1689 needed.memory |= set.memory;
1690 IOR_HARD_REG_SET (needed.regs, set.regs);
1692 /* This insn isn't redundant if it conflicts with an insn that either is
1693 or will be in a delay slot of TARGET. */
1697 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1699 delay_list = XEXP (delay_list, 1);
1702 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1703 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1704 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1707 /* Scan backwards until we reach a label or an insn that uses something
1708 INSN sets or sets something insn uses or sets. */
1710 for (trial = PREV_INSN (target);
1711 trial && GET_CODE (trial) != CODE_LABEL;
1712 trial = PREV_INSN (trial))
1714 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1715 && GET_CODE (trial) != JUMP_INSN)
1718 pat = PATTERN (trial);
1719 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1722 if (GET_CODE (pat) == SEQUENCE)
1724 /* If this is a CALL_INSN and its delay slots, it is hard to track
1725 the resource needs properly, so give up. */
1726 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1729 /* See if any of the insns in the delay slot match, updating
1730 resource requirements as we go. */
1731 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1733 rtx candidate = XVECEXP (pat, 0, i);
1735 /* If an insn will be annulled if the branch is false, it isn't
1736 considered as a possible duplicate insn. */
1737 if (rtx_equal_p (PATTERN (candidate), ipat)
1738 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1739 && INSN_FROM_TARGET_P (candidate)))
1741 /* Show that this insn will be used in the sequel. */
1742 INSN_FROM_TARGET_P (candidate) = 0;
1746 /* Unless this is an annulled insn from the target of a branch,
1747 we must stop if it sets anything needed or set by INSN. */
1748 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1749 || ! INSN_FROM_TARGET_P (candidate))
1750 && insn_sets_resource_p (candidate, &needed, 1))
1755 /* If the insn requiring the delay slot conflicts with INSN, we
1757 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1762 /* See if TRIAL is the same as INSN. */
1763 pat = PATTERN (trial);
1764 if (rtx_equal_p (pat, ipat))
1767 /* Can't go any further if TRIAL conflicts with INSN. */
1768 if (insn_sets_resource_p (trial, &needed, 1))
1776 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1777 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1778 is non-zero, we are allowed to fall into this thread; otherwise, we are
1781 If LABEL is used more than one or we pass a label other than LABEL before
1782 finding an active insn, we do not own this thread. */
1785 own_thread_p (thread, label, allow_fallthrough)
1788 int allow_fallthrough;
1793 /* We don't own the function end. */
1797 /* Get the first active insn, or THREAD, if it is an active insn. */
1798 active_insn = next_active_insn (PREV_INSN (thread));
1800 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1801 if (GET_CODE (insn) == CODE_LABEL
1802 && (insn != label || LABEL_NUSES (insn) != 1))
1805 if (allow_fallthrough)
1808 /* Ensure that we reach a BARRIER before any insn or label. */
1809 for (insn = prev_nonnote_insn (thread);
1810 insn == 0 || GET_CODE (insn) != BARRIER;
1811 insn = prev_nonnote_insn (insn))
1813 || GET_CODE (insn) == CODE_LABEL
1814 || (GET_CODE (insn) == INSN
1815 && GET_CODE (PATTERN (insn)) != USE
1816 && GET_CODE (PATTERN (insn)) != CLOBBER))
1822 /* Find the number of the basic block that starts closest to INSN. Return -1
1823 if we couldn't find such a basic block. */
1826 find_basic_block (insn)
1831 /* Scan backwards to the previous BARRIER. Then see if we can find a
1832 label that starts a basic block. Return the basic block number. */
1834 for (insn = prev_nonnote_insn (insn);
1835 insn && GET_CODE (insn) != BARRIER;
1836 insn = prev_nonnote_insn (insn))
1839 /* The start of the function is basic block zero. */
1843 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
1844 anything other than a CODE_LABEL or note, we can't find this code. */
1845 for (insn = next_nonnote_insn (insn);
1846 insn && GET_CODE (insn) == CODE_LABEL;
1847 insn = next_nonnote_insn (insn))
1849 for (i = 0; i < n_basic_blocks; i++)
1850 if (insn == basic_block_head[i])
1857 /* Called when INSN is being moved from a location near the target of a jump.
1858 We leave a marker of the form (use (INSN)) immediately in front
1859 of WHERE for mark_target_live_regs. These markers will be deleted when
1862 We used to try to update the live status of registers if WHERE is at
1863 the start of a basic block, but that can't work since we may remove a
1864 BARRIER in relax_delay_slots. */
1867 update_block (insn, where)
1873 /* Ignore if this was in a delay slot and it came from the target of
1875 if (INSN_FROM_TARGET_P (insn))
1878 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
1880 /* INSN might be making a value live in a block where it didn't use to
1881 be. So recompute liveness information for this block. */
1883 b = find_basic_block (insn);
1888 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1889 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1890 that reference values used in INSN. If we find one, then we move the
1891 REG_DEAD note to INSN.
1893 This is needed to handle the case where an later insn (after INSN) has a
1894 REG_DEAD note for a register used by INSN, and this later insn subsequently
1895 gets moved before a CODE_LABEL because it is a redundant insn. In this
1896 case, mark_target_live_regs may be confused into thinking the register
1897 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1900 update_reg_dead_notes (insn, delayed_insn)
1901 rtx insn, delayed_insn;
1905 for (p = next_nonnote_insn (insn); p != delayed_insn;
1906 p = next_nonnote_insn (p))
1907 for (link = REG_NOTES (p); link; link = next)
1909 next = XEXP (link, 1);
1911 if (REG_NOTE_KIND (link) != REG_DEAD
1912 || GET_CODE (XEXP (link, 0)) != REG)
1915 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1917 /* Move the REG_DEAD note from P to INSN. */
1918 remove_note (p, link);
1919 XEXP (link, 1) = REG_NOTES (insn);
1920 REG_NOTES (insn) = link;
1925 /* Marks registers possibly live at the current place being scanned by
1926 mark_target_live_regs. Used only by next two function. */
1928 static HARD_REG_SET current_live_regs;
1930 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
1931 Also only used by the next two functions. */
1933 static HARD_REG_SET pending_dead_regs;
1935 /* Utility function called from mark_target_live_regs via note_stores.
1936 It deadens any CLOBBERed registers and livens any SET registers. */
1939 update_live_status (dest, x)
1943 int first_regno, last_regno;
1946 if (GET_CODE (dest) != REG
1947 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
1950 if (GET_CODE (dest) == SUBREG)
1951 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
1953 first_regno = REGNO (dest);
1955 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
1957 if (GET_CODE (x) == CLOBBER)
1958 for (i = first_regno; i < last_regno; i++)
1959 CLEAR_HARD_REG_BIT (current_live_regs, i);
1961 for (i = first_regno; i < last_regno; i++)
1963 SET_HARD_REG_BIT (current_live_regs, i);
1964 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
1968 /* Similar to next_insn, but ignores insns in the delay slots of
1969 an annulled branch. */
1972 next_insn_no_annul (insn)
1977 /* If INSN is an annulled branch, skip any insns from the target
1979 if (INSN_ANNULLED_BRANCH_P (insn)
1980 && NEXT_INSN (PREV_INSN (insn)) != insn)
1981 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
1982 insn = NEXT_INSN (insn);
1984 insn = NEXT_INSN (insn);
1985 if (insn && GET_CODE (insn) == INSN
1986 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1987 insn = XVECEXP (PATTERN (insn), 0, 0);
1993 /* Set the resources that are live at TARGET.
1995 If TARGET is zero, we refer to the end of the current function and can
1996 return our precomputed value.
1998 Otherwise, we try to find out what is live by consulting the basic block
1999 information. This is tricky, because we must consider the actions of
2000 reload and jump optimization, which occur after the basic block information
2003 Accordingly, we proceed as follows::
2005 We find the previous BARRIER and look at all immediately following labels
2006 (with no intervening active insns) to see if any of them start a basic
2007 block. If we hit the start of the function first, we use block 0.
2009 Once we have found a basic block and a corresponding first insns, we can
2010 accurately compute the live status from basic_block_live_regs and
2011 reg_renumber. (By starting at a label following a BARRIER, we are immune
2012 to actions taken by reload and jump.) Then we scan all insns between
2013 that point and our target. For each CLOBBER (or for call-clobbered regs
2014 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2015 a SET, mark them as live.
2017 We have to be careful when using REG_DEAD notes because they are not
2018 updated by such things as find_equiv_reg. So keep track of registers
2019 marked as dead that haven't been assigned to, and mark them dead at the
2020 next CODE_LABEL since reload and jump won't propagate values across labels.
2022 If we cannot find the start of a basic block (should be a very rare
2023 case, if it can happen at all), mark everything as potentially live.
2025 Next, scan forward from TARGET looking for things set or clobbered
2026 before they are used. These are not live.
2028 Because we can be called many times on the same target, save our results
2029 in a hash table indexed by INSN_UID. */
2032 mark_target_live_regs (target, res)
2034 struct resources *res;
2038 struct target_info *tinfo;
2042 HARD_REG_SET scratch;
2043 struct resources set, needed;
2046 /* Handle end of function. */
2049 *res = end_of_function_needs;
2053 /* We have to assume memory is needed, but the CC isn't. */
2058 /* See if we have computed this value already. */
2059 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2060 tinfo; tinfo = tinfo->next)
2061 if (tinfo->uid == INSN_UID (target))
2064 /* Start by getting the basic block number. If we have saved information,
2065 we can get it from there unless the insn at the start of the basic block
2066 has been deleted. */
2067 if (tinfo && tinfo->block != -1
2068 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2072 b = find_basic_block (target);
2076 /* If the information is up-to-date, use it. Otherwise, we will
2078 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2080 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2086 /* Allocate a place to put our results and chain it into the
2088 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2089 tinfo->uid = INSN_UID (target);
2091 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2092 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2095 CLEAR_HARD_REG_SET (pending_dead_regs);
2097 /* If we found a basic block, get the live registers from it and update
2098 them with anything set or killed between its start and the insn before
2099 TARGET. Otherwise, we must assume everything is live. */
2102 regset regs_live = basic_block_live_at_start[b];
2104 REGSET_ELT_TYPE bit;
2106 rtx start_insn, stop_insn;
2108 /* Compute hard regs live at start of block -- this is the real hard regs
2109 marked live, plus live pseudo regs that have been renumbered to
2113 current_live_regs = *regs_live;
2115 COPY_HARD_REG_SET (current_live_regs, regs_live);
2118 for (offset = 0, i = 0; offset < regset_size; offset++)
2120 if (regs_live[offset] == 0)
2121 i += REGSET_ELT_BITS;
2123 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2124 if ((regs_live[offset] & bit)
2125 && (regno = reg_renumber[i]) >= 0)
2127 j < regno + HARD_REGNO_NREGS (regno,
2128 PSEUDO_REGNO_MODE (i));
2130 SET_HARD_REG_BIT (current_live_regs, j);
2133 /* Get starting and ending insn, handling the case where each might
2135 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2138 if (GET_CODE (start_insn) == INSN
2139 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2140 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2142 if (GET_CODE (stop_insn) == INSN
2143 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2144 stop_insn = next_insn (PREV_INSN (stop_insn));
2146 for (insn = start_insn; insn != stop_insn;
2147 insn = next_insn_no_annul (insn))
2150 rtx real_insn = insn;
2152 /* If this insn is from the target of a branch, it isn't going to
2153 be used in the sequel. If it is used in both cases, this
2154 test will not be true. */
2155 if (INSN_FROM_TARGET_P (insn))
2158 /* If this insn is a USE made by update_block, we care about the
2160 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2161 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2162 real_insn = XEXP (PATTERN (insn), 0);
2164 if (GET_CODE (real_insn) == CALL_INSN)
2166 /* CALL clobbers all call-used regs that aren't fixed except
2167 sp, ap, and fp. Do this before setting the result of the
2169 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2170 if (call_used_regs[i]
2171 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2172 && i != ARG_POINTER_REGNUM
2173 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2174 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2176 #ifdef PIC_OFFSET_TABLE_REGNUM
2177 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2180 CLEAR_HARD_REG_BIT (current_live_regs, i);
2182 /* A CALL_INSN sets any global register live, since it may
2183 have been modified by the call. */
2184 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2186 SET_HARD_REG_BIT (current_live_regs, i);
2189 /* Mark anything killed in an insn to be deadened at the next
2190 label. Ignore USE insns; the only REG_DEAD notes will be for
2191 parameters. But they might be early. A CALL_INSN will usually
2192 clobber registers used for parameters. It isn't worth bothering
2193 with the unlikely case when it won't. */
2194 if ((GET_CODE (real_insn) == INSN
2195 && GET_CODE (PATTERN (real_insn)) != USE)
2196 || GET_CODE (real_insn) == JUMP_INSN
2197 || GET_CODE (real_insn) == CALL_INSN)
2199 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2200 if (REG_NOTE_KIND (link) == REG_DEAD
2201 && GET_CODE (XEXP (link, 0)) == REG
2202 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2204 int first_regno = REGNO (XEXP (link, 0));
2207 + HARD_REGNO_NREGS (first_regno,
2208 GET_MODE (XEXP (link, 0))));
2210 for (i = first_regno; i < last_regno; i++)
2211 SET_HARD_REG_BIT (pending_dead_regs, i);
2214 note_stores (PATTERN (real_insn), update_live_status);
2216 /* If any registers were unused after this insn, kill them.
2217 These notes will always be accurate. */
2218 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2219 if (REG_NOTE_KIND (link) == REG_UNUSED
2220 && GET_CODE (XEXP (link, 0)) == REG
2221 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2223 int first_regno = REGNO (XEXP (link, 0));
2226 + HARD_REGNO_NREGS (first_regno,
2227 GET_MODE (XEXP (link, 0))));
2229 for (i = first_regno; i < last_regno; i++)
2230 CLEAR_HARD_REG_BIT (current_live_regs, i);
2234 else if (GET_CODE (real_insn) == CODE_LABEL)
2236 /* A label clobbers the pending dead registers since neither
2237 reload nor jump will propagate a value across a label. */
2238 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2239 CLEAR_HARD_REG_SET (pending_dead_regs);
2242 /* The beginning of the epilogue corresponds to the end of the
2243 RTL chain when there are no epilogue insns. Certain resources
2244 are implicitly required at that point. */
2245 else if (GET_CODE (real_insn) == NOTE
2246 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2247 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2250 COPY_HARD_REG_SET (res->regs, current_live_regs);
2252 tinfo->bb_tick = bb_ticks[b];
2255 /* We didn't find the start of a basic block. Assume everything
2256 in use. This should happen only extremely rarely. */
2257 SET_HARD_REG_SET (res->regs);
2259 /* Now step forward from TARGET looking for registers that are set before
2260 they are used. These are dead. If we pass a label, any pending dead
2261 registers that weren't yet used can be made dead. Stop when we pass a
2262 conditional JUMP_INSN; follow the first few unconditional branches. */
2264 CLEAR_RESOURCE (&set);
2265 CLEAR_RESOURCE (&needed);
2267 for (insn = target; insn; insn = next)
2269 rtx this_jump_insn = insn;
2271 next = NEXT_INSN (insn);
2272 switch (GET_CODE (insn))
2275 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2276 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2277 CLEAR_HARD_REG_SET (pending_dead_regs);
2285 if (GET_CODE (PATTERN (insn)) == USE)
2287 /* If INSN is a USE made by update_block, we care about the
2288 underlying insn. Any registers set by the underlying insn
2289 are live since the insn is being done somewhere else. */
2290 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2291 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2293 /* All other USE insns are to be ignored. */
2296 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2298 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2300 /* An unconditional jump can be used to fill the delay slot
2301 of a call, so search for a JUMP_INSN in any position. */
2302 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2304 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2305 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2311 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2313 if (jump_count++ < 10
2314 && (simplejump_p (this_jump_insn)
2315 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2317 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2321 jump_target = JUMP_LABEL (this_jump_insn);
2328 mark_referenced_resources (insn, &needed, 1);
2329 mark_set_resources (insn, &set, 0, 1);
2331 COPY_HARD_REG_SET (scratch, set.regs);
2332 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2333 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2336 /* If we hit an unconditional branch, we have another way of finding out
2337 what is live: we can see what is live at the branch target and include
2338 anything used but not set before the branch. The only things that are
2339 live are those that are live using the above test and the test below.
2341 Don't try this if we expired our jump count above, since that would
2342 mean there may be an infinite loop in the function being compiled. */
2344 if (jump_insn && jump_count < 10)
2346 struct resources new_resources;
2347 rtx stop_insn = next_active_insn (jump_insn);
2349 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2350 CLEAR_RESOURCE (&set);
2351 CLEAR_RESOURCE (&needed);
2353 /* Include JUMP_INSN in the needed registers. */
2354 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2356 mark_referenced_resources (insn, &needed, 1);
2358 COPY_HARD_REG_SET (scratch, needed.regs);
2359 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2360 IOR_HARD_REG_SET (new_resources.regs, scratch);
2362 mark_set_resources (insn, &set, 0, 1);
2365 AND_HARD_REG_SET (res->regs, new_resources.regs);
2368 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2371 /* Scan a function looking for insns that need a delay slot and find insns to
2372 put into the delay slot.
2374 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2375 as calls). We do these first since we don't want jump insns (that are
2376 easier to fill) to get the only insns that could be used for non-jump insns.
2377 When it is zero, only try to fill JUMP_INSNs.
2379 When slots are filled in this manner, the insns (including the
2380 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2381 it is possible to tell whether a delay slot has really been filled
2382 or not. `final' knows how to deal with this, by communicating
2383 through FINAL_SEQUENCE. */
2386 fill_simple_delay_slots (first, non_jumps_p)
2390 register rtx insn, pat, trial, next_trial;
2392 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2393 struct resources needed, set;
2394 register int slots_to_fill, slots_filled;
2397 for (i = 0; i < num_unfilled_slots; i++)
2399 /* Get the next insn to fill. If it has already had any slots assigned,
2400 we can't do anything with it. Maybe we'll improve this later. */
2402 insn = unfilled_slots_base[i];
2404 || INSN_DELETED_P (insn)
2405 || (GET_CODE (insn) == INSN
2406 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2407 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2408 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2411 slots_to_fill = num_delay_slots (insn);
2412 if (slots_to_fill == 0)
2415 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2416 says how many. After initialization, first try optimizing
2419 nop add %o7,.-L1,%o7
2423 If this case applies, the delay slot of the call is filled with
2424 the unconditional jump. This is done first to avoid having the
2425 delay slot of the call filled in the backward scan. Also, since
2426 the unconditional jump is likely to also have a delay slot, that
2427 insn must exist when it is subsequently scanned. */
2432 if (GET_CODE (insn) == CALL_INSN
2433 && (trial = next_active_insn (insn))
2434 && GET_CODE (trial) == JUMP_INSN
2435 && simplejump_p (trial)
2436 && eligible_for_delay (insn, slots_filled, trial)
2437 && no_labels_between_p (insn, trial))
2440 delay_list = add_to_delay_list (trial, delay_list);
2441 /* Remove the unconditional jump from consideration for delay slot
2442 filling and unthread it. */
2443 if (unfilled_slots_base[i + 1] == trial)
2444 unfilled_slots_base[i + 1] = 0;
2446 rtx next = NEXT_INSN (trial);
2447 rtx prev = PREV_INSN (trial);
2449 NEXT_INSN (prev) = next;
2451 PREV_INSN (next) = prev;
2455 /* Now, scan backwards from the insn to search for a potential
2456 delay-slot candidate. Stop searching when a label or jump is hit.
2458 For each candidate, if it is to go into the delay slot (moved
2459 forward in execution sequence), it must not need or set any resources
2460 that were set by later insns and must not set any resources that
2461 are needed for those insns.
2463 The delay slot insn itself sets resources unless it is a call
2464 (in which case the called routine, not the insn itself, is doing
2467 if (slots_filled < slots_to_fill)
2469 CLEAR_RESOURCE (&needed);
2470 CLEAR_RESOURCE (&set);
2471 mark_set_resources (insn, &set, 0, 0);
2472 mark_referenced_resources (insn, &needed, 0);
2474 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2477 next_trial = prev_nonnote_insn (trial);
2479 /* This must be an INSN or CALL_INSN. */
2480 pat = PATTERN (trial);
2482 /* USE and CLOBBER at this level was just for flow; ignore it. */
2483 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2486 /* Check for resource conflict first, to avoid unnecessary
2488 if (! insn_references_resource_p (trial, &set, 1)
2489 && ! insn_sets_resource_p (trial, &set, 1)
2490 && ! insn_sets_resource_p (trial, &needed, 1)
2492 /* Can't separate set of cc0 from its use. */
2493 && ! (reg_mentioned_p (cc0_rtx, pat)
2494 && ! sets_cc0_p (cc0_rtx, pat))
2498 trial = try_split (pat, trial, 1);
2499 next_trial = prev_nonnote_insn (trial);
2500 if (eligible_for_delay (insn, slots_filled, trial))
2502 /* In this case, we are searching backward, so if we
2503 find insns to put on the delay list, we want
2504 to put them at the head, rather than the
2505 tail, of the list. */
2507 update_reg_dead_notes (trial, insn);
2508 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2510 update_block (trial, trial);
2511 delete_insn (trial);
2512 if (slots_to_fill == ++slots_filled)
2518 mark_set_resources (trial, &set, 0, 1);
2519 mark_referenced_resources (trial, &needed, 1);
2523 /* If all needed slots haven't been filled, we come here. */
2525 /* Try to optimize case of jumping around a single insn. */
2526 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2527 if (slots_filled != slots_to_fill
2529 && GET_CODE (insn) == JUMP_INSN && condjump_p (insn))
2531 delay_list = optimize_skip (insn);
2537 /* Try to get insns from beyond the insn needing the delay slot.
2538 These insns can neither set or reference resources set in insns being
2539 skipped, cannot set resources in the insn being skipped, and, if this
2540 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2541 call might not return).
2543 If this is a conditional jump, see if it merges back to us early
2544 enough for us to pick up insns from the merge point. Don't do
2545 this if there is another branch to our label unless we pass all of
2548 Another similar merge is if we jump to the same place that a
2549 later unconditional jump branches to. In that case, we don't
2550 care about the number of uses of our label. */
2552 if (slots_filled != slots_to_fill
2553 && (GET_CODE (insn) != JUMP_INSN
2554 || (condjump_p (insn) && ! simplejump_p (insn)
2555 && JUMP_LABEL (insn) != 0)))
2558 int maybe_never = 0;
2559 int passed_label = 0;
2561 struct resources needed_at_jump;
2563 CLEAR_RESOURCE (&needed);
2564 CLEAR_RESOURCE (&set);
2566 if (GET_CODE (insn) == CALL_INSN)
2568 mark_set_resources (insn, &set, 0, 1);
2569 mark_referenced_resources (insn, &needed, 1);
2574 mark_set_resources (insn, &set, 0, 1);
2575 mark_referenced_resources (insn, &needed, 1);
2576 if (GET_CODE (insn) == JUMP_INSN)
2578 /* Get our target and show how many more uses we want to
2579 see before we hit the label. */
2580 target = JUMP_LABEL (insn);
2581 target_uses = LABEL_NUSES (target) - 1;
2586 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2588 rtx pat, trial_delay;
2590 next_trial = next_nonnote_insn (trial);
2592 if (GET_CODE (trial) == CODE_LABEL)
2596 /* If this is our target, see if we have seen all its uses.
2597 If so, indicate we have passed our target and ignore it.
2598 All other labels cause us to stop our search. */
2599 if (trial == target && target_uses == 0)
2607 else if (GET_CODE (trial) == BARRIER)
2610 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2611 pat = PATTERN (trial);
2613 /* Stand-alone USE and CLOBBER are just for flow. */
2614 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2617 /* If this already has filled delay slots, get the insn needing
2619 if (GET_CODE (pat) == SEQUENCE)
2620 trial_delay = XVECEXP (pat, 0, 0);
2622 trial_delay = trial;
2624 /* If this is a jump insn to our target, indicate that we have
2625 seen another jump to it. If we aren't handling a conditional
2626 jump, stop our search. Otherwise, compute the needs at its
2627 target and add them to NEEDED. */
2628 if (GET_CODE (trial_delay) == JUMP_INSN)
2632 else if (JUMP_LABEL (trial_delay) == target)
2636 mark_target_live_regs
2637 (next_active_insn (JUMP_LABEL (trial_delay)),
2639 needed.memory |= needed_at_jump.memory;
2640 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2644 /* See if we have a resource problem before we try to
2647 && GET_CODE (pat) != SEQUENCE
2648 && ! insn_references_resource_p (trial, &set, 1)
2649 && ! insn_sets_resource_p (trial, &set, 1)
2650 && ! insn_sets_resource_p (trial, &needed, 1)
2652 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2654 && ! (maybe_never && may_trap_p (pat))
2655 && (trial = try_split (pat, trial, 0))
2656 && eligible_for_delay (insn, slots_filled, trial))
2658 next_trial = next_nonnote_insn (trial);
2659 delay_list = add_to_delay_list (trial, delay_list);
2662 if (reg_mentioned_p (cc0_rtx, pat))
2663 link_cc0_insns (trial);
2667 update_block (trial, trial);
2668 delete_insn (trial);
2669 if (slots_to_fill == ++slots_filled)
2674 mark_set_resources (trial, &set, 0, 1);
2675 mark_referenced_resources (trial, &needed, 1);
2677 /* Ensure we don't put insns between the setting of cc and the
2678 comparison by moving a setting of cc into an earlier delay
2679 slot since these insns could clobber the condition code. */
2682 /* If this is a call or jump, we might not get here. */
2683 if (GET_CODE (trial) == CALL_INSN
2684 || GET_CODE (trial) == JUMP_INSN)
2688 /* If there are slots left to fill and our search was stopped by an
2689 unconditional branch, try the insn at the branch target. We can
2690 redirect the branch if it works. */
2691 if (slots_to_fill != slots_filled
2693 && GET_CODE (trial) == JUMP_INSN
2694 && simplejump_p (trial)
2695 && (target == 0 || JUMP_LABEL (trial) == target)
2696 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2697 && ! (GET_CODE (next_trial) == INSN
2698 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2699 && ! insn_references_resource_p (next_trial, &set, 1)
2700 && ! insn_sets_resource_p (next_trial, &set, 1)
2701 && ! insn_sets_resource_p (next_trial, &needed, 1)
2703 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2705 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2706 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2707 && eligible_for_delay (insn, slots_filled, next_trial))
2709 rtx new_label = next_active_insn (next_trial);
2712 new_label = get_label_before (new_label);
2715 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2717 redirect_jump (trial, new_label);
2719 /* If we merged because we both jumped to the same place,
2720 redirect the original insn also. */
2722 redirect_jump (insn, new_label);
2727 unfilled_slots_base[i]
2728 = emit_delay_sequence (insn, delay_list,
2729 slots_filled, slots_to_fill);
2731 if (slots_to_fill == slots_filled)
2732 unfilled_slots_base[i] = 0;
2734 note_delay_statistics (slots_filled, 0);
2737 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2738 /* See if the epilogue needs any delay slots. Try to fill them if so.
2739 The only thing we can do is scan backwards from the end of the
2740 function. If we did this in a previous pass, it is incorrect to do it
2742 if (current_function_epilogue_delay_list)
2745 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2746 if (slots_to_fill == 0)
2750 CLEAR_RESOURCE (&needed);
2751 CLEAR_RESOURCE (&set);
2753 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2754 trial = PREV_INSN (trial))
2756 if (GET_CODE (trial) == NOTE)
2758 pat = PATTERN (trial);
2759 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2762 if (! insn_references_resource_p (trial, &set, 1)
2763 && ! insn_sets_resource_p (trial, &needed, 1)
2765 /* Don't want to mess with cc0 here. */
2766 && ! reg_mentioned_p (cc0_rtx, pat)
2770 trial = try_split (pat, trial, 1);
2771 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2773 /* Here as well we are searching backward, so put the
2774 insns we find on the head of the list. */
2776 current_function_epilogue_delay_list
2777 = gen_rtx (INSN_LIST, VOIDmode, trial,
2778 current_function_epilogue_delay_list);
2779 mark_referenced_resources (trial, &end_of_function_needs, 1);
2780 update_block (trial, trial);
2781 delete_insn (trial);
2783 /* Clear deleted bit so final.c will output the insn. */
2784 INSN_DELETED_P (trial) = 0;
2786 if (slots_to_fill == ++slots_filled)
2792 mark_set_resources (trial, &set, 0, 1);
2793 mark_referenced_resources (trial, &needed, 1);
2796 note_delay_statistics (slots_filled, 0);
2800 /* Try to find insns to place in delay slots.
2802 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2803 or is an unconditional branch if CONDITION is const_true_rtx.
2804 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2806 THREAD is a flow-of-control, either the insns to be executed if the
2807 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2809 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2810 to see if any potential delay slot insns set things needed there.
2812 LIKELY is non-zero if it is extremely likely that the branch will be
2813 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2814 end of a loop back up to the top.
2816 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2817 thread. I.e., it is the fallthrough code of our jump or the target of the
2818 jump when we are the only jump going there.
2820 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2821 case, we can only take insns from the head of the thread for our delay
2822 slot. We then adjust the jump to point after the insns we have taken. */
2825 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2826 thread_if_true, own_thread, own_opposite_thread,
2827 slots_to_fill, pslots_filled)
2830 rtx thread, opposite_thread;
2833 int own_thread, own_opposite_thread;
2834 int slots_to_fill, *pslots_filled;
2838 struct resources opposite_needed, set, needed;
2843 /* Validate our arguments. */
2844 if ((condition == const_true_rtx && ! thread_if_true)
2845 || (! own_thread && ! thread_if_true))
2848 /* If our thread is the end of subroutine, we can't get any delay
2853 /* If this is an unconditional branch, nothing is needed at the
2854 opposite thread. Otherwise, compute what is needed there. */
2855 if (condition == const_true_rtx)
2856 CLEAR_RESOURCE (&opposite_needed);
2858 mark_target_live_regs (opposite_thread, &opposite_needed);
2860 /* If the insn at THREAD can be split, do it here to avoid having to
2861 update THREAD and NEW_THREAD if it is done in the loop below. Also
2862 initialize NEW_THREAD. */
2864 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2866 /* Scan insns at THREAD. We are looking for an insn that can be removed
2867 from THREAD (it neither sets nor references resources that were set
2868 ahead of it and it doesn't set anything needs by the insns ahead of
2869 it) and that either can be placed in an annulling insn or aren't
2870 needed at OPPOSITE_THREAD. */
2872 CLEAR_RESOURCE (&needed);
2873 CLEAR_RESOURCE (&set);
2875 /* If we do not own this thread, we must stop as soon as we find
2876 something that we can't put in a delay slot, since all we can do
2877 is branch into THREAD at a later point. Therefore, labels stop
2878 the search if this is not the `true' thread. */
2880 for (trial = thread;
2881 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2882 trial = next_nonnote_insn (trial))
2886 /* If we have passed a label, we no longer own this thread. */
2887 if (GET_CODE (trial) == CODE_LABEL)
2893 pat = PATTERN (trial);
2894 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2897 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2898 don't separate or copy insns that set and use CC0. */
2899 if (! insn_references_resource_p (trial, &set, 1)
2900 && ! insn_sets_resource_p (trial, &set, 1)
2901 && ! insn_sets_resource_p (trial, &needed, 1)
2903 && ! (reg_mentioned_p (cc0_rtx, pat)
2904 && (! own_thread || ! sets_cc0_p (pat)))
2908 /* If TRIAL is redundant with some insn before INSN, we don't
2909 actually need to add it to the delay list; we can merely pretend
2911 if (redundant_insn_p (trial, insn, delay_list))
2915 update_block (trial, thread);
2916 delete_insn (trial);
2919 new_thread = next_active_insn (trial);
2924 /* There are two ways we can win: If TRIAL doesn't set anything
2925 needed at the opposite thread and can't trap, or if it can
2926 go into an annulled delay slot. */
2927 if (condition == const_true_rtx
2928 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2929 && ! may_trap_p (pat)))
2931 trial = try_split (pat, trial, 0);
2932 pat = PATTERN (trial);
2933 if (eligible_for_delay (insn, *pslots_filled, trial))
2937 #ifdef ANNUL_IFTRUE_SLOTS
2940 #ifdef ANNUL_IFFALSE_SLOTS
2945 trial = try_split (pat, trial, 0);
2946 pat = PATTERN (trial);
2948 ? eligible_for_annul_false (insn, *pslots_filled, trial)
2949 : eligible_for_annul_true (insn, *pslots_filled, trial)))
2957 if (reg_mentioned_p (cc0_rtx, pat))
2958 link_cc0_insns (trial);
2961 /* If we own this thread, delete the insn. If this is the
2962 destination of a branch, show that a basic block status
2963 may have been updated. In any case, mark the new
2964 starting point of this thread. */
2967 update_block (trial, thread);
2968 delete_insn (trial);
2971 new_thread = next_active_insn (trial);
2973 temp = own_thread ? trial : copy_rtx (trial);
2975 INSN_FROM_TARGET_P (temp) = 1;
2977 delay_list = add_to_delay_list (temp, delay_list);
2979 if (slots_to_fill == ++(*pslots_filled))
2981 /* Even though we have filled all the slots, we
2982 may be branching to a location that has a
2983 redundant insn. Skip any if so. */
2984 while (new_thread && ! own_thread
2985 && ! insn_sets_resource_p (new_thread, &set, 1)
2986 && ! insn_sets_resource_p (new_thread, &needed, 1)
2987 && ! insn_references_resource_p (new_thread,
2989 && redundant_insn_p (new_thread, insn,
2991 new_thread = next_active_insn (new_thread);
3000 /* This insn can't go into a delay slot. */
3002 mark_set_resources (trial, &set, 0, 1);
3003 mark_referenced_resources (trial, &needed, 1);
3005 /* Ensure we don't put insns between the setting of cc and the comparison
3006 by moving a setting of cc into an earlier delay slot since these insns
3007 could clobber the condition code. */
3010 /* If this insn is a register-register copy and the next insn has
3011 a use of our destination, change it to use our source. That way,
3012 it will become a candidate for our delay slot the next time
3013 through this loop. This case occurs commonly in loops that
3016 We could check for more complex cases than those tested below,
3017 but it doesn't seem worth it. It might also be a good idea to try
3018 to swap the two insns. That might do better.
3020 We can't do this if the next insn modifies our source, because that
3021 would make the replacement into the insn invalid. This also
3022 prevents updating the contents of a PRE_INC. */
3024 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3025 && GET_CODE (SET_SRC (pat)) == REG
3026 && GET_CODE (SET_DEST (pat)) == REG)
3028 rtx next = next_nonnote_insn (trial);
3030 if (next && GET_CODE (next) == INSN
3031 && GET_CODE (PATTERN (next)) != USE
3032 && ! reg_set_p (SET_DEST (pat), next)
3033 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3034 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3038 /* If we stopped on a branch insn that has delay slots, see if we can
3039 steal some of the insns in those slots. */
3040 if (trial && GET_CODE (trial) == INSN
3041 && GET_CODE (PATTERN (trial)) == SEQUENCE
3042 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3044 /* If this is the `true' thread, we will want to follow the jump,
3045 so we can only do this if we have taken everything up to here. */
3046 if (thread_if_true && trial == new_thread)
3048 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3049 delay_list, &set, &needed,
3050 &opposite_needed, slots_to_fill,
3051 pslots_filled, &must_annul,
3053 else if (! thread_if_true)
3055 = steal_delay_list_from_fallthrough (insn, condition,
3057 delay_list, &set, &needed,
3058 &opposite_needed, slots_to_fill,
3059 pslots_filled, &must_annul);
3062 /* If we haven't found anything for this delay slot and it is very
3063 likely that the branch will be taken, see if the insn at our target
3064 increments or decrements a register with an increment that does not
3065 depend on the destination register. If so, try to place the opposite
3066 arithmetic insn after the jump insn and put the arithmetic insn in the
3067 delay slot. If we can't do this, return. */
3068 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3070 rtx pat = PATTERN (new_thread);
3075 pat = PATTERN (trial);
3077 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3078 || ! eligible_for_delay (insn, 0, trial))
3081 dest = SET_DEST (pat), src = SET_SRC (pat);
3082 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3083 && rtx_equal_p (XEXP (src, 0), dest)
3084 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3086 rtx other = XEXP (src, 1);
3090 /* If this is a constant adjustment, use the same code with
3091 the negated constant. Otherwise, reverse the sense of the
3093 if (GET_CODE (other) == CONST_INT)
3094 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3095 negate_rtx (GET_MODE (src), other));
3097 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3098 GET_MODE (src), dest, other);
3100 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3103 if (recog_memoized (ninsn) < 0
3104 || (insn_extract (ninsn),
3105 ! constrain_operands (INSN_CODE (ninsn), 1)))
3107 delete_insn (ninsn);
3113 update_block (trial, thread);
3114 delete_insn (trial);
3117 new_thread = next_active_insn (trial);
3119 ninsn = own_thread ? trial : copy_rtx (trial);
3121 INSN_FROM_TARGET_P (ninsn) = 1;
3123 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3128 if (delay_list && must_annul)
3129 INSN_ANNULLED_BRANCH_P (insn) = 1;
3131 /* If we are to branch into the middle of this thread, find an appropriate
3132 label or make a new one if none, and redirect INSN to it. If we hit the
3133 end of the function, use the end-of-function label. */
3134 if (new_thread != thread)
3138 if (! thread_if_true)
3141 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3142 && (simplejump_p (new_thread)
3143 || GET_CODE (PATTERN (new_thread)) == RETURN))
3144 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3146 if (new_thread == 0)
3147 label = find_end_label ();
3148 else if (GET_CODE (new_thread) == CODE_LABEL)
3151 label = get_label_before (new_thread);
3153 redirect_jump (insn, label);
3159 /* Make another attempt to find insns to place in delay slots.
3161 We previously looked for insns located in front of the delay insn
3162 and, for non-jump delay insns, located behind the delay insn.
3164 Here only try to schedule jump insns and try to move insns from either
3165 the target or the following insns into the delay slot. If annulling is
3166 supported, we will be likely to do this. Otherwise, we can do this only
3170 fill_eager_delay_slots (first)
3175 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3177 for (i = 0; i < num_unfilled_slots; i++)
3180 rtx target_label, insn_at_target, fallthrough_insn;
3183 int own_fallthrough;
3184 int prediction, slots_to_fill, slots_filled;
3186 insn = unfilled_slots_base[i];
3188 || INSN_DELETED_P (insn)
3189 || GET_CODE (insn) != JUMP_INSN
3190 || ! condjump_p (insn))
3193 slots_to_fill = num_delay_slots (insn);
3194 if (slots_to_fill == 0)
3198 target_label = JUMP_LABEL (insn);
3199 condition = get_branch_condition (insn, target_label);
3204 /* Get the next active fallthough and target insns and see if we own
3205 them. Then see whether the branch is likely true. We don't need
3206 to do a lot of this for unconditional branches. */
3208 insn_at_target = next_active_insn (target_label);
3209 own_target = own_thread_p (target_label, target_label, 0);
3211 if (condition == const_true_rtx)
3213 own_fallthrough = 0;
3214 fallthrough_insn = 0;
3219 fallthrough_insn = next_active_insn (insn);
3220 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3221 prediction = mostly_true_jump (insn, condition);
3224 /* If this insn is expected to branch, first try to get insns from our
3225 target, then our fallthrough insns. If it is not, expected to branch,
3226 try the other order. */
3231 = fill_slots_from_thread (insn, condition, insn_at_target,
3232 fallthrough_insn, prediction == 2, 1,
3233 own_target, own_fallthrough,
3234 slots_to_fill, &slots_filled);
3236 if (delay_list == 0 && own_fallthrough)
3238 /* Even though we didn't find anything for delay slots,
3239 we might have found a redundant insn which we deleted
3240 from the thread that was filled. So we have to recompute
3241 the next insn at the target. */
3242 target_label = JUMP_LABEL (insn);
3243 insn_at_target = next_active_insn (target_label);
3246 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3247 insn_at_target, 0, 0,
3248 own_fallthrough, own_target,
3249 slots_to_fill, &slots_filled);
3254 if (own_fallthrough)
3256 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3257 insn_at_target, 0, 0,
3258 own_fallthrough, own_target,
3259 slots_to_fill, &slots_filled);
3261 if (delay_list == 0)
3263 = fill_slots_from_thread (insn, condition, insn_at_target,
3264 next_active_insn (insn), 0, 1,
3265 own_target, own_fallthrough,
3266 slots_to_fill, &slots_filled);
3270 unfilled_slots_base[i]
3271 = emit_delay_sequence (insn, delay_list,
3272 slots_filled, slots_to_fill);
3274 if (slots_to_fill == slots_filled)
3275 unfilled_slots_base[i] = 0;
3277 note_delay_statistics (slots_filled, 1);
3281 /* Once we have tried two ways to fill a delay slot, make a pass over the
3282 code to try to improve the results and to do such things as more jump
3286 relax_delay_slots (first)
3289 register rtx insn, next, pat;
3290 register rtx trial, delay_insn, target_label;
3292 /* Look at every JUMP_INSN and see if we can improve it. */
3293 for (insn = first; insn; insn = next)
3297 next = next_active_insn (insn);
3299 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3300 the next insn, or jumps to a label that is not the last of a
3301 group of consecutive labels. */
3302 if (GET_CODE (insn) == JUMP_INSN
3303 && condjump_p (insn)
3304 && (target_label = JUMP_LABEL (insn)) != 0)
3306 target_label = follow_jumps (target_label);
3307 target_label = prev_label (next_active_insn (target_label));
3309 if (target_label == 0)
3310 target_label = find_end_label ();
3312 if (next_active_insn (target_label) == next)
3318 if (target_label != JUMP_LABEL (insn))
3319 redirect_jump (insn, target_label);
3321 /* See if this jump branches around a unconditional jump.
3322 If so, invert this jump and point it to the target of the
3324 if (next && GET_CODE (next) == JUMP_INSN
3325 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3326 && next_active_insn (target_label) == next_active_insn (next)
3327 && no_labels_between_p (insn, next))
3329 rtx label = JUMP_LABEL (next);
3331 /* Be careful how we do this to avoid deleting code or
3332 labels that are momentarily dead. See similar optimization
3335 We also need to ensure we properly handle the case when
3336 invert_jump fails. */
3338 ++LABEL_NUSES (target_label);
3340 ++LABEL_NUSES (label);
3342 if (invert_jump (insn, label))
3349 --LABEL_NUSES (label);
3351 if (--LABEL_NUSES (target_label) == 0)
3352 delete_insn (target_label);
3358 /* If this is an unconditional jump and the previous insn is a
3359 conditional jump, try reversing the condition of the previous
3360 insn and swapping our targets. The next pass might be able to
3363 Don't do this if we expect the conditional branch to be true, because
3364 we would then be making the more common case longer. */
3366 if (GET_CODE (insn) == JUMP_INSN
3367 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3368 && (other = prev_active_insn (insn)) != 0
3369 && condjump_p (other)
3370 && no_labels_between_p (other, insn)
3371 && ! mostly_true_jump (other,
3372 get_branch_condition (other,
3373 JUMP_LABEL (other))))
3375 rtx other_target = JUMP_LABEL (other);
3377 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3378 as we move the label. */
3380 ++LABEL_NUSES (other_target);
3382 if (invert_jump (other, target_label))
3383 redirect_jump (insn, other_target);
3386 --LABEL_NUSES (other_target);
3389 /* Now look only at cases where we have filled a delay slot. */
3390 if (GET_CODE (insn) != INSN
3391 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3394 pat = PATTERN (insn);
3395 delay_insn = XVECEXP (pat, 0, 0);
3397 /* See if the first insn in the delay slot is redundant with some
3398 previous insn. Remove it from the delay slot if so; then set up
3399 to reprocess this insn. */
3400 if (redundant_insn_p (XVECEXP (pat, 0, 1), delay_insn, 0))
3402 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3403 next = prev_active_insn (next);
3407 /* Now look only at the cases where we have a filled JUMP_INSN. */
3408 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3409 || ! condjump_p (XVECEXP (PATTERN (insn), 0, 0)))
3412 target_label = JUMP_LABEL (delay_insn);
3416 /* If this jump goes to another unconditional jump, thread it, but
3417 don't convert a jump into a RETURN here. */
3418 trial = follow_jumps (target_label);
3419 trial = prev_label (next_active_insn (trial));
3420 if (trial == 0 && target_label != 0)
3421 trial = find_end_label ();
3423 if (trial != target_label)
3425 redirect_jump (delay_insn, trial);
3426 target_label = trial;
3429 /* If the first insn at TARGET_LABEL is redundant with a previous
3430 insn, redirect the jump to the following insn process again. */
3431 trial = next_active_insn (target_label);
3432 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3433 && redundant_insn_p (trial, insn, 0))
3435 trial = next_active_insn (trial);
3437 target_label = find_end_label ();
3439 target_label = get_label_before (trial);
3440 redirect_jump (delay_insn, target_label);
3445 /* Similarly, if it is an unconditional jump with one insn in its
3446 delay list and that insn is redundant, thread the jump. */
3447 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3448 && XVECLEN (PATTERN (trial), 0) == 2
3449 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3450 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3451 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3452 && redundant_insn_p (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3454 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3455 if (target_label == 0)
3456 target_label = find_end_label ();
3457 redirect_jump (delay_insn, target_label);
3463 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3464 && prev_active_insn (target_label) == insn
3466 /* If the last insn in the delay slot sets CC0 for some insn,
3467 various code assumes that it is in a delay slot. We could
3468 put it back where it belonged and delete the register notes,
3469 but it doesn't seem worthwhile in this uncommon case. */
3470 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3471 REG_CC_USER, NULL_RTX)
3477 /* All this insn does is execute its delay list and jump to the
3478 following insn. So delete the jump and just execute the delay
3481 We do this by deleting the INSN containing the SEQUENCE, then
3482 re-emitting the insns separately, and then deleting the jump.
3483 This allows the count of the jump target to be properly
3486 /* Clear the from target bit, since these insns are no longer
3488 for (i = 0; i < XVECLEN (pat, 0); i++)
3489 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3491 trial = PREV_INSN (insn);
3493 emit_insn_after (pat, trial);
3494 delete_scheduled_jump (delay_insn);
3498 /* See if this is an unconditional jump around a single insn which is
3499 identical to the one in its delay slot. In this case, we can just
3500 delete the branch and the insn in its delay slot. */
3501 if (next && GET_CODE (next) == INSN
3502 && prev_label (next_active_insn (next)) == target_label
3503 && simplejump_p (insn)
3504 && XVECLEN (pat, 0) == 2
3505 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3511 /* See if this jump (with its delay slots) branches around another
3512 jump (without delay slots). If so, invert this jump and point
3513 it to the target of the second jump. We cannot do this for
3514 annulled jumps, though. Again, don't convert a jump to a RETURN
3516 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3517 && next && GET_CODE (next) == JUMP_INSN
3518 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3519 && next_active_insn (target_label) == next_active_insn (next)
3520 && no_labels_between_p (insn, next))
3522 rtx label = JUMP_LABEL (next);
3523 rtx old_label = JUMP_LABEL (delay_insn);
3526 label = find_end_label ();
3528 /* Be careful how we do this to avoid deleting code or labels
3529 that are momentarily dead. See similar optimization in jump.c */
3531 ++LABEL_NUSES (old_label);
3533 if (invert_jump (delay_insn, label))
3539 if (old_label && --LABEL_NUSES (old_label) == 0)
3540 delete_insn (old_label);
3544 /* If we own the thread opposite the way this insn branches, see if we
3545 can merge its delay slots with following insns. */
3546 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3547 && own_thread_p (NEXT_INSN (insn), 0, 1))
3548 try_merge_delay_insns (insn, next);
3549 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3550 && own_thread_p (target_label, target_label, 0))
3551 try_merge_delay_insns (insn, next_active_insn (target_label));
3553 /* If we get here, we haven't deleted INSN. But we may have deleted
3554 NEXT, so recompute it. */
3555 next = next_active_insn (insn);
3561 /* Look for filled jumps to the end of function label. We can try to convert
3562 them into RETURN insns if the insns in the delay slot are valid for the
3566 make_return_insns (first)
3569 rtx insn, jump_insn, pat;
3570 rtx real_return_label = end_of_function_label;
3573 /* See if there is a RETURN insn in the function other than the one we
3574 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3575 into a RETURN to jump to it. */
3576 for (insn = first; insn; insn = NEXT_INSN (insn))
3577 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3579 real_return_label = get_label_before (insn);
3583 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3584 was equal to END_OF_FUNCTION_LABEL. */
3585 LABEL_NUSES (real_return_label)++;
3587 /* Clear the list of insns to fill so we can use it. */
3588 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3590 for (insn = first; insn; insn = NEXT_INSN (insn))
3592 /* Only look at filled JUMP_INSNs that go to the end of function
3594 if (GET_CODE (insn) != INSN
3595 || GET_CODE (PATTERN (insn)) != SEQUENCE
3596 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3597 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3600 pat = PATTERN (insn);
3601 jump_insn = XVECEXP (pat, 0, 0);
3603 /* If we can't make the jump into a RETURN, redirect it to the best
3604 RETURN and go on to the next insn. */
3605 if (! redirect_jump (jump_insn, NULL_RTX))
3607 redirect_jump (jump_insn, real_return_label);
3611 /* See if this RETURN can accept the insns current in its delay slot.
3612 It can if it has more or an equal number of slots and the contents
3613 of each is valid. */
3615 slots = num_delay_slots (jump_insn);
3616 if (slots >= XVECLEN (pat, 0) - 1)
3618 for (i = 1; i < XVECLEN (pat, 0); i++)
3620 #ifdef ANNUL_IFFALSE_SLOTS
3621 (INSN_ANNULLED_BRANCH_P (jump_insn)
3622 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3623 ? eligible_for_annul_false (jump_insn, i - 1,
3624 XVECEXP (pat, 0, i)) :
3626 #ifdef ANNUL_IFTRUE_SLOTS
3627 (INSN_ANNULLED_BRANCH_P (jump_insn)
3628 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3629 ? eligible_for_annul_true (jump_insn, i - 1,
3630 XVECEXP (pat, 0, i)) :
3632 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i))))
3638 if (i == XVECLEN (pat, 0))
3641 /* We have to do something with this insn. If it is an unconditional
3642 RETURN, delete the SEQUENCE and output the individual insns,
3643 followed by the RETURN. Then set things up so we try to find
3644 insns for its delay slots, if it needs some. */
3645 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3647 rtx prev = PREV_INSN (insn);
3650 for (i = 1; i < XVECLEN (pat, 0); i++)
3651 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3653 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3654 emit_barrier_after (insn);
3657 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3660 /* It is probably more efficient to keep this with its current
3661 delay slot as a branch to a RETURN. */
3662 redirect_jump (jump_insn, real_return_label);
3665 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3666 new delay slots we have created. */
3667 if (--LABEL_NUSES (real_return_label) == 0)
3668 delete_insn (real_return_label);
3670 fill_simple_delay_slots (first, 1);
3671 fill_simple_delay_slots (first, 0);
3675 /* Try to find insns to place in delay slots. */
3678 dbr_schedule (first, file)
3682 rtx insn, next, epilogue_insn = 0;
3685 int old_flag_no_peephole = flag_no_peephole;
3687 /* Execute `final' once in prescan mode to delete any insns that won't be
3688 used. Don't let final try to do any peephole optimization--it will
3689 ruin dataflow information for this pass. */
3691 flag_no_peephole = 1;
3692 final (first, 0, NO_DEBUG, 1, 1);
3693 flag_no_peephole = old_flag_no_peephole;
3696 /* Find the highest INSN_UID and allocate and initialize our map from
3697 INSN_UID's to position in code. */
3698 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3700 if (INSN_UID (insn) > max_uid)
3701 max_uid = INSN_UID (insn);
3702 if (GET_CODE (insn) == NOTE
3703 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3704 epilogue_insn = insn;
3707 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
3708 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3709 uid_to_ruid[INSN_UID (insn)] = i;
3711 /* Initialize the list of insns that need filling. */
3712 if (unfilled_firstobj == 0)
3714 gcc_obstack_init (&unfilled_slots_obstack);
3715 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3718 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3722 INSN_ANNULLED_BRANCH_P (insn) = 0;
3723 INSN_FROM_TARGET_P (insn) = 0;
3725 /* Skip vector tables. We can't get attributes for them. */
3726 if (GET_CODE (insn) == JUMP_INSN
3727 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3728 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3731 if (num_delay_slots (insn) > 0)
3732 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3734 /* Ensure all jumps go to the last of a set of consecutive labels. */
3735 if (GET_CODE (insn) == JUMP_INSN && condjump_p (insn)
3736 && JUMP_LABEL (insn) != 0
3737 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3738 != JUMP_LABEL (insn)))
3739 redirect_jump (insn, target);
3742 /* Indicate what resources are required to be valid at the end of the current
3743 function. The condition code never is and memory always is. If the
3744 frame pointer is needed, it is and so is the stack pointer unless
3745 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
3746 stack pointer is. Registers used to return the function value are
3747 needed. Registers holding global variables are needed. */
3749 end_of_function_needs.cc = 0;
3750 end_of_function_needs.memory = 1;
3751 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
3753 if (frame_pointer_needed)
3755 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
3756 #ifdef EXIT_IGNORE_STACK
3757 if (! EXIT_IGNORE_STACK)
3759 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
3762 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
3764 if (current_function_return_rtx != 0
3765 && GET_CODE (current_function_return_rtx) == REG)
3766 mark_referenced_resources (current_function_return_rtx,
3767 &end_of_function_needs, 1);
3769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3771 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
3773 /* The registers required to be live at the end of the function are
3774 represented in the flow information as being dead just prior to
3775 reaching the end of the function. For example, the return of a value
3776 might be represented by a USE of the return register immediately
3777 followed by an unconditional jump to the return label where the
3778 return label is the end of the RTL chain. The end of the RTL chain
3779 is then taken to mean that the return register is live.
3781 This sequence is no longer maintained when epilogue instructions are
3782 added to the RTL chain. To reconstruct the original meaning, the
3783 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
3784 point where these registers become live (start_of_epilogue_needs).
3785 If epilogue instructions are present, the registers set by those
3786 instructions won't have been processed by flow. Thus, those
3787 registers are additionally required at the end of the RTL chain
3788 (end_of_function_needs). */
3790 start_of_epilogue_needs = end_of_function_needs;
3792 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
3793 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
3795 /* Show we haven't computed an end-of-function label yet. */
3796 end_of_function_label = 0;
3798 /* Allocate and initialize the tables used by mark_target_live_regs. */
3800 = (struct target_info **) alloca ((TARGET_HASH_PRIME
3801 * sizeof (struct target_info *)));
3802 bzero (target_hash_table, TARGET_HASH_PRIME * sizeof (struct target_info *));
3804 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
3805 bzero (bb_ticks, n_basic_blocks * sizeof (int));
3807 /* Initialize the statistics for this function. */
3808 bzero (num_insns_needing_delays, sizeof num_insns_needing_delays);
3809 bzero (num_filled_delays, sizeof num_filled_delays);
3811 /* Now do the delay slot filling. Try everything twice in case earlier
3812 changes make more slots fillable. */
3814 for (reorg_pass_number = 0;
3815 reorg_pass_number < MAX_REORG_PASSES;
3816 reorg_pass_number++)
3818 fill_simple_delay_slots (first, 1);
3819 fill_simple_delay_slots (first, 0);
3820 fill_eager_delay_slots (first);
3821 relax_delay_slots (first);
3824 /* Delete any USE insns made by update_block; subsequent passes don't need
3825 them or know how to deal with them. */
3826 for (insn = first; insn; insn = next)
3828 next = NEXT_INSN (insn);
3830 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3831 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
3832 next = delete_insn (insn);
3835 /* If we made an end of function label, indicate that it is now
3836 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3837 If it is now unused, delete it. */
3838 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3839 delete_insn (end_of_function_label);
3842 if (HAVE_return && end_of_function_label != 0)
3843 make_return_insns (first);
3846 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3848 /* It is not clear why the line below is needed, but it does seem to be. */
3849 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3851 /* Reposition the prologue and epilogue notes in case we moved the
3852 prologue/epilogue insns. */
3853 reposition_prologue_and_epilogue_notes (first);
3857 register int i, j, need_comma;
3859 for (reorg_pass_number = 0;
3860 reorg_pass_number < MAX_REORG_PASSES;
3861 reorg_pass_number++)
3863 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3864 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3867 fprintf (file, ";; Reorg function #%d\n", i);
3869 fprintf (file, ";; %d insns needing delay slots\n;; ",
3870 num_insns_needing_delays[i][reorg_pass_number]);
3872 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
3873 if (num_filled_delays[i][j][reorg_pass_number])
3876 fprintf (file, ", ");
3878 fprintf (file, "%d got %d delays",
3879 num_filled_delays[i][j][reorg_pass_number], j);
3881 fprintf (file, "\n");
3886 #endif /* DELAY_SLOTS */