1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
126 #include "insn-flags.h"
131 #include "insn-attr.h"
135 #define obstack_chunk_alloc xmalloc
136 #define obstack_chunk_free free
138 #ifndef ANNUL_IFTRUE_SLOTS
139 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
141 #ifndef ANNUL_IFFALSE_SLOTS
142 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
145 /* Insns which have delay slots that have not yet been filled. */
147 static struct obstack unfilled_slots_obstack;
148 static rtx *unfilled_firstobj;
150 /* Define macros to refer to the first and last slot containing unfilled
151 insns. These are used because the list may move and its address
152 should be recomputed at each use. */
154 #define unfilled_slots_base \
155 ((rtx *) obstack_base (&unfilled_slots_obstack))
157 #define unfilled_slots_next \
158 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
160 /* This structure is used to indicate which hardware resources are set or
161 needed by insns so far. */
165 char memory; /* Insn sets or needs a memory location. */
166 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
167 char volatil; /* Insn sets or needs a volatile memory loc. */
168 char cc; /* Insn sets or needs the condition codes. */
169 HARD_REG_SET regs; /* Which registers are set or needed. */
172 /* Macro to clear all resources. */
173 #define CLEAR_RESOURCE(RES) \
174 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
175 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
177 /* Indicates what resources are required at the beginning of the epilogue. */
178 static struct resources start_of_epilogue_needs;
180 /* Indicates what resources are required at function end. */
181 static struct resources end_of_function_needs;
183 /* Points to the label before the end of the function. */
184 static rtx end_of_function_label;
186 /* This structure is used to record liveness information at the targets or
187 fallthrough insns of branches. We will most likely need the information
188 at targets again, so save them in a hash table rather than recomputing them
193 int uid; /* INSN_UID of target. */
194 struct target_info *next; /* Next info for same hash bucket. */
195 HARD_REG_SET live_regs; /* Registers live at target. */
196 int block; /* Basic block number containing target. */
197 int bb_tick; /* Generation count of basic block info. */
200 #define TARGET_HASH_PRIME 257
202 /* Define the hash table itself. */
203 static struct target_info **target_hash_table;
205 /* For each basic block, we maintain a generation number of its basic
206 block info, which is updated each time we move an insn from the
207 target of a jump. This is the generation number indexed by block
210 static int *bb_ticks;
212 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
213 not always monotonically increase. */
214 static int *uid_to_ruid;
216 /* Highest valid index in `uid_to_ruid'. */
219 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
220 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
221 static int stop_search_p PROTO((rtx, int));
222 static int resource_conflicts_p PROTO((struct resources *,
223 struct resources *));
224 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
225 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
226 static rtx find_end_label PROTO((void));
227 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
228 static rtx add_to_delay_list PROTO((rtx, rtx));
229 static void delete_from_delay_slot PROTO((rtx));
230 static void delete_scheduled_jump PROTO((rtx));
231 static void note_delay_statistics PROTO((int, int));
232 static rtx optimize_skip PROTO((rtx));
233 static int get_jump_flags PROTO((rtx, rtx));
234 static int rare_destination PROTO((rtx));
235 static int mostly_true_jump PROTO((rtx, rtx));
236 static rtx get_branch_condition PROTO((rtx, rtx));
237 static int condition_dominates_p PROTO((rtx, rtx));
238 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
242 int, int *, int *, rtx *));
243 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
248 static void try_merge_delay_insns PROTO((rtx, rtx));
249 static rtx redundant_insn PROTO((rtx, rtx, rtx));
250 static int own_thread_p PROTO((rtx, rtx, int));
251 static int find_basic_block PROTO((rtx));
252 static void update_block PROTO((rtx, rtx));
253 static int reorg_redirect_jump PROTO((rtx, rtx));
254 static void update_reg_dead_notes PROTO((rtx, rtx));
255 static void update_reg_unused_notes PROTO((rtx, rtx));
256 static void update_live_status PROTO((rtx, rtx));
257 static rtx next_insn_no_annul PROTO((rtx));
258 static void mark_target_live_regs PROTO((rtx, struct resources *));
259 static void fill_simple_delay_slots PROTO((rtx, int));
260 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
261 int, int, int, int *));
262 static void fill_eager_delay_slots PROTO((rtx));
263 static void relax_delay_slots PROTO((rtx));
264 static void make_return_insns PROTO((rtx));
265 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
266 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
268 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
269 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
270 is TRUE, resources used by the called routine will be included for
274 mark_referenced_resources (x, res, include_delayed_effects)
276 register struct resources *res;
277 register int include_delayed_effects;
279 register enum rtx_code code = GET_CODE (x);
281 register char *format_ptr;
283 /* Handle leaf items for which we set resource flags. Also, special-case
284 CALL, SET and CLOBBER operators. */
296 if (GET_CODE (SUBREG_REG (x)) != REG)
297 mark_referenced_resources (SUBREG_REG (x), res, 0);
300 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
301 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
302 for (i = regno; i < last_regno; i++)
303 SET_HARD_REG_BIT (res->regs, i);
308 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
309 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
313 /* If this memory shouldn't change, it really isn't referencing
315 if (RTX_UNCHANGING_P (x))
316 res->unch_memory = 1;
319 res->volatil = MEM_VOLATILE_P (x);
321 /* Mark registers used to access memory. */
322 mark_referenced_resources (XEXP (x, 0), res, 0);
329 case UNSPEC_VOLATILE:
332 /* Traditional asm's are always volatile. */
337 res->volatil = MEM_VOLATILE_P (x);
339 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
340 We can not just fall through here since then we would be confused
341 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
342 traditional asms unlike their normal usage. */
344 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
345 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
349 /* The first operand will be a (MEM (xxx)) but doesn't really reference
350 memory. The second operand may be referenced, though. */
351 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
352 mark_referenced_resources (XEXP (x, 1), res, 0);
356 /* Usually, the first operand of SET is set, not referenced. But
357 registers used to access memory are referenced. SET_DEST is
358 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
360 mark_referenced_resources (SET_SRC (x), res, 0);
363 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
364 mark_referenced_resources (x, res, 0);
365 else if (GET_CODE (x) == SUBREG)
367 if (GET_CODE (x) == MEM)
368 mark_referenced_resources (XEXP (x, 0), res, 0);
375 if (include_delayed_effects)
377 /* A CALL references memory, the frame pointer if it exists, the
378 stack pointer, any global registers and any registers given in
379 USE insns immediately in front of the CALL.
381 However, we may have moved some of the parameter loading insns
382 into the delay slot of this CALL. If so, the USE's for them
383 don't count and should be skipped. */
384 rtx insn = PREV_INSN (x);
387 rtx next = NEXT_INSN (x);
390 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
391 if (NEXT_INSN (insn) != x)
393 next = NEXT_INSN (NEXT_INSN (insn));
394 sequence = PATTERN (NEXT_INSN (insn));
395 seq_size = XVECLEN (sequence, 0);
396 if (GET_CODE (sequence) != SEQUENCE)
401 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
402 if (frame_pointer_needed)
404 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
405 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
406 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
412 SET_HARD_REG_BIT (res->regs, i);
414 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
415 assume that this call can need any register.
417 This is done to be more conservative about how we handle setjmp.
418 We assume that they both use and set all registers. Using all
419 registers ensures that a register will not be considered dead
420 just because it crosses a setjmp call. A register should be
421 considered dead only if the setjmp call returns non-zero. */
422 if (next && GET_CODE (next) == NOTE
423 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
424 SET_HARD_REG_SET (res->regs);
429 for (link = CALL_INSN_FUNCTION_USAGE (x);
431 link = XEXP (link, 1))
432 if (GET_CODE (XEXP (link, 0)) == USE)
434 for (i = 1; i < seq_size; i++)
436 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
437 if (GET_CODE (slot_pat) == SET
438 && rtx_equal_p (SET_DEST (slot_pat),
439 SET_DEST (XEXP (link, 0))))
443 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
449 /* ... fall through to other INSN processing ... */
454 #ifdef INSN_REFERENCES_ARE_DELAYED
455 if (! include_delayed_effects
456 && INSN_REFERENCES_ARE_DELAYED (x))
460 /* No special processing, just speed up. */
461 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
465 /* Process each sub-expression and flag what it needs. */
466 format_ptr = GET_RTX_FORMAT (code);
467 for (i = 0; i < GET_RTX_LENGTH (code); i++)
468 switch (*format_ptr++)
471 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
475 for (j = 0; j < XVECLEN (x, i); j++)
476 mark_referenced_resources (XVECEXP (x, i, j), res,
477 include_delayed_effects);
482 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
483 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
484 is nonzero, also mark resources potentially set by the called routine.
486 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
487 objects are being referenced instead of set.
489 We never mark the insn as modifying the condition code unless it explicitly
490 SETs CC0 even though this is not totally correct. The reason for this is
491 that we require a SET of CC0 to immediately precede the reference to CC0.
492 So if some other insn sets CC0 as a side-effect, we know it cannot affect
493 our computation and thus may be placed in a delay slot. */
496 mark_set_resources (x, res, in_dest, include_delayed_effects)
498 register struct resources *res;
500 int include_delayed_effects;
502 register enum rtx_code code;
504 register char *format_ptr;
522 /* These don't set any resources. */
531 /* Called routine modifies the condition code, memory, any registers
532 that aren't saved across calls, global registers and anything
533 explicitly CLOBBERed immediately after the CALL_INSN. */
535 if (include_delayed_effects)
537 rtx next = NEXT_INSN (x);
538 rtx prev = PREV_INSN (x);
541 res->cc = res->memory = 1;
542 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
543 if (call_used_regs[i] || global_regs[i])
544 SET_HARD_REG_BIT (res->regs, i);
546 /* If X is part of a delay slot sequence, then NEXT should be
547 the first insn after the sequence. */
548 if (NEXT_INSN (prev) != x)
549 next = NEXT_INSN (NEXT_INSN (prev));
551 for (link = CALL_INSN_FUNCTION_USAGE (x);
552 link; link = XEXP (link, 1))
553 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
554 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
556 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
557 assume that this call can clobber any register. */
558 if (next && GET_CODE (next) == NOTE
559 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
560 SET_HARD_REG_SET (res->regs);
563 /* ... and also what it's RTL says it modifies, if anything. */
568 /* An insn consisting of just a CLOBBER (or USE) is just for flow
569 and doesn't actually do anything, so we ignore it. */
571 #ifdef INSN_SETS_ARE_DELAYED
572 if (! include_delayed_effects
573 && INSN_SETS_ARE_DELAYED (x))
578 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
583 /* If the source of a SET is a CALL, this is actually done by
584 the called routine. So only include it if we are to include the
585 effects of the calling routine. */
587 mark_set_resources (SET_DEST (x), res,
588 (include_delayed_effects
589 || GET_CODE (SET_SRC (x)) != CALL),
592 mark_set_resources (SET_SRC (x), res, 0, 0);
596 mark_set_resources (XEXP (x, 0), res, 1, 0);
600 for (i = 0; i < XVECLEN (x, 0); i++)
601 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
602 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
603 mark_set_resources (XVECEXP (x, 0, i), res, 0,
604 include_delayed_effects);
611 mark_set_resources (XEXP (x, 0), res, 1, 0);
615 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
616 mark_set_resources (XEXP (x, 1), res, 0, 0);
617 mark_set_resources (XEXP (x, 2), res, 0, 0);
624 res->unch_memory = RTX_UNCHANGING_P (x);
625 res->volatil = MEM_VOLATILE_P (x);
628 mark_set_resources (XEXP (x, 0), res, 0, 0);
634 if (GET_CODE (SUBREG_REG (x)) != REG)
635 mark_set_resources (SUBREG_REG (x), res,
636 in_dest, include_delayed_effects);
639 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
640 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
641 for (i = regno; i < last_regno; i++)
642 SET_HARD_REG_BIT (res->regs, i);
649 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
650 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
654 /* Process each sub-expression and flag what it needs. */
655 format_ptr = GET_RTX_FORMAT (code);
656 for (i = 0; i < GET_RTX_LENGTH (code); i++)
657 switch (*format_ptr++)
660 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
664 for (j = 0; j < XVECLEN (x, i); j++)
665 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
666 include_delayed_effects);
671 /* Return TRUE if this insn should stop the search for insn to fill delay
672 slots. LABELS_P indicates that labels should terminate the search.
673 In all cases, jumps terminate the search. */
676 stop_search_p (insn, labels_p)
683 switch (GET_CODE (insn))
697 /* OK unless it contains a delay slot or is an `asm' insn of some type.
698 We don't know anything about these. */
699 return (GET_CODE (PATTERN (insn)) == SEQUENCE
700 || GET_CODE (PATTERN (insn)) == ASM_INPUT
701 || asm_noperands (PATTERN (insn)) >= 0);
708 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
709 resource set contains a volatile memory reference. Otherwise, return FALSE. */
712 resource_conflicts_p (res1, res2)
713 struct resources *res1, *res2;
715 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
716 || (res1->unch_memory && res2->unch_memory)
717 || res1->volatil || res2->volatil)
721 return (res1->regs & res2->regs) != HARD_CONST (0);
726 for (i = 0; i < HARD_REG_SET_LONGS; i++)
727 if ((res1->regs[i] & res2->regs[i]) != 0)
734 /* Return TRUE if any resource marked in RES, a `struct resources', is
735 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
736 routine is using those resources.
738 We compute this by computing all the resources referenced by INSN and
739 seeing if this conflicts with RES. It might be faster to directly check
740 ourselves, and this is the way it used to work, but it means duplicating
741 a large block of complex code. */
744 insn_references_resource_p (insn, res, include_delayed_effects)
746 register struct resources *res;
747 int include_delayed_effects;
749 struct resources insn_res;
751 CLEAR_RESOURCE (&insn_res);
752 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
753 return resource_conflicts_p (&insn_res, res);
756 /* Return TRUE if INSN modifies resources that are marked in RES.
757 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
758 included. CC0 is only modified if it is explicitly set; see comments
759 in front of mark_set_resources for details. */
762 insn_sets_resource_p (insn, res, include_delayed_effects)
764 register struct resources *res;
765 int include_delayed_effects;
767 struct resources insn_sets;
769 CLEAR_RESOURCE (&insn_sets);
770 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
771 return resource_conflicts_p (&insn_sets, res);
774 /* Find a label at the end of the function or before a RETURN. If there is
782 /* If we found one previously, return it. */
783 if (end_of_function_label)
784 return end_of_function_label;
786 /* Otherwise, see if there is a label at the end of the function. If there
787 is, it must be that RETURN insns aren't needed, so that is our return
788 label and we don't have to do anything else. */
790 insn = get_last_insn ();
791 while (GET_CODE (insn) == NOTE
792 || (GET_CODE (insn) == INSN
793 && (GET_CODE (PATTERN (insn)) == USE
794 || GET_CODE (PATTERN (insn)) == CLOBBER)))
795 insn = PREV_INSN (insn);
797 /* When a target threads its epilogue we might already have a
798 suitable return insn. If so put a label before it for the
799 end_of_function_label. */
800 if (GET_CODE (insn) == BARRIER
801 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
802 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
804 rtx temp = PREV_INSN (PREV_INSN (insn));
805 end_of_function_label = gen_label_rtx ();
806 LABEL_NUSES (end_of_function_label) = 0;
808 /* Put the label before an USE insns that may proceed the RETURN insn. */
809 while (GET_CODE (temp) == USE)
810 temp = PREV_INSN (temp);
812 emit_label_after (end_of_function_label, temp);
815 else if (GET_CODE (insn) == CODE_LABEL)
816 end_of_function_label = insn;
819 /* Otherwise, make a new label and emit a RETURN and BARRIER,
821 end_of_function_label = gen_label_rtx ();
822 LABEL_NUSES (end_of_function_label) = 0;
823 emit_label (end_of_function_label);
827 /* The return we make may have delay slots too. */
828 rtx insn = gen_return ();
829 insn = emit_jump_insn (insn);
831 if (num_delay_slots (insn) > 0)
832 obstack_ptr_grow (&unfilled_slots_obstack, insn);
837 /* Show one additional use for this label so it won't go away until
839 ++LABEL_NUSES (end_of_function_label);
841 return end_of_function_label;
844 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
845 the pattern of INSN with the SEQUENCE.
847 Chain the insns so that NEXT_INSN of each insn in the sequence points to
848 the next and NEXT_INSN of the last insn in the sequence points to
849 the first insn after the sequence. Similarly for PREV_INSN. This makes
850 it easier to scan all insns.
852 Returns the SEQUENCE that replaces INSN. */
855 emit_delay_sequence (insn, list, length, avail)
865 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
866 rtvec seqv = rtvec_alloc (length + 1);
867 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
868 rtx seq_insn = make_insn_raw (seq);
869 rtx first = get_insns ();
870 rtx last = get_last_insn ();
872 /* Make a copy of the insn having delay slots. */
873 rtx delay_insn = copy_rtx (insn);
875 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
876 confuse further processing. Update LAST in case it was the last insn.
877 We will put the BARRIER back in later. */
878 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
880 delete_insn (NEXT_INSN (insn));
881 last = get_last_insn ();
885 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
886 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
887 PREV_INSN (seq_insn) = PREV_INSN (insn);
890 set_new_first_and_last_insn (first, seq_insn);
892 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
895 set_new_first_and_last_insn (seq_insn, last);
897 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
899 /* Build our SEQUENCE and rebuild the insn chain. */
900 XVECEXP (seq, 0, 0) = delay_insn;
901 INSN_DELETED_P (delay_insn) = 0;
902 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
904 for (li = list; li; li = XEXP (li, 1), i++)
906 rtx tem = XEXP (li, 0);
909 /* Show that this copy of the insn isn't deleted. */
910 INSN_DELETED_P (tem) = 0;
912 XVECEXP (seq, 0, i) = tem;
913 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
914 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
916 /* Remove any REG_DEAD notes because we can't rely on them now
917 that the insn has been moved. */
918 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
919 if (REG_NOTE_KIND (note) == REG_DEAD)
920 XEXP (note, 0) = const0_rtx;
923 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
925 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
926 last insn in that SEQUENCE to point to us. Similarly for the first
927 insn in the following insn if it is a SEQUENCE. */
929 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
930 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
931 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
932 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
935 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
936 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
937 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
939 /* If there used to be a BARRIER, put it back. */
941 emit_barrier_after (seq_insn);
949 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
950 be in the order in which the insns are to be executed. */
953 add_to_delay_list (insn, delay_list)
957 /* If we have an empty list, just make a new list element. If
958 INSN has it's block number recorded, clear it since we may
959 be moving the insn to a new block. */
963 struct target_info *tinfo;
965 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
966 tinfo; tinfo = tinfo->next)
967 if (tinfo->uid == INSN_UID (insn))
973 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
976 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
978 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
983 /* Delete INSN from the the delay slot of the insn that it is in. This may
984 produce an insn without anything in its delay slots. */
987 delete_from_delay_slot (insn)
990 rtx trial, seq_insn, seq, prev;
994 /* We first must find the insn containing the SEQUENCE with INSN in its
995 delay slot. Do this by finding an insn, TRIAL, where
996 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
999 PREV_INSN (NEXT_INSN (trial)) == trial;
1000 trial = NEXT_INSN (trial))
1003 seq_insn = PREV_INSN (NEXT_INSN (trial));
1004 seq = PATTERN (seq_insn);
1006 /* Create a delay list consisting of all the insns other than the one
1007 we are deleting (unless we were the only one). */
1008 if (XVECLEN (seq, 0) > 2)
1009 for (i = 1; i < XVECLEN (seq, 0); i++)
1010 if (XVECEXP (seq, 0, i) != insn)
1011 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1013 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1014 list, and rebuild the delay list if non-empty. */
1015 prev = PREV_INSN (seq_insn);
1016 trial = XVECEXP (seq, 0, 0);
1017 delete_insn (seq_insn);
1018 add_insn_after (trial, prev);
1020 if (GET_CODE (trial) == JUMP_INSN
1021 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1022 emit_barrier_after (trial);
1024 /* If there are any delay insns, remit them. Otherwise clear the
1027 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
1029 INSN_ANNULLED_BRANCH_P (trial) = 0;
1031 INSN_FROM_TARGET_P (insn) = 0;
1033 /* Show we need to fill this insn again. */
1034 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1037 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1038 the insn that sets CC0 for it and delete it too. */
1041 delete_scheduled_jump (insn)
1044 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1045 delete the insn that sets the condition code, but it is hard to find it.
1046 Since this case is rare anyway, don't bother trying; there would likely
1047 be other insns that became dead anyway, which we wouldn't know to
1051 if (reg_mentioned_p (cc0_rtx, insn))
1053 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1055 /* If a reg-note was found, it points to an insn to set CC0. This
1056 insn is in the delay list of some other insn. So delete it from
1057 the delay list it was in. */
1060 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1061 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1062 delete_from_delay_slot (XEXP (note, 0));
1066 /* The insn setting CC0 is our previous insn, but it may be in
1067 a delay slot. It will be the last insn in the delay slot, if
1069 rtx trial = previous_insn (insn);
1070 if (GET_CODE (trial) == NOTE)
1071 trial = prev_nonnote_insn (trial);
1072 if (sets_cc0_p (PATTERN (trial)) != 1
1073 || FIND_REG_INC_NOTE (trial, 0))
1075 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1076 delete_insn (trial);
1078 delete_from_delay_slot (trial);
1086 /* Counters for delay-slot filling. */
1088 #define NUM_REORG_FUNCTIONS 2
1089 #define MAX_DELAY_HISTOGRAM 3
1090 #define MAX_REORG_PASSES 2
1092 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1094 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1096 static int reorg_pass_number;
1099 note_delay_statistics (slots_filled, index)
1100 int slots_filled, index;
1102 num_insns_needing_delays[index][reorg_pass_number]++;
1103 if (slots_filled > MAX_DELAY_HISTOGRAM)
1104 slots_filled = MAX_DELAY_HISTOGRAM;
1105 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1108 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1110 /* Optimize the following cases:
1112 1. When a conditional branch skips over only one instruction,
1113 use an annulling branch and put that insn in the delay slot.
1114 Use either a branch that annuls when the condition if true or
1115 invert the test with a branch that annuls when the condition is
1116 false. This saves insns, since otherwise we must copy an insn
1119 (orig) (skip) (otherwise)
1120 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1127 2. When a conditional branch skips over only one instruction,
1128 and after that, it unconditionally branches somewhere else,
1129 perform the similar optimization. This saves executing the
1130 second branch in the case where the inverted condition is true.
1137 INSN is a JUMP_INSN.
1139 This should be expanded to skip over N insns, where N is the number
1140 of delay slots required. */
1143 optimize_skip (insn)
1146 register rtx trial = next_nonnote_insn (insn);
1147 rtx next_trial = next_active_insn (trial);
1152 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1155 || GET_CODE (trial) != INSN
1156 || GET_CODE (PATTERN (trial)) == SEQUENCE
1157 || recog_memoized (trial) < 0
1158 || (! eligible_for_annul_false (insn, 0, trial, flags)
1159 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1162 /* There are two cases where we are just executing one insn (we assume
1163 here that a branch requires only one insn; this should be generalized
1164 at some point): Where the branch goes around a single insn or where
1165 we have one insn followed by a branch to the same label we branch to.
1166 In both of these cases, inverting the jump and annulling the delay
1167 slot give the same effect in fewer insns. */
1168 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1170 && GET_CODE (next_trial) == JUMP_INSN
1171 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1172 && (simplejump_p (next_trial)
1173 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1175 if (eligible_for_annul_false (insn, 0, trial, flags))
1177 if (invert_jump (insn, JUMP_LABEL (insn)))
1178 INSN_FROM_TARGET_P (trial) = 1;
1179 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1183 delay_list = add_to_delay_list (trial, NULL_RTX);
1184 next_trial = next_active_insn (trial);
1185 update_block (trial, trial);
1186 delete_insn (trial);
1188 /* Also, if we are targeting an unconditional
1189 branch, thread our jump to the target of that branch. Don't
1190 change this into a RETURN here, because it may not accept what
1191 we have in the delay slot. We'll fix this up later. */
1192 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1193 && (simplejump_p (next_trial)
1194 || GET_CODE (PATTERN (next_trial)) == RETURN))
1196 target_label = JUMP_LABEL (next_trial);
1197 if (target_label == 0)
1198 target_label = find_end_label ();
1200 /* Recompute the flags based on TARGET_LABEL since threading
1201 the jump to TARGET_LABEL may change the direction of the
1202 jump (which may change the circumstances in which the
1203 delay slot is nullified). */
1204 flags = get_jump_flags (insn, target_label);
1205 if (eligible_for_annul_true (insn, 0, trial, flags))
1206 reorg_redirect_jump (insn, target_label);
1209 INSN_ANNULLED_BRANCH_P (insn) = 1;
1217 /* Encode and return branch direction and prediction information for
1218 INSN assuming it will jump to LABEL.
1220 Non conditional branches return no direction information and
1221 are predicted as very likely taken. */
1223 get_jump_flags (insn, label)
1228 /* get_jump_flags can be passed any insn with delay slots, these may
1229 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1230 direction information, and only if they are conditional jumps.
1232 If LABEL is zero, then there is no way to determine the branch
1234 if (GET_CODE (insn) == JUMP_INSN
1235 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1236 && INSN_UID (insn) <= max_uid
1238 && INSN_UID (label) <= max_uid)
1240 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1241 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1242 /* No valid direction information. */
1246 /* If insn is a conditional branch call mostly_true_jump to get
1247 determine the branch prediction.
1249 Non conditional branches are predicted as very likely taken. */
1250 if (GET_CODE (insn) == JUMP_INSN
1251 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1255 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1259 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1262 flags |= ATTR_FLAG_likely;
1265 flags |= ATTR_FLAG_unlikely;
1268 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1276 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1281 /* Return 1 if INSN is a destination that will be branched to rarely (the
1282 return point of a function); return 2 if DEST will be branched to very
1283 rarely (a call to a function that doesn't return). Otherwise,
1287 rare_destination (insn)
1293 for (; insn; insn = next)
1295 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1296 insn = XVECEXP (PATTERN (insn), 0, 0);
1298 next = NEXT_INSN (insn);
1300 switch (GET_CODE (insn))
1305 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1306 don't scan past JUMP_INSNs, so any barrier we find here must
1307 have been after a CALL_INSN and hence mean the call doesn't
1311 if (GET_CODE (PATTERN (insn)) == RETURN)
1313 else if (simplejump_p (insn)
1314 && jump_count++ < 10)
1315 next = JUMP_LABEL (insn);
1321 /* If we got here it means we hit the end of the function. So this
1322 is an unlikely destination. */
1327 /* Return truth value of the statement that this branch
1328 is mostly taken. If we think that the branch is extremely likely
1329 to be taken, we return 2. If the branch is slightly more likely to be
1330 taken, return 1. If the branch is slightly less likely to be taken,
1331 return 0 and if the branch is highly unlikely to be taken, return -1.
1333 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1336 mostly_true_jump (jump_insn, condition)
1337 rtx jump_insn, condition;
1339 rtx target_label = JUMP_LABEL (jump_insn);
1341 int rare_dest = rare_destination (target_label);
1342 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1344 /* If this is a branch outside a loop, it is highly unlikely. */
1345 if (GET_CODE (PATTERN (jump_insn)) == SET
1346 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1347 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1348 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1349 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1350 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1355 /* If this is the test of a loop, it is very likely true. We scan
1356 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1357 before the next real insn, we assume the branch is to the top of
1359 for (insn = PREV_INSN (target_label);
1360 insn && GET_CODE (insn) == NOTE;
1361 insn = PREV_INSN (insn))
1362 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1365 /* If this is a jump to the test of a loop, it is likely true. We scan
1366 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1367 before the next real insn, we assume the branch is to the loop branch
1369 for (insn = NEXT_INSN (target_label);
1370 insn && GET_CODE (insn) == NOTE;
1371 insn = PREV_INSN (insn))
1372 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1376 /* Look at the relative rarities of the fallthrough and destination. If
1377 they differ, we can predict the branch that way. */
1379 switch (rare_fallthrough - rare_dest)
1393 /* If we couldn't figure out what this jump was, assume it won't be
1394 taken. This should be rare. */
1398 /* EQ tests are usually false and NE tests are usually true. Also,
1399 most quantities are positive, so we can make the appropriate guesses
1400 about signed comparisons against zero. */
1401 switch (GET_CODE (condition))
1404 /* Unconditional branch. */
1412 if (XEXP (condition, 1) == const0_rtx)
1417 if (XEXP (condition, 1) == const0_rtx)
1422 /* Predict backward branches usually take, forward branches usually not. If
1423 we don't know whether this is forward or backward, assume the branch
1424 will be taken, since most are. */
1425 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1426 || INSN_UID (target_label) > max_uid
1427 || (uid_to_ruid[INSN_UID (jump_insn)]
1428 > uid_to_ruid[INSN_UID (target_label)]));;
1431 /* Return the condition under which INSN will branch to TARGET. If TARGET
1432 is zero, return the condition under which INSN will return. If INSN is
1433 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1434 type of jump, or it doesn't go to TARGET, return 0. */
1437 get_branch_condition (insn, target)
1441 rtx pat = PATTERN (insn);
1444 if (condjump_in_parallel_p (insn))
1445 pat = XVECEXP (pat, 0, 0);
1447 if (GET_CODE (pat) == RETURN)
1448 return target == 0 ? const_true_rtx : 0;
1450 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1453 src = SET_SRC (pat);
1454 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1455 return const_true_rtx;
1457 else if (GET_CODE (src) == IF_THEN_ELSE
1458 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1459 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1460 && XEXP (XEXP (src, 1), 0) == target))
1461 && XEXP (src, 2) == pc_rtx)
1462 return XEXP (src, 0);
1464 else if (GET_CODE (src) == IF_THEN_ELSE
1465 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1466 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1467 && XEXP (XEXP (src, 2), 0) == target))
1468 && XEXP (src, 1) == pc_rtx)
1469 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1470 GET_MODE (XEXP (src, 0)),
1471 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1476 /* Return non-zero if CONDITION is more strict than the condition of
1477 INSN, i.e., if INSN will always branch if CONDITION is true. */
1480 condition_dominates_p (condition, insn)
1484 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1485 enum rtx_code code = GET_CODE (condition);
1486 enum rtx_code other_code;
1488 if (rtx_equal_p (condition, other_condition)
1489 || other_condition == const_true_rtx)
1492 else if (condition == const_true_rtx || other_condition == 0)
1495 other_code = GET_CODE (other_condition);
1496 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1497 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1498 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1501 return comparison_dominates_p (code, other_code);
1504 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1505 any insns already in the delay slot of JUMP. */
1508 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1509 rtx jump, newlabel, seq;
1511 int flags, slots, i;
1512 rtx pat = PATTERN (seq);
1514 /* Make sure all the delay slots of this jump would still
1515 be valid after threading the jump. If they are still
1516 valid, then return non-zero. */
1518 flags = get_jump_flags (jump, newlabel);
1519 for (i = 1; i < XVECLEN (pat, 0); i++)
1521 #ifdef ANNUL_IFFALSE_SLOTS
1522 (INSN_ANNULLED_BRANCH_P (jump)
1523 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1524 ? eligible_for_annul_false (jump, i - 1,
1525 XVECEXP (pat, 0, i), flags) :
1527 #ifdef ANNUL_IFTRUE_SLOTS
1528 (INSN_ANNULLED_BRANCH_P (jump)
1529 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1530 ? eligible_for_annul_true (jump, i - 1,
1531 XVECEXP (pat, 0, i), flags) :
1533 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1536 return (i == XVECLEN (pat, 0));
1539 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1540 any insns we wish to place in the delay slot of JUMP. */
1543 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1544 rtx jump, newlabel, delay_list;
1549 /* Make sure all the insns in DELAY_LIST would still be
1550 valid after threading the jump. If they are still
1551 valid, then return non-zero. */
1553 flags = get_jump_flags (jump, newlabel);
1554 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1556 #ifdef ANNUL_IFFALSE_SLOTS
1557 (INSN_ANNULLED_BRANCH_P (jump)
1558 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1559 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1561 #ifdef ANNUL_IFTRUE_SLOTS
1562 (INSN_ANNULLED_BRANCH_P (jump)
1563 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1564 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1566 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1569 return (li == NULL);
1573 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1574 the condition tested by INSN is CONDITION and the resources shown in
1575 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1576 from SEQ's delay list, in addition to whatever insns it may execute
1577 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1578 needed while searching for delay slot insns. Return the concatenated
1579 delay list if possible, otherwise, return 0.
1581 SLOTS_TO_FILL is the total number of slots required by INSN, and
1582 PSLOTS_FILLED points to the number filled so far (also the number of
1583 insns in DELAY_LIST). It is updated with the number that have been
1584 filled from the SEQUENCE, if any.
1586 PANNUL_P points to a non-zero value if we already know that we need
1587 to annul INSN. If this routine determines that annulling is needed,
1588 it may set that value non-zero.
1590 PNEW_THREAD points to a location that is to receive the place at which
1591 execution should continue. */
1594 steal_delay_list_from_target (insn, condition, seq, delay_list,
1595 sets, needed, other_needed,
1596 slots_to_fill, pslots_filled, pannul_p,
1598 rtx insn, condition;
1601 struct resources *sets, *needed, *other_needed;
1608 int slots_remaining = slots_to_fill - *pslots_filled;
1609 int total_slots_filled = *pslots_filled;
1610 rtx new_delay_list = 0;
1611 int must_annul = *pannul_p;
1614 /* We can't do anything if there are more delay slots in SEQ than we
1615 can handle, or if we don't know that it will be a taken branch.
1616 We know that it will be a taken branch if it is either an unconditional
1617 branch or a conditional branch with a stricter branch condition.
1619 Also, exit if the branch has more than one set, since then it is computing
1620 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1621 ??? It may be possible to move other sets into INSN in addition to
1622 moving the instructions in the delay slots. */
1624 if (XVECLEN (seq, 0) - 1 > slots_remaining
1625 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1626 || ! single_set (XVECEXP (seq, 0, 0)))
1629 for (i = 1; i < XVECLEN (seq, 0); i++)
1631 rtx trial = XVECEXP (seq, 0, i);
1634 if (insn_references_resource_p (trial, sets, 0)
1635 || insn_sets_resource_p (trial, needed, 0)
1636 || insn_sets_resource_p (trial, sets, 0)
1638 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1640 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1642 /* If TRIAL is from the fallthrough code of an annulled branch insn
1643 in SEQ, we cannot use it. */
1644 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1645 && ! INSN_FROM_TARGET_P (trial)))
1648 /* If this insn was already done (usually in a previous delay slot),
1649 pretend we put it in our delay slot. */
1650 if (redundant_insn (trial, insn, new_delay_list))
1653 /* We will end up re-vectoring this branch, so compute flags
1654 based on jumping to the new label. */
1655 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1658 && ((condition == const_true_rtx
1659 || (! insn_sets_resource_p (trial, other_needed, 0)
1660 && ! may_trap_p (PATTERN (trial)))))
1661 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1663 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1665 temp = copy_rtx (trial);
1666 INSN_FROM_TARGET_P (temp) = 1;
1667 new_delay_list = add_to_delay_list (temp, new_delay_list);
1668 total_slots_filled++;
1670 if (--slots_remaining == 0)
1677 /* Show the place to which we will be branching. */
1678 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1680 /* Add any new insns to the delay list and update the count of the
1681 number of slots filled. */
1682 *pslots_filled = total_slots_filled;
1683 *pannul_p = must_annul;
1685 if (delay_list == 0)
1686 return new_delay_list;
1688 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1689 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1694 /* Similar to steal_delay_list_from_target except that SEQ is on the
1695 fallthrough path of INSN. Here we only do something if the delay insn
1696 of SEQ is an unconditional branch. In that case we steal its delay slot
1697 for INSN since unconditional branches are much easier to fill. */
1700 steal_delay_list_from_fallthrough (insn, condition, seq,
1701 delay_list, sets, needed, other_needed,
1702 slots_to_fill, pslots_filled, pannul_p)
1703 rtx insn, condition;
1706 struct resources *sets, *needed, *other_needed;
1714 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1716 /* We can't do anything if SEQ's delay insn isn't an
1717 unconditional branch. */
1719 if (! simplejump_p (XVECEXP (seq, 0, 0))
1720 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1723 for (i = 1; i < XVECLEN (seq, 0); i++)
1725 rtx trial = XVECEXP (seq, 0, i);
1727 /* If TRIAL sets CC0, stealing it will move it too far from the use
1729 if (insn_references_resource_p (trial, sets, 0)
1730 || insn_sets_resource_p (trial, needed, 0)
1731 || insn_sets_resource_p (trial, sets, 0)
1733 || sets_cc0_p (PATTERN (trial))
1739 /* If this insn was already done, we don't need it. */
1740 if (redundant_insn (trial, insn, delay_list))
1742 delete_from_delay_slot (trial);
1747 && ((condition == const_true_rtx
1748 || (! insn_sets_resource_p (trial, other_needed, 0)
1749 && ! may_trap_p (PATTERN (trial)))))
1750 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1752 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1754 delete_from_delay_slot (trial);
1755 delay_list = add_to_delay_list (trial, delay_list);
1757 if (++(*pslots_filled) == slots_to_fill)
1767 /* Try merging insns starting at THREAD which match exactly the insns in
1770 If all insns were matched and the insn was previously annulling, the
1771 annul bit will be cleared.
1773 For each insn that is merged, if the branch is or will be non-annulling,
1774 we delete the merged insn. */
1777 try_merge_delay_insns (insn, thread)
1780 rtx trial, next_trial;
1781 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1782 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1783 int slot_number = 1;
1784 int num_slots = XVECLEN (PATTERN (insn), 0);
1785 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1786 struct resources set, needed;
1787 rtx merged_insns = 0;
1791 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1793 CLEAR_RESOURCE (&needed);
1794 CLEAR_RESOURCE (&set);
1796 /* If this is not an annulling branch, take into account anything needed in
1797 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1798 folded into one. If we are annulling, this would be the correct
1799 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1800 will essentially disable this optimization. This method is somewhat of
1801 a kludge, but I don't see a better way.) */
1803 mark_referenced_resources (next_to_match, &needed, 1);
1805 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1807 rtx pat = PATTERN (trial);
1808 rtx oldtrial = trial;
1810 next_trial = next_nonnote_insn (trial);
1812 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1813 if (GET_CODE (trial) == INSN
1814 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1817 if (GET_CODE (next_to_match) == GET_CODE (trial)
1819 /* We can't share an insn that sets cc0. */
1820 && ! sets_cc0_p (pat)
1822 && ! insn_references_resource_p (trial, &set, 1)
1823 && ! insn_sets_resource_p (trial, &set, 1)
1824 && ! insn_sets_resource_p (trial, &needed, 1)
1825 && (trial = try_split (pat, trial, 0)) != 0
1826 /* Update next_trial, in case try_split succeeded. */
1827 && (next_trial = next_nonnote_insn (trial))
1828 /* Likewise THREAD. */
1829 && (thread = oldtrial == thread ? trial : thread)
1830 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1831 /* Have to test this condition if annul condition is different
1832 from (and less restrictive than) non-annulling one. */
1833 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1838 update_block (trial, thread);
1839 if (trial == thread)
1840 thread = next_active_insn (thread);
1842 delete_insn (trial);
1843 INSN_FROM_TARGET_P (next_to_match) = 0;
1846 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1848 if (++slot_number == num_slots)
1851 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1853 mark_referenced_resources (next_to_match, &needed, 1);
1856 mark_set_resources (trial, &set, 0, 1);
1857 mark_referenced_resources (trial, &needed, 1);
1860 /* See if we stopped on a filled insn. If we did, try to see if its
1861 delay slots match. */
1862 if (slot_number != num_slots
1863 && trial && GET_CODE (trial) == INSN
1864 && GET_CODE (PATTERN (trial)) == SEQUENCE
1865 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1867 rtx pat = PATTERN (trial);
1868 rtx filled_insn = XVECEXP (pat, 0, 0);
1870 /* Account for resources set/needed by the filled insn. */
1871 mark_set_resources (filled_insn, &set, 0, 1);
1872 mark_referenced_resources (filled_insn, &needed, 1);
1874 for (i = 1; i < XVECLEN (pat, 0); i++)
1876 rtx dtrial = XVECEXP (pat, 0, i);
1878 if (! insn_references_resource_p (dtrial, &set, 1)
1879 && ! insn_sets_resource_p (dtrial, &set, 1)
1880 && ! insn_sets_resource_p (dtrial, &needed, 1)
1882 && ! sets_cc0_p (PATTERN (dtrial))
1884 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1885 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1889 update_block (dtrial, thread);
1890 delete_from_delay_slot (dtrial);
1891 INSN_FROM_TARGET_P (next_to_match) = 0;
1894 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1897 if (++slot_number == num_slots)
1900 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1905 /* If all insns in the delay slot have been matched and we were previously
1906 annulling the branch, we need not any more. In that case delete all the
1907 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1908 the delay list so that we know that it isn't only being used at the
1910 if (slot_number == num_slots && annul_p)
1912 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1914 if (GET_MODE (merged_insns) == SImode)
1916 update_block (XEXP (merged_insns, 0), thread);
1917 delete_from_delay_slot (XEXP (merged_insns, 0));
1921 update_block (XEXP (merged_insns, 0), thread);
1922 delete_insn (XEXP (merged_insns, 0));
1926 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1928 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1929 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1933 /* See if INSN is redundant with an insn in front of TARGET. Often this
1934 is called when INSN is a candidate for a delay slot of TARGET.
1935 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1936 of INSN. Often INSN will be redundant with an insn in a delay slot of
1937 some previous insn. This happens when we have a series of branches to the
1938 same label; in that case the first insn at the target might want to go
1939 into each of the delay slots.
1941 If we are not careful, this routine can take up a significant fraction
1942 of the total compilation time (4%), but only wins rarely. Hence we
1943 speed this routine up by making two passes. The first pass goes back
1944 until it hits a label and sees if it find an insn with an identical
1945 pattern. Only in this (relatively rare) event does it check for
1948 We do not split insns we encounter. This could cause us not to find a
1949 redundant insn, but the cost of splitting seems greater than the possible
1950 gain in rare cases. */
1953 redundant_insn (insn, target, delay_list)
1958 rtx target_main = target;
1959 rtx ipat = PATTERN (insn);
1961 struct resources needed, set;
1964 /* Scan backwards looking for a match. */
1965 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1967 if (GET_CODE (trial) == CODE_LABEL)
1970 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1973 pat = PATTERN (trial);
1974 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1977 if (GET_CODE (pat) == SEQUENCE)
1979 /* Stop for a CALL and its delay slots because it is difficult to
1980 track its resource needs correctly. */
1981 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1984 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1985 slots because it is difficult to track its resource needs
1988 #ifdef INSN_SETS_ARE_DELAYED
1989 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1993 #ifdef INSN_REFERENCES_ARE_DELAYED
1994 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1998 /* See if any of the insns in the delay slot match, updating
1999 resource requirements as we go. */
2000 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2001 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2002 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
2005 /* If found a match, exit this loop early. */
2010 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
2014 /* If we didn't find an insn that matches, return 0. */
2018 /* See what resources this insn sets and needs. If they overlap, or
2019 if this insn references CC0, it can't be redundant. */
2021 CLEAR_RESOURCE (&needed);
2022 CLEAR_RESOURCE (&set);
2023 mark_set_resources (insn, &set, 0, 1);
2024 mark_referenced_resources (insn, &needed, 1);
2026 /* If TARGET is a SEQUENCE, get the main insn. */
2027 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2028 target_main = XVECEXP (PATTERN (target), 0, 0);
2030 if (resource_conflicts_p (&needed, &set)
2032 || reg_mentioned_p (cc0_rtx, ipat)
2034 /* The insn requiring the delay may not set anything needed or set by
2036 || insn_sets_resource_p (target_main, &needed, 1)
2037 || insn_sets_resource_p (target_main, &set, 1))
2040 /* Insns we pass may not set either NEEDED or SET, so merge them for
2042 needed.memory |= set.memory;
2043 needed.unch_memory |= set.unch_memory;
2044 IOR_HARD_REG_SET (needed.regs, set.regs);
2046 /* This insn isn't redundant if it conflicts with an insn that either is
2047 or will be in a delay slot of TARGET. */
2051 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2053 delay_list = XEXP (delay_list, 1);
2056 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2057 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2058 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2061 /* Scan backwards until we reach a label or an insn that uses something
2062 INSN sets or sets something insn uses or sets. */
2064 for (trial = PREV_INSN (target);
2065 trial && GET_CODE (trial) != CODE_LABEL;
2066 trial = PREV_INSN (trial))
2068 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2069 && GET_CODE (trial) != JUMP_INSN)
2072 pat = PATTERN (trial);
2073 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2076 if (GET_CODE (pat) == SEQUENCE)
2078 /* If this is a CALL_INSN and its delay slots, it is hard to track
2079 the resource needs properly, so give up. */
2080 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2083 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2084 is hard to track the resource needs properly, so give up. */
2086 #ifdef INSN_SETS_ARE_DELAYED
2087 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2091 #ifdef INSN_REFERENCES_ARE_DELAYED
2092 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2096 /* See if any of the insns in the delay slot match, updating
2097 resource requirements as we go. */
2098 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2100 rtx candidate = XVECEXP (pat, 0, i);
2102 /* If an insn will be annulled if the branch is false, it isn't
2103 considered as a possible duplicate insn. */
2104 if (rtx_equal_p (PATTERN (candidate), ipat)
2105 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2106 && INSN_FROM_TARGET_P (candidate)))
2108 /* Show that this insn will be used in the sequel. */
2109 INSN_FROM_TARGET_P (candidate) = 0;
2113 /* Unless this is an annulled insn from the target of a branch,
2114 we must stop if it sets anything needed or set by INSN. */
2115 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2116 || ! INSN_FROM_TARGET_P (candidate))
2117 && insn_sets_resource_p (candidate, &needed, 1))
2122 /* If the insn requiring the delay slot conflicts with INSN, we
2124 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2129 /* See if TRIAL is the same as INSN. */
2130 pat = PATTERN (trial);
2131 if (rtx_equal_p (pat, ipat))
2134 /* Can't go any further if TRIAL conflicts with INSN. */
2135 if (insn_sets_resource_p (trial, &needed, 1))
2143 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2144 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2145 is non-zero, we are allowed to fall into this thread; otherwise, we are
2148 If LABEL is used more than one or we pass a label other than LABEL before
2149 finding an active insn, we do not own this thread. */
2152 own_thread_p (thread, label, allow_fallthrough)
2155 int allow_fallthrough;
2160 /* We don't own the function end. */
2164 /* Get the first active insn, or THREAD, if it is an active insn. */
2165 active_insn = next_active_insn (PREV_INSN (thread));
2167 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2168 if (GET_CODE (insn) == CODE_LABEL
2169 && (insn != label || LABEL_NUSES (insn) != 1))
2172 if (allow_fallthrough)
2175 /* Ensure that we reach a BARRIER before any insn or label. */
2176 for (insn = prev_nonnote_insn (thread);
2177 insn == 0 || GET_CODE (insn) != BARRIER;
2178 insn = prev_nonnote_insn (insn))
2180 || GET_CODE (insn) == CODE_LABEL
2181 || (GET_CODE (insn) == INSN
2182 && GET_CODE (PATTERN (insn)) != USE
2183 && GET_CODE (PATTERN (insn)) != CLOBBER))
2189 /* Find the number of the basic block that starts closest to INSN. Return -1
2190 if we couldn't find such a basic block. */
2193 find_basic_block (insn)
2198 /* Scan backwards to the previous BARRIER. Then see if we can find a
2199 label that starts a basic block. Return the basic block number. */
2201 for (insn = prev_nonnote_insn (insn);
2202 insn && GET_CODE (insn) != BARRIER;
2203 insn = prev_nonnote_insn (insn))
2206 /* The start of the function is basic block zero. */
2210 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2211 anything other than a CODE_LABEL or note, we can't find this code. */
2212 for (insn = next_nonnote_insn (insn);
2213 insn && GET_CODE (insn) == CODE_LABEL;
2214 insn = next_nonnote_insn (insn))
2216 for (i = 0; i < n_basic_blocks; i++)
2217 if (insn == basic_block_head[i])
2224 /* Called when INSN is being moved from a location near the target of a jump.
2225 We leave a marker of the form (use (INSN)) immediately in front
2226 of WHERE for mark_target_live_regs. These markers will be deleted when
2229 We used to try to update the live status of registers if WHERE is at
2230 the start of a basic block, but that can't work since we may remove a
2231 BARRIER in relax_delay_slots. */
2234 update_block (insn, where)
2240 /* Ignore if this was in a delay slot and it came from the target of
2242 if (INSN_FROM_TARGET_P (insn))
2245 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2247 /* INSN might be making a value live in a block where it didn't use to
2248 be. So recompute liveness information for this block. */
2250 b = find_basic_block (insn);
2255 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2256 the basic block containing the jump. */
2259 reorg_redirect_jump (jump, nlabel)
2263 int b = find_basic_block (jump);
2268 return redirect_jump (jump, nlabel);
2271 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2272 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2273 that reference values used in INSN. If we find one, then we move the
2274 REG_DEAD note to INSN.
2276 This is needed to handle the case where an later insn (after INSN) has a
2277 REG_DEAD note for a register used by INSN, and this later insn subsequently
2278 gets moved before a CODE_LABEL because it is a redundant insn. In this
2279 case, mark_target_live_regs may be confused into thinking the register
2280 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2283 update_reg_dead_notes (insn, delayed_insn)
2284 rtx insn, delayed_insn;
2288 for (p = next_nonnote_insn (insn); p != delayed_insn;
2289 p = next_nonnote_insn (p))
2290 for (link = REG_NOTES (p); link; link = next)
2292 next = XEXP (link, 1);
2294 if (REG_NOTE_KIND (link) != REG_DEAD
2295 || GET_CODE (XEXP (link, 0)) != REG)
2298 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2300 /* Move the REG_DEAD note from P to INSN. */
2301 remove_note (p, link);
2302 XEXP (link, 1) = REG_NOTES (insn);
2303 REG_NOTES (insn) = link;
2308 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2310 This handles the case of udivmodXi4 instructions which optimize their
2311 output depending on whether any REG_UNUSED notes are present.
2312 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2316 update_reg_unused_notes (insn, redundant_insn)
2317 rtx insn, redundant_insn;
2321 for (link = REG_NOTES (insn); link; link = next)
2323 next = XEXP (link, 1);
2325 if (REG_NOTE_KIND (link) != REG_UNUSED
2326 || GET_CODE (XEXP (link, 0)) != REG)
2329 if (! find_regno_note (redundant_insn, REG_UNUSED,
2330 REGNO (XEXP (link, 0))))
2331 remove_note (insn, link);
2335 /* Marks registers possibly live at the current place being scanned by
2336 mark_target_live_regs. Used only by next two function. */
2338 static HARD_REG_SET current_live_regs;
2340 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2341 Also only used by the next two functions. */
2343 static HARD_REG_SET pending_dead_regs;
2345 /* Utility function called from mark_target_live_regs via note_stores.
2346 It deadens any CLOBBERed registers and livens any SET registers. */
2349 update_live_status (dest, x)
2353 int first_regno, last_regno;
2356 if (GET_CODE (dest) != REG
2357 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2360 if (GET_CODE (dest) == SUBREG)
2361 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2363 first_regno = REGNO (dest);
2365 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2367 if (GET_CODE (x) == CLOBBER)
2368 for (i = first_regno; i < last_regno; i++)
2369 CLEAR_HARD_REG_BIT (current_live_regs, i);
2371 for (i = first_regno; i < last_regno; i++)
2373 SET_HARD_REG_BIT (current_live_regs, i);
2374 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2378 /* Similar to next_insn, but ignores insns in the delay slots of
2379 an annulled branch. */
2382 next_insn_no_annul (insn)
2387 /* If INSN is an annulled branch, skip any insns from the target
2389 if (INSN_ANNULLED_BRANCH_P (insn)
2390 && NEXT_INSN (PREV_INSN (insn)) != insn)
2391 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2392 insn = NEXT_INSN (insn);
2394 insn = NEXT_INSN (insn);
2395 if (insn && GET_CODE (insn) == INSN
2396 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2397 insn = XVECEXP (PATTERN (insn), 0, 0);
2403 /* Set the resources that are live at TARGET.
2405 If TARGET is zero, we refer to the end of the current function and can
2406 return our precomputed value.
2408 Otherwise, we try to find out what is live by consulting the basic block
2409 information. This is tricky, because we must consider the actions of
2410 reload and jump optimization, which occur after the basic block information
2413 Accordingly, we proceed as follows::
2415 We find the previous BARRIER and look at all immediately following labels
2416 (with no intervening active insns) to see if any of them start a basic
2417 block. If we hit the start of the function first, we use block 0.
2419 Once we have found a basic block and a corresponding first insns, we can
2420 accurately compute the live status from basic_block_live_regs and
2421 reg_renumber. (By starting at a label following a BARRIER, we are immune
2422 to actions taken by reload and jump.) Then we scan all insns between
2423 that point and our target. For each CLOBBER (or for call-clobbered regs
2424 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2425 a SET, mark them as live.
2427 We have to be careful when using REG_DEAD notes because they are not
2428 updated by such things as find_equiv_reg. So keep track of registers
2429 marked as dead that haven't been assigned to, and mark them dead at the
2430 next CODE_LABEL since reload and jump won't propagate values across labels.
2432 If we cannot find the start of a basic block (should be a very rare
2433 case, if it can happen at all), mark everything as potentially live.
2435 Next, scan forward from TARGET looking for things set or clobbered
2436 before they are used. These are not live.
2438 Because we can be called many times on the same target, save our results
2439 in a hash table indexed by INSN_UID. */
2442 mark_target_live_regs (target, res)
2444 struct resources *res;
2448 struct target_info *tinfo;
2452 HARD_REG_SET scratch;
2453 struct resources set, needed;
2456 /* Handle end of function. */
2459 *res = end_of_function_needs;
2463 /* We have to assume memory is needed, but the CC isn't. */
2465 res->volatil = res->unch_memory = 0;
2468 /* See if we have computed this value already. */
2469 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2470 tinfo; tinfo = tinfo->next)
2471 if (tinfo->uid == INSN_UID (target))
2474 /* Start by getting the basic block number. If we have saved information,
2475 we can get it from there unless the insn at the start of the basic block
2476 has been deleted. */
2477 if (tinfo && tinfo->block != -1
2478 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2482 b = find_basic_block (target);
2486 /* If the information is up-to-date, use it. Otherwise, we will
2488 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2490 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2496 /* Allocate a place to put our results and chain it into the
2498 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2499 tinfo->uid = INSN_UID (target);
2501 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2502 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2505 CLEAR_HARD_REG_SET (pending_dead_regs);
2507 /* If we found a basic block, get the live registers from it and update
2508 them with anything set or killed between its start and the insn before
2509 TARGET. Otherwise, we must assume everything is live. */
2512 regset regs_live = basic_block_live_at_start[b];
2514 REGSET_ELT_TYPE bit;
2516 rtx start_insn, stop_insn;
2518 /* Compute hard regs live at start of block -- this is the real hard regs
2519 marked live, plus live pseudo regs that have been renumbered to
2523 current_live_regs = *regs_live;
2525 COPY_HARD_REG_SET (current_live_regs, regs_live);
2528 for (offset = 0, i = 0; offset < regset_size; offset++)
2530 if (regs_live[offset] == 0)
2531 i += REGSET_ELT_BITS;
2533 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2534 if ((regs_live[offset] & bit)
2535 && (regno = reg_renumber[i]) >= 0)
2537 j < regno + HARD_REGNO_NREGS (regno,
2538 PSEUDO_REGNO_MODE (i));
2540 SET_HARD_REG_BIT (current_live_regs, j);
2543 /* Get starting and ending insn, handling the case where each might
2545 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2548 if (GET_CODE (start_insn) == INSN
2549 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2550 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2552 if (GET_CODE (stop_insn) == INSN
2553 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2554 stop_insn = next_insn (PREV_INSN (stop_insn));
2556 for (insn = start_insn; insn != stop_insn;
2557 insn = next_insn_no_annul (insn))
2560 rtx real_insn = insn;
2562 /* If this insn is from the target of a branch, it isn't going to
2563 be used in the sequel. If it is used in both cases, this
2564 test will not be true. */
2565 if (INSN_FROM_TARGET_P (insn))
2568 /* If this insn is a USE made by update_block, we care about the
2570 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2571 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2572 real_insn = XEXP (PATTERN (insn), 0);
2574 if (GET_CODE (real_insn) == CALL_INSN)
2576 /* CALL clobbers all call-used regs that aren't fixed except
2577 sp, ap, and fp. Do this before setting the result of the
2579 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2580 if (call_used_regs[i]
2581 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2582 && i != ARG_POINTER_REGNUM
2583 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2584 && i != HARD_FRAME_POINTER_REGNUM
2586 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2587 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2589 #ifdef PIC_OFFSET_TABLE_REGNUM
2590 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2593 CLEAR_HARD_REG_BIT (current_live_regs, i);
2595 /* A CALL_INSN sets any global register live, since it may
2596 have been modified by the call. */
2597 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2599 SET_HARD_REG_BIT (current_live_regs, i);
2602 /* Mark anything killed in an insn to be deadened at the next
2603 label. Ignore USE insns; the only REG_DEAD notes will be for
2604 parameters. But they might be early. A CALL_INSN will usually
2605 clobber registers used for parameters. It isn't worth bothering
2606 with the unlikely case when it won't. */
2607 if ((GET_CODE (real_insn) == INSN
2608 && GET_CODE (PATTERN (real_insn)) != USE
2609 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2610 || GET_CODE (real_insn) == JUMP_INSN
2611 || GET_CODE (real_insn) == CALL_INSN)
2613 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2614 if (REG_NOTE_KIND (link) == REG_DEAD
2615 && GET_CODE (XEXP (link, 0)) == REG
2616 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2618 int first_regno = REGNO (XEXP (link, 0));
2621 + HARD_REGNO_NREGS (first_regno,
2622 GET_MODE (XEXP (link, 0))));
2624 for (i = first_regno; i < last_regno; i++)
2625 SET_HARD_REG_BIT (pending_dead_regs, i);
2628 note_stores (PATTERN (real_insn), update_live_status);
2630 /* If any registers were unused after this insn, kill them.
2631 These notes will always be accurate. */
2632 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2633 if (REG_NOTE_KIND (link) == REG_UNUSED
2634 && GET_CODE (XEXP (link, 0)) == REG
2635 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2637 int first_regno = REGNO (XEXP (link, 0));
2640 + HARD_REGNO_NREGS (first_regno,
2641 GET_MODE (XEXP (link, 0))));
2643 for (i = first_regno; i < last_regno; i++)
2644 CLEAR_HARD_REG_BIT (current_live_regs, i);
2648 else if (GET_CODE (real_insn) == CODE_LABEL)
2650 /* A label clobbers the pending dead registers since neither
2651 reload nor jump will propagate a value across a label. */
2652 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2653 CLEAR_HARD_REG_SET (pending_dead_regs);
2656 /* The beginning of the epilogue corresponds to the end of the
2657 RTL chain when there are no epilogue insns. Certain resources
2658 are implicitly required at that point. */
2659 else if (GET_CODE (real_insn) == NOTE
2660 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2661 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2664 COPY_HARD_REG_SET (res->regs, current_live_regs);
2666 tinfo->bb_tick = bb_ticks[b];
2669 /* We didn't find the start of a basic block. Assume everything
2670 in use. This should happen only extremely rarely. */
2671 SET_HARD_REG_SET (res->regs);
2673 /* Now step forward from TARGET looking for registers that are set before
2674 they are used. These are dead. If we pass a label, any pending dead
2675 registers that weren't yet used can be made dead. Stop when we pass a
2676 conditional JUMP_INSN; follow the first few unconditional branches. */
2678 CLEAR_RESOURCE (&set);
2679 CLEAR_RESOURCE (&needed);
2681 for (insn = target; insn; insn = next)
2683 rtx this_jump_insn = insn;
2685 next = NEXT_INSN (insn);
2686 switch (GET_CODE (insn))
2689 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2690 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2691 CLEAR_HARD_REG_SET (pending_dead_regs);
2699 if (GET_CODE (PATTERN (insn)) == USE)
2701 /* If INSN is a USE made by update_block, we care about the
2702 underlying insn. Any registers set by the underlying insn
2703 are live since the insn is being done somewhere else. */
2704 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2705 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2707 /* All other USE insns are to be ignored. */
2710 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2712 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2714 /* An unconditional jump can be used to fill the delay slot
2715 of a call, so search for a JUMP_INSN in any position. */
2716 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2718 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2719 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2725 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2727 if (jump_count++ < 10
2728 && (simplejump_p (this_jump_insn)
2729 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2731 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2735 jump_target = JUMP_LABEL (this_jump_insn);
2742 mark_referenced_resources (insn, &needed, 1);
2743 mark_set_resources (insn, &set, 0, 1);
2745 COPY_HARD_REG_SET (scratch, set.regs);
2746 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2747 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2750 /* If we hit an unconditional branch, we have another way of finding out
2751 what is live: we can see what is live at the branch target and include
2752 anything used but not set before the branch. The only things that are
2753 live are those that are live using the above test and the test below.
2755 Don't try this if we expired our jump count above, since that would
2756 mean there may be an infinite loop in the function being compiled. */
2758 if (jump_insn && jump_count < 10)
2760 struct resources new_resources;
2761 rtx stop_insn = next_active_insn (jump_insn);
2763 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2764 CLEAR_RESOURCE (&set);
2765 CLEAR_RESOURCE (&needed);
2767 /* Include JUMP_INSN in the needed registers. */
2768 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2770 mark_referenced_resources (insn, &needed, 1);
2772 COPY_HARD_REG_SET (scratch, needed.regs);
2773 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2774 IOR_HARD_REG_SET (new_resources.regs, scratch);
2776 mark_set_resources (insn, &set, 0, 1);
2779 AND_HARD_REG_SET (res->regs, new_resources.regs);
2782 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2785 /* Scan a function looking for insns that need a delay slot and find insns to
2786 put into the delay slot.
2788 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2789 as calls). We do these first since we don't want jump insns (that are
2790 easier to fill) to get the only insns that could be used for non-jump insns.
2791 When it is zero, only try to fill JUMP_INSNs.
2793 When slots are filled in this manner, the insns (including the
2794 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2795 it is possible to tell whether a delay slot has really been filled
2796 or not. `final' knows how to deal with this, by communicating
2797 through FINAL_SEQUENCE. */
2800 fill_simple_delay_slots (first, non_jumps_p)
2804 register rtx insn, pat, trial, next_trial;
2806 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2807 struct resources needed, set;
2808 int slots_to_fill, slots_filled;
2811 for (i = 0; i < num_unfilled_slots; i++)
2814 /* Get the next insn to fill. If it has already had any slots assigned,
2815 we can't do anything with it. Maybe we'll improve this later. */
2817 insn = unfilled_slots_base[i];
2819 || INSN_DELETED_P (insn)
2820 || (GET_CODE (insn) == INSN
2821 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2822 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2823 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2826 if (GET_CODE (insn) == JUMP_INSN)
2827 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2829 flags = get_jump_flags (insn, NULL_RTX);
2830 slots_to_fill = num_delay_slots (insn);
2831 if (slots_to_fill == 0)
2834 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2835 says how many. After initialization, first try optimizing
2838 nop add %o7,.-L1,%o7
2842 If this case applies, the delay slot of the call is filled with
2843 the unconditional jump. This is done first to avoid having the
2844 delay slot of the call filled in the backward scan. Also, since
2845 the unconditional jump is likely to also have a delay slot, that
2846 insn must exist when it is subsequently scanned.
2848 This is tried on each insn with delay slots as some machines
2849 have insns which perform calls, but are not represented as
2855 if ((trial = next_active_insn (insn))
2856 && GET_CODE (trial) == JUMP_INSN
2857 && simplejump_p (trial)
2858 && eligible_for_delay (insn, slots_filled, trial, flags)
2859 && no_labels_between_p (insn, trial))
2863 delay_list = add_to_delay_list (trial, delay_list);
2865 /* TRIAL may have had its delay slot filled, then unfilled. When
2866 the delay slot is unfilled, TRIAL is placed back on the unfilled
2867 slots obstack. Unfortunately, it is placed on the end of the
2868 obstack, not in its original location. Therefore, we must search
2869 from entry i + 1 to the end of the unfilled slots obstack to
2870 try and find TRIAL. */
2871 tmp = &unfilled_slots_base[i + 1];
2872 while (*tmp != trial && tmp != unfilled_slots_next)
2875 /* Remove the unconditional jump from consideration for delay slot
2876 filling and unthread it. */
2880 rtx next = NEXT_INSN (trial);
2881 rtx prev = PREV_INSN (trial);
2883 NEXT_INSN (prev) = next;
2885 PREV_INSN (next) = prev;
2889 /* Now, scan backwards from the insn to search for a potential
2890 delay-slot candidate. Stop searching when a label or jump is hit.
2892 For each candidate, if it is to go into the delay slot (moved
2893 forward in execution sequence), it must not need or set any resources
2894 that were set by later insns and must not set any resources that
2895 are needed for those insns.
2897 The delay slot insn itself sets resources unless it is a call
2898 (in which case the called routine, not the insn itself, is doing
2901 if (slots_filled < slots_to_fill)
2903 CLEAR_RESOURCE (&needed);
2904 CLEAR_RESOURCE (&set);
2905 mark_set_resources (insn, &set, 0, 0);
2906 mark_referenced_resources (insn, &needed, 0);
2908 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2911 next_trial = prev_nonnote_insn (trial);
2913 /* This must be an INSN or CALL_INSN. */
2914 pat = PATTERN (trial);
2916 /* USE and CLOBBER at this level was just for flow; ignore it. */
2917 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2920 /* Check for resource conflict first, to avoid unnecessary
2922 if (! insn_references_resource_p (trial, &set, 1)
2923 && ! insn_sets_resource_p (trial, &set, 1)
2924 && ! insn_sets_resource_p (trial, &needed, 1)
2926 /* Can't separate set of cc0 from its use. */
2927 && ! (reg_mentioned_p (cc0_rtx, pat)
2928 && ! sets_cc0_p (cc0_rtx, pat))
2932 trial = try_split (pat, trial, 1);
2933 next_trial = prev_nonnote_insn (trial);
2934 if (eligible_for_delay (insn, slots_filled, trial, flags))
2936 /* In this case, we are searching backward, so if we
2937 find insns to put on the delay list, we want
2938 to put them at the head, rather than the
2939 tail, of the list. */
2941 update_reg_dead_notes (trial, insn);
2942 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2944 update_block (trial, trial);
2945 delete_insn (trial);
2946 if (slots_to_fill == ++slots_filled)
2952 mark_set_resources (trial, &set, 0, 1);
2953 mark_referenced_resources (trial, &needed, 1);
2957 /* If all needed slots haven't been filled, we come here. */
2959 /* Try to optimize case of jumping around a single insn. */
2960 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2961 if (slots_filled != slots_to_fill
2963 && GET_CODE (insn) == JUMP_INSN
2964 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2966 delay_list = optimize_skip (insn);
2972 /* Try to get insns from beyond the insn needing the delay slot.
2973 These insns can neither set or reference resources set in insns being
2974 skipped, cannot set resources in the insn being skipped, and, if this
2975 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2976 call might not return).
2978 There used to be code which continued past the target label if
2979 we saw all uses of the target label. This code did not work,
2980 because it failed to account for some instructions which were
2981 both annulled and marked as from the target. This can happen as a
2982 result of optimize_skip. Since this code was redundant with
2983 fill_eager_delay_slots anyways, it was just deleted. */
2985 if (slots_filled != slots_to_fill
2986 && (GET_CODE (insn) != JUMP_INSN
2987 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2988 && ! simplejump_p (insn)
2989 && JUMP_LABEL (insn) != 0)))
2992 int maybe_never = 0;
2993 struct resources needed_at_jump;
2995 CLEAR_RESOURCE (&needed);
2996 CLEAR_RESOURCE (&set);
2998 if (GET_CODE (insn) == CALL_INSN)
3000 mark_set_resources (insn, &set, 0, 1);
3001 mark_referenced_resources (insn, &needed, 1);
3006 mark_set_resources (insn, &set, 0, 1);
3007 mark_referenced_resources (insn, &needed, 1);
3008 if (GET_CODE (insn) == JUMP_INSN)
3009 target = JUMP_LABEL (insn);
3012 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3014 rtx pat, trial_delay;
3016 next_trial = next_nonnote_insn (trial);
3018 if (GET_CODE (trial) == CODE_LABEL
3019 || GET_CODE (trial) == BARRIER)
3022 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3023 pat = PATTERN (trial);
3025 /* Stand-alone USE and CLOBBER are just for flow. */
3026 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3029 /* If this already has filled delay slots, get the insn needing
3031 if (GET_CODE (pat) == SEQUENCE)
3032 trial_delay = XVECEXP (pat, 0, 0);
3034 trial_delay = trial;
3036 /* If this is a jump insn to our target, indicate that we have
3037 seen another jump to it. If we aren't handling a conditional
3038 jump, stop our search. Otherwise, compute the needs at its
3039 target and add them to NEEDED. */
3040 if (GET_CODE (trial_delay) == JUMP_INSN)
3044 else if (JUMP_LABEL (trial_delay) != target)
3046 mark_target_live_regs
3047 (next_active_insn (JUMP_LABEL (trial_delay)),
3049 needed.memory |= needed_at_jump.memory;
3050 needed.unch_memory |= needed_at_jump.unch_memory;
3051 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3055 /* See if we have a resource problem before we try to
3058 && GET_CODE (pat) != SEQUENCE
3059 && ! insn_references_resource_p (trial, &set, 1)
3060 && ! insn_sets_resource_p (trial, &set, 1)
3061 && ! insn_sets_resource_p (trial, &needed, 1)
3063 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3065 && ! (maybe_never && may_trap_p (pat))
3066 && (trial = try_split (pat, trial, 0))
3067 && eligible_for_delay (insn, slots_filled, trial, flags))
3069 next_trial = next_nonnote_insn (trial);
3070 delay_list = add_to_delay_list (trial, delay_list);
3073 if (reg_mentioned_p (cc0_rtx, pat))
3074 link_cc0_insns (trial);
3077 delete_insn (trial);
3078 if (slots_to_fill == ++slots_filled)
3083 mark_set_resources (trial, &set, 0, 1);
3084 mark_referenced_resources (trial, &needed, 1);
3086 /* Ensure we don't put insns between the setting of cc and the
3087 comparison by moving a setting of cc into an earlier delay
3088 slot since these insns could clobber the condition code. */
3091 /* If this is a call or jump, we might not get here. */
3092 if (GET_CODE (trial_delay) == CALL_INSN
3093 || GET_CODE (trial_delay) == JUMP_INSN)
3097 /* If there are slots left to fill and our search was stopped by an
3098 unconditional branch, try the insn at the branch target. We can
3099 redirect the branch if it works.
3101 Don't do this if the insn at the branch target is a branch. */
3102 if (slots_to_fill != slots_filled
3104 && GET_CODE (trial) == JUMP_INSN
3105 && simplejump_p (trial)
3106 && (target == 0 || JUMP_LABEL (trial) == target)
3107 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3108 && ! (GET_CODE (next_trial) == INSN
3109 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3110 && GET_CODE (next_trial) != JUMP_INSN
3111 && ! insn_references_resource_p (next_trial, &set, 1)
3112 && ! insn_sets_resource_p (next_trial, &set, 1)
3113 && ! insn_sets_resource_p (next_trial, &needed, 1)
3115 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3117 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3118 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3119 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3121 rtx new_label = next_active_insn (next_trial);
3124 new_label = get_label_before (new_label);
3126 new_label = find_end_label ();
3129 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3131 reorg_redirect_jump (trial, new_label);
3133 /* If we merged because we both jumped to the same place,
3134 redirect the original insn also. */
3136 reorg_redirect_jump (insn, new_label);
3140 /* If this is an unconditional jump, then try to get insns from the
3141 target of the jump. */
3142 if (GET_CODE (insn) == JUMP_INSN
3143 && simplejump_p (insn)
3144 && slots_filled != slots_to_fill)
3146 = fill_slots_from_thread (insn, const_true_rtx,
3147 next_active_insn (JUMP_LABEL (insn)),
3149 own_thread_p (JUMP_LABEL (insn),
3150 JUMP_LABEL (insn), 0),
3151 0, slots_to_fill, &slots_filled);
3154 unfilled_slots_base[i]
3155 = emit_delay_sequence (insn, delay_list,
3156 slots_filled, slots_to_fill);
3158 if (slots_to_fill == slots_filled)
3159 unfilled_slots_base[i] = 0;
3161 note_delay_statistics (slots_filled, 0);
3164 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3165 /* See if the epilogue needs any delay slots. Try to fill them if so.
3166 The only thing we can do is scan backwards from the end of the
3167 function. If we did this in a previous pass, it is incorrect to do it
3169 if (current_function_epilogue_delay_list)
3172 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3173 if (slots_to_fill == 0)
3177 CLEAR_RESOURCE (&set);
3179 /* The frame pointer and stack pointer are needed at the beginning of
3180 the epilogue, so instructions setting them can not be put in the
3181 epilogue delay slot. However, everything else needed at function
3182 end is safe, so we don't want to use end_of_function_needs here. */
3183 CLEAR_RESOURCE (&needed);
3184 if (frame_pointer_needed)
3186 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3187 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3188 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3190 #ifdef EXIT_IGNORE_STACK
3191 if (! EXIT_IGNORE_STACK)
3193 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3196 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3198 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3199 trial = PREV_INSN (trial))
3201 if (GET_CODE (trial) == NOTE)
3203 pat = PATTERN (trial);
3204 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3207 if (! insn_references_resource_p (trial, &set, 1)
3208 && ! insn_sets_resource_p (trial, &needed, 1)
3209 && ! insn_sets_resource_p (trial, &set, 1)
3211 /* Don't want to mess with cc0 here. */
3212 && ! reg_mentioned_p (cc0_rtx, pat)
3216 trial = try_split (pat, trial, 1);
3217 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3219 /* Here as well we are searching backward, so put the
3220 insns we find on the head of the list. */
3222 current_function_epilogue_delay_list
3223 = gen_rtx (INSN_LIST, VOIDmode, trial,
3224 current_function_epilogue_delay_list);
3225 mark_referenced_resources (trial, &end_of_function_needs, 1);
3226 update_block (trial, trial);
3227 delete_insn (trial);
3229 /* Clear deleted bit so final.c will output the insn. */
3230 INSN_DELETED_P (trial) = 0;
3232 if (slots_to_fill == ++slots_filled)
3238 mark_set_resources (trial, &set, 0, 1);
3239 mark_referenced_resources (trial, &needed, 1);
3242 note_delay_statistics (slots_filled, 0);
3246 /* Try to find insns to place in delay slots.
3248 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3249 or is an unconditional branch if CONDITION is const_true_rtx.
3250 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3252 THREAD is a flow-of-control, either the insns to be executed if the
3253 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3255 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3256 to see if any potential delay slot insns set things needed there.
3258 LIKELY is non-zero if it is extremely likely that the branch will be
3259 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3260 end of a loop back up to the top.
3262 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3263 thread. I.e., it is the fallthrough code of our jump or the target of the
3264 jump when we are the only jump going there.
3266 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3267 case, we can only take insns from the head of the thread for our delay
3268 slot. We then adjust the jump to point after the insns we have taken. */
3271 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3272 thread_if_true, own_thread, own_opposite_thread,
3273 slots_to_fill, pslots_filled)
3276 rtx thread, opposite_thread;
3279 int own_thread, own_opposite_thread;
3280 int slots_to_fill, *pslots_filled;
3284 struct resources opposite_needed, set, needed;
3290 /* Validate our arguments. */
3291 if ((condition == const_true_rtx && ! thread_if_true)
3292 || (! own_thread && ! thread_if_true))
3295 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3297 /* If our thread is the end of subroutine, we can't get any delay
3302 /* If this is an unconditional branch, nothing is needed at the
3303 opposite thread. Otherwise, compute what is needed there. */
3304 if (condition == const_true_rtx)
3305 CLEAR_RESOURCE (&opposite_needed);
3307 mark_target_live_regs (opposite_thread, &opposite_needed);
3309 /* If the insn at THREAD can be split, do it here to avoid having to
3310 update THREAD and NEW_THREAD if it is done in the loop below. Also
3311 initialize NEW_THREAD. */
3313 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3315 /* Scan insns at THREAD. We are looking for an insn that can be removed
3316 from THREAD (it neither sets nor references resources that were set
3317 ahead of it and it doesn't set anything needs by the insns ahead of
3318 it) and that either can be placed in an annulling insn or aren't
3319 needed at OPPOSITE_THREAD. */
3321 CLEAR_RESOURCE (&needed);
3322 CLEAR_RESOURCE (&set);
3324 /* If we do not own this thread, we must stop as soon as we find
3325 something that we can't put in a delay slot, since all we can do
3326 is branch into THREAD at a later point. Therefore, labels stop
3327 the search if this is not the `true' thread. */
3329 for (trial = thread;
3330 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3331 trial = next_nonnote_insn (trial))
3335 /* If we have passed a label, we no longer own this thread. */
3336 if (GET_CODE (trial) == CODE_LABEL)
3342 pat = PATTERN (trial);
3343 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3346 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3347 don't separate or copy insns that set and use CC0. */
3348 if (! insn_references_resource_p (trial, &set, 1)
3349 && ! insn_sets_resource_p (trial, &set, 1)
3350 && ! insn_sets_resource_p (trial, &needed, 1)
3352 && ! (reg_mentioned_p (cc0_rtx, pat)
3353 && (! own_thread || ! sets_cc0_p (pat)))
3359 /* If TRIAL is redundant with some insn before INSN, we don't
3360 actually need to add it to the delay list; we can merely pretend
3362 if (prior_insn = redundant_insn (trial, insn, delay_list))
3366 update_block (trial, thread);
3367 if (trial == thread)
3369 thread = next_active_insn (thread);
3370 if (new_thread == trial)
3371 new_thread = thread;
3374 delete_insn (trial);
3378 update_reg_unused_notes (prior_insn, trial);
3379 new_thread = next_active_insn (trial);
3385 /* There are two ways we can win: If TRIAL doesn't set anything
3386 needed at the opposite thread and can't trap, or if it can
3387 go into an annulled delay slot. */
3388 if (condition == const_true_rtx
3389 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3390 && ! may_trap_p (pat)))
3393 trial = try_split (pat, trial, 0);
3394 if (new_thread == old_trial)
3396 if (thread == old_trial)
3398 pat = PATTERN (trial);
3399 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3403 #ifdef ANNUL_IFTRUE_SLOTS
3406 #ifdef ANNUL_IFFALSE_SLOTS
3412 trial = try_split (pat, trial, 0);
3413 if (new_thread == old_trial)
3415 if (thread == old_trial)
3417 pat = PATTERN (trial);
3419 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3420 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3428 if (reg_mentioned_p (cc0_rtx, pat))
3429 link_cc0_insns (trial);
3432 /* If we own this thread, delete the insn. If this is the
3433 destination of a branch, show that a basic block status
3434 may have been updated. In any case, mark the new
3435 starting point of this thread. */
3438 update_block (trial, thread);
3439 delete_insn (trial);
3442 new_thread = next_active_insn (trial);
3444 temp = own_thread ? trial : copy_rtx (trial);
3446 INSN_FROM_TARGET_P (temp) = 1;
3448 delay_list = add_to_delay_list (temp, delay_list);
3450 if (slots_to_fill == ++(*pslots_filled))
3452 /* Even though we have filled all the slots, we
3453 may be branching to a location that has a
3454 redundant insn. Skip any if so. */
3455 while (new_thread && ! own_thread
3456 && ! insn_sets_resource_p (new_thread, &set, 1)
3457 && ! insn_sets_resource_p (new_thread, &needed, 1)
3458 && ! insn_references_resource_p (new_thread,
3460 && redundant_insn (new_thread, insn, delay_list))
3461 new_thread = next_active_insn (new_thread);
3470 /* This insn can't go into a delay slot. */
3472 mark_set_resources (trial, &set, 0, 1);
3473 mark_referenced_resources (trial, &needed, 1);
3475 /* Ensure we don't put insns between the setting of cc and the comparison
3476 by moving a setting of cc into an earlier delay slot since these insns
3477 could clobber the condition code. */
3480 /* If this insn is a register-register copy and the next insn has
3481 a use of our destination, change it to use our source. That way,
3482 it will become a candidate for our delay slot the next time
3483 through this loop. This case occurs commonly in loops that
3486 We could check for more complex cases than those tested below,
3487 but it doesn't seem worth it. It might also be a good idea to try
3488 to swap the two insns. That might do better.
3490 We can't do this if the next insn modifies our destination, because
3491 that would make the replacement into the insn invalid. We also can't
3492 do this if it modifies our source, because it might be an earlyclobber
3493 operand. This latter test also prevents updating the contents of
3496 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3497 && GET_CODE (SET_SRC (pat)) == REG
3498 && GET_CODE (SET_DEST (pat)) == REG)
3500 rtx next = next_nonnote_insn (trial);
3502 if (next && GET_CODE (next) == INSN
3503 && GET_CODE (PATTERN (next)) != USE
3504 && ! reg_set_p (SET_DEST (pat), next)
3505 && ! reg_set_p (SET_SRC (pat), next)
3506 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3507 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3511 /* If we stopped on a branch insn that has delay slots, see if we can
3512 steal some of the insns in those slots. */
3513 if (trial && GET_CODE (trial) == INSN
3514 && GET_CODE (PATTERN (trial)) == SEQUENCE
3515 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3517 /* If this is the `true' thread, we will want to follow the jump,
3518 so we can only do this if we have taken everything up to here. */
3519 if (thread_if_true && trial == new_thread)
3521 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3522 delay_list, &set, &needed,
3523 &opposite_needed, slots_to_fill,
3524 pslots_filled, &must_annul,
3526 else if (! thread_if_true)
3528 = steal_delay_list_from_fallthrough (insn, condition,
3530 delay_list, &set, &needed,
3531 &opposite_needed, slots_to_fill,
3532 pslots_filled, &must_annul);
3535 /* If we haven't found anything for this delay slot and it is very
3536 likely that the branch will be taken, see if the insn at our target
3537 increments or decrements a register with an increment that does not
3538 depend on the destination register. If so, try to place the opposite
3539 arithmetic insn after the jump insn and put the arithmetic insn in the
3540 delay slot. If we can't do this, return. */
3541 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3543 rtx pat = PATTERN (new_thread);
3548 pat = PATTERN (trial);
3550 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3551 || ! eligible_for_delay (insn, 0, trial, flags))
3554 dest = SET_DEST (pat), src = SET_SRC (pat);
3555 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3556 && rtx_equal_p (XEXP (src, 0), dest)
3557 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3559 rtx other = XEXP (src, 1);
3563 /* If this is a constant adjustment, use the same code with
3564 the negated constant. Otherwise, reverse the sense of the
3566 if (GET_CODE (other) == CONST_INT)
3567 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3568 negate_rtx (GET_MODE (src), other));
3570 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3571 GET_MODE (src), dest, other);
3573 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3576 if (recog_memoized (ninsn) < 0
3577 || (insn_extract (ninsn),
3578 ! constrain_operands (INSN_CODE (ninsn), 1)))
3580 delete_insn (ninsn);
3586 update_block (trial, thread);
3587 delete_insn (trial);
3590 new_thread = next_active_insn (trial);
3592 ninsn = own_thread ? trial : copy_rtx (trial);
3594 INSN_FROM_TARGET_P (ninsn) = 1;
3596 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3601 if (delay_list && must_annul)
3602 INSN_ANNULLED_BRANCH_P (insn) = 1;
3604 /* If we are to branch into the middle of this thread, find an appropriate
3605 label or make a new one if none, and redirect INSN to it. If we hit the
3606 end of the function, use the end-of-function label. */
3607 if (new_thread != thread)
3611 if (! thread_if_true)
3614 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3615 && (simplejump_p (new_thread)
3616 || GET_CODE (PATTERN (new_thread)) == RETURN)
3617 && redirect_with_delay_list_safe_p (insn,
3618 JUMP_LABEL (new_thread),
3620 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3622 if (new_thread == 0)
3623 label = find_end_label ();
3624 else if (GET_CODE (new_thread) == CODE_LABEL)
3627 label = get_label_before (new_thread);
3629 reorg_redirect_jump (insn, label);
3635 /* Make another attempt to find insns to place in delay slots.
3637 We previously looked for insns located in front of the delay insn
3638 and, for non-jump delay insns, located behind the delay insn.
3640 Here only try to schedule jump insns and try to move insns from either
3641 the target or the following insns into the delay slot. If annulling is
3642 supported, we will be likely to do this. Otherwise, we can do this only
3646 fill_eager_delay_slots (first)
3651 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3653 for (i = 0; i < num_unfilled_slots; i++)
3656 rtx target_label, insn_at_target, fallthrough_insn;
3659 int own_fallthrough;
3660 int prediction, slots_to_fill, slots_filled;
3662 insn = unfilled_slots_base[i];
3664 || INSN_DELETED_P (insn)
3665 || GET_CODE (insn) != JUMP_INSN
3666 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3669 slots_to_fill = num_delay_slots (insn);
3670 if (slots_to_fill == 0)
3674 target_label = JUMP_LABEL (insn);
3675 condition = get_branch_condition (insn, target_label);
3680 /* Get the next active fallthrough and target insns and see if we own
3681 them. Then see whether the branch is likely true. We don't need
3682 to do a lot of this for unconditional branches. */
3684 insn_at_target = next_active_insn (target_label);
3685 own_target = own_thread_p (target_label, target_label, 0);
3687 if (condition == const_true_rtx)
3689 own_fallthrough = 0;
3690 fallthrough_insn = 0;
3695 fallthrough_insn = next_active_insn (insn);
3696 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3697 prediction = mostly_true_jump (insn, condition);
3700 /* If this insn is expected to branch, first try to get insns from our
3701 target, then our fallthrough insns. If it is not, expected to branch,
3702 try the other order. */
3707 = fill_slots_from_thread (insn, condition, insn_at_target,
3708 fallthrough_insn, prediction == 2, 1,
3709 own_target, own_fallthrough,
3710 slots_to_fill, &slots_filled);
3712 if (delay_list == 0 && own_fallthrough)
3714 /* Even though we didn't find anything for delay slots,
3715 we might have found a redundant insn which we deleted
3716 from the thread that was filled. So we have to recompute
3717 the next insn at the target. */
3718 target_label = JUMP_LABEL (insn);
3719 insn_at_target = next_active_insn (target_label);
3722 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3723 insn_at_target, 0, 0,
3724 own_fallthrough, own_target,
3725 slots_to_fill, &slots_filled);
3730 if (own_fallthrough)
3732 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3733 insn_at_target, 0, 0,
3734 own_fallthrough, own_target,
3735 slots_to_fill, &slots_filled);
3737 if (delay_list == 0)
3739 = fill_slots_from_thread (insn, condition, insn_at_target,
3740 next_active_insn (insn), 0, 1,
3741 own_target, own_fallthrough,
3742 slots_to_fill, &slots_filled);
3746 unfilled_slots_base[i]
3747 = emit_delay_sequence (insn, delay_list,
3748 slots_filled, slots_to_fill);
3750 if (slots_to_fill == slots_filled)
3751 unfilled_slots_base[i] = 0;
3753 note_delay_statistics (slots_filled, 1);
3757 /* Once we have tried two ways to fill a delay slot, make a pass over the
3758 code to try to improve the results and to do such things as more jump
3762 relax_delay_slots (first)
3765 register rtx insn, next, pat;
3766 register rtx trial, delay_insn, target_label;
3768 /* Look at every JUMP_INSN and see if we can improve it. */
3769 for (insn = first; insn; insn = next)
3773 next = next_active_insn (insn);
3775 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3776 the next insn, or jumps to a label that is not the last of a
3777 group of consecutive labels. */
3778 if (GET_CODE (insn) == JUMP_INSN
3779 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3780 && (target_label = JUMP_LABEL (insn)) != 0)
3782 target_label = follow_jumps (target_label);
3783 target_label = prev_label (next_active_insn (target_label));
3785 if (target_label == 0)
3786 target_label = find_end_label ();
3788 if (next_active_insn (target_label) == next
3789 && ! condjump_in_parallel_p (insn))
3795 if (target_label != JUMP_LABEL (insn))
3796 reorg_redirect_jump (insn, target_label);
3798 /* See if this jump branches around a unconditional jump.
3799 If so, invert this jump and point it to the target of the
3801 if (next && GET_CODE (next) == JUMP_INSN
3802 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3803 && next_active_insn (target_label) == next_active_insn (next)
3804 && no_labels_between_p (insn, next))
3806 rtx label = JUMP_LABEL (next);
3808 /* Be careful how we do this to avoid deleting code or
3809 labels that are momentarily dead. See similar optimization
3812 We also need to ensure we properly handle the case when
3813 invert_jump fails. */
3815 ++LABEL_NUSES (target_label);
3817 ++LABEL_NUSES (label);
3819 if (invert_jump (insn, label))
3826 --LABEL_NUSES (label);
3828 if (--LABEL_NUSES (target_label) == 0)
3829 delete_insn (target_label);
3835 /* If this is an unconditional jump and the previous insn is a
3836 conditional jump, try reversing the condition of the previous
3837 insn and swapping our targets. The next pass might be able to
3840 Don't do this if we expect the conditional branch to be true, because
3841 we would then be making the more common case longer. */
3843 if (GET_CODE (insn) == JUMP_INSN
3844 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3845 && (other = prev_active_insn (insn)) != 0
3846 && (condjump_p (other) || condjump_in_parallel_p (other))
3847 && no_labels_between_p (other, insn)
3848 && 0 < mostly_true_jump (other,
3849 get_branch_condition (other,
3850 JUMP_LABEL (other))))
3852 rtx other_target = JUMP_LABEL (other);
3853 target_label = JUMP_LABEL (insn);
3855 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3856 as we move the label. */
3858 ++LABEL_NUSES (other_target);
3860 if (invert_jump (other, target_label))
3861 reorg_redirect_jump (insn, other_target);
3864 --LABEL_NUSES (other_target);
3867 /* Now look only at cases where we have filled a delay slot. */
3868 if (GET_CODE (insn) != INSN
3869 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3872 pat = PATTERN (insn);
3873 delay_insn = XVECEXP (pat, 0, 0);
3875 /* See if the first insn in the delay slot is redundant with some
3876 previous insn. Remove it from the delay slot if so; then set up
3877 to reprocess this insn. */
3878 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3880 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3881 next = prev_active_insn (next);
3885 /* Now look only at the cases where we have a filled JUMP_INSN. */
3886 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3887 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3888 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3891 target_label = JUMP_LABEL (delay_insn);
3895 /* If this jump goes to another unconditional jump, thread it, but
3896 don't convert a jump into a RETURN here. */
3897 trial = follow_jumps (target_label);
3898 /* We use next_real_insn instead of next_active_insn, so that
3899 the special USE insns emitted by reorg won't be ignored.
3900 If they are ignored, then they will get deleted if target_label
3901 is now unreachable, and that would cause mark_target_live_regs
3903 trial = prev_label (next_real_insn (trial));
3904 if (trial == 0 && target_label != 0)
3905 trial = find_end_label ();
3907 if (trial != target_label
3908 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3910 reorg_redirect_jump (delay_insn, trial);
3911 target_label = trial;
3914 /* If the first insn at TARGET_LABEL is redundant with a previous
3915 insn, redirect the jump to the following insn process again. */
3916 trial = next_active_insn (target_label);
3917 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3918 && redundant_insn (trial, insn, 0))
3920 trial = next_active_insn (trial);
3922 target_label = find_end_label ();
3924 target_label = get_label_before (trial);
3925 reorg_redirect_jump (delay_insn, target_label);
3930 /* Similarly, if it is an unconditional jump with one insn in its
3931 delay list and that insn is redundant, thread the jump. */
3932 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3933 && XVECLEN (PATTERN (trial), 0) == 2
3934 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3935 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3936 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3937 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3939 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3940 if (target_label == 0)
3941 target_label = find_end_label ();
3943 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3946 reorg_redirect_jump (delay_insn, target_label);
3953 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3954 && prev_active_insn (target_label) == insn
3955 && ! condjump_in_parallel_p (delay_insn)
3957 /* If the last insn in the delay slot sets CC0 for some insn,
3958 various code assumes that it is in a delay slot. We could
3959 put it back where it belonged and delete the register notes,
3960 but it doesn't seem worthwhile in this uncommon case. */
3961 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3962 REG_CC_USER, NULL_RTX)
3968 /* All this insn does is execute its delay list and jump to the
3969 following insn. So delete the jump and just execute the delay
3972 We do this by deleting the INSN containing the SEQUENCE, then
3973 re-emitting the insns separately, and then deleting the jump.
3974 This allows the count of the jump target to be properly
3977 /* Clear the from target bit, since these insns are no longer
3979 for (i = 0; i < XVECLEN (pat, 0); i++)
3980 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3982 trial = PREV_INSN (insn);
3984 emit_insn_after (pat, trial);
3985 delete_scheduled_jump (delay_insn);
3989 /* See if this is an unconditional jump around a single insn which is
3990 identical to the one in its delay slot. In this case, we can just
3991 delete the branch and the insn in its delay slot. */
3992 if (next && GET_CODE (next) == INSN
3993 && prev_label (next_active_insn (next)) == target_label
3994 && simplejump_p (insn)
3995 && XVECLEN (pat, 0) == 2
3996 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4002 /* See if this jump (with its delay slots) branches around another
4003 jump (without delay slots). If so, invert this jump and point
4004 it to the target of the second jump. We cannot do this for
4005 annulled jumps, though. Again, don't convert a jump to a RETURN
4007 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4008 && next && GET_CODE (next) == JUMP_INSN
4009 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4010 && next_active_insn (target_label) == next_active_insn (next)
4011 && no_labels_between_p (insn, next))
4013 rtx label = JUMP_LABEL (next);
4014 rtx old_label = JUMP_LABEL (delay_insn);
4017 label = find_end_label ();
4019 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4021 /* Be careful how we do this to avoid deleting code or labels
4022 that are momentarily dead. See similar optimization in
4025 ++LABEL_NUSES (old_label);
4027 if (invert_jump (delay_insn, label))
4031 /* Must update the INSN_FROM_TARGET_P bits now that
4032 the branch is reversed, so that mark_target_live_regs
4033 will handle the delay slot insn correctly. */
4034 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4036 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4037 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4044 if (old_label && --LABEL_NUSES (old_label) == 0)
4045 delete_insn (old_label);
4050 /* If we own the thread opposite the way this insn branches, see if we
4051 can merge its delay slots with following insns. */
4052 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4053 && own_thread_p (NEXT_INSN (insn), 0, 1))
4054 try_merge_delay_insns (insn, next);
4055 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4056 && own_thread_p (target_label, target_label, 0))
4057 try_merge_delay_insns (insn, next_active_insn (target_label));
4059 /* If we get here, we haven't deleted INSN. But we may have deleted
4060 NEXT, so recompute it. */
4061 next = next_active_insn (insn);
4067 /* Look for filled jumps to the end of function label. We can try to convert
4068 them into RETURN insns if the insns in the delay slot are valid for the
4072 make_return_insns (first)
4075 rtx insn, jump_insn, pat;
4076 rtx real_return_label = end_of_function_label;
4079 /* See if there is a RETURN insn in the function other than the one we
4080 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4081 into a RETURN to jump to it. */
4082 for (insn = first; insn; insn = NEXT_INSN (insn))
4083 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4085 real_return_label = get_label_before (insn);
4089 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4090 was equal to END_OF_FUNCTION_LABEL. */
4091 LABEL_NUSES (real_return_label)++;
4093 /* Clear the list of insns to fill so we can use it. */
4094 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4096 for (insn = first; insn; insn = NEXT_INSN (insn))
4100 /* Only look at filled JUMP_INSNs that go to the end of function
4102 if (GET_CODE (insn) != INSN
4103 || GET_CODE (PATTERN (insn)) != SEQUENCE
4104 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4105 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4108 pat = PATTERN (insn);
4109 jump_insn = XVECEXP (pat, 0, 0);
4111 /* If we can't make the jump into a RETURN, try to redirect it to the best
4112 RETURN and go on to the next insn. */
4113 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4115 /* Make sure redirecting the jump will not invalidate the delay
4117 if (redirect_with_delay_slots_safe_p (jump_insn,
4120 reorg_redirect_jump (jump_insn, real_return_label);
4124 /* See if this RETURN can accept the insns current in its delay slot.
4125 It can if it has more or an equal number of slots and the contents
4126 of each is valid. */
4128 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4129 slots = num_delay_slots (jump_insn);
4130 if (slots >= XVECLEN (pat, 0) - 1)
4132 for (i = 1; i < XVECLEN (pat, 0); i++)
4134 #ifdef ANNUL_IFFALSE_SLOTS
4135 (INSN_ANNULLED_BRANCH_P (jump_insn)
4136 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4137 ? eligible_for_annul_false (jump_insn, i - 1,
4138 XVECEXP (pat, 0, i), flags) :
4140 #ifdef ANNUL_IFTRUE_SLOTS
4141 (INSN_ANNULLED_BRANCH_P (jump_insn)
4142 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4143 ? eligible_for_annul_true (jump_insn, i - 1,
4144 XVECEXP (pat, 0, i), flags) :
4146 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4152 if (i == XVECLEN (pat, 0))
4155 /* We have to do something with this insn. If it is an unconditional
4156 RETURN, delete the SEQUENCE and output the individual insns,
4157 followed by the RETURN. Then set things up so we try to find
4158 insns for its delay slots, if it needs some. */
4159 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4161 rtx prev = PREV_INSN (insn);
4164 for (i = 1; i < XVECLEN (pat, 0); i++)
4165 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4167 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4168 emit_barrier_after (insn);
4171 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4174 /* It is probably more efficient to keep this with its current
4175 delay slot as a branch to a RETURN. */
4176 reorg_redirect_jump (jump_insn, real_return_label);
4179 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4180 new delay slots we have created. */
4181 if (--LABEL_NUSES (real_return_label) == 0)
4182 delete_insn (real_return_label);
4184 fill_simple_delay_slots (first, 1);
4185 fill_simple_delay_slots (first, 0);
4189 /* Try to find insns to place in delay slots. */
4192 dbr_schedule (first, file)
4196 rtx insn, next, epilogue_insn = 0;
4199 int old_flag_no_peephole = flag_no_peephole;
4201 /* Execute `final' once in prescan mode to delete any insns that won't be
4202 used. Don't let final try to do any peephole optimization--it will
4203 ruin dataflow information for this pass. */
4205 flag_no_peephole = 1;
4206 final (first, 0, NO_DEBUG, 1, 1);
4207 flag_no_peephole = old_flag_no_peephole;
4210 /* If the current function has no insns other than the prologue and
4211 epilogue, then do not try to fill any delay slots. */
4212 if (n_basic_blocks == 0)
4215 /* Find the highest INSN_UID and allocate and initialize our map from
4216 INSN_UID's to position in code. */
4217 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4219 if (INSN_UID (insn) > max_uid)
4220 max_uid = INSN_UID (insn);
4221 if (GET_CODE (insn) == NOTE
4222 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4223 epilogue_insn = insn;
4226 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4227 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4228 uid_to_ruid[INSN_UID (insn)] = i;
4230 /* Initialize the list of insns that need filling. */
4231 if (unfilled_firstobj == 0)
4233 gcc_obstack_init (&unfilled_slots_obstack);
4234 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4237 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4241 INSN_ANNULLED_BRANCH_P (insn) = 0;
4242 INSN_FROM_TARGET_P (insn) = 0;
4244 /* Skip vector tables. We can't get attributes for them. */
4245 if (GET_CODE (insn) == JUMP_INSN
4246 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4247 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4250 if (num_delay_slots (insn) > 0)
4251 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4253 /* Ensure all jumps go to the last of a set of consecutive labels. */
4254 if (GET_CODE (insn) == JUMP_INSN
4255 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4256 && JUMP_LABEL (insn) != 0
4257 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4258 != JUMP_LABEL (insn)))
4259 redirect_jump (insn, target);
4262 /* Indicate what resources are required to be valid at the end of the current
4263 function. The condition code never is and memory always is. If the
4264 frame pointer is needed, it is and so is the stack pointer unless
4265 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4266 stack pointer is. Registers used to return the function value are
4267 needed. Registers holding global variables are needed. */
4269 end_of_function_needs.cc = 0;
4270 end_of_function_needs.memory = 1;
4271 end_of_function_needs.unch_memory = 0;
4272 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4274 if (frame_pointer_needed)
4276 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4277 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4278 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4280 #ifdef EXIT_IGNORE_STACK
4281 if (! EXIT_IGNORE_STACK)
4283 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4286 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4288 if (current_function_return_rtx != 0
4289 && GET_CODE (current_function_return_rtx) == REG)
4290 mark_referenced_resources (current_function_return_rtx,
4291 &end_of_function_needs, 1);
4293 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4295 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4297 /* The registers required to be live at the end of the function are
4298 represented in the flow information as being dead just prior to
4299 reaching the end of the function. For example, the return of a value
4300 might be represented by a USE of the return register immediately
4301 followed by an unconditional jump to the return label where the
4302 return label is the end of the RTL chain. The end of the RTL chain
4303 is then taken to mean that the return register is live.
4305 This sequence is no longer maintained when epilogue instructions are
4306 added to the RTL chain. To reconstruct the original meaning, the
4307 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4308 point where these registers become live (start_of_epilogue_needs).
4309 If epilogue instructions are present, the registers set by those
4310 instructions won't have been processed by flow. Thus, those
4311 registers are additionally required at the end of the RTL chain
4312 (end_of_function_needs). */
4314 start_of_epilogue_needs = end_of_function_needs;
4316 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4317 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4319 /* Show we haven't computed an end-of-function label yet. */
4320 end_of_function_label = 0;
4322 /* Allocate and initialize the tables used by mark_target_live_regs. */
4324 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4325 * sizeof (struct target_info *)));
4326 bzero ((char *) target_hash_table,
4327 TARGET_HASH_PRIME * sizeof (struct target_info *));
4329 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4330 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4332 /* Initialize the statistics for this function. */
4333 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4334 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4336 /* Now do the delay slot filling. Try everything twice in case earlier
4337 changes make more slots fillable. */
4339 for (reorg_pass_number = 0;
4340 reorg_pass_number < MAX_REORG_PASSES;
4341 reorg_pass_number++)
4343 fill_simple_delay_slots (first, 1);
4344 fill_simple_delay_slots (first, 0);
4345 fill_eager_delay_slots (first);
4346 relax_delay_slots (first);
4349 /* Delete any USE insns made by update_block; subsequent passes don't need
4350 them or know how to deal with them. */
4351 for (insn = first; insn; insn = next)
4353 next = NEXT_INSN (insn);
4355 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4356 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4357 next = delete_insn (insn);
4360 /* If we made an end of function label, indicate that it is now
4361 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4362 If it is now unused, delete it. */
4363 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4364 delete_insn (end_of_function_label);
4367 if (HAVE_return && end_of_function_label != 0)
4368 make_return_insns (first);
4371 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4373 /* It is not clear why the line below is needed, but it does seem to be. */
4374 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4376 /* Reposition the prologue and epilogue notes in case we moved the
4377 prologue/epilogue insns. */
4378 reposition_prologue_and_epilogue_notes (first);
4382 register int i, j, need_comma;
4384 for (reorg_pass_number = 0;
4385 reorg_pass_number < MAX_REORG_PASSES;
4386 reorg_pass_number++)
4388 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4389 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4392 fprintf (file, ";; Reorg function #%d\n", i);
4394 fprintf (file, ";; %d insns needing delay slots\n;; ",
4395 num_insns_needing_delays[i][reorg_pass_number]);
4397 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4398 if (num_filled_delays[i][j][reorg_pass_number])
4401 fprintf (file, ", ");
4403 fprintf (file, "%d got %d delays",
4404 num_filled_delays[i][j][reorg_pass_number], j);
4406 fprintf (file, "\n");
4411 #endif /* DELAY_SLOTS */