1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
162 /* The contents of the current function definition are allocated
163 in this obstack, and all are freed at the end of the function.
164 For top-level functions, this is temporary_obstack.
165 Separate obstacks are made for nested functions. */
167 extern struct obstack *function_obstack;
169 /* Number of basic blocks in the current function. */
173 /* Number of edges in the current function. */
177 /* The basic block array. */
179 varray_type basic_block_info;
181 /* The special entry and exit blocks. */
183 struct basic_block_def entry_exit_blocks[2]
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
192 ENTRY_BLOCK, /* index */
194 -1, -1 /* eh_beg, eh_end */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 EXIT_BLOCK, /* index */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
214 /* Maximum register number used in this function, plus one. */
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx *reg_next_use;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
278 /* Forward declarations */
279 static int count_basic_blocks PARAMS ((rtx));
280 static rtx find_basic_blocks_1 PARAMS ((rtx));
281 static void create_basic_block PARAMS ((int, rtx, rtx, rtx));
282 static void clear_edges PARAMS ((void));
283 static void make_edges PARAMS ((rtx));
284 static void make_label_edge PARAMS ((sbitmap *, basic_block,
286 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
287 basic_block, rtx, int));
288 static void mark_critical_edges PARAMS ((void));
289 static void move_stray_eh_region_notes PARAMS ((void));
290 static void record_active_eh_regions PARAMS ((rtx));
292 static void commit_one_edge_insertion PARAMS ((edge));
294 static void delete_unreachable_blocks PARAMS ((void));
295 static void delete_eh_regions PARAMS ((void));
296 static int can_delete_note_p PARAMS ((rtx));
297 static int delete_block PARAMS ((basic_block));
298 static void expunge_block PARAMS ((basic_block));
299 static int can_delete_label_p PARAMS ((rtx));
300 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
302 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
304 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
305 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
306 static void try_merge_blocks PARAMS ((void));
307 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
308 static void tidy_fallthru_edges PARAMS ((void));
309 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
310 static void verify_wide_reg PARAMS ((int, rtx, rtx));
311 static void verify_local_live_at_start PARAMS ((regset, basic_block));
312 static int set_noop_p PARAMS ((rtx));
313 static int noop_move_p PARAMS ((rtx));
314 static void delete_noop_moves PARAMS ((rtx));
315 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
316 static void notice_stack_pointer_modification PARAMS ((rtx));
317 static void mark_reg PARAMS ((rtx, void *));
318 static void mark_regs_live_at_end PARAMS ((regset));
319 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
320 static void propagate_block PARAMS ((basic_block, regset,
322 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
323 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
324 static void mark_set_regs PARAMS ((regset, regset, rtx,
326 static void mark_set_1 PARAMS ((regset, regset, rtx,
329 static void find_auto_inc PARAMS ((regset, rtx, rtx));
330 static int try_pre_increment_1 PARAMS ((rtx));
331 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
333 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
334 void dump_flow_info PARAMS ((FILE *));
335 void debug_flow_info PARAMS ((void));
336 static void dump_edge_info PARAMS ((FILE *, edge, int));
338 static void count_reg_sets_1 PARAMS ((rtx));
339 static void count_reg_sets PARAMS ((rtx));
340 static void count_reg_references PARAMS ((rtx));
341 static void invalidate_mems_from_autoinc PARAMS ((rtx));
342 static void remove_fake_successors PARAMS ((basic_block));
343 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
344 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
345 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
346 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
347 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
348 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
349 static int flow_depth_first_order_compute PARAMS ((int *));
350 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
351 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
352 static void flow_loops_tree_build PARAMS ((struct loops *));
353 static int flow_loop_level_compute PARAMS ((struct loop *, int));
354 static int flow_loops_level_compute PARAMS ((struct loops *));
355 static basic_block get_common_dest PARAMS ((basic_block, basic_block));
356 static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
357 static void make_reorder_chain PARAMS ((basic_block));
358 static void fixup_reorder_chain PARAMS ((void));
360 /* This function is always defined so it can be called from the
361 debugger, and it is declared extern so we don't get warnings about
363 void verify_flow_info PARAMS ((void));
364 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
366 /* Find basic blocks of the current function.
367 F is the first insn of the function and NREGS the number of register
371 find_basic_blocks (f, nregs, file)
373 int nregs ATTRIBUTE_UNUSED;
374 FILE *file ATTRIBUTE_UNUSED;
378 /* Flush out existing data. */
379 if (basic_block_info != NULL)
385 /* Clear bb->aux on all extant basic blocks. We'll use this as a
386 tag for reuse during create_basic_block, just in case some pass
387 copies around basic block notes improperly. */
388 for (i = 0; i < n_basic_blocks; ++i)
389 BASIC_BLOCK (i)->aux = NULL;
391 VARRAY_FREE (basic_block_info);
394 n_basic_blocks = count_basic_blocks (f);
396 /* Size the basic block table. The actual structures will be allocated
397 by find_basic_blocks_1, since we want to keep the structure pointers
398 stable across calls to find_basic_blocks. */
399 /* ??? This whole issue would be much simpler if we called find_basic_blocks
400 exactly once, and thereafter we don't have a single long chain of
401 instructions at all until close to the end of compilation when we
402 actually lay them out. */
404 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
406 label_value_list = find_basic_blocks_1 (f);
408 /* Record the block to which an insn belongs. */
409 /* ??? This should be done another way, by which (perhaps) a label is
410 tagged directly with the basic block that it starts. It is used for
411 more than that currently, but IMO that is the only valid use. */
413 max_uid = get_max_uid ();
415 /* Leave space for insns life_analysis makes in some cases for auto-inc.
416 These cases are rare, so we don't need too much space. */
417 max_uid += max_uid / 10;
420 compute_bb_for_insn (max_uid);
422 /* Discover the edges of our cfg. */
423 record_active_eh_regions (f);
424 make_edges (label_value_list);
426 /* Do very simple cleanup now, for the benefit of code that runs between
427 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
428 tidy_fallthru_edges ();
430 mark_critical_edges ();
432 #ifdef ENABLE_CHECKING
437 /* Count the basic blocks of the function. */
440 count_basic_blocks (f)
444 register RTX_CODE prev_code;
445 register int count = 0;
447 int call_had_abnormal_edge = 0;
448 rtx prev_call = NULL_RTX;
450 prev_code = JUMP_INSN;
451 for (insn = f; insn; insn = NEXT_INSN (insn))
453 register RTX_CODE code = GET_CODE (insn);
455 if (code == CODE_LABEL
456 || (GET_RTX_CLASS (code) == 'i'
457 && (prev_code == JUMP_INSN
458 || prev_code == BARRIER
459 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
464 /* Record whether this call created an edge. */
465 if (code == CALL_INSN)
467 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
468 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
470 call_had_abnormal_edge = 0;
472 /* If there is an EH region or rethrow, we have an edge. */
473 if ((eh_region && region > 0)
474 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
475 call_had_abnormal_edge = 1;
478 /* If there is a nonlocal goto label and the specified
479 region number isn't -1, we have an edge. (0 means
480 no throw, but might have a nonlocal goto). */
481 if (nonlocal_goto_handler_labels && region >= 0)
482 call_had_abnormal_edge = 1;
485 else if (code != NOTE)
486 prev_call = NULL_RTX;
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
492 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
497 /* The rest of the compiler works a bit smoother when we don't have to
498 check for the edge case of do-nothing functions with no basic blocks. */
501 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
508 /* Find all basic blocks of the function whose first insn is F.
510 Collect and return a list of labels whose addresses are taken. This
511 will be used in make_edges for use with computed gotos. */
514 find_basic_blocks_1 (f)
517 register rtx insn, next;
518 int call_has_abnormal_edge = 0;
520 rtx bb_note = NULL_RTX;
521 rtx eh_list = NULL_RTX;
522 rtx label_value_list = NULL_RTX;
526 /* We process the instructions in a slightly different way than we did
527 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
528 closed out the previous block, so that it gets attached at the proper
529 place. Since this form should be equivalent to the previous,
530 count_basic_blocks continues to use the old form as a check. */
532 for (insn = f; insn; insn = next)
534 enum rtx_code code = GET_CODE (insn);
536 next = NEXT_INSN (insn);
538 if (code == CALL_INSN)
540 /* Record whether this call created an edge. */
541 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
542 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
543 call_has_abnormal_edge = 0;
545 /* If there is an EH region or rethrow, we have an edge. */
546 if ((eh_list && region > 0)
547 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
548 call_has_abnormal_edge = 1;
551 /* If there is a nonlocal goto label and the specified
552 region number isn't -1, we have an edge. (0 means
553 no throw, but might have a nonlocal goto). */
554 if (nonlocal_goto_handler_labels && region >= 0)
555 call_has_abnormal_edge = 1;
563 int kind = NOTE_LINE_NUMBER (insn);
565 /* Keep a LIFO list of the currently active exception notes. */
566 if (kind == NOTE_INSN_EH_REGION_BEG)
567 eh_list = alloc_INSN_LIST (insn, eh_list);
568 else if (kind == NOTE_INSN_EH_REGION_END)
571 eh_list = XEXP (eh_list, 1);
572 free_INSN_LIST_node (t);
575 /* Look for basic block notes with which to keep the
576 basic_block_info pointers stable. Unthread the note now;
577 we'll put it back at the right place in create_basic_block.
578 Or not at all if we've already found a note in this block. */
579 else if (kind == NOTE_INSN_BASIC_BLOCK)
581 if (bb_note == NULL_RTX)
583 next = flow_delete_insn (insn);
590 /* A basic block starts at a label. If we've closed one off due
591 to a barrier or some such, no need to do it again. */
592 if (head != NULL_RTX)
594 /* While we now have edge lists with which other portions of
595 the compiler might determine a call ending a basic block
596 does not imply an abnormal edge, it will be a bit before
597 everything can be updated. So continue to emit a noop at
598 the end of such a block. */
599 if (GET_CODE (end) == CALL_INSN)
601 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
602 end = emit_insn_after (nop, end);
605 create_basic_block (i++, head, end, bb_note);
612 /* A basic block ends at a jump. */
613 if (head == NULL_RTX)
617 /* ??? Make a special check for table jumps. The way this
618 happens is truly and amazingly gross. We are about to
619 create a basic block that contains just a code label and
620 an addr*vec jump insn. Worse, an addr_diff_vec creates
621 its own natural loop.
623 Prevent this bit of brain damage, pasting things together
624 correctly in make_edges.
626 The correct solution involves emitting the table directly
627 on the tablejump instruction as a note, or JUMP_LABEL. */
629 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
630 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
638 goto new_bb_inclusive;
641 /* A basic block ends at a barrier. It may be that an unconditional
642 jump already closed the basic block -- no need to do it again. */
643 if (head == NULL_RTX)
646 /* While we now have edge lists with which other portions of the
647 compiler might determine a call ending a basic block does not
648 imply an abnormal edge, it will be a bit before everything can
649 be updated. So continue to emit a noop at the end of such a
651 if (GET_CODE (end) == CALL_INSN)
653 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
654 end = emit_insn_after (nop, end);
656 goto new_bb_exclusive;
659 /* A basic block ends at a call that can either throw or
660 do a non-local goto. */
661 if (call_has_abnormal_edge)
664 if (head == NULL_RTX)
669 create_basic_block (i++, head, end, bb_note);
670 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a CALL_INSN, then mark it as reaching the active EH
981 handler for this CALL_INSN. If we're handling asynchronous
982 exceptions then any insn can reach any of the active handlers.
984 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
986 if (code == CALL_INSN || asynchronous_exceptions)
988 /* Add any appropriate EH edges. We do this unconditionally
989 since there may be a REG_EH_REGION or REG_EH_RETHROW note
990 on the call, and this needn't be within an EH region. */
991 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
993 /* If we have asynchronous exceptions, do the same for *all*
994 exception regions active in the block. */
995 if (asynchronous_exceptions
996 && bb->eh_beg != bb->eh_end)
999 make_eh_edge (edge_cache, eh_nest_info, bb,
1000 NULL_RTX, bb->eh_beg);
1002 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1003 if (GET_CODE (x) == NOTE
1004 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1005 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1007 int region = NOTE_EH_HANDLER (x);
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1013 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1015 /* ??? This could be made smarter: in some cases it's possible
1016 to tell that certain calls will not do a nonlocal goto.
1018 For example, if the nested functions that do the nonlocal
1019 gotos do not have their addresses taken, then only calls to
1020 those functions or to other nested functions that use them
1021 could possibly do nonlocal gotos. */
1022 /* We do know that a REG_EH_REGION note with a value less
1023 than 0 is guaranteed not to perform a non-local goto. */
1024 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1025 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1026 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1027 make_label_edge (edge_cache, bb, XEXP (x, 0),
1028 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1032 /* We know something about the structure of the function __throw in
1033 libgcc2.c. It is the only function that ever contains eh_stub
1034 labels. It modifies its return address so that the last block
1035 returns to one of the eh_stub labels within it. So we have to
1036 make additional edges in the flow graph. */
1037 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1038 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1040 /* Find out if we can drop through to the next block. */
1041 insn = next_nonnote_insn (insn);
1042 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1043 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1044 else if (i + 1 < n_basic_blocks)
1046 rtx tmp = BLOCK_HEAD (i + 1);
1047 if (GET_CODE (tmp) == NOTE)
1048 tmp = next_nonnote_insn (tmp);
1049 if (force_fallthru || insn == tmp)
1050 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1054 free_eh_nesting_info (eh_nest_info);
1056 sbitmap_vector_free (edge_cache);
1059 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1060 about the edge that is accumulated between calls. */
1063 make_edge (edge_cache, src, dst, flags)
1064 sbitmap *edge_cache;
1065 basic_block src, dst;
1071 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1072 many edges to them, and we didn't allocate memory for it. */
1073 use_edge_cache = (edge_cache
1074 && src != ENTRY_BLOCK_PTR
1075 && dst != EXIT_BLOCK_PTR);
1077 /* Make sure we don't add duplicate edges. */
1078 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1079 for (e = src->succ; e ; e = e->succ_next)
1086 e = (edge) xcalloc (1, sizeof (*e));
1089 e->succ_next = src->succ;
1090 e->pred_next = dst->pred;
1099 SET_BIT (edge_cache[src->index], dst->index);
1102 /* Create an edge from a basic block to a label. */
1105 make_label_edge (edge_cache, src, label, flags)
1106 sbitmap *edge_cache;
1111 if (GET_CODE (label) != CODE_LABEL)
1114 /* If the label was never emitted, this insn is junk, but avoid a
1115 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1116 as a result of a syntax error and a diagnostic has already been
1119 if (INSN_UID (label) == 0)
1122 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1125 /* Create the edges generated by INSN in REGION. */
1128 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1129 sbitmap *edge_cache;
1130 eh_nesting_info *eh_nest_info;
1135 handler_info **handler_list;
1138 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1139 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1142 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1143 EDGE_ABNORMAL | EDGE_EH | is_call);
1147 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1148 dangerous if we intend to move basic blocks around. Move such notes
1149 into the following block. */
1152 move_stray_eh_region_notes ()
1157 if (n_basic_blocks < 2)
1160 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1161 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1163 rtx insn, next, list = NULL_RTX;
1165 b1 = BASIC_BLOCK (i);
1166 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1168 next = NEXT_INSN (insn);
1169 if (GET_CODE (insn) == NOTE
1170 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1171 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1173 /* Unlink from the insn chain. */
1174 NEXT_INSN (PREV_INSN (insn)) = next;
1175 PREV_INSN (next) = PREV_INSN (insn);
1178 NEXT_INSN (insn) = list;
1183 if (list == NULL_RTX)
1186 /* Find where to insert these things. */
1188 if (GET_CODE (insn) == CODE_LABEL)
1189 insn = NEXT_INSN (insn);
1193 next = NEXT_INSN (list);
1194 add_insn_after (list, insn);
1200 /* Recompute eh_beg/eh_end for each basic block. */
1203 record_active_eh_regions (f)
1206 rtx insn, eh_list = NULL_RTX;
1208 basic_block bb = BASIC_BLOCK (0);
1210 for (insn = f; insn ; insn = NEXT_INSN (insn))
1212 if (bb->head == insn)
1213 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1215 if (GET_CODE (insn) == NOTE)
1217 int kind = NOTE_LINE_NUMBER (insn);
1218 if (kind == NOTE_INSN_EH_REGION_BEG)
1219 eh_list = alloc_INSN_LIST (insn, eh_list);
1220 else if (kind == NOTE_INSN_EH_REGION_END)
1222 rtx t = XEXP (eh_list, 1);
1223 free_INSN_LIST_node (eh_list);
1228 if (bb->end == insn)
1230 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1232 if (i == n_basic_blocks)
1234 bb = BASIC_BLOCK (i);
1239 /* Identify critical edges and set the bits appropriately. */
1242 mark_critical_edges ()
1244 int i, n = n_basic_blocks;
1247 /* We begin with the entry block. This is not terribly important now,
1248 but could be if a front end (Fortran) implemented alternate entry
1250 bb = ENTRY_BLOCK_PTR;
1257 /* (1) Critical edges must have a source with multiple successors. */
1258 if (bb->succ && bb->succ->succ_next)
1260 for (e = bb->succ; e ; e = e->succ_next)
1262 /* (2) Critical edges must have a destination with multiple
1263 predecessors. Note that we know there is at least one
1264 predecessor -- the edge we followed to get here. */
1265 if (e->dest->pred->pred_next)
1266 e->flags |= EDGE_CRITICAL;
1268 e->flags &= ~EDGE_CRITICAL;
1273 for (e = bb->succ; e ; e = e->succ_next)
1274 e->flags &= ~EDGE_CRITICAL;
1279 bb = BASIC_BLOCK (i);
1283 /* Split a (typically critical) edge. Return the new block.
1284 Abort on abnormal edges.
1286 ??? The code generally expects to be called on critical edges.
1287 The case of a block ending in an unconditional jump to a
1288 block with multiple predecessors is not handled optimally. */
1291 split_edge (edge_in)
1294 basic_block old_pred, bb, old_succ;
1299 /* Abnormal edges cannot be split. */
1300 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1303 old_pred = edge_in->src;
1304 old_succ = edge_in->dest;
1306 /* Remove the existing edge from the destination's pred list. */
1309 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1311 *pp = edge_in->pred_next;
1312 edge_in->pred_next = NULL;
1315 /* Create the new structures. */
1316 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1317 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1320 memset (bb, 0, sizeof (*bb));
1321 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1322 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1324 /* ??? This info is likely going to be out of date very soon. */
1325 if (old_succ->global_live_at_start)
1327 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1328 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1332 CLEAR_REG_SET (bb->global_live_at_start);
1333 CLEAR_REG_SET (bb->global_live_at_end);
1338 bb->succ = edge_out;
1341 edge_in->flags &= ~EDGE_CRITICAL;
1343 edge_out->pred_next = old_succ->pred;
1344 edge_out->succ_next = NULL;
1346 edge_out->dest = old_succ;
1347 edge_out->flags = EDGE_FALLTHRU;
1348 edge_out->probability = REG_BR_PROB_BASE;
1350 old_succ->pred = edge_out;
1352 /* Tricky case -- if there existed a fallthru into the successor
1353 (and we're not it) we must add a new unconditional jump around
1354 the new block we're actually interested in.
1356 Further, if that edge is critical, this means a second new basic
1357 block must be created to hold it. In order to simplify correct
1358 insn placement, do this before we touch the existing basic block
1359 ordering for the block we were really wanting. */
1360 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1363 for (e = edge_out->pred_next; e ; e = e->pred_next)
1364 if (e->flags & EDGE_FALLTHRU)
1369 basic_block jump_block;
1372 if ((e->flags & EDGE_CRITICAL) == 0)
1374 /* Non critical -- we can simply add a jump to the end
1375 of the existing predecessor. */
1376 jump_block = e->src;
1380 /* We need a new block to hold the jump. The simplest
1381 way to do the bulk of the work here is to recursively
1383 jump_block = split_edge (e);
1384 e = jump_block->succ;
1387 /* Now add the jump insn ... */
1388 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1390 jump_block->end = pos;
1391 if (basic_block_for_insn)
1392 set_block_for_insn (pos, jump_block);
1393 emit_barrier_after (pos);
1395 /* ... let jump know that label is in use, ... */
1396 JUMP_LABEL (pos) = old_succ->head;
1397 ++LABEL_NUSES (old_succ->head);
1399 /* ... and clear fallthru on the outgoing edge. */
1400 e->flags &= ~EDGE_FALLTHRU;
1402 /* Continue splitting the interesting edge. */
1406 /* Place the new block just in front of the successor. */
1407 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1408 if (old_succ == EXIT_BLOCK_PTR)
1409 j = n_basic_blocks - 1;
1411 j = old_succ->index;
1412 for (i = n_basic_blocks - 1; i > j; --i)
1414 basic_block tmp = BASIC_BLOCK (i - 1);
1415 BASIC_BLOCK (i) = tmp;
1418 BASIC_BLOCK (i) = bb;
1421 /* Create the basic block note.
1423 Where we place the note can have a noticable impact on the generated
1424 code. Consider this cfg:
1435 If we need to insert an insn on the edge from block 0 to block 1,
1436 we want to ensure the instructions we insert are outside of any
1437 loop notes that physically sit between block 0 and block 1. Otherwise
1438 we confuse the loop optimizer into thinking the loop is a phony. */
1439 if (old_succ != EXIT_BLOCK_PTR
1440 && PREV_INSN (old_succ->head)
1441 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1442 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1443 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1444 PREV_INSN (old_succ->head));
1445 else if (old_succ != EXIT_BLOCK_PTR)
1446 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1448 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1449 NOTE_BASIC_BLOCK (bb_note) = bb;
1450 bb->head = bb->end = bb_note;
1452 /* Not quite simple -- for non-fallthru edges, we must adjust the
1453 predecessor's jump instruction to target our new block. */
1454 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1456 rtx tmp, insn = old_pred->end;
1457 rtx old_label = old_succ->head;
1458 rtx new_label = gen_label_rtx ();
1460 if (GET_CODE (insn) != JUMP_INSN)
1463 /* ??? Recognize a tablejump and adjust all matching cases. */
1464 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1465 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1466 && GET_CODE (tmp) == JUMP_INSN
1467 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1468 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1473 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1474 vec = XVEC (PATTERN (tmp), 0);
1476 vec = XVEC (PATTERN (tmp), 1);
1478 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1479 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1481 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1482 --LABEL_NUSES (old_label);
1483 ++LABEL_NUSES (new_label);
1486 /* Handle casesi dispatch insns */
1487 if ((tmp = single_set (insn)) != NULL
1488 && SET_DEST (tmp) == pc_rtx
1489 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1490 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1491 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1493 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1495 --LABEL_NUSES (old_label);
1496 ++LABEL_NUSES (new_label);
1501 /* This would have indicated an abnormal edge. */
1502 if (computed_jump_p (insn))
1505 /* A return instruction can't be redirected. */
1506 if (returnjump_p (insn))
1509 /* If the insn doesn't go where we think, we're confused. */
1510 if (JUMP_LABEL (insn) != old_label)
1513 redirect_jump (insn, new_label);
1516 emit_label_before (new_label, bb_note);
1517 bb->head = new_label;
1523 /* Queue instructions for insertion on an edge between two basic blocks.
1524 The new instructions and basic blocks (if any) will not appear in the
1525 CFG until commit_edge_insertions is called. */
1528 insert_insn_on_edge (pattern, e)
1532 /* We cannot insert instructions on an abnormal critical edge.
1533 It will be easier to find the culprit if we die now. */
1534 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1535 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1538 if (e->insns == NULL_RTX)
1541 push_to_sequence (e->insns);
1543 emit_insn (pattern);
1545 e->insns = get_insns ();
1549 /* Update the CFG for the instructions queued on edge E. */
1552 commit_one_edge_insertion (e)
1555 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1558 /* Pull the insns off the edge now since the edge might go away. */
1560 e->insns = NULL_RTX;
1562 /* Figure out where to put these things. If the destination has
1563 one predecessor, insert there. Except for the exit block. */
1564 if (e->dest->pred->pred_next == NULL
1565 && e->dest != EXIT_BLOCK_PTR)
1569 /* Get the location correct wrt a code label, and "nice" wrt
1570 a basic block note, and before everything else. */
1572 if (GET_CODE (tmp) == CODE_LABEL)
1573 tmp = NEXT_INSN (tmp);
1574 if (GET_CODE (tmp) == NOTE
1575 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1576 tmp = NEXT_INSN (tmp);
1577 if (tmp == bb->head)
1580 after = PREV_INSN (tmp);
1583 /* If the source has one successor and the edge is not abnormal,
1584 insert there. Except for the entry block. */
1585 else if ((e->flags & EDGE_ABNORMAL) == 0
1586 && e->src->succ->succ_next == NULL
1587 && e->src != ENTRY_BLOCK_PTR)
1590 /* It is possible to have a non-simple jump here. Consider a target
1591 where some forms of unconditional jumps clobber a register. This
1592 happens on the fr30 for example.
1594 We know this block has a single successor, so we can just emit
1595 the queued insns before the jump. */
1596 if (GET_CODE (bb->end) == JUMP_INSN)
1602 /* We'd better be fallthru, or we've lost track of what's what. */
1603 if ((e->flags & EDGE_FALLTHRU) == 0)
1610 /* Otherwise we must split the edge. */
1613 bb = split_edge (e);
1617 /* Now that we've found the spot, do the insertion. */
1619 /* Set the new block number for these insns, if structure is allocated. */
1620 if (basic_block_for_insn)
1623 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1624 set_block_for_insn (i, bb);
1629 emit_insns_before (insns, before);
1630 if (before == bb->head)
1635 rtx last = emit_insns_after (insns, after);
1636 if (after == bb->end)
1640 if (GET_CODE (last) == JUMP_INSN)
1642 if (returnjump_p (last))
1644 /* ??? Remove all outgoing edges from BB and add one
1645 for EXIT. This is not currently a problem because
1646 this only happens for the (single) epilogue, which
1647 already has a fallthru edge to EXIT. */
1650 if (e->dest != EXIT_BLOCK_PTR
1651 || e->succ_next != NULL
1652 || (e->flags & EDGE_FALLTHRU) == 0)
1654 e->flags &= ~EDGE_FALLTHRU;
1656 emit_barrier_after (last);
1665 /* Update the CFG for all queued instructions. */
1668 commit_edge_insertions ()
1673 #ifdef ENABLE_CHECKING
1674 verify_flow_info ();
1678 bb = ENTRY_BLOCK_PTR;
1683 for (e = bb->succ; e ; e = next)
1685 next = e->succ_next;
1687 commit_one_edge_insertion (e);
1690 if (++i >= n_basic_blocks)
1692 bb = BASIC_BLOCK (i);
1696 /* Delete all unreachable basic blocks. */
1699 delete_unreachable_blocks ()
1701 basic_block *worklist, *tos;
1702 int deleted_handler;
1707 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1709 /* Use basic_block->aux as a marker. Clear them all. */
1711 for (i = 0; i < n; ++i)
1712 BASIC_BLOCK (i)->aux = NULL;
1714 /* Add our starting points to the worklist. Almost always there will
1715 be only one. It isn't inconcievable that we might one day directly
1716 support Fortran alternate entry points. */
1718 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1722 /* Mark the block with a handy non-null value. */
1726 /* Iterate: find everything reachable from what we've already seen. */
1728 while (tos != worklist)
1730 basic_block b = *--tos;
1732 for (e = b->succ; e ; e = e->succ_next)
1740 /* Delete all unreachable basic blocks. Count down so that we don't
1741 interfere with the block renumbering that happens in delete_block. */
1743 deleted_handler = 0;
1745 for (i = n - 1; i >= 0; --i)
1747 basic_block b = BASIC_BLOCK (i);
1750 /* This block was found. Tidy up the mark. */
1753 deleted_handler |= delete_block (b);
1756 tidy_fallthru_edges ();
1758 /* If we deleted an exception handler, we may have EH region begin/end
1759 blocks to remove as well. */
1760 if (deleted_handler)
1761 delete_eh_regions ();
1766 /* Find EH regions for which there is no longer a handler, and delete them. */
1769 delete_eh_regions ()
1773 update_rethrow_references ();
1775 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1776 if (GET_CODE (insn) == NOTE)
1778 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1779 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1781 int num = NOTE_EH_HANDLER (insn);
1782 /* A NULL handler indicates a region is no longer needed,
1783 as long as its rethrow label isn't used. */
1784 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1786 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1787 NOTE_SOURCE_FILE (insn) = 0;
1793 /* Return true if NOTE is not one of the ones that must be kept paired,
1794 so that we may simply delete them. */
1797 can_delete_note_p (note)
1800 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1801 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1804 /* Unlink a chain of insns between START and FINISH, leaving notes
1805 that must be paired. */
1808 flow_delete_insn_chain (start, finish)
1811 /* Unchain the insns one by one. It would be quicker to delete all
1812 of these with a single unchaining, rather than one at a time, but
1813 we need to keep the NOTE's. */
1819 next = NEXT_INSN (start);
1820 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1822 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1825 next = flow_delete_insn (start);
1827 if (start == finish)
1833 /* Delete the insns in a (non-live) block. We physically delete every
1834 non-deleted-note insn, and update the flow graph appropriately.
1836 Return nonzero if we deleted an exception handler. */
1838 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1839 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1845 int deleted_handler = 0;
1848 /* If the head of this block is a CODE_LABEL, then it might be the
1849 label for an exception handler which can't be reached.
1851 We need to remove the label from the exception_handler_label list
1852 and remove the associated NOTE_INSN_EH_REGION_BEG and
1853 NOTE_INSN_EH_REGION_END notes. */
1857 never_reached_warning (insn);
1859 if (GET_CODE (insn) == CODE_LABEL)
1861 rtx x, *prev = &exception_handler_labels;
1863 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1865 if (XEXP (x, 0) == insn)
1867 /* Found a match, splice this label out of the EH label list. */
1868 *prev = XEXP (x, 1);
1869 XEXP (x, 1) = NULL_RTX;
1870 XEXP (x, 0) = NULL_RTX;
1872 /* Remove the handler from all regions */
1873 remove_handler (insn);
1874 deleted_handler = 1;
1877 prev = &XEXP (x, 1);
1880 /* This label may be referenced by code solely for its value, or
1881 referenced by static data, or something. We have determined
1882 that it is not reachable, but cannot delete the label itself.
1883 Save code space and continue to delete the balance of the block,
1884 along with properly updating the cfg. */
1885 if (!can_delete_label_p (insn))
1887 /* If we've only got one of these, skip the whole deleting
1890 goto no_delete_insns;
1891 insn = NEXT_INSN (insn);
1895 /* Selectively unlink the insn chain. Include any BARRIER that may
1896 follow the basic block. */
1897 end = next_nonnote_insn (b->end);
1898 if (!end || GET_CODE (end) != BARRIER)
1900 flow_delete_insn_chain (insn, end);
1904 /* Remove the edges into and out of this block. Note that there may
1905 indeed be edges in, if we are removing an unreachable loop. */
1909 for (e = b->pred; e ; e = next)
1911 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1914 next = e->pred_next;
1918 for (e = b->succ; e ; e = next)
1920 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1923 next = e->succ_next;
1932 /* Remove the basic block from the array, and compact behind it. */
1935 return deleted_handler;
1938 /* Remove block B from the basic block array and compact behind it. */
1944 int i, n = n_basic_blocks;
1946 for (i = b->index; i + 1 < n; ++i)
1948 basic_block x = BASIC_BLOCK (i + 1);
1949 BASIC_BLOCK (i) = x;
1953 basic_block_info->num_elements--;
1957 /* Delete INSN by patching it out. Return the next insn. */
1960 flow_delete_insn (insn)
1963 rtx prev = PREV_INSN (insn);
1964 rtx next = NEXT_INSN (insn);
1966 PREV_INSN (insn) = NULL_RTX;
1967 NEXT_INSN (insn) = NULL_RTX;
1970 NEXT_INSN (prev) = next;
1972 PREV_INSN (next) = prev;
1974 set_last_insn (prev);
1976 if (GET_CODE (insn) == CODE_LABEL)
1977 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1979 /* If deleting a jump, decrement the use count of the label. Deleting
1980 the label itself should happen in the normal course of block merging. */
1981 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1982 LABEL_NUSES (JUMP_LABEL (insn))--;
1987 /* True if a given label can be deleted. */
1990 can_delete_label_p (label)
1995 if (LABEL_PRESERVE_P (label))
1998 for (x = forced_labels; x ; x = XEXP (x, 1))
1999 if (label == XEXP (x, 0))
2001 for (x = label_value_list; x ; x = XEXP (x, 1))
2002 if (label == XEXP (x, 0))
2004 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2005 if (label == XEXP (x, 0))
2008 /* User declared labels must be preserved. */
2009 if (LABEL_NAME (label) != 0)
2015 /* Blocks A and B are to be merged into a single block. A has no incoming
2016 fallthru edge, so it can be moved before B without adding or modifying
2017 any jumps (aside from the jump from A to B). */
2020 merge_blocks_move_predecessor_nojumps (a, b)
2023 rtx start, end, barrier;
2029 /* We want to delete the BARRIER after the end of the insns we are
2030 going to move. If we don't find a BARRIER, then do nothing. This
2031 can happen in some cases if we have labels we can not delete.
2033 Similarly, do nothing if we can not delete the label at the start
2034 of the target block. */
2035 barrier = next_nonnote_insn (end);
2036 if (GET_CODE (barrier) != BARRIER
2037 || (GET_CODE (b->head) == CODE_LABEL
2038 && ! can_delete_label_p (b->head)))
2041 flow_delete_insn (barrier);
2043 /* Move block and loop notes out of the chain so that we do not
2044 disturb their order.
2046 ??? A better solution would be to squeeze out all the non-nested notes
2047 and adjust the block trees appropriately. Even better would be to have
2048 a tighter connection between block trees and rtl so that this is not
2050 start = squeeze_notes (start, end);
2052 /* Scramble the insn chain. */
2053 if (end != PREV_INSN (b->head))
2054 reorder_insns (start, end, PREV_INSN (b->head));
2058 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2059 a->index, b->index);
2062 /* Swap the records for the two blocks around. Although we are deleting B,
2063 A is now where B was and we want to compact the BB array from where
2065 BASIC_BLOCK(a->index) = b;
2066 BASIC_BLOCK(b->index) = a;
2068 a->index = b->index;
2071 /* Now blocks A and B are contiguous. Merge them. */
2072 merge_blocks_nomove (a, b);
2077 /* Blocks A and B are to be merged into a single block. B has no outgoing
2078 fallthru edge, so it can be moved after A without adding or modifying
2079 any jumps (aside from the jump from A to B). */
2082 merge_blocks_move_successor_nojumps (a, b)
2085 rtx start, end, barrier;
2090 /* We want to delete the BARRIER after the end of the insns we are
2091 going to move. If we don't find a BARRIER, then do nothing. This
2092 can happen in some cases if we have labels we can not delete.
2094 Similarly, do nothing if we can not delete the label at the start
2095 of the target block. */
2096 barrier = next_nonnote_insn (end);
2097 if (GET_CODE (barrier) != BARRIER
2098 || (GET_CODE (b->head) == CODE_LABEL
2099 && ! can_delete_label_p (b->head)))
2102 flow_delete_insn (barrier);
2104 /* Move block and loop notes out of the chain so that we do not
2105 disturb their order.
2107 ??? A better solution would be to squeeze out all the non-nested notes
2108 and adjust the block trees appropriately. Even better would be to have
2109 a tighter connection between block trees and rtl so that this is not
2111 start = squeeze_notes (start, end);
2113 /* Scramble the insn chain. */
2114 reorder_insns (start, end, a->end);
2116 /* Now blocks A and B are contiguous. Merge them. */
2117 merge_blocks_nomove (a, b);
2121 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2122 b->index, a->index);
2128 /* Blocks A and B are to be merged into a single block. The insns
2129 are already contiguous, hence `nomove'. */
2132 merge_blocks_nomove (a, b)
2136 rtx b_head, b_end, a_end;
2139 /* If there was a CODE_LABEL beginning B, delete it. */
2142 if (GET_CODE (b_head) == CODE_LABEL)
2144 /* Detect basic blocks with nothing but a label. This can happen
2145 in particular at the end of a function. */
2146 if (b_head == b_end)
2148 b_head = flow_delete_insn (b_head);
2151 /* Delete the basic block note. */
2152 if (GET_CODE (b_head) == NOTE
2153 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2155 if (b_head == b_end)
2157 b_head = flow_delete_insn (b_head);
2160 /* If there was a jump out of A, delete it. */
2162 if (GET_CODE (a_end) == JUMP_INSN)
2166 prev = prev_nonnote_insn (a_end);
2171 /* If this was a conditional jump, we need to also delete
2172 the insn that set cc0. */
2174 if (prev && sets_cc0_p (prev))
2177 prev = prev_nonnote_insn (prev);
2180 flow_delete_insn (tmp);
2184 /* Note that a->head != a->end, since we should have at least a
2185 bb note plus the jump, so prev != insn. */
2186 flow_delete_insn (a_end);
2190 /* By definition, there should only be one successor of A, and that is
2191 B. Free that edge struct. */
2195 /* Adjust the edges out of B for the new owner. */
2196 for (e = b->succ; e ; e = e->succ_next)
2200 /* Reassociate the insns of B with A. */
2203 BLOCK_FOR_INSN (b_head) = a;
2204 while (b_head != b_end)
2206 b_head = NEXT_INSN (b_head);
2207 BLOCK_FOR_INSN (b_head) = a;
2213 /* Compact the basic block array. */
2217 /* Attempt to merge basic blocks that are potentially non-adjacent.
2218 Return true iff the attempt succeeded. */
2221 merge_blocks (e, b, c)
2225 /* If B has a fallthru edge to C, no need to move anything. */
2226 if (e->flags & EDGE_FALLTHRU)
2228 /* If a label still appears somewhere and we cannot delete the label,
2229 then we cannot merge the blocks. The edge was tidied already. */
2231 rtx insn, stop = NEXT_INSN (c->head);
2232 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2233 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2236 merge_blocks_nomove (b, c);
2240 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2241 b->index, c->index);
2250 int c_has_outgoing_fallthru;
2251 int b_has_incoming_fallthru;
2253 /* We must make sure to not munge nesting of exception regions,
2254 lexical blocks, and loop notes.
2256 The first is taken care of by requiring that the active eh
2257 region at the end of one block always matches the active eh
2258 region at the beginning of the next block.
2260 The later two are taken care of by squeezing out all the notes. */
2262 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2263 executed and we may want to treat blocks which have two out
2264 edges, one normal, one abnormal as only having one edge for
2265 block merging purposes. */
2267 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2268 if (tmp_edge->flags & EDGE_FALLTHRU)
2270 c_has_outgoing_fallthru = (tmp_edge != NULL);
2272 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2273 if (tmp_edge->flags & EDGE_FALLTHRU)
2275 b_has_incoming_fallthru = (tmp_edge != NULL);
2277 /* If B does not have an incoming fallthru, and the exception regions
2278 match, then it can be moved immediately before C without introducing
2281 C can not be the first block, so we do not have to worry about
2282 accessing a non-existent block. */
2283 d = BASIC_BLOCK (c->index - 1);
2284 if (! b_has_incoming_fallthru
2285 && d->eh_end == b->eh_beg
2286 && b->eh_end == c->eh_beg)
2287 return merge_blocks_move_predecessor_nojumps (b, c);
2289 /* Otherwise, we're going to try to move C after B. Make sure the
2290 exception regions match.
2292 If B is the last basic block, then we must not try to access the
2293 block structure for block B + 1. Luckily in that case we do not
2294 need to worry about matching exception regions. */
2295 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2296 if (b->eh_end == c->eh_beg
2297 && (d == NULL || c->eh_end == d->eh_beg))
2299 /* If C does not have an outgoing fallthru, then it can be moved
2300 immediately after B without introducing or modifying jumps. */
2301 if (! c_has_outgoing_fallthru)
2302 return merge_blocks_move_successor_nojumps (b, c);
2304 /* Otherwise, we'll need to insert an extra jump, and possibly
2305 a new block to contain it. */
2306 /* ??? Not implemented yet. */
2313 /* Top level driver for merge_blocks. */
2320 /* Attempt to merge blocks as made possible by edge removal. If a block
2321 has only one successor, and the successor has only one predecessor,
2322 they may be combined. */
2324 for (i = 0; i < n_basic_blocks; )
2326 basic_block c, b = BASIC_BLOCK (i);
2329 /* A loop because chains of blocks might be combineable. */
2330 while ((s = b->succ) != NULL
2331 && s->succ_next == NULL
2332 && (s->flags & EDGE_EH) == 0
2333 && (c = s->dest) != EXIT_BLOCK_PTR
2334 && c->pred->pred_next == NULL
2335 /* If the jump insn has side effects, we can't kill the edge. */
2336 && (GET_CODE (b->end) != JUMP_INSN
2337 || onlyjump_p (b->end))
2338 && merge_blocks (s, b, c))
2341 /* Don't get confused by the index shift caused by deleting blocks. */
2346 /* The given edge should potentially be a fallthru edge. If that is in
2347 fact true, delete the jump and barriers that are in the way. */
2350 tidy_fallthru_edge (e, b, c)
2356 /* ??? In a late-running flow pass, other folks may have deleted basic
2357 blocks by nopping out blocks, leaving multiple BARRIERs between here
2358 and the target label. They ought to be chastized and fixed.
2360 We can also wind up with a sequence of undeletable labels between
2361 one block and the next.
2363 So search through a sequence of barriers, labels, and notes for
2364 the head of block C and assert that we really do fall through. */
2366 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2369 /* Remove what will soon cease being the jump insn from the source block.
2370 If block B consisted only of this single jump, turn it into a deleted
2373 if (GET_CODE (q) == JUMP_INSN)
2376 /* If this was a conditional jump, we need to also delete
2377 the insn that set cc0. */
2378 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2385 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2386 NOTE_SOURCE_FILE (q) = 0;
2389 b->end = q = PREV_INSN (q);
2392 /* Selectively unlink the sequence. */
2393 if (q != PREV_INSN (c->head))
2394 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2396 e->flags |= EDGE_FALLTHRU;
2399 /* Fix up edges that now fall through, or rather should now fall through
2400 but previously required a jump around now deleted blocks. Simplify
2401 the search by only examining blocks numerically adjacent, since this
2402 is how find_basic_blocks created them. */
2405 tidy_fallthru_edges ()
2409 for (i = 1; i < n_basic_blocks; ++i)
2411 basic_block b = BASIC_BLOCK (i - 1);
2412 basic_block c = BASIC_BLOCK (i);
2415 /* We care about simple conditional or unconditional jumps with
2418 If we had a conditional branch to the next instruction when
2419 find_basic_blocks was called, then there will only be one
2420 out edge for the block which ended with the conditional
2421 branch (since we do not create duplicate edges).
2423 Furthermore, the edge will be marked as a fallthru because we
2424 merge the flags for the duplicate edges. So we do not want to
2425 check that the edge is not a FALLTHRU edge. */
2426 if ((s = b->succ) != NULL
2427 && s->succ_next == NULL
2429 /* If the jump insn has side effects, we can't tidy the edge. */
2430 && (GET_CODE (b->end) != JUMP_INSN
2431 || onlyjump_p (b->end)))
2432 tidy_fallthru_edge (s, b, c);
2436 /* Discover and record the loop depth at the head of each basic block. */
2439 calculate_loop_depth (dump)
2444 /* The loop infrastructure does the real job for us. */
2445 flow_loops_find (&loops);
2448 flow_loops_dump (&loops, dump, 0);
2450 flow_loops_free (&loops);
2453 /* Perform data flow analysis.
2454 F is the first insn of the function and NREGS the number of register numbers
2458 life_analysis (f, nregs, file, remove_dead_code)
2462 int remove_dead_code;
2464 #ifdef ELIMINABLE_REGS
2466 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2471 /* Record which registers will be eliminated. We use this in
2474 CLEAR_HARD_REG_SET (elim_reg_set);
2476 #ifdef ELIMINABLE_REGS
2477 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2478 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2480 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2483 /* We want alias analysis information for local dead store elimination. */
2484 init_alias_analysis ();
2487 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2491 if (! remove_dead_code)
2492 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2495 /* The post-reload life analysis have (on a global basis) the same
2496 registers live as was computed by reload itself. elimination
2497 Otherwise offsets and such may be incorrect.
2499 Reload will make some registers as live even though they do not
2500 appear in the rtl. */
2501 if (reload_completed)
2502 flags &= ~PROP_REG_INFO;
2506 /* Always remove no-op moves. Do this before other processing so
2507 that we don't have to keep re-scanning them. */
2508 delete_noop_moves (f);
2510 /* Some targets can emit simpler epilogues if they know that sp was
2511 not ever modified during the function. After reload, of course,
2512 we've already emitted the epilogue so there's no sense searching. */
2513 if (! reload_completed)
2514 notice_stack_pointer_modification (f);
2516 /* Allocate and zero out data structures that will record the
2517 data from lifetime analysis. */
2518 allocate_reg_life_data ();
2519 allocate_bb_life_data ();
2520 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2521 all_blocks = sbitmap_alloc (n_basic_blocks);
2522 sbitmap_ones (all_blocks);
2524 /* Find the set of registers live on function exit. */
2525 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2527 /* "Update" life info from zero. It'd be nice to begin the
2528 relaxation with just the exit and noreturn blocks, but that set
2529 is not immediately handy. */
2531 if (flags & PROP_REG_INFO)
2532 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2533 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2536 sbitmap_free (all_blocks);
2537 free (reg_next_use);
2538 reg_next_use = NULL;
2539 end_alias_analysis ();
2542 dump_flow_info (file);
2544 free_basic_block_vars (1);
2547 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2548 Search for REGNO. If found, abort if it is not wider than word_mode. */
2551 verify_wide_reg_1 (px, pregno)
2556 int regno = *(int *) pregno;
2558 if (GET_CODE (x) == REG && REGNO (x) == regno)
2560 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2567 /* A subroutine of verify_local_live_at_start. Search through insns
2568 between HEAD and END looking for register REGNO. */
2571 verify_wide_reg (regno, head, end)
2577 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2578 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2582 head = NEXT_INSN (head);
2585 /* We didn't find the register at all. Something's way screwy. */
2589 /* A subroutine of update_life_info. Verify that there are no untoward
2590 changes in live_at_start during a local update. */
2593 verify_local_live_at_start (new_live_at_start, bb)
2594 regset new_live_at_start;
2597 if (reload_completed)
2599 /* After reload, there are no pseudos, nor subregs of multi-word
2600 registers. The regsets should exactly match. */
2601 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2608 /* Find the set of changed registers. */
2609 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2611 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2613 /* No registers should die. */
2614 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2616 /* Verify that the now-live register is wider than word_mode. */
2617 verify_wide_reg (i, bb->head, bb->end);
2622 /* Updates life information starting with the basic blocks set in BLOCKS.
2624 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2625 expecting local modifications to basic blocks. If we find extra
2626 registers live at the beginning of a block, then we either killed
2627 useful data, or we have a broken split that wants data not provided.
2628 If we find registers removed from live_at_start, that means we have
2629 a broken peephole that is killing a register it shouldn't.
2631 ??? This is not true in one situation -- when a pre-reload splitter
2632 generates subregs of a multi-word pseudo, current life analysis will
2633 lose the kill. So we _can_ have a pseudo go live. How irritating.
2635 BLOCK_FOR_INSN is assumed to be correct.
2637 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2638 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2639 regs_ever_live unless the caller resets it to zero. */
2642 update_life_info (blocks, extent, prop_flags)
2644 enum update_life_extent extent;
2648 regset_head tmp_head;
2651 tmp = INITIALIZE_REG_SET (tmp_head);
2653 /* For a global update, we go through the relaxation process again. */
2654 if (extent != UPDATE_LIFE_LOCAL)
2656 calculate_global_regs_live (blocks, blocks,
2657 prop_flags & PROP_SCAN_DEAD_CODE);
2659 /* If asked, remove notes from the blocks we'll update. */
2660 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2661 count_or_remove_death_notes (blocks, 1);
2664 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2666 basic_block bb = BASIC_BLOCK (i);
2668 COPY_REG_SET (tmp, bb->global_live_at_end);
2669 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2671 if (extent == UPDATE_LIFE_LOCAL)
2672 verify_local_live_at_start (tmp, bb);
2677 if (prop_flags & PROP_REG_INFO)
2679 /* The only pseudos that are live at the beginning of the function
2680 are those that were not set anywhere in the function. local-alloc
2681 doesn't know how to handle these correctly, so mark them as not
2682 local to any one basic block. */
2683 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2684 FIRST_PSEUDO_REGISTER, i,
2685 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2687 /* We have a problem with any pseudoreg that lives across the setjmp.
2688 ANSI says that if a user variable does not change in value between
2689 the setjmp and the longjmp, then the longjmp preserves it. This
2690 includes longjmp from a place where the pseudo appears dead.
2691 (In principle, the value still exists if it is in scope.)
2692 If the pseudo goes in a hard reg, some other value may occupy
2693 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2694 Conclusion: such a pseudo must not go in a hard reg. */
2695 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2696 FIRST_PSEUDO_REGISTER, i,
2698 if (regno_reg_rtx[i] != 0)
2700 REG_LIVE_LENGTH (i) = -1;
2701 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2707 /* Free the variables allocated by find_basic_blocks.
2709 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2712 free_basic_block_vars (keep_head_end_p)
2713 int keep_head_end_p;
2715 if (basic_block_for_insn)
2717 VARRAY_FREE (basic_block_for_insn);
2718 basic_block_for_insn = NULL;
2721 if (! keep_head_end_p)
2724 VARRAY_FREE (basic_block_info);
2727 ENTRY_BLOCK_PTR->aux = NULL;
2728 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2729 EXIT_BLOCK_PTR->aux = NULL;
2730 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2734 /* Return nonzero if the destination of SET equals the source. */
2739 rtx src = SET_SRC (set);
2740 rtx dst = SET_DEST (set);
2742 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2744 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2746 src = SUBREG_REG (src);
2747 dst = SUBREG_REG (dst);
2750 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2751 && REGNO (src) == REGNO (dst));
2754 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2760 rtx pat = PATTERN (insn);
2762 /* Insns carrying these notes are useful later on. */
2763 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2766 if (GET_CODE (pat) == SET && set_noop_p (pat))
2769 if (GET_CODE (pat) == PARALLEL)
2772 /* If nothing but SETs of registers to themselves,
2773 this insn can also be deleted. */
2774 for (i = 0; i < XVECLEN (pat, 0); i++)
2776 rtx tem = XVECEXP (pat, 0, i);
2778 if (GET_CODE (tem) == USE
2779 || GET_CODE (tem) == CLOBBER)
2782 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2791 /* Delete any insns that copy a register to itself. */
2794 delete_noop_moves (f)
2798 for (insn = f; insn; insn = NEXT_INSN (insn))
2800 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2802 PUT_CODE (insn, NOTE);
2803 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2804 NOTE_SOURCE_FILE (insn) = 0;
2809 /* Determine if the stack pointer is constant over the life of the function.
2810 Only useful before prologues have been emitted. */
2813 notice_stack_pointer_modification_1 (x, pat, data)
2815 rtx pat ATTRIBUTE_UNUSED;
2816 void *data ATTRIBUTE_UNUSED;
2818 if (x == stack_pointer_rtx
2819 /* The stack pointer is only modified indirectly as the result
2820 of a push until later in flow. See the comments in rtl.texi
2821 regarding Embedded Side-Effects on Addresses. */
2822 || (GET_CODE (x) == MEM
2823 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2824 || GET_CODE (XEXP (x, 0)) == PRE_INC
2825 || GET_CODE (XEXP (x, 0)) == POST_DEC
2826 || GET_CODE (XEXP (x, 0)) == POST_INC)
2827 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2828 current_function_sp_is_unchanging = 0;
2832 notice_stack_pointer_modification (f)
2837 /* Assume that the stack pointer is unchanging if alloca hasn't
2839 current_function_sp_is_unchanging = !current_function_calls_alloca;
2840 if (! current_function_sp_is_unchanging)
2843 for (insn = f; insn; insn = NEXT_INSN (insn))
2845 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2847 /* Check if insn modifies the stack pointer. */
2848 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2850 if (! current_function_sp_is_unchanging)
2856 /* Mark a register in SET. Hard registers in large modes get all
2857 of their component registers set as well. */
2859 mark_reg (reg, xset)
2863 regset set = (regset) xset;
2864 int regno = REGNO (reg);
2866 if (GET_MODE (reg) == BLKmode)
2869 SET_REGNO_REG_SET (set, regno);
2870 if (regno < FIRST_PSEUDO_REGISTER)
2872 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2874 SET_REGNO_REG_SET (set, regno + n);
2878 /* Mark those regs which are needed at the end of the function as live
2879 at the end of the last basic block. */
2881 mark_regs_live_at_end (set)
2886 /* If exiting needs the right stack value, consider the stack pointer
2887 live at the end of the function. */
2888 if ((HAVE_epilogue && reload_completed)
2889 || ! EXIT_IGNORE_STACK
2890 || (! FRAME_POINTER_REQUIRED
2891 && ! current_function_calls_alloca
2892 && flag_omit_frame_pointer)
2893 || current_function_sp_is_unchanging)
2895 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2898 /* Mark the frame pointer if needed at the end of the function. If
2899 we end up eliminating it, it will be removed from the live list
2900 of each basic block by reload. */
2902 if (! reload_completed || frame_pointer_needed)
2904 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2905 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2906 /* If they are different, also mark the hard frame pointer as live */
2907 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2911 #ifdef PIC_OFFSET_TABLE_REGNUM
2912 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2913 /* Many architectures have a GP register even without flag_pic.
2914 Assume the pic register is not in use, or will be handled by
2915 other means, if it is not fixed. */
2916 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2917 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2921 /* Mark all global registers, and all registers used by the epilogue
2922 as being live at the end of the function since they may be
2923 referenced by our caller. */
2924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2926 #ifdef EPILOGUE_USES
2927 || EPILOGUE_USES (i)
2930 SET_REGNO_REG_SET (set, i);
2932 /* Mark all call-saved registers that we actaully used. */
2933 if (HAVE_epilogue && reload_completed)
2935 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2936 if (! call_used_regs[i] && regs_ever_live[i])
2937 SET_REGNO_REG_SET (set, i);
2940 /* Mark function return value. */
2941 diddle_return_value (mark_reg, set);
2944 /* Propagate global life info around the graph of basic blocks. Begin
2945 considering blocks with their corresponding bit set in BLOCKS_IN.
2946 BLOCKS_OUT is set for every block that was changed. */
2949 calculate_global_regs_live (blocks_in, blocks_out, flags)
2950 sbitmap blocks_in, blocks_out;
2953 basic_block *queue, *qhead, *qtail, *qend;
2954 regset tmp, new_live_at_end;
2955 regset_head tmp_head;
2956 regset_head new_live_at_end_head;
2959 tmp = INITIALIZE_REG_SET (tmp_head);
2960 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2962 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2963 because the `head == tail' style test for an empty queue doesn't
2964 work with a full queue. */
2965 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2967 qhead = qend = queue + n_basic_blocks + 2;
2969 /* Clear out the garbage that might be hanging out in bb->aux. */
2970 for (i = n_basic_blocks - 1; i >= 0; --i)
2971 BASIC_BLOCK (i)->aux = NULL;
2973 /* Queue the blocks set in the initial mask. Do this in reverse block
2974 number order so that we are more likely for the first round to do
2975 useful work. We use AUX non-null to flag that the block is queued. */
2976 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2978 basic_block bb = BASIC_BLOCK (i);
2983 sbitmap_zero (blocks_out);
2985 while (qhead != qtail)
2987 int rescan, changed;
2996 /* Begin by propogating live_at_start from the successor blocks. */
2997 CLEAR_REG_SET (new_live_at_end);
2998 for (e = bb->succ; e ; e = e->succ_next)
3000 basic_block sb = e->dest;
3001 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3004 if (bb == ENTRY_BLOCK_PTR)
3006 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3010 /* On our first pass through this block, we'll go ahead and continue.
3011 Recognize first pass by local_set NULL. On subsequent passes, we
3012 get to skip out early if live_at_end wouldn't have changed. */
3014 if (bb->local_set == NULL)
3016 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3021 /* If any bits were removed from live_at_end, we'll have to
3022 rescan the block. This wouldn't be necessary if we had
3023 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3024 local_live is really dependant on live_at_end. */
3025 CLEAR_REG_SET (tmp);
3026 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3027 new_live_at_end, BITMAP_AND_COMPL);
3031 /* Find the set of changed bits. Take this opportunity
3032 to notice that this set is empty and early out. */
3033 CLEAR_REG_SET (tmp);
3034 changed = bitmap_operation (tmp, bb->global_live_at_end,
3035 new_live_at_end, BITMAP_XOR);
3039 /* If any of the changed bits overlap with local_set,
3040 we'll have to rescan the block. Detect overlap by
3041 the AND with ~local_set turning off bits. */
3042 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3047 /* Let our caller know that BB changed enough to require its
3048 death notes updated. */
3049 SET_BIT (blocks_out, bb->index);
3053 /* Add to live_at_start the set of all registers in
3054 new_live_at_end that aren't in the old live_at_end. */
3056 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3058 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3060 changed = bitmap_operation (bb->global_live_at_start,
3061 bb->global_live_at_start,
3068 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3070 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3071 into live_at_start. */
3072 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3074 /* If live_at start didn't change, no need to go farther. */
3075 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3078 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3081 /* Queue all predecessors of BB so that we may re-examine
3082 their live_at_end. */
3083 for (e = bb->pred; e ; e = e->pred_next)
3085 basic_block pb = e->src;
3086 if (pb->aux == NULL)
3097 FREE_REG_SET (new_live_at_end);
3099 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3101 basic_block bb = BASIC_BLOCK (i);
3102 FREE_REG_SET (bb->local_set);
3108 /* Subroutines of life analysis. */
3110 /* Allocate the permanent data structures that represent the results
3111 of life analysis. Not static since used also for stupid life analysis. */
3114 allocate_bb_life_data ()
3118 for (i = 0; i < n_basic_blocks; i++)
3120 basic_block bb = BASIC_BLOCK (i);
3122 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3123 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3126 ENTRY_BLOCK_PTR->global_live_at_end
3127 = OBSTACK_ALLOC_REG_SET (function_obstack);
3128 EXIT_BLOCK_PTR->global_live_at_start
3129 = OBSTACK_ALLOC_REG_SET (function_obstack);
3131 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3135 allocate_reg_life_data ()
3139 /* Recalculate the register space, in case it has grown. Old style
3140 vector oriented regsets would set regset_{size,bytes} here also. */
3141 allocate_reg_info (max_regno, FALSE, FALSE);
3143 /* Reset all the data we'll collect in propagate_block and its
3145 for (i = 0; i < max_regno; i++)
3149 REG_N_DEATHS (i) = 0;
3150 REG_N_CALLS_CROSSED (i) = 0;
3151 REG_LIVE_LENGTH (i) = 0;
3152 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3156 /* Compute the registers live at the beginning of a basic block
3157 from those live at the end.
3159 When called, OLD contains those live at the end.
3160 On return, it contains those live at the beginning.
3161 FIRST and LAST are the first and last insns of the basic block.
3163 FINAL is nonzero if we are doing the final pass which is not
3164 for computing the life info (since that has already been done)
3165 but for acting on it. On this pass, we delete dead stores,
3166 set up the logical links and dead-variables lists of instructions,
3167 and merge instructions for autoincrement and autodecrement addresses.
3169 SIGNIFICANT is nonzero only the first time for each basic block.
3170 If it is nonzero, it points to a regset in which we store
3171 a 1 for each register that is set within the block.
3173 BNUM is the number of the basic block. */
3176 propagate_block (bb, old, significant, flags)
3185 regset_head live_head;
3187 regset_head dead_head;
3189 /* Find the loop depth for this block. Ignore loop level changes in the
3190 middle of the basic block -- for register allocation purposes, the
3191 important uses will be in the blocks wholely contained within the loop
3192 not in the loop pre-header or post-trailer. */
3193 loop_depth = bb->loop_depth;
3195 dead = INITIALIZE_REG_SET (live_head);
3196 live = INITIALIZE_REG_SET (dead_head);
3200 if (flags & PROP_REG_INFO)
3204 /* Process the regs live at the end of the block.
3205 Mark them as not local to any one basic block. */
3206 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3208 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3212 /* Scan the block an insn at a time from end to beginning. */
3214 for (insn = bb->end; ; insn = prev)
3216 prev = PREV_INSN (insn);
3218 if (GET_CODE (insn) == NOTE)
3220 /* If this is a call to `setjmp' et al,
3221 warn if any non-volatile datum is live. */
3223 if ((flags & PROP_REG_INFO)
3224 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3225 IOR_REG_SET (regs_live_at_setjmp, old);
3228 /* Update the life-status of regs for this insn.
3229 First DEAD gets which regs are set in this insn
3230 then LIVE gets which regs are used in this insn.
3231 Then the regs live before the insn
3232 are those live after, with DEAD regs turned off,
3233 and then LIVE regs turned on. */
3235 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3238 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3239 int insn_is_dead = 0;
3240 int libcall_is_dead = 0;
3242 if (flags & PROP_SCAN_DEAD_CODE)
3244 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3246 libcall_is_dead = (insn_is_dead && note != 0
3247 && libcall_dead_p (PATTERN (insn), old,
3251 /* We almost certainly don't want to delete prologue or epilogue
3252 instructions. Warn about probable compiler losage. */
3255 && (HAVE_epilogue || HAVE_prologue)
3256 && prologue_epilogue_contains (insn))
3258 if (flags & PROP_KILL_DEAD_CODE)
3260 warning ("ICE: would have deleted prologue/epilogue insn");
3261 if (!inhibit_warnings)
3264 libcall_is_dead = insn_is_dead = 0;
3267 /* If an instruction consists of just dead store(s) on final pass,
3268 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3269 We could really delete it with delete_insn, but that
3270 can cause trouble for first or last insn in a basic block. */
3271 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3274 /* If the insn referred to a label, note that the label is
3276 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3278 if (REG_NOTE_KIND (inote) == REG_LABEL)
3280 rtx label = XEXP (inote, 0);
3282 LABEL_NUSES (label)--;
3284 /* If this label was attached to an ADDR_VEC, it's
3285 safe to delete the ADDR_VEC. In fact, it's pretty
3286 much mandatory to delete it, because the ADDR_VEC may
3287 be referencing labels that no longer exist. */
3288 if (LABEL_NUSES (label) == 0
3289 && (next = next_nonnote_insn (label)) != NULL
3290 && GET_CODE (next) == JUMP_INSN
3291 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3292 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3294 rtx pat = PATTERN (next);
3295 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3296 int len = XVECLEN (pat, diff_vec_p);
3298 for (i = 0; i < len; i++)
3299 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3300 PUT_CODE (next, NOTE);
3301 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3302 NOTE_SOURCE_FILE (next) = 0;
3307 PUT_CODE (insn, NOTE);
3308 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3309 NOTE_SOURCE_FILE (insn) = 0;
3311 /* CC0 is now known to be dead. Either this insn used it,
3312 in which case it doesn't anymore, or clobbered it,
3313 so the next insn can't use it. */
3316 /* If this insn is copying the return value from a library call,
3317 delete the entire library call. */
3318 if (libcall_is_dead)
3320 rtx first = XEXP (note, 0);
3322 while (INSN_DELETED_P (first))
3323 first = NEXT_INSN (first);
3328 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3329 NOTE_SOURCE_FILE (p) = 0;
3335 CLEAR_REG_SET (dead);
3336 CLEAR_REG_SET (live);
3338 /* See if this is an increment or decrement that can be
3339 merged into a following memory address. */
3342 register rtx x = single_set (insn);
3344 /* Does this instruction increment or decrement a register? */
3345 if (!reload_completed
3346 && (flags & PROP_AUTOINC)
3348 && GET_CODE (SET_DEST (x)) == REG
3349 && (GET_CODE (SET_SRC (x)) == PLUS
3350 || GET_CODE (SET_SRC (x)) == MINUS)
3351 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3352 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3353 /* Ok, look for a following memory ref we can combine with.
3354 If one is found, change the memory ref to a PRE_INC
3355 or PRE_DEC, cancel this insn, and return 1.
3356 Return 0 if nothing has been done. */
3357 && try_pre_increment_1 (insn))
3360 #endif /* AUTO_INC_DEC */
3362 /* If this is not the final pass, and this insn is copying the
3363 value of a library call and it's dead, don't scan the
3364 insns that perform the library call, so that the call's
3365 arguments are not marked live. */
3366 if (libcall_is_dead)
3368 /* Mark the dest reg as `significant'. */
3369 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3370 significant, flags);
3372 insn = XEXP (note, 0);
3373 prev = PREV_INSN (insn);
3375 else if (GET_CODE (PATTERN (insn)) == SET
3376 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3377 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3378 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3379 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3380 /* We have an insn to pop a constant amount off the stack.
3381 (Such insns use PLUS regardless of the direction of the stack,
3382 and any insn to adjust the stack by a constant is always a pop.)
3383 These insns, if not dead stores, have no effect on life. */
3387 /* Any regs live at the time of a call instruction
3388 must not go in a register clobbered by calls.
3389 Find all regs now live and record this for them. */
3391 if (GET_CODE (insn) == CALL_INSN
3392 && (flags & PROP_REG_INFO))
3393 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3395 REG_N_CALLS_CROSSED (i)++;
3398 /* LIVE gets the regs used in INSN;
3399 DEAD gets those set by it. Dead insns don't make anything
3402 mark_set_regs (old, dead, PATTERN (insn),
3403 insn, significant, flags);
3405 /* If an insn doesn't use CC0, it becomes dead since we
3406 assume that every insn clobbers it. So show it dead here;
3407 mark_used_regs will set it live if it is referenced. */
3411 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3413 /* Sometimes we may have inserted something before INSN (such as
3414 a move) when we make an auto-inc. So ensure we will scan
3417 prev = PREV_INSN (insn);
3420 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3426 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3428 note = XEXP (note, 1))
3429 if (GET_CODE (XEXP (note, 0)) == USE)
3430 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3433 /* Each call clobbers all call-clobbered regs that are not
3434 global or fixed. Note that the function-value reg is a
3435 call-clobbered reg, and mark_set_regs has already had
3436 a chance to handle it. */
3438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3439 if (call_used_regs[i] && ! global_regs[i]
3442 SET_REGNO_REG_SET (dead, i);
3444 SET_REGNO_REG_SET (significant, i);
3447 /* The stack ptr is used (honorarily) by a CALL insn. */
3448 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3450 /* Calls may also reference any of the global registers,
3451 so they are made live. */
3452 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3454 mark_used_regs (old, live,
3455 gen_rtx_REG (reg_raw_mode[i], i),
3458 /* Calls also clobber memory. */
3459 free_EXPR_LIST_list (&mem_set_list);
3462 /* Update OLD for the registers used or set. */
3463 AND_COMPL_REG_SET (old, dead);
3464 IOR_REG_SET (old, live);
3468 /* On final pass, update counts of how many insns in which
3469 each reg is live. */
3470 if (flags & PROP_REG_INFO)
3471 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3474 if (insn == bb->head)
3478 FREE_REG_SET (dead);
3479 FREE_REG_SET (live);
3480 free_EXPR_LIST_list (&mem_set_list);
3483 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3484 (SET expressions whose destinations are registers dead after the insn).
3485 NEEDED is the regset that says which regs are alive after the insn.
3487 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3489 If X is the entire body of an insn, NOTES contains the reg notes
3490 pertaining to the insn. */
3493 insn_dead_p (x, needed, call_ok, notes)
3497 rtx notes ATTRIBUTE_UNUSED;
3499 enum rtx_code code = GET_CODE (x);
3502 /* If flow is invoked after reload, we must take existing AUTO_INC
3503 expresions into account. */
3504 if (reload_completed)
3506 for ( ; notes; notes = XEXP (notes, 1))
3508 if (REG_NOTE_KIND (notes) == REG_INC)
3510 int regno = REGNO (XEXP (notes, 0));
3512 /* Don't delete insns to set global regs. */
3513 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3514 || REGNO_REG_SET_P (needed, regno))
3521 /* If setting something that's a reg or part of one,
3522 see if that register's altered value will be live. */
3526 rtx r = SET_DEST (x);
3529 if (GET_CODE (r) == CC0)
3533 /* A SET that is a subroutine call cannot be dead. */
3534 if (GET_CODE (SET_SRC (x)) == CALL)
3540 /* Don't eliminate loads from volatile memory or volatile asms. */
3541 else if (volatile_refs_p (SET_SRC (x)))
3544 if (GET_CODE (r) == MEM)
3548 if (MEM_VOLATILE_P (r))
3551 /* Walk the set of memory locations we are currently tracking
3552 and see if one is an identical match to this memory location.
3553 If so, this memory write is dead (remember, we're walking
3554 backwards from the end of the block to the start. */
3555 temp = mem_set_list;
3558 if (rtx_equal_p (XEXP (temp, 0), r))
3560 temp = XEXP (temp, 1);
3565 while (GET_CODE (r) == SUBREG
3566 || GET_CODE (r) == STRICT_LOW_PART
3567 || GET_CODE (r) == ZERO_EXTRACT)
3570 if (GET_CODE (r) == REG)
3572 int regno = REGNO (r);
3575 if (REGNO_REG_SET_P (needed, regno))
3578 /* If this is a hard register, verify that subsequent
3579 words are not needed. */
3580 if (regno < FIRST_PSEUDO_REGISTER)
3582 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3585 if (REGNO_REG_SET_P (needed, regno+n))
3589 /* Don't delete insns to set global regs. */
3590 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3593 /* Make sure insns to set the stack pointer aren't deleted. */
3594 if (regno == STACK_POINTER_REGNUM)
3597 /* Make sure insns to set the frame pointer aren't deleted. */
3598 if (regno == FRAME_POINTER_REGNUM
3599 && (! reload_completed || frame_pointer_needed))
3601 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3602 if (regno == HARD_FRAME_POINTER_REGNUM
3603 && (! reload_completed || frame_pointer_needed))
3607 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3608 /* Make sure insns to set arg pointer are never deleted
3609 (if the arg pointer isn't fixed, there will be a USE
3610 for it, so we can treat it normally). */
3611 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3615 /* Otherwise, the set is dead. */
3621 /* If performing several activities, insn is dead if each activity
3622 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3623 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3625 else if (code == PARALLEL)
3627 int i = XVECLEN (x, 0);
3629 for (i--; i >= 0; i--)
3630 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3631 && GET_CODE (XVECEXP (x, 0, i)) != USE
3632 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3638 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3639 is not necessarily true for hard registers. */
3640 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3641 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3642 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3645 /* We do not check other CLOBBER or USE here. An insn consisting of just
3646 a CLOBBER or just a USE should not be deleted. */
3650 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3651 return 1 if the entire library call is dead.
3652 This is true if X copies a register (hard or pseudo)
3653 and if the hard return reg of the call insn is dead.
3654 (The caller should have tested the destination of X already for death.)
3656 If this insn doesn't just copy a register, then we don't
3657 have an ordinary libcall. In that case, cse could not have
3658 managed to substitute the source for the dest later on,
3659 so we can assume the libcall is dead.
3661 NEEDED is the bit vector of pseudoregs live before this insn.
3662 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3665 libcall_dead_p (x, needed, note, insn)
3671 register RTX_CODE code = GET_CODE (x);
3675 register rtx r = SET_SRC (x);
3676 if (GET_CODE (r) == REG)
3678 rtx call = XEXP (note, 0);
3682 /* Find the call insn. */
3683 while (call != insn && GET_CODE (call) != CALL_INSN)
3684 call = NEXT_INSN (call);
3686 /* If there is none, do nothing special,
3687 since ordinary death handling can understand these insns. */
3691 /* See if the hard reg holding the value is dead.
3692 If this is a PARALLEL, find the call within it. */
3693 call_pat = PATTERN (call);
3694 if (GET_CODE (call_pat) == PARALLEL)
3696 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3697 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3698 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3701 /* This may be a library call that is returning a value
3702 via invisible pointer. Do nothing special, since
3703 ordinary death handling can understand these insns. */
3707 call_pat = XVECEXP (call_pat, 0, i);
3710 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3716 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3717 live at function entry. Don't count global register variables, variables
3718 in registers that can be used for function arg passing, or variables in
3719 fixed hard registers. */
3722 regno_uninitialized (regno)
3725 if (n_basic_blocks == 0
3726 || (regno < FIRST_PSEUDO_REGISTER
3727 && (global_regs[regno]
3728 || fixed_regs[regno]
3729 || FUNCTION_ARG_REGNO_P (regno))))
3732 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3735 /* 1 if register REGNO was alive at a place where `setjmp' was called
3736 and was set more than once or is an argument.
3737 Such regs may be clobbered by `longjmp'. */
3740 regno_clobbered_at_setjmp (regno)
3743 if (n_basic_blocks == 0)
3746 return ((REG_N_SETS (regno) > 1
3747 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3748 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3751 /* INSN references memory, possibly using autoincrement addressing modes.
3752 Find any entries on the mem_set_list that need to be invalidated due
3753 to an address change. */
3755 invalidate_mems_from_autoinc (insn)
3758 rtx note = REG_NOTES (insn);
3759 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3761 if (REG_NOTE_KIND (note) == REG_INC)
3763 rtx temp = mem_set_list;
3764 rtx prev = NULL_RTX;
3769 next = XEXP (temp, 1);
3770 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3772 /* Splice temp out of list. */
3774 XEXP (prev, 1) = next;
3776 mem_set_list = next;
3777 free_EXPR_LIST_node (temp);
3787 /* Process the registers that are set within X. Their bits are set to
3788 1 in the regset DEAD, because they are dead prior to this insn.
3790 If INSN is nonzero, it is the insn being processed.
3792 FLAGS is the set of operations to perform. */
3795 mark_set_regs (needed, dead, x, insn, significant, flags)
3803 register RTX_CODE code = GET_CODE (x);
3805 if (code == SET || code == CLOBBER)
3806 mark_set_1 (needed, dead, x, insn, significant, flags);
3807 else if (code == PARALLEL)
3810 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3812 code = GET_CODE (XVECEXP (x, 0, i));
3813 if (code == SET || code == CLOBBER)
3814 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3815 significant, flags);
3820 /* Process a single SET rtx, X. */
3823 mark_set_1 (needed, dead, x, insn, significant, flags)
3831 register int regno = -1;
3832 register rtx reg = SET_DEST (x);
3834 /* Some targets place small structures in registers for
3835 return values of functions. We have to detect this
3836 case specially here to get correct flow information. */
3837 if (GET_CODE (reg) == PARALLEL
3838 && GET_MODE (reg) == BLKmode)
3842 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3843 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3844 significant, flags);
3848 /* Modifying just one hardware register of a multi-reg value
3849 or just a byte field of a register
3850 does not mean the value from before this insn is now dead.
3851 But it does mean liveness of that register at the end of the block
3854 Within mark_set_1, however, we treat it as if the register is
3855 indeed modified. mark_used_regs will, however, also treat this
3856 register as being used. Thus, we treat these insns as setting a
3857 new value for the register as a function of its old value. This
3858 cases LOG_LINKS to be made appropriately and this will help combine. */
3860 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3861 || GET_CODE (reg) == SIGN_EXTRACT
3862 || GET_CODE (reg) == STRICT_LOW_PART)
3863 reg = XEXP (reg, 0);
3865 /* If this set is a MEM, then it kills any aliased writes.
3866 If this set is a REG, then it kills any MEMs which use the reg. */
3867 if (flags & PROP_SCAN_DEAD_CODE)
3869 if (GET_CODE (reg) == MEM
3870 || GET_CODE (reg) == REG)
3872 rtx temp = mem_set_list;
3873 rtx prev = NULL_RTX;
3878 next = XEXP (temp, 1);
3879 if ((GET_CODE (reg) == MEM
3880 && output_dependence (XEXP (temp, 0), reg))
3881 || (GET_CODE (reg) == REG
3882 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3884 /* Splice this entry out of the list. */
3886 XEXP (prev, 1) = next;
3888 mem_set_list = next;
3889 free_EXPR_LIST_node (temp);
3897 /* If the memory reference had embedded side effects (autoincrement
3898 address modes. Then we may need to kill some entries on the
3900 if (insn && GET_CODE (reg) == MEM)
3901 invalidate_mems_from_autoinc (insn);
3903 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3904 /* We do not know the size of a BLKmode store, so we do not track
3905 them for redundant store elimination. */
3906 && GET_MODE (reg) != BLKmode
3907 /* There are no REG_INC notes for SP, so we can't assume we'll see
3908 everything that invalidates it. To be safe, don't eliminate any
3909 stores though SP; none of them should be redundant anyway. */
3910 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3911 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3914 if (GET_CODE (reg) == REG
3915 && (regno = REGNO (reg),
3916 ! (regno == FRAME_POINTER_REGNUM
3917 && (! reload_completed || frame_pointer_needed)))
3918 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3919 && ! (regno == HARD_FRAME_POINTER_REGNUM
3920 && (! reload_completed || frame_pointer_needed))
3922 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3923 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3925 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3926 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3928 int some_needed = REGNO_REG_SET_P (needed, regno);
3929 int some_not_needed = ! some_needed;
3931 /* Mark it as a significant register for this basic block. */
3933 SET_REGNO_REG_SET (significant, regno);
3935 /* Mark it as dead before this insn. */
3936 SET_REGNO_REG_SET (dead, regno);
3938 /* A hard reg in a wide mode may really be multiple registers.
3939 If so, mark all of them just like the first. */
3940 if (regno < FIRST_PSEUDO_REGISTER)
3944 /* Nothing below is needed for the stack pointer; get out asap.
3945 Eg, log links aren't needed, since combine won't use them. */
3946 if (regno == STACK_POINTER_REGNUM)
3949 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3952 int regno_n = regno + n;
3953 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3955 SET_REGNO_REG_SET (significant, regno_n);
3957 SET_REGNO_REG_SET (dead, regno_n);
3958 some_needed |= needed_regno;
3959 some_not_needed |= ! needed_regno;
3963 /* Additional data to record if this is the final pass. */
3964 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3965 | PROP_DEATH_NOTES | PROP_AUTOINC))
3968 register int blocknum = BLOCK_NUM (insn);
3971 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3972 y = reg_next_use[regno];
3974 /* If this is a hard reg, record this function uses the reg. */
3976 if (regno < FIRST_PSEUDO_REGISTER)
3979 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3981 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3982 for (i = regno; i < endregno; i++)
3984 /* The next use is no longer "next", since a store
3986 reg_next_use[i] = 0;
3989 if (flags & PROP_REG_INFO)
3990 for (i = regno; i < endregno; i++)
3992 regs_ever_live[i] = 1;
3998 /* The next use is no longer "next", since a store
4000 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4001 reg_next_use[regno] = 0;
4003 /* Keep track of which basic blocks each reg appears in. */
4005 if (flags & PROP_REG_INFO)
4007 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4008 REG_BASIC_BLOCK (regno) = blocknum;
4009 else if (REG_BASIC_BLOCK (regno) != blocknum)
4010 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4012 /* Count (weighted) references, stores, etc. This counts a
4013 register twice if it is modified, but that is correct. */
4014 REG_N_SETS (regno)++;
4015 REG_N_REFS (regno) += loop_depth + 1;
4017 /* The insns where a reg is live are normally counted
4018 elsewhere, but we want the count to include the insn
4019 where the reg is set, and the normal counting mechanism
4020 would not count it. */
4021 REG_LIVE_LENGTH (regno)++;
4025 if (! some_not_needed)
4027 if (flags & PROP_LOG_LINKS)
4029 /* Make a logical link from the next following insn
4030 that uses this register, back to this insn.
4031 The following insns have already been processed.
4033 We don't build a LOG_LINK for hard registers containing
4034 in ASM_OPERANDs. If these registers get replaced,
4035 we might wind up changing the semantics of the insn,
4036 even if reload can make what appear to be valid
4037 assignments later. */
4038 if (y && (BLOCK_NUM (y) == blocknum)
4039 && (regno >= FIRST_PSEUDO_REGISTER
4040 || asm_noperands (PATTERN (y)) < 0))
4041 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4044 else if (! some_needed)
4046 if (flags & PROP_REG_INFO)
4047 REG_N_DEATHS (REGNO (reg))++;
4049 if (flags & PROP_DEATH_NOTES)
4051 /* Note that dead stores have already been deleted
4052 when possible. If we get here, we have found a
4053 dead store that cannot be eliminated (because the
4054 same insn does something useful). Indicate this
4055 by marking the reg being set as dying here. */
4057 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4062 if (flags & PROP_DEATH_NOTES)
4064 /* This is a case where we have a multi-word hard register
4065 and some, but not all, of the words of the register are
4066 needed in subsequent insns. Write REG_UNUSED notes
4067 for those parts that were not needed. This case should
4072 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4074 if (!REGNO_REG_SET_P (needed, regno + i))
4078 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4084 else if (GET_CODE (reg) == REG)
4086 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4087 reg_next_use[regno] = 0;
4090 /* If this is the last pass and this is a SCRATCH, show it will be dying
4091 here and count it. */
4092 else if (GET_CODE (reg) == SCRATCH)
4094 if (flags & PROP_DEATH_NOTES)
4096 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4102 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4106 find_auto_inc (needed, x, insn)
4111 rtx addr = XEXP (x, 0);
4112 HOST_WIDE_INT offset = 0;
4115 /* Here we detect use of an index register which might be good for
4116 postincrement, postdecrement, preincrement, or predecrement. */
4118 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4119 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4121 if (GET_CODE (addr) == REG)
4124 register int size = GET_MODE_SIZE (GET_MODE (x));
4127 int regno = REGNO (addr);
4129 /* Is the next use an increment that might make auto-increment? */
4130 if ((incr = reg_next_use[regno]) != 0
4131 && (set = single_set (incr)) != 0
4132 && GET_CODE (set) == SET
4133 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4134 /* Can't add side effects to jumps; if reg is spilled and
4135 reloaded, there's no way to store back the altered value. */
4136 && GET_CODE (insn) != JUMP_INSN
4137 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4138 && XEXP (y, 0) == addr
4139 && GET_CODE (XEXP (y, 1)) == CONST_INT
4140 && ((HAVE_POST_INCREMENT
4141 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4142 || (HAVE_POST_DECREMENT
4143 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4144 || (HAVE_PRE_INCREMENT
4145 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4146 || (HAVE_PRE_DECREMENT
4147 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4148 /* Make sure this reg appears only once in this insn. */
4149 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4150 use != 0 && use != (rtx) 1))
4152 rtx q = SET_DEST (set);
4153 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4154 ? (offset ? PRE_INC : POST_INC)
4155 : (offset ? PRE_DEC : POST_DEC));
4157 if (dead_or_set_p (incr, addr))
4159 /* This is the simple case. Try to make the auto-inc. If
4160 we can't, we are done. Otherwise, we will do any
4161 needed updates below. */
4162 if (! validate_change (insn, &XEXP (x, 0),
4163 gen_rtx_fmt_e (inc_code, Pmode, addr),
4167 else if (GET_CODE (q) == REG
4168 /* PREV_INSN used here to check the semi-open interval
4170 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4171 /* We must also check for sets of q as q may be
4172 a call clobbered hard register and there may
4173 be a call between PREV_INSN (insn) and incr. */
4174 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4176 /* We have *p followed sometime later by q = p+size.
4177 Both p and q must be live afterward,
4178 and q is not used between INSN and its assignment.
4179 Change it to q = p, ...*q..., q = q+size.
4180 Then fall into the usual case. */
4185 emit_move_insn (q, addr);
4186 insns = get_insns ();
4189 bb = BLOCK_FOR_INSN (insn);
4190 for (temp = insns; temp; temp = NEXT_INSN (temp))
4191 set_block_for_insn (temp, bb);
4193 /* If we can't make the auto-inc, or can't make the
4194 replacement into Y, exit. There's no point in making
4195 the change below if we can't do the auto-inc and doing
4196 so is not correct in the pre-inc case. */
4198 validate_change (insn, &XEXP (x, 0),
4199 gen_rtx_fmt_e (inc_code, Pmode, q),
4201 validate_change (incr, &XEXP (y, 0), q, 1);
4202 if (! apply_change_group ())
4205 /* We now know we'll be doing this change, so emit the
4206 new insn(s) and do the updates. */
4207 emit_insns_before (insns, insn);
4209 if (BLOCK_FOR_INSN (insn)->head == insn)
4210 BLOCK_FOR_INSN (insn)->head = insns;
4212 /* INCR will become a NOTE and INSN won't contain a
4213 use of ADDR. If a use of ADDR was just placed in
4214 the insn before INSN, make that the next use.
4215 Otherwise, invalidate it. */
4216 if (GET_CODE (PREV_INSN (insn)) == INSN
4217 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4218 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4219 reg_next_use[regno] = PREV_INSN (insn);
4221 reg_next_use[regno] = 0;
4226 /* REGNO is now used in INCR which is below INSN, but
4227 it previously wasn't live here. If we don't mark
4228 it as needed, we'll put a REG_DEAD note for it
4229 on this insn, which is incorrect. */
4230 SET_REGNO_REG_SET (needed, regno);
4232 /* If there are any calls between INSN and INCR, show
4233 that REGNO now crosses them. */
4234 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4235 if (GET_CODE (temp) == CALL_INSN)
4236 REG_N_CALLS_CROSSED (regno)++;
4241 /* If we haven't returned, it means we were able to make the
4242 auto-inc, so update the status. First, record that this insn
4243 has an implicit side effect. */
4246 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4248 /* Modify the old increment-insn to simply copy
4249 the already-incremented value of our register. */
4250 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4253 /* If that makes it a no-op (copying the register into itself) delete
4254 it so it won't appear to be a "use" and a "set" of this
4256 if (SET_DEST (set) == addr)
4258 PUT_CODE (incr, NOTE);
4259 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4260 NOTE_SOURCE_FILE (incr) = 0;
4263 if (regno >= FIRST_PSEUDO_REGISTER)
4265 /* Count an extra reference to the reg. When a reg is
4266 incremented, spilling it is worse, so we want to make
4267 that less likely. */
4268 REG_N_REFS (regno) += loop_depth + 1;
4270 /* Count the increment as a setting of the register,
4271 even though it isn't a SET in rtl. */
4272 REG_N_SETS (regno)++;
4277 #endif /* AUTO_INC_DEC */
4279 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4280 This is done assuming the registers needed from X
4281 are those that have 1-bits in NEEDED.
4283 FLAGS is the set of enabled operations.
4285 INSN is the containing instruction. If INSN is dead, this function is not
4289 mark_used_regs (needed, live, x, flags, insn)
4296 register RTX_CODE code;
4301 code = GET_CODE (x);
4321 /* If we are clobbering a MEM, mark any registers inside the address
4323 if (GET_CODE (XEXP (x, 0)) == MEM)
4324 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4328 /* Don't bother watching stores to mems if this is not the
4329 final pass. We'll not be deleting dead stores this round. */
4330 if (flags & PROP_SCAN_DEAD_CODE)
4332 /* Invalidate the data for the last MEM stored, but only if MEM is
4333 something that can be stored into. */
4334 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4335 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4336 ; /* needn't clear the memory set list */
4339 rtx temp = mem_set_list;
4340 rtx prev = NULL_RTX;
4345 next = XEXP (temp, 1);
4346 if (anti_dependence (XEXP (temp, 0), x))
4348 /* Splice temp out of the list. */
4350 XEXP (prev, 1) = next;
4352 mem_set_list = next;
4353 free_EXPR_LIST_node (temp);
4361 /* If the memory reference had embedded side effects (autoincrement
4362 address modes. Then we may need to kill some entries on the
4365 invalidate_mems_from_autoinc (insn);
4369 if (flags & PROP_AUTOINC)
4370 find_auto_inc (needed, x, insn);
4375 if (GET_CODE (SUBREG_REG (x)) == REG
4376 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4377 && (GET_MODE_SIZE (GET_MODE (x))
4378 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4379 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4381 /* While we're here, optimize this case. */
4384 /* In case the SUBREG is not of a register, don't optimize */
4385 if (GET_CODE (x) != REG)
4387 mark_used_regs (needed, live, x, flags, insn);
4391 /* ... fall through ... */
4394 /* See a register other than being set
4395 => mark it as needed. */
4399 int some_needed = REGNO_REG_SET_P (needed, regno);
4400 int some_not_needed = ! some_needed;
4402 SET_REGNO_REG_SET (live, regno);
4404 /* A hard reg in a wide mode may really be multiple registers.
4405 If so, mark all of them just like the first. */
4406 if (regno < FIRST_PSEUDO_REGISTER)
4410 /* For stack ptr or fixed arg pointer,
4411 nothing below can be necessary, so waste no more time. */
4412 if (regno == STACK_POINTER_REGNUM
4413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4414 || (regno == HARD_FRAME_POINTER_REGNUM
4415 && (! reload_completed || frame_pointer_needed))
4417 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4418 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4420 || (regno == FRAME_POINTER_REGNUM
4421 && (! reload_completed || frame_pointer_needed)))
4423 /* If this is a register we are going to try to eliminate,
4424 don't mark it live here. If we are successful in
4425 eliminating it, it need not be live unless it is used for
4426 pseudos, in which case it will have been set live when
4427 it was allocated to the pseudos. If the register will not
4428 be eliminated, reload will set it live at that point. */
4430 if ((flags & PROP_REG_INFO)
4431 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4432 regs_ever_live[regno] = 1;
4435 /* No death notes for global register variables;
4436 their values are live after this function exits. */
4437 if (global_regs[regno])
4439 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4440 reg_next_use[regno] = insn;
4444 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4447 int regno_n = regno + n;
4448 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4450 SET_REGNO_REG_SET (live, regno_n);
4451 some_needed |= needed_regno;
4452 some_not_needed |= ! needed_regno;
4456 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4458 /* Record where each reg is used, so when the reg
4459 is set we know the next insn that uses it. */
4461 reg_next_use[regno] = insn;
4463 if (flags & PROP_REG_INFO)
4465 if (regno < FIRST_PSEUDO_REGISTER)
4467 /* If a hard reg is being used,
4468 record that this function does use it. */
4470 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4474 regs_ever_live[regno + --i] = 1;
4479 /* Keep track of which basic block each reg appears in. */
4481 register int blocknum = BLOCK_NUM (insn);
4483 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4484 REG_BASIC_BLOCK (regno) = blocknum;
4485 else if (REG_BASIC_BLOCK (regno) != blocknum)
4486 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4488 /* Count (weighted) number of uses of each reg. */
4490 REG_N_REFS (regno) += loop_depth + 1;
4494 /* Record and count the insns in which a reg dies.
4495 If it is used in this insn and was dead below the insn
4496 then it dies in this insn. If it was set in this insn,
4497 we do not make a REG_DEAD note; likewise if we already
4498 made such a note. */
4500 if (flags & PROP_DEATH_NOTES)
4503 && ! dead_or_set_p (insn, x)
4505 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4509 /* Check for the case where the register dying partially
4510 overlaps the register set by this insn. */
4511 if (regno < FIRST_PSEUDO_REGISTER
4512 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4514 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4516 some_needed |= dead_or_set_regno_p (insn, regno + n);
4519 /* If none of the words in X is needed, make a REG_DEAD
4520 note. Otherwise, we must make partial REG_DEAD notes. */
4524 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4525 REG_N_DEATHS (regno)++;
4531 /* Don't make a REG_DEAD note for a part of a register
4532 that is set in the insn. */
4534 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4536 if (!REGNO_REG_SET_P (needed, regno + i)
4537 && ! dead_or_set_regno_p (insn, regno + i))
4540 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4551 register rtx testreg = SET_DEST (x);
4554 /* If storing into MEM, don't show it as being used. But do
4555 show the address as being used. */
4556 if (GET_CODE (testreg) == MEM)
4559 if (flags & PROP_AUTOINC)
4560 find_auto_inc (needed, testreg, insn);
4562 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4563 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4567 /* Storing in STRICT_LOW_PART is like storing in a reg
4568 in that this SET might be dead, so ignore it in TESTREG.
4569 but in some other ways it is like using the reg.
4571 Storing in a SUBREG or a bit field is like storing the entire
4572 register in that if the register's value is not used
4573 then this SET is not needed. */
4574 while (GET_CODE (testreg) == STRICT_LOW_PART
4575 || GET_CODE (testreg) == ZERO_EXTRACT
4576 || GET_CODE (testreg) == SIGN_EXTRACT
4577 || GET_CODE (testreg) == SUBREG)
4579 if (GET_CODE (testreg) == SUBREG
4580 && GET_CODE (SUBREG_REG (testreg)) == REG
4581 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4582 && (GET_MODE_SIZE (GET_MODE (testreg))
4583 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4584 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4586 /* Modifying a single register in an alternate mode
4587 does not use any of the old value. But these other
4588 ways of storing in a register do use the old value. */
4589 if (GET_CODE (testreg) == SUBREG
4590 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4595 testreg = XEXP (testreg, 0);
4598 /* If this is a store into a register,
4599 recursively scan the value being stored. */
4601 if ((GET_CODE (testreg) == PARALLEL
4602 && GET_MODE (testreg) == BLKmode)
4603 || (GET_CODE (testreg) == REG
4604 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4605 && (! reload_completed || frame_pointer_needed)))
4606 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4607 && ! (regno == HARD_FRAME_POINTER_REGNUM
4608 && (! reload_completed || frame_pointer_needed))
4610 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4611 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4614 /* We used to exclude global_regs here, but that seems wrong.
4615 Storing in them is like storing in mem. */
4617 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4619 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4626 case UNSPEC_VOLATILE:
4630 /* Traditional and volatile asm instructions must be considered to use
4631 and clobber all hard registers, all pseudo-registers and all of
4632 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4634 Consider for instance a volatile asm that changes the fpu rounding
4635 mode. An insn should not be moved across this even if it only uses
4636 pseudo-regs because it might give an incorrectly rounded result.
4638 ?!? Unfortunately, marking all hard registers as live causes massive
4639 problems for the register allocator and marking all pseudos as live
4640 creates mountains of uninitialized variable warnings.
4642 So for now, just clear the memory set list and mark any regs
4643 we can find in ASM_OPERANDS as used. */
4644 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4645 free_EXPR_LIST_list (&mem_set_list);
4647 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4648 We can not just fall through here since then we would be confused
4649 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4650 traditional asms unlike their normal usage. */
4651 if (code == ASM_OPERANDS)
4655 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4656 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4667 /* Recursively scan the operands of this expression. */
4670 register const char *fmt = GET_RTX_FORMAT (code);
4673 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4677 /* Tail recursive case: save a function call level. */
4683 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4685 else if (fmt[i] == 'E')
4688 for (j = 0; j < XVECLEN (x, i); j++)
4689 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4698 try_pre_increment_1 (insn)
4701 /* Find the next use of this reg. If in same basic block,
4702 make it do pre-increment or pre-decrement if appropriate. */
4703 rtx x = single_set (insn);
4704 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4705 * INTVAL (XEXP (SET_SRC (x), 1)));
4706 int regno = REGNO (SET_DEST (x));
4707 rtx y = reg_next_use[regno];
4709 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4710 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4711 mode would be better. */
4712 && ! dead_or_set_p (y, SET_DEST (x))
4713 && try_pre_increment (y, SET_DEST (x), amount))
4715 /* We have found a suitable auto-increment
4716 and already changed insn Y to do it.
4717 So flush this increment-instruction. */
4718 PUT_CODE (insn, NOTE);
4719 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4720 NOTE_SOURCE_FILE (insn) = 0;
4721 /* Count a reference to this reg for the increment
4722 insn we are deleting. When a reg is incremented.
4723 spilling it is worse, so we want to make that
4725 if (regno >= FIRST_PSEUDO_REGISTER)
4727 REG_N_REFS (regno) += loop_depth + 1;
4728 REG_N_SETS (regno)++;
4735 /* Try to change INSN so that it does pre-increment or pre-decrement
4736 addressing on register REG in order to add AMOUNT to REG.
4737 AMOUNT is negative for pre-decrement.
4738 Returns 1 if the change could be made.
4739 This checks all about the validity of the result of modifying INSN. */
4742 try_pre_increment (insn, reg, amount)
4744 HOST_WIDE_INT amount;
4748 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4749 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4751 /* Nonzero if we can try to make a post-increment or post-decrement.
4752 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4753 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4754 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4757 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4760 /* From the sign of increment, see which possibilities are conceivable
4761 on this target machine. */
4762 if (HAVE_PRE_INCREMENT && amount > 0)
4764 if (HAVE_POST_INCREMENT && amount > 0)
4767 if (HAVE_PRE_DECREMENT && amount < 0)
4769 if (HAVE_POST_DECREMENT && amount < 0)
4772 if (! (pre_ok || post_ok))
4775 /* It is not safe to add a side effect to a jump insn
4776 because if the incremented register is spilled and must be reloaded
4777 there would be no way to store the incremented value back in memory. */
4779 if (GET_CODE (insn) == JUMP_INSN)
4784 use = find_use_as_address (PATTERN (insn), reg, 0);
4785 if (post_ok && (use == 0 || use == (rtx) 1))
4787 use = find_use_as_address (PATTERN (insn), reg, -amount);
4791 if (use == 0 || use == (rtx) 1)
4794 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4797 /* See if this combination of instruction and addressing mode exists. */
4798 if (! validate_change (insn, &XEXP (use, 0),
4799 gen_rtx_fmt_e (amount > 0
4800 ? (do_post ? POST_INC : PRE_INC)
4801 : (do_post ? POST_DEC : PRE_DEC),
4805 /* Record that this insn now has an implicit side effect on X. */
4806 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4810 #endif /* AUTO_INC_DEC */
4812 /* Find the place in the rtx X where REG is used as a memory address.
4813 Return the MEM rtx that so uses it.
4814 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4815 (plus REG (const_int PLUSCONST)).
4817 If such an address does not appear, return 0.
4818 If REG appears more than once, or is used other than in such an address,
4822 find_use_as_address (x, reg, plusconst)
4825 HOST_WIDE_INT plusconst;
4827 enum rtx_code code = GET_CODE (x);
4828 const char *fmt = GET_RTX_FORMAT (code);
4830 register rtx value = 0;
4833 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4836 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4837 && XEXP (XEXP (x, 0), 0) == reg
4838 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4839 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4842 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4844 /* If REG occurs inside a MEM used in a bit-field reference,
4845 that is unacceptable. */
4846 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4847 return (rtx) (HOST_WIDE_INT) 1;
4851 return (rtx) (HOST_WIDE_INT) 1;
4853 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4857 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4861 return (rtx) (HOST_WIDE_INT) 1;
4863 else if (fmt[i] == 'E')
4866 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4868 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4872 return (rtx) (HOST_WIDE_INT) 1;
4880 /* Write information about registers and basic blocks into FILE.
4881 This is part of making a debugging dump. */
4884 dump_regset (r, outf)
4891 fputs (" (nil)", outf);
4895 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4897 fprintf (outf, " %d", i);
4898 if (i < FIRST_PSEUDO_REGISTER)
4899 fprintf (outf, " [%s]",
4908 dump_regset (r, stderr);
4909 putc ('\n', stderr);
4913 dump_flow_info (file)
4917 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4919 fprintf (file, "%d registers.\n", max_regno);
4920 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4923 enum reg_class class, altclass;
4924 fprintf (file, "\nRegister %d used %d times across %d insns",
4925 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4926 if (REG_BASIC_BLOCK (i) >= 0)
4927 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4929 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4930 (REG_N_SETS (i) == 1) ? "" : "s");
4931 if (REG_USERVAR_P (regno_reg_rtx[i]))
4932 fprintf (file, "; user var");
4933 if (REG_N_DEATHS (i) != 1)
4934 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4935 if (REG_N_CALLS_CROSSED (i) == 1)
4936 fprintf (file, "; crosses 1 call");
4937 else if (REG_N_CALLS_CROSSED (i))
4938 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4939 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4940 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4941 class = reg_preferred_class (i);
4942 altclass = reg_alternate_class (i);
4943 if (class != GENERAL_REGS || altclass != ALL_REGS)
4945 if (altclass == ALL_REGS || class == ALL_REGS)
4946 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4947 else if (altclass == NO_REGS)
4948 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4950 fprintf (file, "; pref %s, else %s",
4951 reg_class_names[(int) class],
4952 reg_class_names[(int) altclass]);
4954 if (REGNO_POINTER_FLAG (i))
4955 fprintf (file, "; pointer");
4956 fprintf (file, ".\n");
4959 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4960 for (i = 0; i < n_basic_blocks; i++)
4962 register basic_block bb = BASIC_BLOCK (i);
4965 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4966 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4968 fprintf (file, "Predecessors: ");
4969 for (e = bb->pred; e ; e = e->pred_next)
4970 dump_edge_info (file, e, 0);
4972 fprintf (file, "\nSuccessors: ");
4973 for (e = bb->succ; e ; e = e->succ_next)
4974 dump_edge_info (file, e, 1);
4976 fprintf (file, "\nRegisters live at start:");
4977 dump_regset (bb->global_live_at_start, file);
4979 fprintf (file, "\nRegisters live at end:");
4980 dump_regset (bb->global_live_at_end, file);
4991 dump_flow_info (stderr);
4995 dump_edge_info (file, e, do_succ)
5000 basic_block side = (do_succ ? e->dest : e->src);
5002 if (side == ENTRY_BLOCK_PTR)
5003 fputs (" ENTRY", file);
5004 else if (side == EXIT_BLOCK_PTR)
5005 fputs (" EXIT", file);
5007 fprintf (file, " %d", side->index);
5011 static const char * const bitnames[] = {
5012 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5015 int i, flags = e->flags;
5019 for (i = 0; flags; i++)
5020 if (flags & (1 << i))
5026 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5027 fputs (bitnames[i], file);
5029 fprintf (file, "%d", i);
5037 /* Print out one basic block with live information at start and end. */
5047 fprintf (outf, ";; Basic block %d, loop depth %d",
5048 bb->index, bb->loop_depth - 1);
5049 if (bb->eh_beg != -1 || bb->eh_end != -1)
5050 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5053 fputs (";; Predecessors: ", outf);
5054 for (e = bb->pred; e ; e = e->pred_next)
5055 dump_edge_info (outf, e, 0);
5058 fputs (";; Registers live at start:", outf);
5059 dump_regset (bb->global_live_at_start, outf);
5062 for (insn = bb->head, last = NEXT_INSN (bb->end);
5064 insn = NEXT_INSN (insn))
5065 print_rtl_single (outf, insn);
5067 fputs (";; Registers live at end:", outf);
5068 dump_regset (bb->global_live_at_end, outf);
5071 fputs (";; Successors: ", outf);
5072 for (e = bb->succ; e; e = e->succ_next)
5073 dump_edge_info (outf, e, 1);
5081 dump_bb (bb, stderr);
5088 dump_bb (BASIC_BLOCK(n), stderr);
5091 /* Like print_rtl, but also print out live information for the start of each
5095 print_rtl_with_bb (outf, rtx_first)
5099 register rtx tmp_rtx;
5102 fprintf (outf, "(nil)\n");
5106 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5107 int max_uid = get_max_uid ();
5108 basic_block *start = (basic_block *)
5109 xcalloc (max_uid, sizeof (basic_block));
5110 basic_block *end = (basic_block *)
5111 xcalloc (max_uid, sizeof (basic_block));
5112 enum bb_state *in_bb_p = (enum bb_state *)
5113 xcalloc (max_uid, sizeof (enum bb_state));
5115 for (i = n_basic_blocks - 1; i >= 0; i--)
5117 basic_block bb = BASIC_BLOCK (i);
5120 start[INSN_UID (bb->head)] = bb;
5121 end[INSN_UID (bb->end)] = bb;
5122 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5124 enum bb_state state = IN_MULTIPLE_BB;
5125 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5127 in_bb_p[INSN_UID(x)] = state;
5134 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5139 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5141 fprintf (outf, ";; Start of basic block %d, registers live:",
5143 dump_regset (bb->global_live_at_start, outf);
5147 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5148 && GET_CODE (tmp_rtx) != NOTE
5149 && GET_CODE (tmp_rtx) != BARRIER)
5150 fprintf (outf, ";; Insn is not within a basic block\n");
5151 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5152 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5154 did_output = print_rtl_single (outf, tmp_rtx);
5156 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5157 fprintf (outf, ";; End of basic block %d\n", bb->index);
5168 if (current_function_epilogue_delay_list != 0)
5170 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5171 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5172 tmp_rtx = XEXP (tmp_rtx, 1))
5173 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5177 /* Compute dominator relationships using new flow graph structures. */
5179 compute_flow_dominators (dominators, post_dominators)
5180 sbitmap *dominators;
5181 sbitmap *post_dominators;
5184 sbitmap *temp_bitmap;
5186 basic_block *worklist, *tos;
5188 /* Allocate a worklist array/queue. Entries are only added to the
5189 list if they were not already on the list. So the size is
5190 bounded by the number of basic blocks. */
5191 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5194 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5195 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5199 /* The optimistic setting of dominators requires us to put every
5200 block on the work list initially. */
5201 for (bb = 0; bb < n_basic_blocks; bb++)
5203 *tos++ = BASIC_BLOCK (bb);
5204 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5207 /* We want a maximal solution, so initially assume everything dominates
5209 sbitmap_vector_ones (dominators, n_basic_blocks);
5211 /* Mark successors of the entry block so we can identify them below. */
5212 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5213 e->dest->aux = ENTRY_BLOCK_PTR;
5215 /* Iterate until the worklist is empty. */
5216 while (tos != worklist)
5218 /* Take the first entry off the worklist. */
5219 basic_block b = *--tos;
5222 /* Compute the intersection of the dominators of all the
5225 If one of the predecessor blocks is the ENTRY block, then the
5226 intersection of the dominators of the predecessor blocks is
5227 defined as the null set. We can identify such blocks by the
5228 special value in the AUX field in the block structure. */
5229 if (b->aux == ENTRY_BLOCK_PTR)
5231 /* Do not clear the aux field for blocks which are
5232 successors of the ENTRY block. That way we we never
5233 add them to the worklist again.
5235 The intersect of dominators of the preds of this block is
5236 defined as the null set. */
5237 sbitmap_zero (temp_bitmap[bb]);
5241 /* Clear the aux field of this block so it can be added to
5242 the worklist again if necessary. */
5244 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5247 /* Make sure each block always dominates itself. */
5248 SET_BIT (temp_bitmap[bb], bb);
5250 /* If the out state of this block changed, then we need to
5251 add the successors of this block to the worklist if they
5252 are not already on the worklist. */
5253 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5255 for (e = b->succ; e; e = e->succ_next)
5257 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5267 if (post_dominators)
5269 /* The optimistic setting of dominators requires us to put every
5270 block on the work list initially. */
5271 for (bb = 0; bb < n_basic_blocks; bb++)
5273 *tos++ = BASIC_BLOCK (bb);
5274 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5277 /* We want a maximal solution, so initially assume everything post
5278 dominates everything else. */
5279 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5281 /* Mark predecessors of the exit block so we can identify them below. */
5282 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5283 e->src->aux = EXIT_BLOCK_PTR;
5285 /* Iterate until the worklist is empty. */
5286 while (tos != worklist)
5288 /* Take the first entry off the worklist. */
5289 basic_block b = *--tos;
5292 /* Compute the intersection of the post dominators of all the
5295 If one of the successor blocks is the EXIT block, then the
5296 intersection of the dominators of the successor blocks is
5297 defined as the null set. We can identify such blocks by the
5298 special value in the AUX field in the block structure. */
5299 if (b->aux == EXIT_BLOCK_PTR)
5301 /* Do not clear the aux field for blocks which are
5302 predecessors of the EXIT block. That way we we never
5303 add them to the worklist again.
5305 The intersect of dominators of the succs of this block is
5306 defined as the null set. */
5307 sbitmap_zero (temp_bitmap[bb]);
5311 /* Clear the aux field of this block so it can be added to
5312 the worklist again if necessary. */
5314 sbitmap_intersection_of_succs (temp_bitmap[bb],
5315 post_dominators, bb);
5318 /* Make sure each block always post dominates itself. */
5319 SET_BIT (temp_bitmap[bb], bb);
5321 /* If the out state of this block changed, then we need to
5322 add the successors of this block to the worklist if they
5323 are not already on the worklist. */
5324 if (sbitmap_a_and_b (post_dominators[bb],
5325 post_dominators[bb],
5328 for (e = b->pred; e; e = e->pred_next)
5330 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5342 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5345 compute_immediate_dominators (idom, dominators)
5347 sbitmap *dominators;
5352 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5354 /* Begin with tmp(n) = dom(n) - { n }. */
5355 for (b = n_basic_blocks; --b >= 0; )
5357 sbitmap_copy (tmp[b], dominators[b]);
5358 RESET_BIT (tmp[b], b);
5361 /* Subtract out all of our dominator's dominators. */
5362 for (b = n_basic_blocks; --b >= 0; )
5364 sbitmap tmp_b = tmp[b];
5367 for (s = n_basic_blocks; --s >= 0; )
5368 if (TEST_BIT (tmp_b, s))
5369 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5372 /* Find the one bit set in the bitmap and put it in the output array. */
5373 for (b = n_basic_blocks; --b >= 0; )
5376 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5379 sbitmap_vector_free (tmp);
5382 /* Count for a single SET rtx, X. */
5385 count_reg_sets_1 (x)
5389 register rtx reg = SET_DEST (x);
5391 /* Find the register that's set/clobbered. */
5392 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5393 || GET_CODE (reg) == SIGN_EXTRACT
5394 || GET_CODE (reg) == STRICT_LOW_PART)
5395 reg = XEXP (reg, 0);
5397 if (GET_CODE (reg) == PARALLEL
5398 && GET_MODE (reg) == BLKmode)
5401 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5402 count_reg_sets_1 (XVECEXP (reg, 0, i));
5406 if (GET_CODE (reg) == REG)
5408 regno = REGNO (reg);
5409 if (regno >= FIRST_PSEUDO_REGISTER)
5411 /* Count (weighted) references, stores, etc. This counts a
5412 register twice if it is modified, but that is correct. */
5413 REG_N_SETS (regno)++;
5414 REG_N_REFS (regno) += loop_depth + 1;
5419 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5420 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5426 register RTX_CODE code = GET_CODE (x);
5428 if (code == SET || code == CLOBBER)
5429 count_reg_sets_1 (x);
5430 else if (code == PARALLEL)
5433 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5435 code = GET_CODE (XVECEXP (x, 0, i));
5436 if (code == SET || code == CLOBBER)
5437 count_reg_sets_1 (XVECEXP (x, 0, i));
5442 /* Increment REG_N_REFS by the current loop depth each register reference
5446 count_reg_references (x)
5449 register RTX_CODE code;
5452 code = GET_CODE (x);
5472 /* If we are clobbering a MEM, mark any registers inside the address
5474 if (GET_CODE (XEXP (x, 0)) == MEM)
5475 count_reg_references (XEXP (XEXP (x, 0), 0));
5479 /* While we're here, optimize this case. */
5482 /* In case the SUBREG is not of a register, don't optimize */
5483 if (GET_CODE (x) != REG)
5485 count_reg_references (x);
5489 /* ... fall through ... */
5492 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5493 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5498 register rtx testreg = SET_DEST (x);
5501 /* If storing into MEM, don't show it as being used. But do
5502 show the address as being used. */
5503 if (GET_CODE (testreg) == MEM)
5505 count_reg_references (XEXP (testreg, 0));
5506 count_reg_references (SET_SRC (x));
5510 /* Storing in STRICT_LOW_PART is like storing in a reg
5511 in that this SET might be dead, so ignore it in TESTREG.
5512 but in some other ways it is like using the reg.
5514 Storing in a SUBREG or a bit field is like storing the entire
5515 register in that if the register's value is not used
5516 then this SET is not needed. */
5517 while (GET_CODE (testreg) == STRICT_LOW_PART
5518 || GET_CODE (testreg) == ZERO_EXTRACT
5519 || GET_CODE (testreg) == SIGN_EXTRACT
5520 || GET_CODE (testreg) == SUBREG)
5522 /* Modifying a single register in an alternate mode
5523 does not use any of the old value. But these other
5524 ways of storing in a register do use the old value. */
5525 if (GET_CODE (testreg) == SUBREG
5526 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5531 testreg = XEXP (testreg, 0);
5534 /* If this is a store into a register,
5535 recursively scan the value being stored. */
5537 if ((GET_CODE (testreg) == PARALLEL
5538 && GET_MODE (testreg) == BLKmode)
5539 || GET_CODE (testreg) == REG)
5541 count_reg_references (SET_SRC (x));
5543 count_reg_references (SET_DEST (x));
5553 /* Recursively scan the operands of this expression. */
5556 register const char *fmt = GET_RTX_FORMAT (code);
5559 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5563 /* Tail recursive case: save a function call level. */
5569 count_reg_references (XEXP (x, i));
5571 else if (fmt[i] == 'E')
5574 for (j = 0; j < XVECLEN (x, i); j++)
5575 count_reg_references (XVECEXP (x, i, j));
5581 /* Recompute register set/reference counts immediately prior to register
5584 This avoids problems with set/reference counts changing to/from values
5585 which have special meanings to the register allocators.
5587 Additionally, the reference counts are the primary component used by the
5588 register allocators to prioritize pseudos for allocation to hard regs.
5589 More accurate reference counts generally lead to better register allocation.
5591 F is the first insn to be scanned.
5593 LOOP_STEP denotes how much loop_depth should be incremented per
5594 loop nesting level in order to increase the ref count more for
5595 references in a loop.
5597 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5598 possibly other information which is used by the register allocators. */
5601 recompute_reg_usage (f, loop_step)
5602 rtx f ATTRIBUTE_UNUSED;
5603 int loop_step ATTRIBUTE_UNUSED;
5609 /* Clear out the old data. */
5610 max_reg = max_reg_num ();
5611 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5617 /* Scan each insn in the chain and count how many times each register is
5619 for (index = 0; index < n_basic_blocks; index++)
5621 basic_block bb = BASIC_BLOCK (index);
5622 loop_depth = bb->loop_depth;
5623 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5625 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5629 /* This call will increment REG_N_SETS for each SET or CLOBBER
5630 of a register in INSN. It will also increment REG_N_REFS
5631 by the loop depth for each set of a register in INSN. */
5632 count_reg_sets (PATTERN (insn));
5634 /* count_reg_sets does not detect autoincrement address modes, so
5635 detect them here by looking at the notes attached to INSN. */
5636 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5638 if (REG_NOTE_KIND (links) == REG_INC)
5639 /* Count (weighted) references, stores, etc. This counts a
5640 register twice if it is modified, but that is correct. */
5641 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5644 /* This call will increment REG_N_REFS by the current loop depth for
5645 each reference to a register in INSN. */
5646 count_reg_references (PATTERN (insn));
5648 /* count_reg_references will not include counts for arguments to
5649 function calls, so detect them here by examining the
5650 CALL_INSN_FUNCTION_USAGE data. */
5651 if (GET_CODE (insn) == CALL_INSN)
5655 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5657 note = XEXP (note, 1))
5658 if (GET_CODE (XEXP (note, 0)) == USE)
5659 count_reg_references (XEXP (XEXP (note, 0), 0));
5662 if (insn == bb->end)
5668 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5669 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5670 of the number of registers that died. */
5673 count_or_remove_death_notes (blocks, kill)
5679 for (i = n_basic_blocks - 1; i >= 0; --i)
5684 if (blocks && ! TEST_BIT (blocks, i))
5687 bb = BASIC_BLOCK (i);
5689 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5691 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5693 rtx *pprev = ®_NOTES (insn);
5698 switch (REG_NOTE_KIND (link))
5701 if (GET_CODE (XEXP (link, 0)) == REG)
5703 rtx reg = XEXP (link, 0);
5706 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5709 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5717 rtx next = XEXP (link, 1);
5718 free_EXPR_LIST_node (link);
5719 *pprev = link = next;
5725 pprev = &XEXP (link, 1);
5732 if (insn == bb->end)
5740 /* Record INSN's block as BB. */
5743 set_block_for_insn (insn, bb)
5747 size_t uid = INSN_UID (insn);
5748 if (uid >= basic_block_for_insn->num_elements)
5752 /* Add one-eighth the size so we don't keep calling xrealloc. */
5753 new_size = uid + (uid + 7) / 8;
5755 VARRAY_GROW (basic_block_for_insn, new_size);
5757 VARRAY_BB (basic_block_for_insn, uid) = bb;
5760 /* Record INSN's block number as BB. */
5761 /* ??? This has got to go. */
5764 set_block_num (insn, bb)
5768 set_block_for_insn (insn, BASIC_BLOCK (bb));
5771 /* Verify the CFG consistency. This function check some CFG invariants and
5772 aborts when something is wrong. Hope that this function will help to
5773 convert many optimization passes to preserve CFG consistent.
5775 Currently it does following checks:
5777 - test head/end pointers
5778 - overlapping of basic blocks
5779 - edge list corectness
5780 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5781 - tails of basic blocks (ensure that boundary is necesary)
5782 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5783 and NOTE_INSN_BASIC_BLOCK
5784 - check that all insns are in the basic blocks
5785 (except the switch handling code, barriers and notes)
5787 In future it can be extended check a lot of other stuff as well
5788 (reachability of basic blocks, life information, etc. etc.). */
5793 const int max_uid = get_max_uid ();
5794 const rtx rtx_first = get_insns ();
5795 basic_block *bb_info;
5799 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5801 /* First pass check head/end pointers and set bb_info array used by
5803 for (i = n_basic_blocks - 1; i >= 0; i--)
5805 basic_block bb = BASIC_BLOCK (i);
5807 /* Check the head pointer and make sure that it is pointing into
5809 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5814 error ("Head insn %d for block %d not found in the insn stream.",
5815 INSN_UID (bb->head), bb->index);
5819 /* Check the end pointer and make sure that it is pointing into
5821 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5823 if (bb_info[INSN_UID (x)] != NULL)
5825 error ("Insn %d is in multiple basic blocks (%d and %d)",
5826 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5829 bb_info[INSN_UID (x)] = bb;
5836 error ("End insn %d for block %d not found in the insn stream.",
5837 INSN_UID (bb->end), bb->index);
5842 /* Now check the basic blocks (boundaries etc.) */
5843 for (i = n_basic_blocks - 1; i >= 0; i--)
5845 basic_block bb = BASIC_BLOCK (i);
5846 /* Check corectness of edge lists */
5854 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5856 fprintf (stderr, "Predecessor: ");
5857 dump_edge_info (stderr, e, 0);
5858 fprintf (stderr, "\nSuccessor: ");
5859 dump_edge_info (stderr, e, 1);
5863 if (e->dest != EXIT_BLOCK_PTR)
5865 edge e2 = e->dest->pred;
5866 while (e2 && e2 != e)
5870 error ("Basic block %i edge lists are corrupted", bb->index);
5882 error ("Basic block %d pred edge is corrupted", bb->index);
5883 fputs ("Predecessor: ", stderr);
5884 dump_edge_info (stderr, e, 0);
5885 fputs ("\nSuccessor: ", stderr);
5886 dump_edge_info (stderr, e, 1);
5887 fputc ('\n', stderr);
5890 if (e->src != ENTRY_BLOCK_PTR)
5892 edge e2 = e->src->succ;
5893 while (e2 && e2 != e)
5897 error ("Basic block %i edge lists are corrupted", bb->index);
5904 /* OK pointers are correct. Now check the header of basic
5905 block. It ought to contain optional CODE_LABEL followed
5906 by NOTE_BASIC_BLOCK. */
5908 if (GET_CODE (x) == CODE_LABEL)
5912 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5918 if (GET_CODE (x) != NOTE
5919 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5920 || NOTE_BASIC_BLOCK (x) != bb)
5922 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5929 /* Do checks for empty blocks here */
5936 if (GET_CODE (x) == NOTE
5937 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5939 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5940 INSN_UID (x), bb->index);
5947 if (GET_CODE (x) == JUMP_INSN
5948 || GET_CODE (x) == CODE_LABEL
5949 || GET_CODE (x) == BARRIER)
5951 error ("In basic block %d:", bb->index);
5952 fatal_insn ("Flow control insn inside a basic block", x);
5963 if (!bb_info[INSN_UID (x)])
5965 switch (GET_CODE (x))
5972 /* An addr_vec is placed outside any block block. */
5974 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5975 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5976 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5981 /* But in any case, non-deletable labels can appear anywhere. */
5985 fatal_insn ("Insn outside basic block", x);
5999 /* Functions to access an edge list with a vector representation.
6000 Enough data is kept such that given an index number, the
6001 pred and succ that edge reprsents can be determined, or
6002 given a pred and a succ, it's index number can be returned.
6003 This allows algorithms which comsume a lot of memory to
6004 represent the normally full matrix of edge (pred,succ) with a
6005 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6006 wasted space in the client code due to sparse flow graphs. */
6008 /* This functions initializes the edge list. Basically the entire
6009 flowgraph is processed, and all edges are assigned a number,
6010 and the data structure is filed in. */
6014 struct edge_list *elist;
6020 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6024 /* Determine the number of edges in the flow graph by counting successor
6025 edges on each basic block. */
6026 for (x = 0; x < n_basic_blocks; x++)
6028 basic_block bb = BASIC_BLOCK (x);
6030 for (e = bb->succ; e; e = e->succ_next)
6033 /* Don't forget successors of the entry block. */
6034 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6037 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6038 elist->num_blocks = block_count;
6039 elist->num_edges = num_edges;
6040 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6044 /* Follow successors of the entry block, and register these edges. */
6045 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6047 elist->index_to_edge[num_edges] = e;
6051 for (x = 0; x < n_basic_blocks; x++)
6053 basic_block bb = BASIC_BLOCK (x);
6055 /* Follow all successors of blocks, and register these edges. */
6056 for (e = bb->succ; e; e = e->succ_next)
6058 elist->index_to_edge[num_edges] = e;
6065 /* This function free's memory associated with an edge list. */
6067 free_edge_list (elist)
6068 struct edge_list *elist;
6072 free (elist->index_to_edge);
6077 /* This function provides debug output showing an edge list. */
6079 print_edge_list (f, elist)
6081 struct edge_list *elist;
6084 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6085 elist->num_blocks - 2, elist->num_edges);
6087 for (x = 0; x < elist->num_edges; x++)
6089 fprintf (f, " %-4d - edge(", x);
6090 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6091 fprintf (f,"entry,");
6093 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6095 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6096 fprintf (f,"exit)\n");
6098 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6102 /* This function provides an internal consistancy check of an edge list,
6103 verifying that all edges are present, and that there are no
6106 verify_edge_list (f, elist)
6108 struct edge_list *elist;
6110 int x, pred, succ, index;
6113 for (x = 0; x < n_basic_blocks; x++)
6115 basic_block bb = BASIC_BLOCK (x);
6117 for (e = bb->succ; e; e = e->succ_next)
6119 pred = e->src->index;
6120 succ = e->dest->index;
6121 index = EDGE_INDEX (elist, e->src, e->dest);
6122 if (index == EDGE_INDEX_NO_EDGE)
6124 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6127 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6128 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6129 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6130 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6131 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6132 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6135 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6137 pred = e->src->index;
6138 succ = e->dest->index;
6139 index = EDGE_INDEX (elist, e->src, e->dest);
6140 if (index == EDGE_INDEX_NO_EDGE)
6142 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6145 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6146 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6147 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6148 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6149 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6150 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6152 /* We've verified that all the edges are in the list, no lets make sure
6153 there are no spurious edges in the list. */
6155 for (pred = 0 ; pred < n_basic_blocks; pred++)
6156 for (succ = 0 ; succ < n_basic_blocks; succ++)
6158 basic_block p = BASIC_BLOCK (pred);
6159 basic_block s = BASIC_BLOCK (succ);
6163 for (e = p->succ; e; e = e->succ_next)
6169 for (e = s->pred; e; e = e->pred_next)
6175 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6176 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6177 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6179 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6180 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6181 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6182 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6183 BASIC_BLOCK (succ)));
6185 for (succ = 0 ; succ < n_basic_blocks; succ++)
6187 basic_block p = ENTRY_BLOCK_PTR;
6188 basic_block s = BASIC_BLOCK (succ);
6192 for (e = p->succ; e; e = e->succ_next)
6198 for (e = s->pred; e; e = e->pred_next)
6204 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6205 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6206 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6208 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6209 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6210 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6211 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6212 BASIC_BLOCK (succ)));
6214 for (pred = 0 ; pred < n_basic_blocks; pred++)
6216 basic_block p = BASIC_BLOCK (pred);
6217 basic_block s = EXIT_BLOCK_PTR;
6221 for (e = p->succ; e; e = e->succ_next)
6227 for (e = s->pred; e; e = e->pred_next)
6233 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6234 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6235 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6237 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6238 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6239 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6240 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6245 /* This routine will determine what, if any, edge there is between
6246 a specified predecessor and successor. */
6249 find_edge_index (edge_list, pred, succ)
6250 struct edge_list *edge_list;
6251 basic_block pred, succ;
6254 for (x = 0; x < NUM_EDGES (edge_list); x++)
6256 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6257 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6260 return (EDGE_INDEX_NO_EDGE);
6263 /* This function will remove an edge from the flow graph. */
6268 edge last_pred = NULL;
6269 edge last_succ = NULL;
6271 basic_block src, dest;
6274 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6280 last_succ->succ_next = e->succ_next;
6282 src->succ = e->succ_next;
6284 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6290 last_pred->pred_next = e->pred_next;
6292 dest->pred = e->pred_next;
6298 /* This routine will remove any fake successor edges for a basic block.
6299 When the edge is removed, it is also removed from whatever predecessor
6302 remove_fake_successors (bb)
6306 for (e = bb->succ; e ; )
6310 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6315 /* This routine will remove all fake edges from the flow graph. If
6316 we remove all fake successors, it will automatically remove all
6317 fake predecessors. */
6319 remove_fake_edges ()
6323 for (x = 0; x < n_basic_blocks; x++)
6324 remove_fake_successors (BASIC_BLOCK (x));
6326 /* We've handled all successors except the entry block's. */
6327 remove_fake_successors (ENTRY_BLOCK_PTR);
6330 /* This functions will add a fake edge between any block which has no
6331 successors, and the exit block. Some data flow equations require these
6334 add_noreturn_fake_exit_edges ()
6338 for (x = 0; x < n_basic_blocks; x++)
6339 if (BASIC_BLOCK (x)->succ == NULL)
6340 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6343 /* Dump the list of basic blocks in the bitmap NODES. */
6345 flow_nodes_print (str, nodes, file)
6347 const sbitmap nodes;
6352 fprintf (file, "%s { ", str);
6353 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6354 fputs ("}\n", file);
6358 /* Dump the list of exiting edges in the array EDGES. */
6360 flow_exits_print (str, edges, num_edges, file)
6368 fprintf (file, "%s { ", str);
6369 for (i = 0; i < num_edges; i++)
6370 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6371 fputs ("}\n", file);
6375 /* Dump loop related CFG information. */
6377 flow_loops_cfg_dump (loops, file)
6378 const struct loops *loops;
6383 if (! loops->num || ! file || ! loops->cfg.dom)
6386 for (i = 0; i < n_basic_blocks; i++)
6390 fprintf (file, ";; %d succs { ", i);
6391 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6392 fprintf (file, "%d ", succ->dest->index);
6393 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6397 /* Dump the DFS node order. */
6398 if (loops->cfg.dfs_order)
6400 fputs (";; DFS order: ", file);
6401 for (i = 0; i < n_basic_blocks; i++)
6402 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6408 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6410 flow_loop_nested_p (outer, loop)
6414 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6418 /* Dump the loop information specified by LOOPS to the stream FILE. */
6420 flow_loops_dump (loops, file, verbose)
6421 const struct loops *loops;
6428 num_loops = loops->num;
6429 if (! num_loops || ! file)
6432 fprintf (file, ";; %d loops found, %d levels\n",
6433 num_loops, loops->levels);
6435 for (i = 0; i < num_loops; i++)
6437 struct loop *loop = &loops->array[i];
6439 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6440 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6441 loop->header->index, loop->latch->index,
6442 loop->pre_header ? loop->pre_header->index : -1,
6443 loop->depth, loop->level,
6444 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6445 fprintf (file, ";; %d", loop->num_nodes);
6446 flow_nodes_print (" nodes", loop->nodes, file);
6447 fprintf (file, ";; %d", loop->num_exits);
6448 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6454 for (j = 0; j < i; j++)
6456 struct loop *oloop = &loops->array[j];
6458 if (loop->header == oloop->header)
6463 smaller = loop->num_nodes < oloop->num_nodes;
6465 /* If the union of LOOP and OLOOP is different than
6466 the larger of LOOP and OLOOP then LOOP and OLOOP
6467 must be disjoint. */
6468 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6469 smaller ? oloop : loop);
6470 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6471 loop->header->index, i, j,
6472 disjoint ? "disjoint" : "nested");
6479 /* Print diagnostics to compare our concept of a loop with
6480 what the loop notes say. */
6481 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6482 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6483 != NOTE_INSN_LOOP_BEG)
6484 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6485 INSN_UID (PREV_INSN (loop->first->head)));
6486 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6487 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6488 != NOTE_INSN_LOOP_END)
6489 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6490 INSN_UID (NEXT_INSN (loop->last->end)));
6495 flow_loops_cfg_dump (loops, file);
6499 /* Free all the memory allocated for LOOPS. */
6501 flow_loops_free (loops)
6502 struct loops *loops;
6511 /* Free the loop descriptors. */
6512 for (i = 0; i < loops->num; i++)
6514 struct loop *loop = &loops->array[i];
6517 sbitmap_free (loop->nodes);
6521 free (loops->array);
6522 loops->array = NULL;
6525 sbitmap_vector_free (loops->cfg.dom);
6526 if (loops->cfg.dfs_order)
6527 free (loops->cfg.dfs_order);
6529 sbitmap_free (loops->shared_headers);
6534 /* Find the exits from the loop using the bitmap of loop nodes NODES
6535 and store in EXITS array. Return the number of exits from the
6538 flow_loop_exits_find (nodes, exits)
6539 const sbitmap nodes;
6548 /* Check all nodes within the loop to see if there are any
6549 successors not in the loop. Note that a node may have multiple
6552 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6553 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6555 basic_block dest = e->dest;
6557 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6565 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6567 /* Store all exiting edges into an array. */
6569 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6570 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6572 basic_block dest = e->dest;
6574 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6575 (*exits)[num_exits++] = e;
6583 /* Find the nodes contained within the loop with header HEADER and
6584 latch LATCH and store in NODES. Return the number of nodes within
6587 flow_loop_nodes_find (header, latch, nodes)
6596 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6599 /* Start with only the loop header in the set of loop nodes. */
6600 sbitmap_zero (nodes);
6601 SET_BIT (nodes, header->index);
6603 header->loop_depth++;
6605 /* Push the loop latch on to the stack. */
6606 if (! TEST_BIT (nodes, latch->index))
6608 SET_BIT (nodes, latch->index);
6609 latch->loop_depth++;
6611 stack[sp++] = latch;
6620 for (e = node->pred; e; e = e->pred_next)
6622 basic_block ancestor = e->src;
6624 /* If each ancestor not marked as part of loop, add to set of
6625 loop nodes and push on to stack. */
6626 if (ancestor != ENTRY_BLOCK_PTR
6627 && ! TEST_BIT (nodes, ancestor->index))
6629 SET_BIT (nodes, ancestor->index);
6630 ancestor->loop_depth++;
6632 stack[sp++] = ancestor;
6641 /* Compute the depth first search order and store in the array
6642 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6643 number of nodes visited. */
6645 flow_depth_first_order_compute (dfs_order)
6654 /* Allocate stack for back-tracking up CFG. */
6655 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6658 /* Allocate bitmap to track nodes that have been visited. */
6659 visited = sbitmap_alloc (n_basic_blocks);
6661 /* None of the nodes in the CFG have been visited yet. */
6662 sbitmap_zero (visited);
6664 /* Start with the first successor edge from the entry block. */
6665 e = ENTRY_BLOCK_PTR->succ;
6668 basic_block src = e->src;
6669 basic_block dest = e->dest;
6671 /* Mark that we have visited this node. */
6672 if (src != ENTRY_BLOCK_PTR)
6673 SET_BIT (visited, src->index);
6675 /* If this node has not been visited before, push the current
6676 edge on to the stack and proceed with the first successor
6677 edge of this node. */
6678 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6686 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6689 /* DEST has no successors (for example, a non-returning
6690 function is called) so do not push the current edge
6691 but carry on with its next successor. */
6692 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6693 SET_BIT (visited, dest->index);
6696 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6698 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6700 /* Pop edge off stack. */
6708 sbitmap_free (visited);
6710 /* The number of nodes visited should not be greater than
6712 if (dfsnum > n_basic_blocks)
6715 /* There are some nodes left in the CFG that are unreachable. */
6716 if (dfsnum < n_basic_blocks)
6722 /* Return the block for the pre-header of the loop with header
6723 HEADER where DOM specifies the dominator information. Return NULL if
6724 there is no pre-header. */
6726 flow_loop_pre_header_find (header, dom)
6730 basic_block pre_header;
6733 /* If block p is a predecessor of the header and is the only block
6734 that the header does not dominate, then it is the pre-header. */
6736 for (e = header->pred; e; e = e->pred_next)
6738 basic_block node = e->src;
6740 if (node != ENTRY_BLOCK_PTR
6741 && ! TEST_BIT (dom[node->index], header->index))
6743 if (pre_header == NULL)
6747 /* There are multiple edges into the header from outside
6748 the loop so there is no pre-header block. */
6758 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6759 previously added. The insertion algorithm assumes that the loops
6760 are added in the order found by a depth first search of the CFG. */
6762 flow_loop_tree_node_add (prevloop, loop)
6763 struct loop *prevloop;
6767 if (flow_loop_nested_p (prevloop, loop))
6769 prevloop->inner = loop;
6770 loop->outer = prevloop;
6774 while (prevloop->outer)
6776 if (flow_loop_nested_p (prevloop->outer, loop))
6778 prevloop->next = loop;
6779 loop->outer = prevloop->outer;
6782 prevloop = prevloop->outer;
6785 prevloop->next = loop;
6790 /* Build the loop hierarchy tree for LOOPS. */
6792 flow_loops_tree_build (loops)
6793 struct loops *loops;
6798 num_loops = loops->num;
6802 /* Root the loop hierarchy tree with the first loop found.
6803 Since we used a depth first search this should be the
6805 loops->tree = &loops->array[0];
6806 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6808 /* Add the remaining loops to the tree. */
6809 for (i = 1; i < num_loops; i++)
6810 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6814 /* Helper function to compute loop nesting depth and enclosed loop level
6815 for the natural loop specified by LOOP at the loop depth DEPTH.
6816 Returns the loop level. */
6818 flow_loop_level_compute (loop, depth)
6828 /* Traverse loop tree assigning depth and computing level as the
6829 maximum level of all the inner loops of this loop. The loop
6830 level is equivalent to the height of the loop in the loop tree
6831 and corresponds to the number of enclosed loop levels (including
6833 for (inner = loop->inner; inner; inner = inner->next)
6837 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6842 loop->level = level;
6843 loop->depth = depth;
6848 /* Compute the loop nesting depth and enclosed loop level for the loop
6849 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6853 flow_loops_level_compute (loops)
6854 struct loops *loops;
6860 /* Traverse all the outer level loops. */
6861 for (loop = loops->tree; loop; loop = loop->next)
6863 level = flow_loop_level_compute (loop, 1);
6871 /* Find all the natural loops in the function and save in LOOPS structure
6872 and recalculate loop_depth information in basic block structures.
6873 Return the number of natural loops found. */
6876 flow_loops_find (loops)
6877 struct loops *loops;
6888 loops->array = NULL;
6892 /* Taking care of this degenerate case makes the rest of
6893 this code simpler. */
6894 if (n_basic_blocks == 0)
6897 /* Compute the dominators. */
6898 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6899 compute_flow_dominators (dom, NULL);
6901 /* Count the number of loop edges (back edges). This should be the
6902 same as the number of natural loops. Also clear the loop_depth
6903 and as we work from inner->outer in a loop nest we call
6904 find_loop_nodes_find which will increment loop_depth for nodes
6905 within the current loop, which happens to enclose inner loops. */
6908 for (b = 0; b < n_basic_blocks; b++)
6910 BASIC_BLOCK (b)->loop_depth = 0;
6911 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6913 basic_block latch = e->src;
6915 /* Look for back edges where a predecessor is dominated
6916 by this block. A natural loop has a single entry
6917 node (header) that dominates all the nodes in the
6918 loop. It also has single back edge to the header
6919 from a latch node. Note that multiple natural loops
6920 may share the same header. */
6921 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6928 /* Compute depth first search order of the CFG so that outer
6929 natural loops will be found before inner natural loops. */
6930 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6931 flow_depth_first_order_compute (dfs_order);
6933 /* Allocate loop structures. */
6935 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6937 headers = sbitmap_alloc (n_basic_blocks);
6938 sbitmap_zero (headers);
6940 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6941 sbitmap_zero (loops->shared_headers);
6943 /* Find and record information about all the natural loops
6946 for (b = 0; b < n_basic_blocks; b++)
6950 /* Search the nodes of the CFG in DFS order that we can find
6951 outer loops first. */
6952 header = BASIC_BLOCK (dfs_order[b]);
6954 /* Look for all the possible latch blocks for this header. */
6955 for (e = header->pred; e; e = e->pred_next)
6957 basic_block latch = e->src;
6959 /* Look for back edges where a predecessor is dominated
6960 by this block. A natural loop has a single entry
6961 node (header) that dominates all the nodes in the
6962 loop. It also has single back edge to the header
6963 from a latch node. Note that multiple natural loops
6964 may share the same header. */
6965 if (latch != ENTRY_BLOCK_PTR
6966 && TEST_BIT (dom[latch->index], header->index))
6970 loop = loops->array + num_loops;
6972 loop->header = header;
6973 loop->latch = latch;
6975 /* Keep track of blocks that are loop headers so
6976 that we can tell which loops should be merged. */
6977 if (TEST_BIT (headers, header->index))
6978 SET_BIT (loops->shared_headers, header->index);
6979 SET_BIT (headers, header->index);
6981 /* Find nodes contained within the loop. */
6982 loop->nodes = sbitmap_alloc (n_basic_blocks);
6984 = flow_loop_nodes_find (header, latch, loop->nodes);
6986 /* Compute first and last blocks within the loop.
6987 These are often the same as the loop header and
6988 loop latch respectively, but this is not always
6991 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6993 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6995 /* Find edges which exit the loop. Note that a node
6996 may have several exit edges. */
6998 = flow_loop_exits_find (loop->nodes, &loop->exits);
7000 /* Look to see if the loop has a pre-header node. */
7002 = flow_loop_pre_header_find (header, dom);
7009 /* Natural loops with shared headers may either be disjoint or
7010 nested. Disjoint loops with shared headers cannot be inner
7011 loops and should be merged. For now just mark loops that share
7013 for (i = 0; i < num_loops; i++)
7014 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7015 loops->array[i].shared = 1;
7017 sbitmap_free (headers);
7020 loops->num = num_loops;
7022 /* Save CFG derived information to avoid recomputing it. */
7023 loops->cfg.dom = dom;
7024 loops->cfg.dfs_order = dfs_order;
7026 /* Build the loop hierarchy tree. */
7027 flow_loops_tree_build (loops);
7029 /* Assign the loop nesting depth and enclosed loop level for each
7031 loops->levels = flow_loops_level_compute (loops);
7037 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7039 flow_loop_outside_edge_p (loop, e)
7040 const struct loop *loop;
7043 if (e->dest != loop->header)
7045 return (e->src == ENTRY_BLOCK_PTR)
7046 || ! TEST_BIT (loop->nodes, e->src->index);
7051 typedef struct reorder_block_def {
7054 basic_block add_jump;
7059 } *reorder_block_def;
7061 #define REORDER_BLOCK_HEAD 0x1
7062 #define REORDER_BLOCK_VISITED 0x2
7063 #define REORDER_MOVED_BLOCK_END 0x3
7065 #define REORDER_BLOCK_FLAGS(bb) \
7066 ((reorder_block_def) (bb)->aux)->flags
7068 #define REORDER_BLOCK_INDEX(bb) \
7069 ((reorder_block_def) (bb)->aux)->index
7071 #define REORDER_BLOCK_ADD_JUMP(bb) \
7072 ((reorder_block_def) (bb)->aux)->add_jump
7074 #define REORDER_BLOCK_SUCC(bb) \
7075 ((reorder_block_def) (bb)->aux)->succ
7077 #define REORDER_BLOCK_OLD_END(bb) \
7078 ((reorder_block_def) (bb)->aux)->end
7080 #define REORDER_BLOCK_BEGIN(bb) \
7081 ((reorder_block_def) (bb)->aux)->block_begin
7083 #define REORDER_BLOCK_END(bb) \
7084 ((reorder_block_def) (bb)->aux)->block_end
7087 static int reorder_index;
7088 static basic_block reorder_last_visited;
7090 enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
7091 REORDER_SKIP_BLOCK_END};
7093 static rtx skip_insns_between_block PARAMS ((basic_block,
7094 enum reorder_skip_type));
7096 /* Skip over insns BEFORE or AFTER BB which are typically associated with
7100 skip_insns_between_block (bb, skip_type)
7102 enum reorder_skip_type skip_type;
7104 rtx insn, last_insn;
7106 if (skip_type == REORDER_SKIP_BEFORE)
7108 if (bb == ENTRY_BLOCK_PTR)
7111 last_insn = bb->head;
7112 for (insn = PREV_INSN (bb->head);
7113 insn && insn != BASIC_BLOCK (bb->index - 1)->end;
7114 last_insn = insn, insn = PREV_INSN (insn))
7116 if (NEXT_INSN (insn) != last_insn)
7119 if (GET_CODE (insn) == NOTE
7120 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
7121 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
7122 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
7131 last_insn = bb->end;
7133 if (bb == EXIT_BLOCK_PTR)
7136 for (insn = NEXT_INSN (bb->end);
7138 last_insn = insn, insn = NEXT_INSN (insn))
7140 if (bb->index + 1 != n_basic_blocks
7141 && insn == BASIC_BLOCK (bb->index + 1)->head)
7144 if (GET_CODE (insn) == BARRIER
7145 || GET_CODE (insn) == JUMP_INSN
7146 || (GET_CODE (insn) == NOTE
7147 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
7148 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
7151 if (GET_CODE (insn) == CODE_LABEL
7152 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
7153 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
7154 || GET_CODE (PATTERN
7155 (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
7157 insn = NEXT_INSN (insn);
7163 if (skip_type == REORDER_SKIP_BLOCK_END)
7165 int found_block_end = 0;
7167 for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
7169 if (bb->index + 1 != n_basic_blocks
7170 && insn == BASIC_BLOCK (bb->index + 1)->head)
7173 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7175 found_block_end = 1;
7179 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
7182 if (GET_CODE (insn) == NOTE
7183 && NOTE_LINE_NUMBER (insn) >= 0
7185 && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
7186 == NOTE_INSN_BLOCK_END))
7191 if (! found_block_end)
7200 /* Return common destination for blocks BB0 and BB1. */
7203 get_common_dest (bb0, bb1)
7204 basic_block bb0, bb1;
7208 for (e0 = bb0->succ; e0; e0 = e0->succ_next)
7210 for (e1 = bb1->succ; e1; e1 = e1->succ_next)
7212 if (e0->dest == e1->dest)
7222 /* Move the destination block for edge E after chain end block CEB
7223 Adding jumps and labels is deferred until fixup_reorder_chain. */
7226 chain_reorder_blocks (e, ceb)
7230 basic_block sb = e->src;
7231 basic_block db = e->dest;
7232 rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
7235 enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
7236 PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
7237 enum cond_types cond_type;
7238 enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
7240 enum cond_block_types cond_block_type;
7243 fprintf (rtl_dump_file,
7244 "Edge from basic block %d to basic block %d last visited %d\n",
7245 sb->index, db->index, ceb->index);
7247 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7248 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7249 cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
7252 int block_begins = 0;
7255 for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
7257 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
7263 REORDER_BLOCK_BEGIN (sb) = block_begins;
7271 for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
7273 if (PREV_INSN (insn) == cebbe_insn)
7275 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7281 REORDER_BLOCK_END (ceb) = block_ends;
7284 /* Blocks are in original order. */
7285 if (sb->index == ceb->index
7286 && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
7289 /* Get the type of block and type of condition. */
7290 cond_type = NO_COND;
7291 cond_block_type = NO_COND_BLOCK;
7292 if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
7293 && condjump_p (sb->end))
7295 if (e->flags & EDGE_FALLTHRU)
7296 cond_block_type = THEN_BLOCK;
7297 else if (get_common_dest (sb->succ->dest, sb))
7298 cond_block_type = NO_ELSE_BLOCK;
7300 cond_block_type = ELSE_BLOCK;
7302 if (sb->succ->succ_next
7303 && get_common_dest (sb->succ->dest, sb))
7305 if (cond_block_type == THEN_BLOCK)
7307 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7308 & REORDER_BLOCK_VISITED))
7309 cond_type = PREDICT_THEN_NO_ELSE;
7311 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7313 else if (cond_block_type == NO_ELSE_BLOCK)
7315 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7316 & REORDER_BLOCK_VISITED))
7317 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7319 cond_type = PREDICT_THEN_NO_ELSE;
7324 if (cond_block_type == THEN_BLOCK)
7326 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7327 & REORDER_BLOCK_VISITED))
7328 cond_type = PREDICT_THEN_WITH_ELSE;
7330 cond_type = PREDICT_ELSE;
7332 else if (cond_block_type == ELSE_BLOCK
7333 && sb->succ->dest != EXIT_BLOCK_PTR)
7335 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7336 & REORDER_BLOCK_VISITED))
7337 cond_type = PREDICT_ELSE;
7339 cond_type = PREDICT_THEN_WITH_ELSE;
7346 static const char * cond_type_str [] = {"not cond jump", "predict then",
7348 "predict then w/o else",
7349 "predict not then w/o else"};
7350 static const char * cond_block_type_str [] = {"not then or else block",
7353 "then w/o else block"};
7355 fprintf (rtl_dump_file, " %s (looking at %s)\n",
7356 cond_type_str[(int)cond_type],
7357 cond_block_type_str[(int)cond_block_type]);
7360 /* Reflect that then block will move and we'll jump to it. */
7361 if (cond_block_type != THEN_BLOCK
7362 && (cond_type == PREDICT_ELSE
7363 || cond_type == PREDICT_NOT_THEN_NO_ELSE))
7366 fprintf (rtl_dump_file,
7367 " then jump from block %d to block %d\n",
7368 sb->index, sb->succ->dest->index);
7370 /* Jump to reordered then block. */
7371 REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
7374 /* Reflect that then block will jump back when we have no else. */
7375 if (cond_block_type != THEN_BLOCK
7376 && cond_type == PREDICT_NOT_THEN_NO_ELSE)
7378 for (ee = sb->succ->dest->succ;
7379 ee && ! (ee->flags & EDGE_FALLTHRU);
7383 if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
7384 && ! simplejump_p (sb->succ->dest->end)))
7386 REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
7390 /* Reflect that else block will jump back. */
7391 if (cond_block_type == ELSE_BLOCK
7392 && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
7397 && last_edge->dest != EXIT_BLOCK_PTR
7398 && GET_CODE (last_edge->dest->head) == CODE_LABEL
7399 && ! (GET_CODE (db->end) == JUMP_INSN))
7402 fprintf (rtl_dump_file,
7403 " else jump from block %d to block %d\n",
7404 db->index, last_edge->dest->index);
7406 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7410 /* This block's successor has already been reordered. This can happen
7411 when we reorder a chain starting at a then or else. */
7412 for (last_edge = db->succ;
7413 last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
7414 last_edge = last_edge->succ_next)
7418 && last_edge->dest != EXIT_BLOCK_PTR
7419 && (REORDER_BLOCK_FLAGS (last_edge->dest)
7420 & REORDER_BLOCK_VISITED))
7423 fprintf (rtl_dump_file,
7424 " end of chain jump from block %d to block %d\n",
7425 db->index, last_edge->dest->index);
7427 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7430 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7431 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7432 dbe_insn = skip_insns_between_block (db, REORDER_SKIP_AFTER);
7434 /* Leave behind any lexical block markers. */
7435 if (debug_info_level > DINFO_LEVEL_TERSE
7436 && ceb->index + 1 < db->index)
7438 rtx insn, last_insn = get_last_insn ();
7439 insn = NEXT_INSN (ceb->end);
7441 insn = REORDER_BLOCK_OLD_END (ceb);
7443 if (NEXT_INSN (cebe_insn) == 0)
7444 set_last_insn (cebe_insn);
7445 for (; insn && insn != db->head/*dbh_insn*/;
7446 insn = NEXT_INSN (insn))
7448 if (GET_CODE (insn) == NOTE
7449 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
7451 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
7454 if (GET_CODE (insn) == NOTE
7455 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
7457 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
7461 set_last_insn (last_insn);
7464 /* Rechain predicted block. */
7465 NEXT_INSN (cebe_insn) = dbh_insn;
7466 PREV_INSN (dbh_insn) = cebe_insn;
7468 REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
7469 if (db->index != n_basic_blocks - 1)
7470 NEXT_INSN (dbe_insn) = 0;
7476 /* Reorder blocks starting at block B. */
7479 make_reorder_chain (bb)
7483 basic_block visited_edge = NULL;
7487 if (bb == EXIT_BLOCK_PTR)
7490 /* Find the most probable block. */
7492 block_end = bb->end;
7493 if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
7495 rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
7498 probability = XINT (XEXP (note, 0), 0);
7502 if (probability >= REG_BR_PROB_BASE / 2)
7503 e = bb->succ->succ_next;
7506 /* Add chosen successor to chain and recurse on it. */
7507 if (e && e->dest != EXIT_BLOCK_PTR
7508 && e->dest != e->src
7509 && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7510 || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
7512 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7514 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
7515 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7516 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7519 if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7520 REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
7522 REORDER_BLOCK_SUCC (bb) = e;
7524 visited_edge = e->dest;
7526 reorder_last_visited = chain_reorder_blocks (e, bb);
7529 && ! (REORDER_BLOCK_FLAGS (e->dest)
7530 & REORDER_BLOCK_VISITED))
7531 make_reorder_chain (e->dest);
7535 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7537 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7538 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7542 /* Recurse on the successors. */
7543 for (e = bb->succ; e; e = e->succ_next)
7545 if (e->dest && e->dest == EXIT_BLOCK_PTR)
7549 && e->dest != e->src
7550 && e->dest != visited_edge
7551 && ! (REORDER_BLOCK_FLAGS (e->dest)
7552 & REORDER_BLOCK_VISITED))
7554 reorder_last_visited
7555 = chain_reorder_blocks (e, reorder_last_visited);
7556 make_reorder_chain (e->dest);
7562 /* Fixup jumps and labels after reordering basic blocks. */
7565 fixup_reorder_chain ()
7570 /* Set the new last insn. */
7572 i < n_basic_blocks - 1
7573 && REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) != n_basic_blocks;
7577 for (insn = BASIC_BLOCK (i)->head;
7578 NEXT_INSN (insn) != 0;
7579 insn = NEXT_INSN (insn))
7582 set_last_insn (insn);
7584 /* Add jumps and labels to fixup blocks. */
7585 for (i = 0; i < n_basic_blocks - 1; i++)
7587 if (REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i)))
7589 rtx new_label = gen_label_rtx ();
7590 rtx label_insn, jump_insn, barrier_insn;
7592 label_insn = emit_label_before (new_label,
7593 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head);
7594 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head = label_insn;
7596 jump_insn = emit_jump_insn_after (gen_jump (label_insn),
7597 BASIC_BLOCK (i)->end);
7598 JUMP_LABEL (jump_insn) = label_insn;
7599 ++LABEL_NUSES (label_insn);
7600 barrier_insn = emit_barrier_after (jump_insn);
7601 if (GET_CODE (BASIC_BLOCK (i)->end) != JUMP_INSN)
7602 BASIC_BLOCK (i)->end = barrier_insn;
7603 /* Add block for jump. Typically this is when a then is not
7604 predicted and we are jumping to the moved then block. */
7609 b = (basic_block) obstack_alloc (function_obstack, sizeof (*b));
7610 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
7611 BASIC_BLOCK (n_basic_blocks - 1) = b;
7612 b->index = n_basic_blocks - 1;
7613 b->head = emit_note_before (NOTE_INSN_BASIC_BLOCK, jump_insn);
7614 NOTE_BASIC_BLOCK (b->head) = b;
7615 b->end = barrier_insn;
7618 basic_block nb = BASIC_BLOCK (n_basic_blocks - 1);
7619 nb->global_live_at_start
7620 = OBSTACK_ALLOC_REG_SET (function_obstack);
7621 nb->global_live_at_end
7622 = OBSTACK_ALLOC_REG_SET (function_obstack);
7624 COPY_REG_SET (nb->global_live_at_start,
7625 BASIC_BLOCK (i)->global_live_at_start);
7626 COPY_REG_SET (nb->global_live_at_end,
7627 BASIC_BLOCK (i)->global_live_at_start);
7628 if (BASIC_BLOCK (i)->local_set)
7630 OBSTACK_ALLOC_REG_SET (function_obstack);
7631 COPY_REG_SET (nb->local_set, BASIC_BLOCK (i)->local_set);
7634 BASIC_BLOCK (nb->index)->local_set = 0;
7636 nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
7637 REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
7638 = REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) + 1;
7639 /* Relink to new block. */
7640 nb->succ = BASIC_BLOCK (i)->succ;
7642 make_edge (0, BASIC_BLOCK (i), nb, 0);
7643 BASIC_BLOCK (i)->succ->succ_next
7644 = BASIC_BLOCK (i)->succ->succ_next->succ_next;
7645 nb->succ->succ_next = 0;
7646 /* Fix reorder block index to reflect new block. */
7647 for (j = 0; j < n_basic_blocks - 1; j++)
7649 basic_block bbj = BASIC_BLOCK (j);
7650 basic_block bbi = BASIC_BLOCK (i);
7651 if (REORDER_BLOCK_INDEX (bbj)
7652 >= REORDER_BLOCK_INDEX (bbi) + 1)
7653 REORDER_BLOCK_INDEX (bbj)++;
7662 /* Reorder basic blocks. */
7665 reorder_basic_blocks ()
7668 struct loops loops_info;
7672 if (profile_arc_flag)
7675 if (n_basic_blocks <= 1)
7678 /* Exception edges are not currently handled. */
7679 for (i = 0; i < n_basic_blocks; i++)
7683 for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
7687 if (e && (e->flags & EDGE_EH))
7693 /* Find natural loops using the CFG. */
7694 num_loops = flow_loops_find (&loops_info);
7696 /* Dump loop information. */
7697 flow_loops_dump (&loops_info, rtl_dump_file, 0);
7699 /* Estimate using heuristics if no profiling info is available. */
7700 if (! flag_branch_probabilities)
7701 estimate_probability (&loops_info);
7703 reorder_last_visited = BASIC_BLOCK (0);
7705 for (i = 0; i < n_basic_blocks; i++)
7707 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
7711 = NEXT_INSN (skip_insns_between_block (BASIC_BLOCK (n_basic_blocks - 1),
7712 REORDER_SKIP_AFTER));
7714 make_reorder_chain (BASIC_BLOCK (0));
7716 fixup_reorder_chain ();
7718 #ifdef ENABLE_CHECKING
7720 rtx insn, last_insn;
7721 last_insn = get_insns ();
7722 for (insn = NEXT_INSN (get_insns ());
7723 insn && PREV_INSN (insn) == last_insn
7724 && NEXT_INSN (PREV_INSN (insn)) == insn;
7726 insn = NEXT_INSN (insn))
7732 fprintf (rtl_dump_file, "insn chaining error at %d\n",
7733 INSN_UID (last_insn));
7739 /* Put basic_block_info in new order. */
7740 for (i = 0; i < n_basic_blocks - 1; i++)
7742 for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
7745 if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
7750 int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
7752 temprbi = BASIC_BLOCK (rbi)->index;
7753 BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
7754 BASIC_BLOCK (j)->index = temprbi;
7755 tempbb = BASIC_BLOCK (rbi);
7756 BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
7757 BASIC_BLOCK (j) = tempbb;
7761 NEXT_INSN (BASIC_BLOCK (n_basic_blocks - 1)->end) = last_insn;
7763 for (i = 0; i < n_basic_blocks - 1; i++)
7765 free (BASIC_BLOCK (i)->aux);
7768 /* Free loop information. */
7769 flow_loops_free (&loops_info);