1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
162 /* The contents of the current function definition are allocated
163 in this obstack, and all are freed at the end of the function.
164 For top-level functions, this is temporary_obstack.
165 Separate obstacks are made for nested functions. */
167 extern struct obstack *function_obstack;
169 /* Number of basic blocks in the current function. */
173 /* Number of edges in the current function. */
177 /* The basic block array. */
179 varray_type basic_block_info;
181 /* The special entry and exit blocks. */
183 struct basic_block_def entry_exit_blocks[2]
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
192 ENTRY_BLOCK, /* index */
194 -1, -1 /* eh_beg, eh_end */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 EXIT_BLOCK, /* index */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
214 /* Maximum register number used in this function, plus one. */
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx *reg_next_use;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
278 /* Forward declarations */
279 static int count_basic_blocks PARAMS ((rtx));
280 static rtx find_basic_blocks_1 PARAMS ((rtx));
281 static void create_basic_block PARAMS ((int, rtx, rtx, rtx));
282 static void clear_edges PARAMS ((void));
283 static void make_edges PARAMS ((rtx));
284 static void make_label_edge PARAMS ((sbitmap *, basic_block,
286 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
287 basic_block, rtx, int));
288 static void mark_critical_edges PARAMS ((void));
289 static void move_stray_eh_region_notes PARAMS ((void));
290 static void record_active_eh_regions PARAMS ((rtx));
292 static void commit_one_edge_insertion PARAMS ((edge));
294 static void delete_unreachable_blocks PARAMS ((void));
295 static void delete_eh_regions PARAMS ((void));
296 static int can_delete_note_p PARAMS ((rtx));
297 static int delete_block PARAMS ((basic_block));
298 static void expunge_block PARAMS ((basic_block));
299 static int can_delete_label_p PARAMS ((rtx));
300 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
302 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
304 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
305 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
306 static void try_merge_blocks PARAMS ((void));
307 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
308 static void tidy_fallthru_edges PARAMS ((void));
309 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
310 static void verify_wide_reg PARAMS ((int, rtx, rtx));
311 static void verify_local_live_at_start PARAMS ((regset, basic_block));
312 static int set_noop_p PARAMS ((rtx));
313 static int noop_move_p PARAMS ((rtx));
314 static void delete_noop_moves PARAMS ((rtx));
315 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
316 static void notice_stack_pointer_modification PARAMS ((rtx));
317 static void mark_reg PARAMS ((rtx, void *));
318 static void mark_regs_live_at_end PARAMS ((regset));
319 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
320 static void propagate_block PARAMS ((basic_block, regset,
322 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
323 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
324 static void mark_set_regs PARAMS ((regset, regset, rtx,
326 static void mark_set_1 PARAMS ((regset, regset, rtx,
329 static void find_auto_inc PARAMS ((regset, rtx, rtx));
330 static int try_pre_increment_1 PARAMS ((rtx));
331 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
333 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
334 void dump_flow_info PARAMS ((FILE *));
335 void debug_flow_info PARAMS ((void));
336 static void dump_edge_info PARAMS ((FILE *, edge, int));
338 static void count_reg_sets_1 PARAMS ((rtx));
339 static void count_reg_sets PARAMS ((rtx));
340 static void count_reg_references PARAMS ((rtx));
341 static void invalidate_mems_from_autoinc PARAMS ((rtx));
342 static void remove_fake_successors PARAMS ((basic_block));
343 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
344 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
345 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
346 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
347 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
348 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
349 static int flow_depth_first_order_compute PARAMS ((int *));
350 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
351 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
352 static void flow_loops_tree_build PARAMS ((struct loops *));
353 static int flow_loop_level_compute PARAMS ((struct loop *, int));
354 static int flow_loops_level_compute PARAMS ((struct loops *));
355 static basic_block get_common_dest PARAMS ((basic_block, basic_block));
356 static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
357 static void make_reorder_chain PARAMS ((basic_block));
358 static void fixup_reorder_chain PARAMS ((void));
360 /* This function is always defined so it can be called from the
361 debugger, and it is declared extern so we don't get warnings about
363 void verify_flow_info PARAMS ((void));
364 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
365 void clear_log_links PARAMS ((rtx));
367 /* Find basic blocks of the current function.
368 F is the first insn of the function and NREGS the number of register
372 find_basic_blocks (f, nregs, file)
374 int nregs ATTRIBUTE_UNUSED;
375 FILE *file ATTRIBUTE_UNUSED;
379 /* Flush out existing data. */
380 if (basic_block_info != NULL)
386 /* Clear bb->aux on all extant basic blocks. We'll use this as a
387 tag for reuse during create_basic_block, just in case some pass
388 copies around basic block notes improperly. */
389 for (i = 0; i < n_basic_blocks; ++i)
390 BASIC_BLOCK (i)->aux = NULL;
392 VARRAY_FREE (basic_block_info);
395 n_basic_blocks = count_basic_blocks (f);
397 /* Size the basic block table. The actual structures will be allocated
398 by find_basic_blocks_1, since we want to keep the structure pointers
399 stable across calls to find_basic_blocks. */
400 /* ??? This whole issue would be much simpler if we called find_basic_blocks
401 exactly once, and thereafter we don't have a single long chain of
402 instructions at all until close to the end of compilation when we
403 actually lay them out. */
405 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
407 label_value_list = find_basic_blocks_1 (f);
409 /* Record the block to which an insn belongs. */
410 /* ??? This should be done another way, by which (perhaps) a label is
411 tagged directly with the basic block that it starts. It is used for
412 more than that currently, but IMO that is the only valid use. */
414 max_uid = get_max_uid ();
416 /* Leave space for insns life_analysis makes in some cases for auto-inc.
417 These cases are rare, so we don't need too much space. */
418 max_uid += max_uid / 10;
421 compute_bb_for_insn (max_uid);
423 /* Discover the edges of our cfg. */
424 record_active_eh_regions (f);
425 make_edges (label_value_list);
427 /* Do very simple cleanup now, for the benefit of code that runs between
428 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
429 tidy_fallthru_edges ();
431 mark_critical_edges ();
433 #ifdef ENABLE_CHECKING
438 /* Count the basic blocks of the function. */
441 count_basic_blocks (f)
445 register RTX_CODE prev_code;
446 register int count = 0;
448 int call_had_abnormal_edge = 0;
449 rtx prev_call = NULL_RTX;
451 prev_code = JUMP_INSN;
452 for (insn = f; insn; insn = NEXT_INSN (insn))
454 register RTX_CODE code = GET_CODE (insn);
456 if (code == CODE_LABEL
457 || (GET_RTX_CLASS (code) == 'i'
458 && (prev_code == JUMP_INSN
459 || prev_code == BARRIER
460 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
465 /* Record whether this call created an edge. */
466 if (code == CALL_INSN)
468 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
469 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
471 call_had_abnormal_edge = 0;
473 /* If there is an EH region or rethrow, we have an edge. */
474 if ((eh_region && region > 0)
475 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
476 call_had_abnormal_edge = 1;
479 /* If there is a nonlocal goto label and the specified
480 region number isn't -1, we have an edge. (0 means
481 no throw, but might have a nonlocal goto). */
482 if (nonlocal_goto_handler_labels && region >= 0)
483 call_had_abnormal_edge = 1;
486 else if (code != NOTE)
487 prev_call = NULL_RTX;
491 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
498 /* The rest of the compiler works a bit smoother when we don't have to
499 check for the edge case of do-nothing functions with no basic blocks. */
502 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
509 /* Find all basic blocks of the function whose first insn is F.
511 Collect and return a list of labels whose addresses are taken. This
512 will be used in make_edges for use with computed gotos. */
515 find_basic_blocks_1 (f)
518 register rtx insn, next;
519 int call_has_abnormal_edge = 0;
521 rtx bb_note = NULL_RTX;
522 rtx eh_list = NULL_RTX;
523 rtx label_value_list = NULL_RTX;
527 /* We process the instructions in a slightly different way than we did
528 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
529 closed out the previous block, so that it gets attached at the proper
530 place. Since this form should be equivalent to the previous,
531 count_basic_blocks continues to use the old form as a check. */
533 for (insn = f; insn; insn = next)
535 enum rtx_code code = GET_CODE (insn);
537 next = NEXT_INSN (insn);
539 if (code == CALL_INSN)
541 /* Record whether this call created an edge. */
542 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
543 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
544 call_has_abnormal_edge = 0;
546 /* If there is an EH region or rethrow, we have an edge. */
547 if ((eh_list && region > 0)
548 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
549 call_has_abnormal_edge = 1;
552 /* If there is a nonlocal goto label and the specified
553 region number isn't -1, we have an edge. (0 means
554 no throw, but might have a nonlocal goto). */
555 if (nonlocal_goto_handler_labels && region >= 0)
556 call_has_abnormal_edge = 1;
564 int kind = NOTE_LINE_NUMBER (insn);
566 /* Keep a LIFO list of the currently active exception notes. */
567 if (kind == NOTE_INSN_EH_REGION_BEG)
568 eh_list = alloc_INSN_LIST (insn, eh_list);
569 else if (kind == NOTE_INSN_EH_REGION_END)
572 eh_list = XEXP (eh_list, 1);
573 free_INSN_LIST_node (t);
576 /* Look for basic block notes with which to keep the
577 basic_block_info pointers stable. Unthread the note now;
578 we'll put it back at the right place in create_basic_block.
579 Or not at all if we've already found a note in this block. */
580 else if (kind == NOTE_INSN_BASIC_BLOCK)
582 if (bb_note == NULL_RTX)
584 next = flow_delete_insn (insn);
591 /* A basic block starts at a label. If we've closed one off due
592 to a barrier or some such, no need to do it again. */
593 if (head != NULL_RTX)
595 /* While we now have edge lists with which other portions of
596 the compiler might determine a call ending a basic block
597 does not imply an abnormal edge, it will be a bit before
598 everything can be updated. So continue to emit a noop at
599 the end of such a block. */
600 if (GET_CODE (end) == CALL_INSN)
602 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
603 end = emit_insn_after (nop, end);
606 create_basic_block (i++, head, end, bb_note);
613 /* A basic block ends at a jump. */
614 if (head == NULL_RTX)
618 /* ??? Make a special check for table jumps. The way this
619 happens is truly and amazingly gross. We are about to
620 create a basic block that contains just a code label and
621 an addr*vec jump insn. Worse, an addr_diff_vec creates
622 its own natural loop.
624 Prevent this bit of brain damage, pasting things together
625 correctly in make_edges.
627 The correct solution involves emitting the table directly
628 on the tablejump instruction as a note, or JUMP_LABEL. */
630 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
631 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
639 goto new_bb_inclusive;
642 /* A basic block ends at a barrier. It may be that an unconditional
643 jump already closed the basic block -- no need to do it again. */
644 if (head == NULL_RTX)
647 /* While we now have edge lists with which other portions of the
648 compiler might determine a call ending a basic block does not
649 imply an abnormal edge, it will be a bit before everything can
650 be updated. So continue to emit a noop at the end of such a
652 if (GET_CODE (end) == CALL_INSN)
654 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
655 end = emit_insn_after (nop, end);
657 goto new_bb_exclusive;
660 /* A basic block ends at a call that can either throw or
661 do a non-local goto. */
662 if (call_has_abnormal_edge)
665 if (head == NULL_RTX)
670 create_basic_block (i++, head, end, bb_note);
671 head = end = NULL_RTX;
678 if (GET_RTX_CLASS (code) == 'i')
680 if (head == NULL_RTX)
687 if (GET_RTX_CLASS (code) == 'i')
691 /* Make a list of all labels referred to other than by jumps
692 (which just don't have the REG_LABEL notes).
694 Make a special exception for labels followed by an ADDR*VEC,
695 as this would be a part of the tablejump setup code.
697 Make a special exception for the eh_return_stub_label, which
698 we know isn't part of any otherwise visible control flow. */
700 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
701 if (REG_NOTE_KIND (note) == REG_LABEL)
703 rtx lab = XEXP (note, 0), next;
705 if (lab == eh_return_stub_label)
707 else if ((next = next_nonnote_insn (lab)) != NULL
708 && GET_CODE (next) == JUMP_INSN
709 && (GET_CODE (PATTERN (next)) == ADDR_VEC
710 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
714 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
719 if (head != NULL_RTX)
720 create_basic_block (i++, head, end, bb_note);
722 if (i != n_basic_blocks)
725 return label_value_list;
728 /* Tidy the CFG by deleting unreachable code and whatnot. */
734 delete_unreachable_blocks ();
735 move_stray_eh_region_notes ();
736 record_active_eh_regions (f);
738 mark_critical_edges ();
740 /* Kill the data we won't maintain. */
741 label_value_list = NULL_RTX;
744 /* Create a new basic block consisting of the instructions between
745 HEAD and END inclusive. Reuses the note and basic block struct
746 in BB_NOTE, if any. */
749 create_basic_block (index, head, end, bb_note)
751 rtx head, end, bb_note;
756 && ! RTX_INTEGRATED_P (bb_note)
757 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
760 /* If we found an existing note, thread it back onto the chain. */
762 if (GET_CODE (head) == CODE_LABEL)
763 add_insn_after (bb_note, head);
766 add_insn_before (bb_note, head);
772 /* Otherwise we must create a note and a basic block structure.
773 Since we allow basic block structs in rtl, give the struct
774 the same lifetime by allocating it off the function obstack
775 rather than using malloc. */
777 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
778 memset (bb, 0, sizeof (*bb));
780 if (GET_CODE (head) == CODE_LABEL)
781 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
784 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
787 NOTE_BASIC_BLOCK (bb_note) = bb;
790 /* Always include the bb note in the block. */
791 if (NEXT_INSN (end) == bb_note)
797 BASIC_BLOCK (index) = bb;
799 /* Tag the block so that we know it has been used when considering
800 other basic block notes. */
804 /* Records the basic block struct in BB_FOR_INSN, for every instruction
805 indexed by INSN_UID. MAX is the size of the array. */
808 compute_bb_for_insn (max)
813 if (basic_block_for_insn)
814 VARRAY_FREE (basic_block_for_insn);
815 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
817 for (i = 0; i < n_basic_blocks; ++i)
819 basic_block bb = BASIC_BLOCK (i);
826 int uid = INSN_UID (insn);
828 VARRAY_BB (basic_block_for_insn, uid) = bb;
831 insn = NEXT_INSN (insn);
836 /* Free the memory associated with the edge structures. */
844 for (i = 0; i < n_basic_blocks; ++i)
846 basic_block bb = BASIC_BLOCK (i);
848 for (e = bb->succ; e ; e = n)
858 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
864 ENTRY_BLOCK_PTR->succ = 0;
865 EXIT_BLOCK_PTR->pred = 0;
870 /* Identify the edges between basic blocks.
872 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
873 that are otherwise unreachable may be reachable with a non-local goto.
875 BB_EH_END is an array indexed by basic block number in which we record
876 the list of exception regions active at the end of the basic block. */
879 make_edges (label_value_list)
880 rtx label_value_list;
883 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
884 sbitmap *edge_cache = NULL;
886 /* Assume no computed jump; revise as we create edges. */
887 current_function_has_computed_jump = 0;
889 /* Heavy use of computed goto in machine-generated code can lead to
890 nearly fully-connected CFGs. In that case we spend a significant
891 amount of time searching the edge lists for duplicates. */
892 if (forced_labels || label_value_list)
894 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
895 sbitmap_vector_zero (edge_cache, n_basic_blocks);
898 /* By nature of the way these get numbered, block 0 is always the entry. */
899 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
901 for (i = 0; i < n_basic_blocks; ++i)
903 basic_block bb = BASIC_BLOCK (i);
906 int force_fallthru = 0;
908 /* Examine the last instruction of the block, and discover the
909 ways we can leave the block. */
912 code = GET_CODE (insn);
915 if (code == JUMP_INSN)
919 /* ??? Recognize a tablejump and do the right thing. */
920 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
921 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
922 && GET_CODE (tmp) == JUMP_INSN
923 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
924 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
929 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
930 vec = XVEC (PATTERN (tmp), 0);
932 vec = XVEC (PATTERN (tmp), 1);
934 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
935 make_label_edge (edge_cache, bb,
936 XEXP (RTVEC_ELT (vec, j), 0), 0);
938 /* Some targets (eg, ARM) emit a conditional jump that also
939 contains the out-of-range target. Scan for these and
940 add an edge if necessary. */
941 if ((tmp = single_set (insn)) != NULL
942 && SET_DEST (tmp) == pc_rtx
943 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
944 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
945 make_label_edge (edge_cache, bb,
946 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
948 #ifdef CASE_DROPS_THROUGH
949 /* Silly VAXen. The ADDR_VEC is going to be in the way of
950 us naturally detecting fallthru into the next block. */
955 /* If this is a computed jump, then mark it as reaching
956 everything on the label_value_list and forced_labels list. */
957 else if (computed_jump_p (insn))
959 current_function_has_computed_jump = 1;
961 for (x = label_value_list; x; x = XEXP (x, 1))
962 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 for (x = forced_labels; x; x = XEXP (x, 1))
965 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
968 /* Returns create an exit out. */
969 else if (returnjump_p (insn))
970 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
972 /* Otherwise, we have a plain conditional or unconditional jump. */
975 if (! JUMP_LABEL (insn))
977 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
981 /* If this is a CALL_INSN, then mark it as reaching the active EH
982 handler for this CALL_INSN. If we're handling asynchronous
983 exceptions then any insn can reach any of the active handlers.
985 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
987 if (code == CALL_INSN || asynchronous_exceptions)
989 /* Add any appropriate EH edges. We do this unconditionally
990 since there may be a REG_EH_REGION or REG_EH_RETHROW note
991 on the call, and this needn't be within an EH region. */
992 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
994 /* If we have asynchronous exceptions, do the same for *all*
995 exception regions active in the block. */
996 if (asynchronous_exceptions
997 && bb->eh_beg != bb->eh_end)
1000 make_eh_edge (edge_cache, eh_nest_info, bb,
1001 NULL_RTX, bb->eh_beg);
1003 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1004 if (GET_CODE (x) == NOTE
1005 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1006 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1008 int region = NOTE_EH_HANDLER (x);
1009 make_eh_edge (edge_cache, eh_nest_info, bb,
1014 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1016 /* ??? This could be made smarter: in some cases it's possible
1017 to tell that certain calls will not do a nonlocal goto.
1019 For example, if the nested functions that do the nonlocal
1020 gotos do not have their addresses taken, then only calls to
1021 those functions or to other nested functions that use them
1022 could possibly do nonlocal gotos. */
1023 /* We do know that a REG_EH_REGION note with a value less
1024 than 0 is guaranteed not to perform a non-local goto. */
1025 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1026 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1027 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1028 make_label_edge (edge_cache, bb, XEXP (x, 0),
1029 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1033 /* We know something about the structure of the function __throw in
1034 libgcc2.c. It is the only function that ever contains eh_stub
1035 labels. It modifies its return address so that the last block
1036 returns to one of the eh_stub labels within it. So we have to
1037 make additional edges in the flow graph. */
1038 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1039 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1041 /* Find out if we can drop through to the next block. */
1042 insn = next_nonnote_insn (insn);
1043 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1044 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1045 else if (i + 1 < n_basic_blocks)
1047 rtx tmp = BLOCK_HEAD (i + 1);
1048 if (GET_CODE (tmp) == NOTE)
1049 tmp = next_nonnote_insn (tmp);
1050 if (force_fallthru || insn == tmp)
1051 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1055 free_eh_nesting_info (eh_nest_info);
1057 sbitmap_vector_free (edge_cache);
1060 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1061 about the edge that is accumulated between calls. */
1064 make_edge (edge_cache, src, dst, flags)
1065 sbitmap *edge_cache;
1066 basic_block src, dst;
1072 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1073 many edges to them, and we didn't allocate memory for it. */
1074 use_edge_cache = (edge_cache
1075 && src != ENTRY_BLOCK_PTR
1076 && dst != EXIT_BLOCK_PTR);
1078 /* Make sure we don't add duplicate edges. */
1079 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1080 for (e = src->succ; e ; e = e->succ_next)
1087 e = (edge) xcalloc (1, sizeof (*e));
1090 e->succ_next = src->succ;
1091 e->pred_next = dst->pred;
1100 SET_BIT (edge_cache[src->index], dst->index);
1103 /* Create an edge from a basic block to a label. */
1106 make_label_edge (edge_cache, src, label, flags)
1107 sbitmap *edge_cache;
1112 if (GET_CODE (label) != CODE_LABEL)
1115 /* If the label was never emitted, this insn is junk, but avoid a
1116 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1117 as a result of a syntax error and a diagnostic has already been
1120 if (INSN_UID (label) == 0)
1123 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1126 /* Create the edges generated by INSN in REGION. */
1129 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1130 sbitmap *edge_cache;
1131 eh_nesting_info *eh_nest_info;
1136 handler_info **handler_list;
1139 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1140 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1143 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1144 EDGE_ABNORMAL | EDGE_EH | is_call);
1148 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1149 dangerous if we intend to move basic blocks around. Move such notes
1150 into the following block. */
1153 move_stray_eh_region_notes ()
1158 if (n_basic_blocks < 2)
1161 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1162 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1164 rtx insn, next, list = NULL_RTX;
1166 b1 = BASIC_BLOCK (i);
1167 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1169 next = NEXT_INSN (insn);
1170 if (GET_CODE (insn) == NOTE
1171 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1172 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1174 /* Unlink from the insn chain. */
1175 NEXT_INSN (PREV_INSN (insn)) = next;
1176 PREV_INSN (next) = PREV_INSN (insn);
1179 NEXT_INSN (insn) = list;
1184 if (list == NULL_RTX)
1187 /* Find where to insert these things. */
1189 if (GET_CODE (insn) == CODE_LABEL)
1190 insn = NEXT_INSN (insn);
1194 next = NEXT_INSN (list);
1195 add_insn_after (list, insn);
1201 /* Recompute eh_beg/eh_end for each basic block. */
1204 record_active_eh_regions (f)
1207 rtx insn, eh_list = NULL_RTX;
1209 basic_block bb = BASIC_BLOCK (0);
1211 for (insn = f; insn ; insn = NEXT_INSN (insn))
1213 if (bb->head == insn)
1214 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1216 if (GET_CODE (insn) == NOTE)
1218 int kind = NOTE_LINE_NUMBER (insn);
1219 if (kind == NOTE_INSN_EH_REGION_BEG)
1220 eh_list = alloc_INSN_LIST (insn, eh_list);
1221 else if (kind == NOTE_INSN_EH_REGION_END)
1223 rtx t = XEXP (eh_list, 1);
1224 free_INSN_LIST_node (eh_list);
1229 if (bb->end == insn)
1231 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1233 if (i == n_basic_blocks)
1235 bb = BASIC_BLOCK (i);
1240 /* Identify critical edges and set the bits appropriately. */
1243 mark_critical_edges ()
1245 int i, n = n_basic_blocks;
1248 /* We begin with the entry block. This is not terribly important now,
1249 but could be if a front end (Fortran) implemented alternate entry
1251 bb = ENTRY_BLOCK_PTR;
1258 /* (1) Critical edges must have a source with multiple successors. */
1259 if (bb->succ && bb->succ->succ_next)
1261 for (e = bb->succ; e ; e = e->succ_next)
1263 /* (2) Critical edges must have a destination with multiple
1264 predecessors. Note that we know there is at least one
1265 predecessor -- the edge we followed to get here. */
1266 if (e->dest->pred->pred_next)
1267 e->flags |= EDGE_CRITICAL;
1269 e->flags &= ~EDGE_CRITICAL;
1274 for (e = bb->succ; e ; e = e->succ_next)
1275 e->flags &= ~EDGE_CRITICAL;
1280 bb = BASIC_BLOCK (i);
1284 /* Split a (typically critical) edge. Return the new block.
1285 Abort on abnormal edges.
1287 ??? The code generally expects to be called on critical edges.
1288 The case of a block ending in an unconditional jump to a
1289 block with multiple predecessors is not handled optimally. */
1292 split_edge (edge_in)
1295 basic_block old_pred, bb, old_succ;
1300 /* Abnormal edges cannot be split. */
1301 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1304 old_pred = edge_in->src;
1305 old_succ = edge_in->dest;
1307 /* Remove the existing edge from the destination's pred list. */
1310 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1312 *pp = edge_in->pred_next;
1313 edge_in->pred_next = NULL;
1316 /* Create the new structures. */
1317 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1318 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1321 memset (bb, 0, sizeof (*bb));
1322 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1323 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1325 /* ??? This info is likely going to be out of date very soon. */
1326 if (old_succ->global_live_at_start)
1328 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1329 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1333 CLEAR_REG_SET (bb->global_live_at_start);
1334 CLEAR_REG_SET (bb->global_live_at_end);
1339 bb->succ = edge_out;
1342 edge_in->flags &= ~EDGE_CRITICAL;
1344 edge_out->pred_next = old_succ->pred;
1345 edge_out->succ_next = NULL;
1347 edge_out->dest = old_succ;
1348 edge_out->flags = EDGE_FALLTHRU;
1349 edge_out->probability = REG_BR_PROB_BASE;
1351 old_succ->pred = edge_out;
1353 /* Tricky case -- if there existed a fallthru into the successor
1354 (and we're not it) we must add a new unconditional jump around
1355 the new block we're actually interested in.
1357 Further, if that edge is critical, this means a second new basic
1358 block must be created to hold it. In order to simplify correct
1359 insn placement, do this before we touch the existing basic block
1360 ordering for the block we were really wanting. */
1361 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1364 for (e = edge_out->pred_next; e ; e = e->pred_next)
1365 if (e->flags & EDGE_FALLTHRU)
1370 basic_block jump_block;
1373 if ((e->flags & EDGE_CRITICAL) == 0
1374 && e->src != ENTRY_BLOCK_PTR)
1376 /* Non critical -- we can simply add a jump to the end
1377 of the existing predecessor. */
1378 jump_block = e->src;
1382 /* We need a new block to hold the jump. The simplest
1383 way to do the bulk of the work here is to recursively
1385 jump_block = split_edge (e);
1386 e = jump_block->succ;
1389 /* Now add the jump insn ... */
1390 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1392 jump_block->end = pos;
1393 if (basic_block_for_insn)
1394 set_block_for_insn (pos, jump_block);
1395 emit_barrier_after (pos);
1397 /* ... let jump know that label is in use, ... */
1398 JUMP_LABEL (pos) = old_succ->head;
1399 ++LABEL_NUSES (old_succ->head);
1401 /* ... and clear fallthru on the outgoing edge. */
1402 e->flags &= ~EDGE_FALLTHRU;
1404 /* Continue splitting the interesting edge. */
1408 /* Place the new block just in front of the successor. */
1409 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1410 if (old_succ == EXIT_BLOCK_PTR)
1411 j = n_basic_blocks - 1;
1413 j = old_succ->index;
1414 for (i = n_basic_blocks - 1; i > j; --i)
1416 basic_block tmp = BASIC_BLOCK (i - 1);
1417 BASIC_BLOCK (i) = tmp;
1420 BASIC_BLOCK (i) = bb;
1423 /* Create the basic block note.
1425 Where we place the note can have a noticable impact on the generated
1426 code. Consider this cfg:
1437 If we need to insert an insn on the edge from block 0 to block 1,
1438 we want to ensure the instructions we insert are outside of any
1439 loop notes that physically sit between block 0 and block 1. Otherwise
1440 we confuse the loop optimizer into thinking the loop is a phony. */
1441 if (old_succ != EXIT_BLOCK_PTR
1442 && PREV_INSN (old_succ->head)
1443 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1444 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1445 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1446 PREV_INSN (old_succ->head));
1447 else if (old_succ != EXIT_BLOCK_PTR)
1448 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1450 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1451 NOTE_BASIC_BLOCK (bb_note) = bb;
1452 bb->head = bb->end = bb_note;
1454 /* Not quite simple -- for non-fallthru edges, we must adjust the
1455 predecessor's jump instruction to target our new block. */
1456 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1458 rtx tmp, insn = old_pred->end;
1459 rtx old_label = old_succ->head;
1460 rtx new_label = gen_label_rtx ();
1462 if (GET_CODE (insn) != JUMP_INSN)
1465 /* ??? Recognize a tablejump and adjust all matching cases. */
1466 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1467 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1468 && GET_CODE (tmp) == JUMP_INSN
1469 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1470 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1475 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1476 vec = XVEC (PATTERN (tmp), 0);
1478 vec = XVEC (PATTERN (tmp), 1);
1480 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1481 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1483 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1484 --LABEL_NUSES (old_label);
1485 ++LABEL_NUSES (new_label);
1488 /* Handle casesi dispatch insns */
1489 if ((tmp = single_set (insn)) != NULL
1490 && SET_DEST (tmp) == pc_rtx
1491 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1492 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1493 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1495 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1497 --LABEL_NUSES (old_label);
1498 ++LABEL_NUSES (new_label);
1503 /* This would have indicated an abnormal edge. */
1504 if (computed_jump_p (insn))
1507 /* A return instruction can't be redirected. */
1508 if (returnjump_p (insn))
1511 /* If the insn doesn't go where we think, we're confused. */
1512 if (JUMP_LABEL (insn) != old_label)
1515 redirect_jump (insn, new_label);
1518 emit_label_before (new_label, bb_note);
1519 bb->head = new_label;
1525 /* Queue instructions for insertion on an edge between two basic blocks.
1526 The new instructions and basic blocks (if any) will not appear in the
1527 CFG until commit_edge_insertions is called. */
1530 insert_insn_on_edge (pattern, e)
1534 /* We cannot insert instructions on an abnormal critical edge.
1535 It will be easier to find the culprit if we die now. */
1536 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1537 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1540 if (e->insns == NULL_RTX)
1543 push_to_sequence (e->insns);
1545 emit_insn (pattern);
1547 e->insns = get_insns ();
1551 /* Update the CFG for the instructions queued on edge E. */
1554 commit_one_edge_insertion (e)
1557 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1560 /* Pull the insns off the edge now since the edge might go away. */
1562 e->insns = NULL_RTX;
1564 /* Figure out where to put these things. If the destination has
1565 one predecessor, insert there. Except for the exit block. */
1566 if (e->dest->pred->pred_next == NULL
1567 && e->dest != EXIT_BLOCK_PTR)
1571 /* Get the location correct wrt a code label, and "nice" wrt
1572 a basic block note, and before everything else. */
1574 if (GET_CODE (tmp) == CODE_LABEL)
1575 tmp = NEXT_INSN (tmp);
1576 if (GET_CODE (tmp) == NOTE
1577 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1578 tmp = NEXT_INSN (tmp);
1579 if (tmp == bb->head)
1582 after = PREV_INSN (tmp);
1585 /* If the source has one successor and the edge is not abnormal,
1586 insert there. Except for the entry block. */
1587 else if ((e->flags & EDGE_ABNORMAL) == 0
1588 && e->src->succ->succ_next == NULL
1589 && e->src != ENTRY_BLOCK_PTR)
1592 /* It is possible to have a non-simple jump here. Consider a target
1593 where some forms of unconditional jumps clobber a register. This
1594 happens on the fr30 for example.
1596 We know this block has a single successor, so we can just emit
1597 the queued insns before the jump. */
1598 if (GET_CODE (bb->end) == JUMP_INSN)
1604 /* We'd better be fallthru, or we've lost track of what's what. */
1605 if ((e->flags & EDGE_FALLTHRU) == 0)
1612 /* Otherwise we must split the edge. */
1615 bb = split_edge (e);
1619 /* Now that we've found the spot, do the insertion. */
1621 /* Set the new block number for these insns, if structure is allocated. */
1622 if (basic_block_for_insn)
1625 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1626 set_block_for_insn (i, bb);
1631 emit_insns_before (insns, before);
1632 if (before == bb->head)
1637 rtx last = emit_insns_after (insns, after);
1638 if (after == bb->end)
1642 if (GET_CODE (last) == JUMP_INSN)
1644 if (returnjump_p (last))
1646 /* ??? Remove all outgoing edges from BB and add one
1647 for EXIT. This is not currently a problem because
1648 this only happens for the (single) epilogue, which
1649 already has a fallthru edge to EXIT. */
1652 if (e->dest != EXIT_BLOCK_PTR
1653 || e->succ_next != NULL
1654 || (e->flags & EDGE_FALLTHRU) == 0)
1656 e->flags &= ~EDGE_FALLTHRU;
1658 emit_barrier_after (last);
1667 /* Update the CFG for all queued instructions. */
1670 commit_edge_insertions ()
1675 #ifdef ENABLE_CHECKING
1676 verify_flow_info ();
1680 bb = ENTRY_BLOCK_PTR;
1685 for (e = bb->succ; e ; e = next)
1687 next = e->succ_next;
1689 commit_one_edge_insertion (e);
1692 if (++i >= n_basic_blocks)
1694 bb = BASIC_BLOCK (i);
1698 /* Delete all unreachable basic blocks. */
1701 delete_unreachable_blocks ()
1703 basic_block *worklist, *tos;
1704 int deleted_handler;
1709 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1711 /* Use basic_block->aux as a marker. Clear them all. */
1713 for (i = 0; i < n; ++i)
1714 BASIC_BLOCK (i)->aux = NULL;
1716 /* Add our starting points to the worklist. Almost always there will
1717 be only one. It isn't inconcievable that we might one day directly
1718 support Fortran alternate entry points. */
1720 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1724 /* Mark the block with a handy non-null value. */
1728 /* Iterate: find everything reachable from what we've already seen. */
1730 while (tos != worklist)
1732 basic_block b = *--tos;
1734 for (e = b->succ; e ; e = e->succ_next)
1742 /* Delete all unreachable basic blocks. Count down so that we don't
1743 interfere with the block renumbering that happens in delete_block. */
1745 deleted_handler = 0;
1747 for (i = n - 1; i >= 0; --i)
1749 basic_block b = BASIC_BLOCK (i);
1752 /* This block was found. Tidy up the mark. */
1755 deleted_handler |= delete_block (b);
1758 tidy_fallthru_edges ();
1760 /* If we deleted an exception handler, we may have EH region begin/end
1761 blocks to remove as well. */
1762 if (deleted_handler)
1763 delete_eh_regions ();
1768 /* Find EH regions for which there is no longer a handler, and delete them. */
1771 delete_eh_regions ()
1775 update_rethrow_references ();
1777 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1778 if (GET_CODE (insn) == NOTE)
1780 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1781 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1783 int num = NOTE_EH_HANDLER (insn);
1784 /* A NULL handler indicates a region is no longer needed,
1785 as long as its rethrow label isn't used. */
1786 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1788 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1789 NOTE_SOURCE_FILE (insn) = 0;
1795 /* Return true if NOTE is not one of the ones that must be kept paired,
1796 so that we may simply delete them. */
1799 can_delete_note_p (note)
1802 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1803 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1806 /* Unlink a chain of insns between START and FINISH, leaving notes
1807 that must be paired. */
1810 flow_delete_insn_chain (start, finish)
1813 /* Unchain the insns one by one. It would be quicker to delete all
1814 of these with a single unchaining, rather than one at a time, but
1815 we need to keep the NOTE's. */
1821 next = NEXT_INSN (start);
1822 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1824 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1827 next = flow_delete_insn (start);
1829 if (start == finish)
1835 /* Delete the insns in a (non-live) block. We physically delete every
1836 non-deleted-note insn, and update the flow graph appropriately.
1838 Return nonzero if we deleted an exception handler. */
1840 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1841 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1847 int deleted_handler = 0;
1850 /* If the head of this block is a CODE_LABEL, then it might be the
1851 label for an exception handler which can't be reached.
1853 We need to remove the label from the exception_handler_label list
1854 and remove the associated NOTE_INSN_EH_REGION_BEG and
1855 NOTE_INSN_EH_REGION_END notes. */
1859 never_reached_warning (insn);
1861 if (GET_CODE (insn) == CODE_LABEL)
1863 rtx x, *prev = &exception_handler_labels;
1865 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1867 if (XEXP (x, 0) == insn)
1869 /* Found a match, splice this label out of the EH label list. */
1870 *prev = XEXP (x, 1);
1871 XEXP (x, 1) = NULL_RTX;
1872 XEXP (x, 0) = NULL_RTX;
1874 /* Remove the handler from all regions */
1875 remove_handler (insn);
1876 deleted_handler = 1;
1879 prev = &XEXP (x, 1);
1882 /* This label may be referenced by code solely for its value, or
1883 referenced by static data, or something. We have determined
1884 that it is not reachable, but cannot delete the label itself.
1885 Save code space and continue to delete the balance of the block,
1886 along with properly updating the cfg. */
1887 if (!can_delete_label_p (insn))
1889 /* If we've only got one of these, skip the whole deleting
1892 goto no_delete_insns;
1893 insn = NEXT_INSN (insn);
1897 /* Selectively unlink the insn chain. Include any BARRIER that may
1898 follow the basic block. */
1899 end = next_nonnote_insn (b->end);
1900 if (!end || GET_CODE (end) != BARRIER)
1902 flow_delete_insn_chain (insn, end);
1906 /* Remove the edges into and out of this block. Note that there may
1907 indeed be edges in, if we are removing an unreachable loop. */
1911 for (e = b->pred; e ; e = next)
1913 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1916 next = e->pred_next;
1920 for (e = b->succ; e ; e = next)
1922 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1925 next = e->succ_next;
1934 /* Remove the basic block from the array, and compact behind it. */
1937 return deleted_handler;
1940 /* Remove block B from the basic block array and compact behind it. */
1946 int i, n = n_basic_blocks;
1948 for (i = b->index; i + 1 < n; ++i)
1950 basic_block x = BASIC_BLOCK (i + 1);
1951 BASIC_BLOCK (i) = x;
1955 basic_block_info->num_elements--;
1959 /* Delete INSN by patching it out. Return the next insn. */
1962 flow_delete_insn (insn)
1965 rtx prev = PREV_INSN (insn);
1966 rtx next = NEXT_INSN (insn);
1968 PREV_INSN (insn) = NULL_RTX;
1969 NEXT_INSN (insn) = NULL_RTX;
1972 NEXT_INSN (prev) = next;
1974 PREV_INSN (next) = prev;
1976 set_last_insn (prev);
1978 if (GET_CODE (insn) == CODE_LABEL)
1979 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1981 /* If deleting a jump, decrement the use count of the label. Deleting
1982 the label itself should happen in the normal course of block merging. */
1983 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1984 LABEL_NUSES (JUMP_LABEL (insn))--;
1989 /* True if a given label can be deleted. */
1992 can_delete_label_p (label)
1997 if (LABEL_PRESERVE_P (label))
2000 for (x = forced_labels; x ; x = XEXP (x, 1))
2001 if (label == XEXP (x, 0))
2003 for (x = label_value_list; x ; x = XEXP (x, 1))
2004 if (label == XEXP (x, 0))
2006 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2007 if (label == XEXP (x, 0))
2010 /* User declared labels must be preserved. */
2011 if (LABEL_NAME (label) != 0)
2017 /* Blocks A and B are to be merged into a single block. A has no incoming
2018 fallthru edge, so it can be moved before B without adding or modifying
2019 any jumps (aside from the jump from A to B). */
2022 merge_blocks_move_predecessor_nojumps (a, b)
2025 rtx start, end, barrier;
2031 /* We want to delete the BARRIER after the end of the insns we are
2032 going to move. If we don't find a BARRIER, then do nothing. This
2033 can happen in some cases if we have labels we can not delete.
2035 Similarly, do nothing if we can not delete the label at the start
2036 of the target block. */
2037 barrier = next_nonnote_insn (end);
2038 if (GET_CODE (barrier) != BARRIER
2039 || (GET_CODE (b->head) == CODE_LABEL
2040 && ! can_delete_label_p (b->head)))
2043 flow_delete_insn (barrier);
2045 /* Move block and loop notes out of the chain so that we do not
2046 disturb their order.
2048 ??? A better solution would be to squeeze out all the non-nested notes
2049 and adjust the block trees appropriately. Even better would be to have
2050 a tighter connection between block trees and rtl so that this is not
2052 start = squeeze_notes (start, end);
2054 /* Scramble the insn chain. */
2055 if (end != PREV_INSN (b->head))
2056 reorder_insns (start, end, PREV_INSN (b->head));
2060 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2061 a->index, b->index);
2064 /* Swap the records for the two blocks around. Although we are deleting B,
2065 A is now where B was and we want to compact the BB array from where
2067 BASIC_BLOCK(a->index) = b;
2068 BASIC_BLOCK(b->index) = a;
2070 a->index = b->index;
2073 /* Now blocks A and B are contiguous. Merge them. */
2074 merge_blocks_nomove (a, b);
2079 /* Blocks A and B are to be merged into a single block. B has no outgoing
2080 fallthru edge, so it can be moved after A without adding or modifying
2081 any jumps (aside from the jump from A to B). */
2084 merge_blocks_move_successor_nojumps (a, b)
2087 rtx start, end, barrier;
2092 /* We want to delete the BARRIER after the end of the insns we are
2093 going to move. If we don't find a BARRIER, then do nothing. This
2094 can happen in some cases if we have labels we can not delete.
2096 Similarly, do nothing if we can not delete the label at the start
2097 of the target block. */
2098 barrier = next_nonnote_insn (end);
2099 if (GET_CODE (barrier) != BARRIER
2100 || (GET_CODE (b->head) == CODE_LABEL
2101 && ! can_delete_label_p (b->head)))
2104 flow_delete_insn (barrier);
2106 /* Move block and loop notes out of the chain so that we do not
2107 disturb their order.
2109 ??? A better solution would be to squeeze out all the non-nested notes
2110 and adjust the block trees appropriately. Even better would be to have
2111 a tighter connection between block trees and rtl so that this is not
2113 start = squeeze_notes (start, end);
2115 /* Scramble the insn chain. */
2116 reorder_insns (start, end, a->end);
2118 /* Now blocks A and B are contiguous. Merge them. */
2119 merge_blocks_nomove (a, b);
2123 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2124 b->index, a->index);
2130 /* Blocks A and B are to be merged into a single block. The insns
2131 are already contiguous, hence `nomove'. */
2134 merge_blocks_nomove (a, b)
2138 rtx b_head, b_end, a_end;
2141 /* If there was a CODE_LABEL beginning B, delete it. */
2144 if (GET_CODE (b_head) == CODE_LABEL)
2146 /* Detect basic blocks with nothing but a label. This can happen
2147 in particular at the end of a function. */
2148 if (b_head == b_end)
2150 b_head = flow_delete_insn (b_head);
2153 /* Delete the basic block note. */
2154 if (GET_CODE (b_head) == NOTE
2155 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2157 if (b_head == b_end)
2159 b_head = flow_delete_insn (b_head);
2162 /* If there was a jump out of A, delete it. */
2164 if (GET_CODE (a_end) == JUMP_INSN)
2168 prev = prev_nonnote_insn (a_end);
2173 /* If this was a conditional jump, we need to also delete
2174 the insn that set cc0. */
2176 if (prev && sets_cc0_p (prev))
2179 prev = prev_nonnote_insn (prev);
2182 flow_delete_insn (tmp);
2186 /* Note that a->head != a->end, since we should have at least a
2187 bb note plus the jump, so prev != insn. */
2188 flow_delete_insn (a_end);
2192 /* By definition, there should only be one successor of A, and that is
2193 B. Free that edge struct. */
2197 /* Adjust the edges out of B for the new owner. */
2198 for (e = b->succ; e ; e = e->succ_next)
2202 /* Reassociate the insns of B with A. */
2205 BLOCK_FOR_INSN (b_head) = a;
2206 while (b_head != b_end)
2208 b_head = NEXT_INSN (b_head);
2209 BLOCK_FOR_INSN (b_head) = a;
2215 /* Compact the basic block array. */
2219 /* Attempt to merge basic blocks that are potentially non-adjacent.
2220 Return true iff the attempt succeeded. */
2223 merge_blocks (e, b, c)
2227 /* If B has a fallthru edge to C, no need to move anything. */
2228 if (e->flags & EDGE_FALLTHRU)
2230 /* If a label still appears somewhere and we cannot delete the label,
2231 then we cannot merge the blocks. The edge was tidied already. */
2233 rtx insn, stop = NEXT_INSN (c->head);
2234 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2235 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2238 merge_blocks_nomove (b, c);
2242 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2243 b->index, c->index);
2252 int c_has_outgoing_fallthru;
2253 int b_has_incoming_fallthru;
2255 /* We must make sure to not munge nesting of exception regions,
2256 lexical blocks, and loop notes.
2258 The first is taken care of by requiring that the active eh
2259 region at the end of one block always matches the active eh
2260 region at the beginning of the next block.
2262 The later two are taken care of by squeezing out all the notes. */
2264 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2265 executed and we may want to treat blocks which have two out
2266 edges, one normal, one abnormal as only having one edge for
2267 block merging purposes. */
2269 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2270 if (tmp_edge->flags & EDGE_FALLTHRU)
2272 c_has_outgoing_fallthru = (tmp_edge != NULL);
2274 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2275 if (tmp_edge->flags & EDGE_FALLTHRU)
2277 b_has_incoming_fallthru = (tmp_edge != NULL);
2279 /* If B does not have an incoming fallthru, and the exception regions
2280 match, then it can be moved immediately before C without introducing
2283 C can not be the first block, so we do not have to worry about
2284 accessing a non-existent block. */
2285 d = BASIC_BLOCK (c->index - 1);
2286 if (! b_has_incoming_fallthru
2287 && d->eh_end == b->eh_beg
2288 && b->eh_end == c->eh_beg)
2289 return merge_blocks_move_predecessor_nojumps (b, c);
2291 /* Otherwise, we're going to try to move C after B. Make sure the
2292 exception regions match.
2294 If B is the last basic block, then we must not try to access the
2295 block structure for block B + 1. Luckily in that case we do not
2296 need to worry about matching exception regions. */
2297 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2298 if (b->eh_end == c->eh_beg
2299 && (d == NULL || c->eh_end == d->eh_beg))
2301 /* If C does not have an outgoing fallthru, then it can be moved
2302 immediately after B without introducing or modifying jumps. */
2303 if (! c_has_outgoing_fallthru)
2304 return merge_blocks_move_successor_nojumps (b, c);
2306 /* Otherwise, we'll need to insert an extra jump, and possibly
2307 a new block to contain it. */
2308 /* ??? Not implemented yet. */
2315 /* Top level driver for merge_blocks. */
2322 /* Attempt to merge blocks as made possible by edge removal. If a block
2323 has only one successor, and the successor has only one predecessor,
2324 they may be combined. */
2326 for (i = 0; i < n_basic_blocks; )
2328 basic_block c, b = BASIC_BLOCK (i);
2331 /* A loop because chains of blocks might be combineable. */
2332 while ((s = b->succ) != NULL
2333 && s->succ_next == NULL
2334 && (s->flags & EDGE_EH) == 0
2335 && (c = s->dest) != EXIT_BLOCK_PTR
2336 && c->pred->pred_next == NULL
2337 /* If the jump insn has side effects, we can't kill the edge. */
2338 && (GET_CODE (b->end) != JUMP_INSN
2339 || onlyjump_p (b->end))
2340 && merge_blocks (s, b, c))
2343 /* Don't get confused by the index shift caused by deleting blocks. */
2348 /* The given edge should potentially be a fallthru edge. If that is in
2349 fact true, delete the jump and barriers that are in the way. */
2352 tidy_fallthru_edge (e, b, c)
2358 /* ??? In a late-running flow pass, other folks may have deleted basic
2359 blocks by nopping out blocks, leaving multiple BARRIERs between here
2360 and the target label. They ought to be chastized and fixed.
2362 We can also wind up with a sequence of undeletable labels between
2363 one block and the next.
2365 So search through a sequence of barriers, labels, and notes for
2366 the head of block C and assert that we really do fall through. */
2368 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2371 /* Remove what will soon cease being the jump insn from the source block.
2372 If block B consisted only of this single jump, turn it into a deleted
2375 if (GET_CODE (q) == JUMP_INSN)
2378 /* If this was a conditional jump, we need to also delete
2379 the insn that set cc0. */
2380 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2387 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2388 NOTE_SOURCE_FILE (q) = 0;
2391 b->end = q = PREV_INSN (q);
2394 /* Selectively unlink the sequence. */
2395 if (q != PREV_INSN (c->head))
2396 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2398 e->flags |= EDGE_FALLTHRU;
2401 /* Fix up edges that now fall through, or rather should now fall through
2402 but previously required a jump around now deleted blocks. Simplify
2403 the search by only examining blocks numerically adjacent, since this
2404 is how find_basic_blocks created them. */
2407 tidy_fallthru_edges ()
2411 for (i = 1; i < n_basic_blocks; ++i)
2413 basic_block b = BASIC_BLOCK (i - 1);
2414 basic_block c = BASIC_BLOCK (i);
2417 /* We care about simple conditional or unconditional jumps with
2420 If we had a conditional branch to the next instruction when
2421 find_basic_blocks was called, then there will only be one
2422 out edge for the block which ended with the conditional
2423 branch (since we do not create duplicate edges).
2425 Furthermore, the edge will be marked as a fallthru because we
2426 merge the flags for the duplicate edges. So we do not want to
2427 check that the edge is not a FALLTHRU edge. */
2428 if ((s = b->succ) != NULL
2429 && s->succ_next == NULL
2431 /* If the jump insn has side effects, we can't tidy the edge. */
2432 && (GET_CODE (b->end) != JUMP_INSN
2433 || onlyjump_p (b->end)))
2434 tidy_fallthru_edge (s, b, c);
2438 /* Discover and record the loop depth at the head of each basic block. */
2441 calculate_loop_depth (dump)
2446 /* The loop infrastructure does the real job for us. */
2447 flow_loops_find (&loops);
2450 flow_loops_dump (&loops, dump, 0);
2452 flow_loops_free (&loops);
2455 /* Perform data flow analysis.
2456 F is the first insn of the function and NREGS the number of register numbers
2460 life_analysis (f, nregs, file, remove_dead_code)
2464 int remove_dead_code;
2466 #ifdef ELIMINABLE_REGS
2468 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2473 /* Record which registers will be eliminated. We use this in
2476 CLEAR_HARD_REG_SET (elim_reg_set);
2478 #ifdef ELIMINABLE_REGS
2479 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2480 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2482 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2485 /* We want alias analysis information for local dead store elimination. */
2486 init_alias_analysis ();
2489 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2493 if (! remove_dead_code)
2494 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2497 /* The post-reload life analysis have (on a global basis) the same
2498 registers live as was computed by reload itself. elimination
2499 Otherwise offsets and such may be incorrect.
2501 Reload will make some registers as live even though they do not
2502 appear in the rtl. */
2503 if (reload_completed)
2504 flags &= ~PROP_REG_INFO;
2508 /* Always remove no-op moves. Do this before other processing so
2509 that we don't have to keep re-scanning them. */
2510 delete_noop_moves (f);
2512 /* Some targets can emit simpler epilogues if they know that sp was
2513 not ever modified during the function. After reload, of course,
2514 we've already emitted the epilogue so there's no sense searching. */
2515 if (! reload_completed)
2516 notice_stack_pointer_modification (f);
2518 /* Allocate and zero out data structures that will record the
2519 data from lifetime analysis. */
2520 allocate_reg_life_data ();
2521 allocate_bb_life_data ();
2522 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2523 all_blocks = sbitmap_alloc (n_basic_blocks);
2524 sbitmap_ones (all_blocks);
2526 /* Find the set of registers live on function exit. */
2527 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2529 /* "Update" life info from zero. It'd be nice to begin the
2530 relaxation with just the exit and noreturn blocks, but that set
2531 is not immediately handy. */
2533 if (flags & PROP_REG_INFO)
2534 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2535 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2538 sbitmap_free (all_blocks);
2539 free (reg_next_use);
2540 reg_next_use = NULL;
2541 end_alias_analysis ();
2544 dump_flow_info (file);
2546 free_basic_block_vars (1);
2549 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2550 Search for REGNO. If found, abort if it is not wider than word_mode. */
2553 verify_wide_reg_1 (px, pregno)
2558 int regno = *(int *) pregno;
2560 if (GET_CODE (x) == REG && REGNO (x) == regno)
2562 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2569 /* A subroutine of verify_local_live_at_start. Search through insns
2570 between HEAD and END looking for register REGNO. */
2573 verify_wide_reg (regno, head, end)
2579 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2580 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2584 head = NEXT_INSN (head);
2587 /* We didn't find the register at all. Something's way screwy. */
2591 /* A subroutine of update_life_info. Verify that there are no untoward
2592 changes in live_at_start during a local update. */
2595 verify_local_live_at_start (new_live_at_start, bb)
2596 regset new_live_at_start;
2599 if (reload_completed)
2601 /* After reload, there are no pseudos, nor subregs of multi-word
2602 registers. The regsets should exactly match. */
2603 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2610 /* Find the set of changed registers. */
2611 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2613 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2615 /* No registers should die. */
2616 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2618 /* Verify that the now-live register is wider than word_mode. */
2619 verify_wide_reg (i, bb->head, bb->end);
2624 /* Updates life information starting with the basic blocks set in BLOCKS.
2626 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2627 expecting local modifications to basic blocks. If we find extra
2628 registers live at the beginning of a block, then we either killed
2629 useful data, or we have a broken split that wants data not provided.
2630 If we find registers removed from live_at_start, that means we have
2631 a broken peephole that is killing a register it shouldn't.
2633 ??? This is not true in one situation -- when a pre-reload splitter
2634 generates subregs of a multi-word pseudo, current life analysis will
2635 lose the kill. So we _can_ have a pseudo go live. How irritating.
2637 BLOCK_FOR_INSN is assumed to be correct.
2639 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2640 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2641 regs_ever_live unless the caller resets it to zero. */
2644 update_life_info (blocks, extent, prop_flags)
2646 enum update_life_extent extent;
2650 regset_head tmp_head;
2653 tmp = INITIALIZE_REG_SET (tmp_head);
2655 /* For a global update, we go through the relaxation process again. */
2656 if (extent != UPDATE_LIFE_LOCAL)
2658 calculate_global_regs_live (blocks, blocks,
2659 prop_flags & PROP_SCAN_DEAD_CODE);
2661 /* If asked, remove notes from the blocks we'll update. */
2662 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2663 count_or_remove_death_notes (blocks, 1);
2666 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2668 basic_block bb = BASIC_BLOCK (i);
2670 COPY_REG_SET (tmp, bb->global_live_at_end);
2671 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2673 if (extent == UPDATE_LIFE_LOCAL)
2674 verify_local_live_at_start (tmp, bb);
2679 if (prop_flags & PROP_REG_INFO)
2681 /* The only pseudos that are live at the beginning of the function
2682 are those that were not set anywhere in the function. local-alloc
2683 doesn't know how to handle these correctly, so mark them as not
2684 local to any one basic block. */
2685 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2686 FIRST_PSEUDO_REGISTER, i,
2687 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2689 /* We have a problem with any pseudoreg that lives across the setjmp.
2690 ANSI says that if a user variable does not change in value between
2691 the setjmp and the longjmp, then the longjmp preserves it. This
2692 includes longjmp from a place where the pseudo appears dead.
2693 (In principle, the value still exists if it is in scope.)
2694 If the pseudo goes in a hard reg, some other value may occupy
2695 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2696 Conclusion: such a pseudo must not go in a hard reg. */
2697 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2698 FIRST_PSEUDO_REGISTER, i,
2700 if (regno_reg_rtx[i] != 0)
2702 REG_LIVE_LENGTH (i) = -1;
2703 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2709 /* Free the variables allocated by find_basic_blocks.
2711 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2714 free_basic_block_vars (keep_head_end_p)
2715 int keep_head_end_p;
2717 if (basic_block_for_insn)
2719 VARRAY_FREE (basic_block_for_insn);
2720 basic_block_for_insn = NULL;
2723 if (! keep_head_end_p)
2726 VARRAY_FREE (basic_block_info);
2729 ENTRY_BLOCK_PTR->aux = NULL;
2730 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2731 EXIT_BLOCK_PTR->aux = NULL;
2732 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2736 /* Return nonzero if the destination of SET equals the source. */
2741 rtx src = SET_SRC (set);
2742 rtx dst = SET_DEST (set);
2744 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2746 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2748 src = SUBREG_REG (src);
2749 dst = SUBREG_REG (dst);
2752 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2753 && REGNO (src) == REGNO (dst));
2756 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2762 rtx pat = PATTERN (insn);
2764 /* Insns carrying these notes are useful later on. */
2765 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2768 if (GET_CODE (pat) == SET && set_noop_p (pat))
2771 if (GET_CODE (pat) == PARALLEL)
2774 /* If nothing but SETs of registers to themselves,
2775 this insn can also be deleted. */
2776 for (i = 0; i < XVECLEN (pat, 0); i++)
2778 rtx tem = XVECEXP (pat, 0, i);
2780 if (GET_CODE (tem) == USE
2781 || GET_CODE (tem) == CLOBBER)
2784 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2793 /* Delete any insns that copy a register to itself. */
2796 delete_noop_moves (f)
2800 for (insn = f; insn; insn = NEXT_INSN (insn))
2802 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2804 PUT_CODE (insn, NOTE);
2805 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2806 NOTE_SOURCE_FILE (insn) = 0;
2811 /* Determine if the stack pointer is constant over the life of the function.
2812 Only useful before prologues have been emitted. */
2815 notice_stack_pointer_modification_1 (x, pat, data)
2817 rtx pat ATTRIBUTE_UNUSED;
2818 void *data ATTRIBUTE_UNUSED;
2820 if (x == stack_pointer_rtx
2821 /* The stack pointer is only modified indirectly as the result
2822 of a push until later in flow. See the comments in rtl.texi
2823 regarding Embedded Side-Effects on Addresses. */
2824 || (GET_CODE (x) == MEM
2825 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2826 || GET_CODE (XEXP (x, 0)) == PRE_INC
2827 || GET_CODE (XEXP (x, 0)) == POST_DEC
2828 || GET_CODE (XEXP (x, 0)) == POST_INC)
2829 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2830 current_function_sp_is_unchanging = 0;
2834 notice_stack_pointer_modification (f)
2839 /* Assume that the stack pointer is unchanging if alloca hasn't
2841 current_function_sp_is_unchanging = !current_function_calls_alloca;
2842 if (! current_function_sp_is_unchanging)
2845 for (insn = f; insn; insn = NEXT_INSN (insn))
2847 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2849 /* Check if insn modifies the stack pointer. */
2850 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2852 if (! current_function_sp_is_unchanging)
2858 /* Mark a register in SET. Hard registers in large modes get all
2859 of their component registers set as well. */
2861 mark_reg (reg, xset)
2865 regset set = (regset) xset;
2866 int regno = REGNO (reg);
2868 if (GET_MODE (reg) == BLKmode)
2871 SET_REGNO_REG_SET (set, regno);
2872 if (regno < FIRST_PSEUDO_REGISTER)
2874 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2876 SET_REGNO_REG_SET (set, regno + n);
2880 /* Mark those regs which are needed at the end of the function as live
2881 at the end of the last basic block. */
2883 mark_regs_live_at_end (set)
2888 /* If exiting needs the right stack value, consider the stack pointer
2889 live at the end of the function. */
2890 if ((HAVE_epilogue && reload_completed)
2891 || ! EXIT_IGNORE_STACK
2892 || (! FRAME_POINTER_REQUIRED
2893 && ! current_function_calls_alloca
2894 && flag_omit_frame_pointer)
2895 || current_function_sp_is_unchanging)
2897 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2900 /* Mark the frame pointer if needed at the end of the function. If
2901 we end up eliminating it, it will be removed from the live list
2902 of each basic block by reload. */
2904 if (! reload_completed || frame_pointer_needed)
2906 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2907 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2908 /* If they are different, also mark the hard frame pointer as live */
2909 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2913 #ifdef PIC_OFFSET_TABLE_REGNUM
2914 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2915 /* Many architectures have a GP register even without flag_pic.
2916 Assume the pic register is not in use, or will be handled by
2917 other means, if it is not fixed. */
2918 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2919 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2923 /* Mark all global registers, and all registers used by the epilogue
2924 as being live at the end of the function since they may be
2925 referenced by our caller. */
2926 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2928 #ifdef EPILOGUE_USES
2929 || EPILOGUE_USES (i)
2932 SET_REGNO_REG_SET (set, i);
2934 /* Mark all call-saved registers that we actaully used. */
2935 if (HAVE_epilogue && reload_completed)
2937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2938 if (! call_used_regs[i] && regs_ever_live[i])
2939 SET_REGNO_REG_SET (set, i);
2942 /* Mark function return value. */
2943 diddle_return_value (mark_reg, set);
2946 /* Propagate global life info around the graph of basic blocks. Begin
2947 considering blocks with their corresponding bit set in BLOCKS_IN.
2948 BLOCKS_OUT is set for every block that was changed. */
2951 calculate_global_regs_live (blocks_in, blocks_out, flags)
2952 sbitmap blocks_in, blocks_out;
2955 basic_block *queue, *qhead, *qtail, *qend;
2956 regset tmp, new_live_at_end;
2957 regset_head tmp_head;
2958 regset_head new_live_at_end_head;
2961 tmp = INITIALIZE_REG_SET (tmp_head);
2962 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2964 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2965 because the `head == tail' style test for an empty queue doesn't
2966 work with a full queue. */
2967 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2969 qhead = qend = queue + n_basic_blocks + 2;
2971 /* Clear out the garbage that might be hanging out in bb->aux. */
2972 for (i = n_basic_blocks - 1; i >= 0; --i)
2973 BASIC_BLOCK (i)->aux = NULL;
2975 /* Queue the blocks set in the initial mask. Do this in reverse block
2976 number order so that we are more likely for the first round to do
2977 useful work. We use AUX non-null to flag that the block is queued. */
2978 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2980 basic_block bb = BASIC_BLOCK (i);
2985 sbitmap_zero (blocks_out);
2987 while (qhead != qtail)
2989 int rescan, changed;
2998 /* Begin by propogating live_at_start from the successor blocks. */
2999 CLEAR_REG_SET (new_live_at_end);
3000 for (e = bb->succ; e ; e = e->succ_next)
3002 basic_block sb = e->dest;
3003 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3006 if (bb == ENTRY_BLOCK_PTR)
3008 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3012 /* On our first pass through this block, we'll go ahead and continue.
3013 Recognize first pass by local_set NULL. On subsequent passes, we
3014 get to skip out early if live_at_end wouldn't have changed. */
3016 if (bb->local_set == NULL)
3018 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3023 /* If any bits were removed from live_at_end, we'll have to
3024 rescan the block. This wouldn't be necessary if we had
3025 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3026 local_live is really dependant on live_at_end. */
3027 CLEAR_REG_SET (tmp);
3028 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3029 new_live_at_end, BITMAP_AND_COMPL);
3033 /* Find the set of changed bits. Take this opportunity
3034 to notice that this set is empty and early out. */
3035 CLEAR_REG_SET (tmp);
3036 changed = bitmap_operation (tmp, bb->global_live_at_end,
3037 new_live_at_end, BITMAP_XOR);
3041 /* If any of the changed bits overlap with local_set,
3042 we'll have to rescan the block. Detect overlap by
3043 the AND with ~local_set turning off bits. */
3044 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3049 /* Let our caller know that BB changed enough to require its
3050 death notes updated. */
3051 SET_BIT (blocks_out, bb->index);
3055 /* Add to live_at_start the set of all registers in
3056 new_live_at_end that aren't in the old live_at_end. */
3058 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3060 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3062 changed = bitmap_operation (bb->global_live_at_start,
3063 bb->global_live_at_start,
3070 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3072 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3073 into live_at_start. */
3074 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3076 /* If live_at start didn't change, no need to go farther. */
3077 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3080 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3083 /* Queue all predecessors of BB so that we may re-examine
3084 their live_at_end. */
3085 for (e = bb->pred; e ; e = e->pred_next)
3087 basic_block pb = e->src;
3088 if (pb->aux == NULL)
3099 FREE_REG_SET (new_live_at_end);
3101 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3103 basic_block bb = BASIC_BLOCK (i);
3104 FREE_REG_SET (bb->local_set);
3110 /* Subroutines of life analysis. */
3112 /* Allocate the permanent data structures that represent the results
3113 of life analysis. Not static since used also for stupid life analysis. */
3116 allocate_bb_life_data ()
3120 for (i = 0; i < n_basic_blocks; i++)
3122 basic_block bb = BASIC_BLOCK (i);
3124 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3125 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3128 ENTRY_BLOCK_PTR->global_live_at_end
3129 = OBSTACK_ALLOC_REG_SET (function_obstack);
3130 EXIT_BLOCK_PTR->global_live_at_start
3131 = OBSTACK_ALLOC_REG_SET (function_obstack);
3133 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3137 allocate_reg_life_data ()
3141 /* Recalculate the register space, in case it has grown. Old style
3142 vector oriented regsets would set regset_{size,bytes} here also. */
3143 allocate_reg_info (max_regno, FALSE, FALSE);
3145 /* Reset all the data we'll collect in propagate_block and its
3147 for (i = 0; i < max_regno; i++)
3151 REG_N_DEATHS (i) = 0;
3152 REG_N_CALLS_CROSSED (i) = 0;
3153 REG_LIVE_LENGTH (i) = 0;
3154 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3158 /* Compute the registers live at the beginning of a basic block
3159 from those live at the end.
3161 When called, OLD contains those live at the end.
3162 On return, it contains those live at the beginning.
3163 FIRST and LAST are the first and last insns of the basic block.
3165 FINAL is nonzero if we are doing the final pass which is not
3166 for computing the life info (since that has already been done)
3167 but for acting on it. On this pass, we delete dead stores,
3168 set up the logical links and dead-variables lists of instructions,
3169 and merge instructions for autoincrement and autodecrement addresses.
3171 SIGNIFICANT is nonzero only the first time for each basic block.
3172 If it is nonzero, it points to a regset in which we store
3173 a 1 for each register that is set within the block.
3175 BNUM is the number of the basic block. */
3178 propagate_block (bb, old, significant, flags)
3187 regset_head live_head;
3189 regset_head dead_head;
3191 /* Find the loop depth for this block. Ignore loop level changes in the
3192 middle of the basic block -- for register allocation purposes, the
3193 important uses will be in the blocks wholely contained within the loop
3194 not in the loop pre-header or post-trailer. */
3195 loop_depth = bb->loop_depth;
3197 dead = INITIALIZE_REG_SET (live_head);
3198 live = INITIALIZE_REG_SET (dead_head);
3202 if (flags & PROP_REG_INFO)
3206 /* Process the regs live at the end of the block.
3207 Mark them as not local to any one basic block. */
3208 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3210 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3214 /* Scan the block an insn at a time from end to beginning. */
3216 for (insn = bb->end; ; insn = prev)
3218 prev = PREV_INSN (insn);
3220 if (GET_CODE (insn) == NOTE)
3222 /* If this is a call to `setjmp' et al,
3223 warn if any non-volatile datum is live. */
3225 if ((flags & PROP_REG_INFO)
3226 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3227 IOR_REG_SET (regs_live_at_setjmp, old);
3230 /* Update the life-status of regs for this insn.
3231 First DEAD gets which regs are set in this insn
3232 then LIVE gets which regs are used in this insn.
3233 Then the regs live before the insn
3234 are those live after, with DEAD regs turned off,
3235 and then LIVE regs turned on. */
3237 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3240 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3241 int insn_is_dead = 0;
3242 int libcall_is_dead = 0;
3244 if (flags & PROP_SCAN_DEAD_CODE)
3246 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3248 libcall_is_dead = (insn_is_dead && note != 0
3249 && libcall_dead_p (PATTERN (insn), old,
3253 /* We almost certainly don't want to delete prologue or epilogue
3254 instructions. Warn about probable compiler losage. */
3257 && (HAVE_epilogue || HAVE_prologue)
3258 && prologue_epilogue_contains (insn))
3260 if (flags & PROP_KILL_DEAD_CODE)
3262 warning ("ICE: would have deleted prologue/epilogue insn");
3263 if (!inhibit_warnings)
3266 libcall_is_dead = insn_is_dead = 0;
3269 /* If an instruction consists of just dead store(s) on final pass,
3270 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3271 We could really delete it with delete_insn, but that
3272 can cause trouble for first or last insn in a basic block. */
3273 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3276 /* If the insn referred to a label, note that the label is
3278 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3280 if (REG_NOTE_KIND (inote) == REG_LABEL)
3282 rtx label = XEXP (inote, 0);
3284 LABEL_NUSES (label)--;
3286 /* If this label was attached to an ADDR_VEC, it's
3287 safe to delete the ADDR_VEC. In fact, it's pretty
3288 much mandatory to delete it, because the ADDR_VEC may
3289 be referencing labels that no longer exist. */
3290 if (LABEL_NUSES (label) == 0
3291 && (next = next_nonnote_insn (label)) != NULL
3292 && GET_CODE (next) == JUMP_INSN
3293 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3294 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3296 rtx pat = PATTERN (next);
3297 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3298 int len = XVECLEN (pat, diff_vec_p);
3300 for (i = 0; i < len; i++)
3301 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3302 PUT_CODE (next, NOTE);
3303 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3304 NOTE_SOURCE_FILE (next) = 0;
3309 PUT_CODE (insn, NOTE);
3310 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3311 NOTE_SOURCE_FILE (insn) = 0;
3313 /* CC0 is now known to be dead. Either this insn used it,
3314 in which case it doesn't anymore, or clobbered it,
3315 so the next insn can't use it. */
3318 /* If this insn is copying the return value from a library call,
3319 delete the entire library call. */
3320 if (libcall_is_dead)
3322 rtx first = XEXP (note, 0);
3324 while (INSN_DELETED_P (first))
3325 first = NEXT_INSN (first);
3330 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3331 NOTE_SOURCE_FILE (p) = 0;
3337 CLEAR_REG_SET (dead);
3338 CLEAR_REG_SET (live);
3340 /* See if this is an increment or decrement that can be
3341 merged into a following memory address. */
3344 register rtx x = single_set (insn);
3346 /* Does this instruction increment or decrement a register? */
3347 if (!reload_completed
3348 && (flags & PROP_AUTOINC)
3350 && GET_CODE (SET_DEST (x)) == REG
3351 && (GET_CODE (SET_SRC (x)) == PLUS
3352 || GET_CODE (SET_SRC (x)) == MINUS)
3353 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3354 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3355 /* Ok, look for a following memory ref we can combine with.
3356 If one is found, change the memory ref to a PRE_INC
3357 or PRE_DEC, cancel this insn, and return 1.
3358 Return 0 if nothing has been done. */
3359 && try_pre_increment_1 (insn))
3362 #endif /* AUTO_INC_DEC */
3364 /* If this is not the final pass, and this insn is copying the
3365 value of a library call and it's dead, don't scan the
3366 insns that perform the library call, so that the call's
3367 arguments are not marked live. */
3368 if (libcall_is_dead)
3370 /* Mark the dest reg as `significant'. */
3371 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3372 significant, flags);
3374 insn = XEXP (note, 0);
3375 prev = PREV_INSN (insn);
3377 else if (GET_CODE (PATTERN (insn)) == SET
3378 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3379 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3380 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3381 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3382 /* We have an insn to pop a constant amount off the stack.
3383 (Such insns use PLUS regardless of the direction of the stack,
3384 and any insn to adjust the stack by a constant is always a pop.)
3385 These insns, if not dead stores, have no effect on life. */
3389 /* Any regs live at the time of a call instruction
3390 must not go in a register clobbered by calls.
3391 Find all regs now live and record this for them. */
3393 if (GET_CODE (insn) == CALL_INSN
3394 && (flags & PROP_REG_INFO))
3395 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3397 REG_N_CALLS_CROSSED (i)++;
3400 /* LIVE gets the regs used in INSN;
3401 DEAD gets those set by it. Dead insns don't make anything
3404 mark_set_regs (old, dead, PATTERN (insn),
3405 insn, significant, flags);
3407 /* If an insn doesn't use CC0, it becomes dead since we
3408 assume that every insn clobbers it. So show it dead here;
3409 mark_used_regs will set it live if it is referenced. */
3413 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3415 /* Sometimes we may have inserted something before INSN (such as
3416 a move) when we make an auto-inc. So ensure we will scan
3419 prev = PREV_INSN (insn);
3422 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3428 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3430 note = XEXP (note, 1))
3431 if (GET_CODE (XEXP (note, 0)) == USE)
3432 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3435 /* Each call clobbers all call-clobbered regs that are not
3436 global or fixed. Note that the function-value reg is a
3437 call-clobbered reg, and mark_set_regs has already had
3438 a chance to handle it. */
3440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3441 if (call_used_regs[i] && ! global_regs[i]
3444 SET_REGNO_REG_SET (dead, i);
3446 SET_REGNO_REG_SET (significant, i);
3449 /* The stack ptr is used (honorarily) by a CALL insn. */
3450 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3452 /* Calls may also reference any of the global registers,
3453 so they are made live. */
3454 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3456 mark_used_regs (old, live,
3457 gen_rtx_REG (reg_raw_mode[i], i),
3460 /* Calls also clobber memory. */
3461 free_EXPR_LIST_list (&mem_set_list);
3464 /* Update OLD for the registers used or set. */
3465 AND_COMPL_REG_SET (old, dead);
3466 IOR_REG_SET (old, live);
3470 /* On final pass, update counts of how many insns in which
3471 each reg is live. */
3472 if (flags & PROP_REG_INFO)
3473 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3476 if (insn == bb->head)
3480 FREE_REG_SET (dead);
3481 FREE_REG_SET (live);
3482 free_EXPR_LIST_list (&mem_set_list);
3485 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3486 (SET expressions whose destinations are registers dead after the insn).
3487 NEEDED is the regset that says which regs are alive after the insn.
3489 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3491 If X is the entire body of an insn, NOTES contains the reg notes
3492 pertaining to the insn. */
3495 insn_dead_p (x, needed, call_ok, notes)
3499 rtx notes ATTRIBUTE_UNUSED;
3501 enum rtx_code code = GET_CODE (x);
3504 /* If flow is invoked after reload, we must take existing AUTO_INC
3505 expresions into account. */
3506 if (reload_completed)
3508 for ( ; notes; notes = XEXP (notes, 1))
3510 if (REG_NOTE_KIND (notes) == REG_INC)
3512 int regno = REGNO (XEXP (notes, 0));
3514 /* Don't delete insns to set global regs. */
3515 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3516 || REGNO_REG_SET_P (needed, regno))
3523 /* If setting something that's a reg or part of one,
3524 see if that register's altered value will be live. */
3528 rtx r = SET_DEST (x);
3531 if (GET_CODE (r) == CC0)
3535 /* A SET that is a subroutine call cannot be dead. */
3536 if (GET_CODE (SET_SRC (x)) == CALL)
3542 /* Don't eliminate loads from volatile memory or volatile asms. */
3543 else if (volatile_refs_p (SET_SRC (x)))
3546 if (GET_CODE (r) == MEM)
3550 if (MEM_VOLATILE_P (r))
3553 /* Walk the set of memory locations we are currently tracking
3554 and see if one is an identical match to this memory location.
3555 If so, this memory write is dead (remember, we're walking
3556 backwards from the end of the block to the start. */
3557 temp = mem_set_list;
3560 if (rtx_equal_p (XEXP (temp, 0), r))
3562 temp = XEXP (temp, 1);
3567 while (GET_CODE (r) == SUBREG
3568 || GET_CODE (r) == STRICT_LOW_PART
3569 || GET_CODE (r) == ZERO_EXTRACT)
3572 if (GET_CODE (r) == REG)
3574 int regno = REGNO (r);
3577 if (REGNO_REG_SET_P (needed, regno))
3580 /* If this is a hard register, verify that subsequent
3581 words are not needed. */
3582 if (regno < FIRST_PSEUDO_REGISTER)
3584 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3587 if (REGNO_REG_SET_P (needed, regno+n))
3591 /* Don't delete insns to set global regs. */
3592 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3595 /* Make sure insns to set the stack pointer aren't deleted. */
3596 if (regno == STACK_POINTER_REGNUM)
3599 /* Make sure insns to set the frame pointer aren't deleted. */
3600 if (regno == FRAME_POINTER_REGNUM
3601 && (! reload_completed || frame_pointer_needed))
3603 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3604 if (regno == HARD_FRAME_POINTER_REGNUM
3605 && (! reload_completed || frame_pointer_needed))
3609 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3610 /* Make sure insns to set arg pointer are never deleted
3611 (if the arg pointer isn't fixed, there will be a USE
3612 for it, so we can treat it normally). */
3613 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3617 /* Otherwise, the set is dead. */
3623 /* If performing several activities, insn is dead if each activity
3624 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3625 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3627 else if (code == PARALLEL)
3629 int i = XVECLEN (x, 0);
3631 for (i--; i >= 0; i--)
3632 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3633 && GET_CODE (XVECEXP (x, 0, i)) != USE
3634 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3640 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3641 is not necessarily true for hard registers. */
3642 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3643 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3644 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3647 /* We do not check other CLOBBER or USE here. An insn consisting of just
3648 a CLOBBER or just a USE should not be deleted. */
3652 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3653 return 1 if the entire library call is dead.
3654 This is true if X copies a register (hard or pseudo)
3655 and if the hard return reg of the call insn is dead.
3656 (The caller should have tested the destination of X already for death.)
3658 If this insn doesn't just copy a register, then we don't
3659 have an ordinary libcall. In that case, cse could not have
3660 managed to substitute the source for the dest later on,
3661 so we can assume the libcall is dead.
3663 NEEDED is the bit vector of pseudoregs live before this insn.
3664 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3667 libcall_dead_p (x, needed, note, insn)
3673 register RTX_CODE code = GET_CODE (x);
3677 register rtx r = SET_SRC (x);
3678 if (GET_CODE (r) == REG)
3680 rtx call = XEXP (note, 0);
3684 /* Find the call insn. */
3685 while (call != insn && GET_CODE (call) != CALL_INSN)
3686 call = NEXT_INSN (call);
3688 /* If there is none, do nothing special,
3689 since ordinary death handling can understand these insns. */
3693 /* See if the hard reg holding the value is dead.
3694 If this is a PARALLEL, find the call within it. */
3695 call_pat = PATTERN (call);
3696 if (GET_CODE (call_pat) == PARALLEL)
3698 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3699 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3700 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3703 /* This may be a library call that is returning a value
3704 via invisible pointer. Do nothing special, since
3705 ordinary death handling can understand these insns. */
3709 call_pat = XVECEXP (call_pat, 0, i);
3712 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3718 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3719 live at function entry. Don't count global register variables, variables
3720 in registers that can be used for function arg passing, or variables in
3721 fixed hard registers. */
3724 regno_uninitialized (regno)
3727 if (n_basic_blocks == 0
3728 || (regno < FIRST_PSEUDO_REGISTER
3729 && (global_regs[regno]
3730 || fixed_regs[regno]
3731 || FUNCTION_ARG_REGNO_P (regno))))
3734 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3737 /* 1 if register REGNO was alive at a place where `setjmp' was called
3738 and was set more than once or is an argument.
3739 Such regs may be clobbered by `longjmp'. */
3742 regno_clobbered_at_setjmp (regno)
3745 if (n_basic_blocks == 0)
3748 return ((REG_N_SETS (regno) > 1
3749 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3750 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3753 /* INSN references memory, possibly using autoincrement addressing modes.
3754 Find any entries on the mem_set_list that need to be invalidated due
3755 to an address change. */
3757 invalidate_mems_from_autoinc (insn)
3760 rtx note = REG_NOTES (insn);
3761 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3763 if (REG_NOTE_KIND (note) == REG_INC)
3765 rtx temp = mem_set_list;
3766 rtx prev = NULL_RTX;
3771 next = XEXP (temp, 1);
3772 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3774 /* Splice temp out of list. */
3776 XEXP (prev, 1) = next;
3778 mem_set_list = next;
3779 free_EXPR_LIST_node (temp);
3789 /* Process the registers that are set within X. Their bits are set to
3790 1 in the regset DEAD, because they are dead prior to this insn.
3792 If INSN is nonzero, it is the insn being processed.
3794 FLAGS is the set of operations to perform. */
3797 mark_set_regs (needed, dead, x, insn, significant, flags)
3805 register RTX_CODE code = GET_CODE (x);
3807 if (code == SET || code == CLOBBER)
3808 mark_set_1 (needed, dead, x, insn, significant, flags);
3809 else if (code == PARALLEL)
3812 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3814 code = GET_CODE (XVECEXP (x, 0, i));
3815 if (code == SET || code == CLOBBER)
3816 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3817 significant, flags);
3822 /* Process a single SET rtx, X. */
3825 mark_set_1 (needed, dead, x, insn, significant, flags)
3833 register int regno = -1;
3834 register rtx reg = SET_DEST (x);
3836 /* Some targets place small structures in registers for
3837 return values of functions. We have to detect this
3838 case specially here to get correct flow information. */
3839 if (GET_CODE (reg) == PARALLEL
3840 && GET_MODE (reg) == BLKmode)
3844 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3845 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3846 significant, flags);
3850 /* Modifying just one hardware register of a multi-reg value
3851 or just a byte field of a register
3852 does not mean the value from before this insn is now dead.
3853 But it does mean liveness of that register at the end of the block
3856 Within mark_set_1, however, we treat it as if the register is
3857 indeed modified. mark_used_regs will, however, also treat this
3858 register as being used. Thus, we treat these insns as setting a
3859 new value for the register as a function of its old value. This
3860 cases LOG_LINKS to be made appropriately and this will help combine. */
3862 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3863 || GET_CODE (reg) == SIGN_EXTRACT
3864 || GET_CODE (reg) == STRICT_LOW_PART)
3865 reg = XEXP (reg, 0);
3867 /* If this set is a MEM, then it kills any aliased writes.
3868 If this set is a REG, then it kills any MEMs which use the reg. */
3869 if (flags & PROP_SCAN_DEAD_CODE)
3871 if (GET_CODE (reg) == MEM
3872 || GET_CODE (reg) == REG)
3874 rtx temp = mem_set_list;
3875 rtx prev = NULL_RTX;
3880 next = XEXP (temp, 1);
3881 if ((GET_CODE (reg) == MEM
3882 && output_dependence (XEXP (temp, 0), reg))
3883 || (GET_CODE (reg) == REG
3884 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3886 /* Splice this entry out of the list. */
3888 XEXP (prev, 1) = next;
3890 mem_set_list = next;
3891 free_EXPR_LIST_node (temp);
3899 /* If the memory reference had embedded side effects (autoincrement
3900 address modes. Then we may need to kill some entries on the
3902 if (insn && GET_CODE (reg) == MEM)
3903 invalidate_mems_from_autoinc (insn);
3905 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3906 /* We do not know the size of a BLKmode store, so we do not track
3907 them for redundant store elimination. */
3908 && GET_MODE (reg) != BLKmode
3909 /* There are no REG_INC notes for SP, so we can't assume we'll see
3910 everything that invalidates it. To be safe, don't eliminate any
3911 stores though SP; none of them should be redundant anyway. */
3912 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3913 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3916 if (GET_CODE (reg) == REG
3917 && (regno = REGNO (reg),
3918 ! (regno == FRAME_POINTER_REGNUM
3919 && (! reload_completed || frame_pointer_needed)))
3920 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3921 && ! (regno == HARD_FRAME_POINTER_REGNUM
3922 && (! reload_completed || frame_pointer_needed))
3924 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3925 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3927 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3928 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3930 int some_needed = REGNO_REG_SET_P (needed, regno);
3931 int some_not_needed = ! some_needed;
3933 /* Mark it as a significant register for this basic block. */
3935 SET_REGNO_REG_SET (significant, regno);
3937 /* Mark it as dead before this insn. */
3938 SET_REGNO_REG_SET (dead, regno);
3940 /* A hard reg in a wide mode may really be multiple registers.
3941 If so, mark all of them just like the first. */
3942 if (regno < FIRST_PSEUDO_REGISTER)
3946 /* Nothing below is needed for the stack pointer; get out asap.
3947 Eg, log links aren't needed, since combine won't use them. */
3948 if (regno == STACK_POINTER_REGNUM)
3951 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3954 int regno_n = regno + n;
3955 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3957 SET_REGNO_REG_SET (significant, regno_n);
3959 SET_REGNO_REG_SET (dead, regno_n);
3960 some_needed |= needed_regno;
3961 some_not_needed |= ! needed_regno;
3965 /* Additional data to record if this is the final pass. */
3966 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3967 | PROP_DEATH_NOTES | PROP_AUTOINC))
3970 register int blocknum = BLOCK_NUM (insn);
3973 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3974 y = reg_next_use[regno];
3976 /* If this is a hard reg, record this function uses the reg. */
3978 if (regno < FIRST_PSEUDO_REGISTER)
3981 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3983 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3984 for (i = regno; i < endregno; i++)
3986 /* The next use is no longer "next", since a store
3988 reg_next_use[i] = 0;
3991 if (flags & PROP_REG_INFO)
3992 for (i = regno; i < endregno; i++)
3994 regs_ever_live[i] = 1;
4000 /* The next use is no longer "next", since a store
4002 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4003 reg_next_use[regno] = 0;
4005 /* Keep track of which basic blocks each reg appears in. */
4007 if (flags & PROP_REG_INFO)
4009 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4010 REG_BASIC_BLOCK (regno) = blocknum;
4011 else if (REG_BASIC_BLOCK (regno) != blocknum)
4012 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4014 /* Count (weighted) references, stores, etc. This counts a
4015 register twice if it is modified, but that is correct. */
4016 REG_N_SETS (regno)++;
4017 REG_N_REFS (regno) += loop_depth + 1;
4019 /* The insns where a reg is live are normally counted
4020 elsewhere, but we want the count to include the insn
4021 where the reg is set, and the normal counting mechanism
4022 would not count it. */
4023 REG_LIVE_LENGTH (regno)++;
4027 if (! some_not_needed)
4029 if (flags & PROP_LOG_LINKS)
4031 /* Make a logical link from the next following insn
4032 that uses this register, back to this insn.
4033 The following insns have already been processed.
4035 We don't build a LOG_LINK for hard registers containing
4036 in ASM_OPERANDs. If these registers get replaced,
4037 we might wind up changing the semantics of the insn,
4038 even if reload can make what appear to be valid
4039 assignments later. */
4040 if (y && (BLOCK_NUM (y) == blocknum)
4041 && (regno >= FIRST_PSEUDO_REGISTER
4042 || asm_noperands (PATTERN (y)) < 0))
4043 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4046 else if (! some_needed)
4048 if (flags & PROP_REG_INFO)
4049 REG_N_DEATHS (REGNO (reg))++;
4051 if (flags & PROP_DEATH_NOTES)
4053 /* Note that dead stores have already been deleted
4054 when possible. If we get here, we have found a
4055 dead store that cannot be eliminated (because the
4056 same insn does something useful). Indicate this
4057 by marking the reg being set as dying here. */
4059 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4064 if (flags & PROP_DEATH_NOTES)
4066 /* This is a case where we have a multi-word hard register
4067 and some, but not all, of the words of the register are
4068 needed in subsequent insns. Write REG_UNUSED notes
4069 for those parts that were not needed. This case should
4074 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4076 if (!REGNO_REG_SET_P (needed, regno + i))
4080 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4086 else if (GET_CODE (reg) == REG)
4088 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4089 reg_next_use[regno] = 0;
4092 /* If this is the last pass and this is a SCRATCH, show it will be dying
4093 here and count it. */
4094 else if (GET_CODE (reg) == SCRATCH)
4096 if (flags & PROP_DEATH_NOTES)
4098 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4104 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4108 find_auto_inc (needed, x, insn)
4113 rtx addr = XEXP (x, 0);
4114 HOST_WIDE_INT offset = 0;
4117 /* Here we detect use of an index register which might be good for
4118 postincrement, postdecrement, preincrement, or predecrement. */
4120 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4121 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4123 if (GET_CODE (addr) == REG)
4126 register int size = GET_MODE_SIZE (GET_MODE (x));
4129 int regno = REGNO (addr);
4131 /* Is the next use an increment that might make auto-increment? */
4132 if ((incr = reg_next_use[regno]) != 0
4133 && (set = single_set (incr)) != 0
4134 && GET_CODE (set) == SET
4135 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4136 /* Can't add side effects to jumps; if reg is spilled and
4137 reloaded, there's no way to store back the altered value. */
4138 && GET_CODE (insn) != JUMP_INSN
4139 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4140 && XEXP (y, 0) == addr
4141 && GET_CODE (XEXP (y, 1)) == CONST_INT
4142 && ((HAVE_POST_INCREMENT
4143 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4144 || (HAVE_POST_DECREMENT
4145 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4146 || (HAVE_PRE_INCREMENT
4147 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4148 || (HAVE_PRE_DECREMENT
4149 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4150 /* Make sure this reg appears only once in this insn. */
4151 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4152 use != 0 && use != (rtx) 1))
4154 rtx q = SET_DEST (set);
4155 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4156 ? (offset ? PRE_INC : POST_INC)
4157 : (offset ? PRE_DEC : POST_DEC));
4159 if (dead_or_set_p (incr, addr))
4161 /* This is the simple case. Try to make the auto-inc. If
4162 we can't, we are done. Otherwise, we will do any
4163 needed updates below. */
4164 if (! validate_change (insn, &XEXP (x, 0),
4165 gen_rtx_fmt_e (inc_code, Pmode, addr),
4169 else if (GET_CODE (q) == REG
4170 /* PREV_INSN used here to check the semi-open interval
4172 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4173 /* We must also check for sets of q as q may be
4174 a call clobbered hard register and there may
4175 be a call between PREV_INSN (insn) and incr. */
4176 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4178 /* We have *p followed sometime later by q = p+size.
4179 Both p and q must be live afterward,
4180 and q is not used between INSN and its assignment.
4181 Change it to q = p, ...*q..., q = q+size.
4182 Then fall into the usual case. */
4187 emit_move_insn (q, addr);
4188 insns = get_insns ();
4191 bb = BLOCK_FOR_INSN (insn);
4192 for (temp = insns; temp; temp = NEXT_INSN (temp))
4193 set_block_for_insn (temp, bb);
4195 /* If we can't make the auto-inc, or can't make the
4196 replacement into Y, exit. There's no point in making
4197 the change below if we can't do the auto-inc and doing
4198 so is not correct in the pre-inc case. */
4200 validate_change (insn, &XEXP (x, 0),
4201 gen_rtx_fmt_e (inc_code, Pmode, q),
4203 validate_change (incr, &XEXP (y, 0), q, 1);
4204 if (! apply_change_group ())
4207 /* We now know we'll be doing this change, so emit the
4208 new insn(s) and do the updates. */
4209 emit_insns_before (insns, insn);
4211 if (BLOCK_FOR_INSN (insn)->head == insn)
4212 BLOCK_FOR_INSN (insn)->head = insns;
4214 /* INCR will become a NOTE and INSN won't contain a
4215 use of ADDR. If a use of ADDR was just placed in
4216 the insn before INSN, make that the next use.
4217 Otherwise, invalidate it. */
4218 if (GET_CODE (PREV_INSN (insn)) == INSN
4219 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4220 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4221 reg_next_use[regno] = PREV_INSN (insn);
4223 reg_next_use[regno] = 0;
4228 /* REGNO is now used in INCR which is below INSN, but
4229 it previously wasn't live here. If we don't mark
4230 it as needed, we'll put a REG_DEAD note for it
4231 on this insn, which is incorrect. */
4232 SET_REGNO_REG_SET (needed, regno);
4234 /* If there are any calls between INSN and INCR, show
4235 that REGNO now crosses them. */
4236 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4237 if (GET_CODE (temp) == CALL_INSN)
4238 REG_N_CALLS_CROSSED (regno)++;
4243 /* If we haven't returned, it means we were able to make the
4244 auto-inc, so update the status. First, record that this insn
4245 has an implicit side effect. */
4248 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4250 /* Modify the old increment-insn to simply copy
4251 the already-incremented value of our register. */
4252 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4255 /* If that makes it a no-op (copying the register into itself) delete
4256 it so it won't appear to be a "use" and a "set" of this
4258 if (SET_DEST (set) == addr)
4260 PUT_CODE (incr, NOTE);
4261 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4262 NOTE_SOURCE_FILE (incr) = 0;
4265 if (regno >= FIRST_PSEUDO_REGISTER)
4267 /* Count an extra reference to the reg. When a reg is
4268 incremented, spilling it is worse, so we want to make
4269 that less likely. */
4270 REG_N_REFS (regno) += loop_depth + 1;
4272 /* Count the increment as a setting of the register,
4273 even though it isn't a SET in rtl. */
4274 REG_N_SETS (regno)++;
4279 #endif /* AUTO_INC_DEC */
4281 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4282 This is done assuming the registers needed from X
4283 are those that have 1-bits in NEEDED.
4285 FLAGS is the set of enabled operations.
4287 INSN is the containing instruction. If INSN is dead, this function is not
4291 mark_used_regs (needed, live, x, flags, insn)
4298 register RTX_CODE code;
4303 code = GET_CODE (x);
4323 /* If we are clobbering a MEM, mark any registers inside the address
4325 if (GET_CODE (XEXP (x, 0)) == MEM)
4326 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4330 /* Don't bother watching stores to mems if this is not the
4331 final pass. We'll not be deleting dead stores this round. */
4332 if (flags & PROP_SCAN_DEAD_CODE)
4334 /* Invalidate the data for the last MEM stored, but only if MEM is
4335 something that can be stored into. */
4336 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4337 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4338 ; /* needn't clear the memory set list */
4341 rtx temp = mem_set_list;
4342 rtx prev = NULL_RTX;
4347 next = XEXP (temp, 1);
4348 if (anti_dependence (XEXP (temp, 0), x))
4350 /* Splice temp out of the list. */
4352 XEXP (prev, 1) = next;
4354 mem_set_list = next;
4355 free_EXPR_LIST_node (temp);
4363 /* If the memory reference had embedded side effects (autoincrement
4364 address modes. Then we may need to kill some entries on the
4367 invalidate_mems_from_autoinc (insn);
4371 if (flags & PROP_AUTOINC)
4372 find_auto_inc (needed, x, insn);
4377 if (GET_CODE (SUBREG_REG (x)) == REG
4378 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4379 && (GET_MODE_SIZE (GET_MODE (x))
4380 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4381 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4383 /* While we're here, optimize this case. */
4386 /* In case the SUBREG is not of a register, don't optimize */
4387 if (GET_CODE (x) != REG)
4389 mark_used_regs (needed, live, x, flags, insn);
4393 /* ... fall through ... */
4396 /* See a register other than being set
4397 => mark it as needed. */
4401 int some_needed = REGNO_REG_SET_P (needed, regno);
4402 int some_not_needed = ! some_needed;
4404 SET_REGNO_REG_SET (live, regno);
4406 /* A hard reg in a wide mode may really be multiple registers.
4407 If so, mark all of them just like the first. */
4408 if (regno < FIRST_PSEUDO_REGISTER)
4412 /* For stack ptr or fixed arg pointer,
4413 nothing below can be necessary, so waste no more time. */
4414 if (regno == STACK_POINTER_REGNUM
4415 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4416 || (regno == HARD_FRAME_POINTER_REGNUM
4417 && (! reload_completed || frame_pointer_needed))
4419 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4420 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4422 || (regno == FRAME_POINTER_REGNUM
4423 && (! reload_completed || frame_pointer_needed)))
4425 /* If this is a register we are going to try to eliminate,
4426 don't mark it live here. If we are successful in
4427 eliminating it, it need not be live unless it is used for
4428 pseudos, in which case it will have been set live when
4429 it was allocated to the pseudos. If the register will not
4430 be eliminated, reload will set it live at that point. */
4432 if ((flags & PROP_REG_INFO)
4433 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4434 regs_ever_live[regno] = 1;
4437 /* No death notes for global register variables;
4438 their values are live after this function exits. */
4439 if (global_regs[regno])
4441 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4442 reg_next_use[regno] = insn;
4446 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4449 int regno_n = regno + n;
4450 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4452 SET_REGNO_REG_SET (live, regno_n);
4453 some_needed |= needed_regno;
4454 some_not_needed |= ! needed_regno;
4458 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4460 /* Record where each reg is used, so when the reg
4461 is set we know the next insn that uses it. */
4463 reg_next_use[regno] = insn;
4465 if (flags & PROP_REG_INFO)
4467 if (regno < FIRST_PSEUDO_REGISTER)
4469 /* If a hard reg is being used,
4470 record that this function does use it. */
4472 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4476 regs_ever_live[regno + --i] = 1;
4481 /* Keep track of which basic block each reg appears in. */
4483 register int blocknum = BLOCK_NUM (insn);
4485 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4486 REG_BASIC_BLOCK (regno) = blocknum;
4487 else if (REG_BASIC_BLOCK (regno) != blocknum)
4488 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4490 /* Count (weighted) number of uses of each reg. */
4492 REG_N_REFS (regno) += loop_depth + 1;
4496 /* Record and count the insns in which a reg dies.
4497 If it is used in this insn and was dead below the insn
4498 then it dies in this insn. If it was set in this insn,
4499 we do not make a REG_DEAD note; likewise if we already
4500 made such a note. */
4502 if (flags & PROP_DEATH_NOTES)
4505 && ! dead_or_set_p (insn, x)
4507 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4511 /* Check for the case where the register dying partially
4512 overlaps the register set by this insn. */
4513 if (regno < FIRST_PSEUDO_REGISTER
4514 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4516 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4518 some_needed |= dead_or_set_regno_p (insn, regno + n);
4521 /* If none of the words in X is needed, make a REG_DEAD
4522 note. Otherwise, we must make partial REG_DEAD notes. */
4526 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4527 REG_N_DEATHS (regno)++;
4533 /* Don't make a REG_DEAD note for a part of a register
4534 that is set in the insn. */
4536 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4538 if (!REGNO_REG_SET_P (needed, regno + i)
4539 && ! dead_or_set_regno_p (insn, regno + i))
4542 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4553 register rtx testreg = SET_DEST (x);
4556 /* If storing into MEM, don't show it as being used. But do
4557 show the address as being used. */
4558 if (GET_CODE (testreg) == MEM)
4561 if (flags & PROP_AUTOINC)
4562 find_auto_inc (needed, testreg, insn);
4564 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4565 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4569 /* Storing in STRICT_LOW_PART is like storing in a reg
4570 in that this SET might be dead, so ignore it in TESTREG.
4571 but in some other ways it is like using the reg.
4573 Storing in a SUBREG or a bit field is like storing the entire
4574 register in that if the register's value is not used
4575 then this SET is not needed. */
4576 while (GET_CODE (testreg) == STRICT_LOW_PART
4577 || GET_CODE (testreg) == ZERO_EXTRACT
4578 || GET_CODE (testreg) == SIGN_EXTRACT
4579 || GET_CODE (testreg) == SUBREG)
4581 if (GET_CODE (testreg) == SUBREG
4582 && GET_CODE (SUBREG_REG (testreg)) == REG
4583 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4584 && (GET_MODE_SIZE (GET_MODE (testreg))
4585 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4586 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4588 /* Modifying a single register in an alternate mode
4589 does not use any of the old value. But these other
4590 ways of storing in a register do use the old value. */
4591 if (GET_CODE (testreg) == SUBREG
4592 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4597 testreg = XEXP (testreg, 0);
4600 /* If this is a store into a register,
4601 recursively scan the value being stored. */
4603 if ((GET_CODE (testreg) == PARALLEL
4604 && GET_MODE (testreg) == BLKmode)
4605 || (GET_CODE (testreg) == REG
4606 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4607 && (! reload_completed || frame_pointer_needed)))
4608 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4609 && ! (regno == HARD_FRAME_POINTER_REGNUM
4610 && (! reload_completed || frame_pointer_needed))
4612 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4613 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4616 /* We used to exclude global_regs here, but that seems wrong.
4617 Storing in them is like storing in mem. */
4619 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4621 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4628 case UNSPEC_VOLATILE:
4632 /* Traditional and volatile asm instructions must be considered to use
4633 and clobber all hard registers, all pseudo-registers and all of
4634 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4636 Consider for instance a volatile asm that changes the fpu rounding
4637 mode. An insn should not be moved across this even if it only uses
4638 pseudo-regs because it might give an incorrectly rounded result.
4640 ?!? Unfortunately, marking all hard registers as live causes massive
4641 problems for the register allocator and marking all pseudos as live
4642 creates mountains of uninitialized variable warnings.
4644 So for now, just clear the memory set list and mark any regs
4645 we can find in ASM_OPERANDS as used. */
4646 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4647 free_EXPR_LIST_list (&mem_set_list);
4649 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4650 We can not just fall through here since then we would be confused
4651 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4652 traditional asms unlike their normal usage. */
4653 if (code == ASM_OPERANDS)
4657 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4658 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4669 /* Recursively scan the operands of this expression. */
4672 register const char *fmt = GET_RTX_FORMAT (code);
4675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4679 /* Tail recursive case: save a function call level. */
4685 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4687 else if (fmt[i] == 'E')
4690 for (j = 0; j < XVECLEN (x, i); j++)
4691 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4700 try_pre_increment_1 (insn)
4703 /* Find the next use of this reg. If in same basic block,
4704 make it do pre-increment or pre-decrement if appropriate. */
4705 rtx x = single_set (insn);
4706 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4707 * INTVAL (XEXP (SET_SRC (x), 1)));
4708 int regno = REGNO (SET_DEST (x));
4709 rtx y = reg_next_use[regno];
4711 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4712 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4713 mode would be better. */
4714 && ! dead_or_set_p (y, SET_DEST (x))
4715 && try_pre_increment (y, SET_DEST (x), amount))
4717 /* We have found a suitable auto-increment
4718 and already changed insn Y to do it.
4719 So flush this increment-instruction. */
4720 PUT_CODE (insn, NOTE);
4721 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4722 NOTE_SOURCE_FILE (insn) = 0;
4723 /* Count a reference to this reg for the increment
4724 insn we are deleting. When a reg is incremented.
4725 spilling it is worse, so we want to make that
4727 if (regno >= FIRST_PSEUDO_REGISTER)
4729 REG_N_REFS (regno) += loop_depth + 1;
4730 REG_N_SETS (regno)++;
4737 /* Try to change INSN so that it does pre-increment or pre-decrement
4738 addressing on register REG in order to add AMOUNT to REG.
4739 AMOUNT is negative for pre-decrement.
4740 Returns 1 if the change could be made.
4741 This checks all about the validity of the result of modifying INSN. */
4744 try_pre_increment (insn, reg, amount)
4746 HOST_WIDE_INT amount;
4750 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4751 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4753 /* Nonzero if we can try to make a post-increment or post-decrement.
4754 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4755 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4756 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4759 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4762 /* From the sign of increment, see which possibilities are conceivable
4763 on this target machine. */
4764 if (HAVE_PRE_INCREMENT && amount > 0)
4766 if (HAVE_POST_INCREMENT && amount > 0)
4769 if (HAVE_PRE_DECREMENT && amount < 0)
4771 if (HAVE_POST_DECREMENT && amount < 0)
4774 if (! (pre_ok || post_ok))
4777 /* It is not safe to add a side effect to a jump insn
4778 because if the incremented register is spilled and must be reloaded
4779 there would be no way to store the incremented value back in memory. */
4781 if (GET_CODE (insn) == JUMP_INSN)
4786 use = find_use_as_address (PATTERN (insn), reg, 0);
4787 if (post_ok && (use == 0 || use == (rtx) 1))
4789 use = find_use_as_address (PATTERN (insn), reg, -amount);
4793 if (use == 0 || use == (rtx) 1)
4796 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4799 /* See if this combination of instruction and addressing mode exists. */
4800 if (! validate_change (insn, &XEXP (use, 0),
4801 gen_rtx_fmt_e (amount > 0
4802 ? (do_post ? POST_INC : PRE_INC)
4803 : (do_post ? POST_DEC : PRE_DEC),
4807 /* Record that this insn now has an implicit side effect on X. */
4808 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4812 #endif /* AUTO_INC_DEC */
4814 /* Find the place in the rtx X where REG is used as a memory address.
4815 Return the MEM rtx that so uses it.
4816 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4817 (plus REG (const_int PLUSCONST)).
4819 If such an address does not appear, return 0.
4820 If REG appears more than once, or is used other than in such an address,
4824 find_use_as_address (x, reg, plusconst)
4827 HOST_WIDE_INT plusconst;
4829 enum rtx_code code = GET_CODE (x);
4830 const char *fmt = GET_RTX_FORMAT (code);
4832 register rtx value = 0;
4835 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4838 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4839 && XEXP (XEXP (x, 0), 0) == reg
4840 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4841 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4844 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4846 /* If REG occurs inside a MEM used in a bit-field reference,
4847 that is unacceptable. */
4848 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4849 return (rtx) (HOST_WIDE_INT) 1;
4853 return (rtx) (HOST_WIDE_INT) 1;
4855 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4859 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4863 return (rtx) (HOST_WIDE_INT) 1;
4865 else if (fmt[i] == 'E')
4868 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4870 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4874 return (rtx) (HOST_WIDE_INT) 1;
4882 /* Write information about registers and basic blocks into FILE.
4883 This is part of making a debugging dump. */
4886 dump_regset (r, outf)
4893 fputs (" (nil)", outf);
4897 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4899 fprintf (outf, " %d", i);
4900 if (i < FIRST_PSEUDO_REGISTER)
4901 fprintf (outf, " [%s]",
4910 dump_regset (r, stderr);
4911 putc ('\n', stderr);
4915 dump_flow_info (file)
4919 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4921 fprintf (file, "%d registers.\n", max_regno);
4922 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4925 enum reg_class class, altclass;
4926 fprintf (file, "\nRegister %d used %d times across %d insns",
4927 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4928 if (REG_BASIC_BLOCK (i) >= 0)
4929 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4931 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4932 (REG_N_SETS (i) == 1) ? "" : "s");
4933 if (REG_USERVAR_P (regno_reg_rtx[i]))
4934 fprintf (file, "; user var");
4935 if (REG_N_DEATHS (i) != 1)
4936 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4937 if (REG_N_CALLS_CROSSED (i) == 1)
4938 fprintf (file, "; crosses 1 call");
4939 else if (REG_N_CALLS_CROSSED (i))
4940 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4941 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4942 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4943 class = reg_preferred_class (i);
4944 altclass = reg_alternate_class (i);
4945 if (class != GENERAL_REGS || altclass != ALL_REGS)
4947 if (altclass == ALL_REGS || class == ALL_REGS)
4948 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4949 else if (altclass == NO_REGS)
4950 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4952 fprintf (file, "; pref %s, else %s",
4953 reg_class_names[(int) class],
4954 reg_class_names[(int) altclass]);
4956 if (REGNO_POINTER_FLAG (i))
4957 fprintf (file, "; pointer");
4958 fprintf (file, ".\n");
4961 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4962 for (i = 0; i < n_basic_blocks; i++)
4964 register basic_block bb = BASIC_BLOCK (i);
4967 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4968 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4970 fprintf (file, "Predecessors: ");
4971 for (e = bb->pred; e ; e = e->pred_next)
4972 dump_edge_info (file, e, 0);
4974 fprintf (file, "\nSuccessors: ");
4975 for (e = bb->succ; e ; e = e->succ_next)
4976 dump_edge_info (file, e, 1);
4978 fprintf (file, "\nRegisters live at start:");
4979 dump_regset (bb->global_live_at_start, file);
4981 fprintf (file, "\nRegisters live at end:");
4982 dump_regset (bb->global_live_at_end, file);
4993 dump_flow_info (stderr);
4997 dump_edge_info (file, e, do_succ)
5002 basic_block side = (do_succ ? e->dest : e->src);
5004 if (side == ENTRY_BLOCK_PTR)
5005 fputs (" ENTRY", file);
5006 else if (side == EXIT_BLOCK_PTR)
5007 fputs (" EXIT", file);
5009 fprintf (file, " %d", side->index);
5013 static const char * const bitnames[] = {
5014 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5017 int i, flags = e->flags;
5021 for (i = 0; flags; i++)
5022 if (flags & (1 << i))
5028 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5029 fputs (bitnames[i], file);
5031 fprintf (file, "%d", i);
5039 /* Print out one basic block with live information at start and end. */
5049 fprintf (outf, ";; Basic block %d, loop depth %d",
5050 bb->index, bb->loop_depth - 1);
5051 if (bb->eh_beg != -1 || bb->eh_end != -1)
5052 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5055 fputs (";; Predecessors: ", outf);
5056 for (e = bb->pred; e ; e = e->pred_next)
5057 dump_edge_info (outf, e, 0);
5060 fputs (";; Registers live at start:", outf);
5061 dump_regset (bb->global_live_at_start, outf);
5064 for (insn = bb->head, last = NEXT_INSN (bb->end);
5066 insn = NEXT_INSN (insn))
5067 print_rtl_single (outf, insn);
5069 fputs (";; Registers live at end:", outf);
5070 dump_regset (bb->global_live_at_end, outf);
5073 fputs (";; Successors: ", outf);
5074 for (e = bb->succ; e; e = e->succ_next)
5075 dump_edge_info (outf, e, 1);
5083 dump_bb (bb, stderr);
5090 dump_bb (BASIC_BLOCK(n), stderr);
5093 /* Like print_rtl, but also print out live information for the start of each
5097 print_rtl_with_bb (outf, rtx_first)
5101 register rtx tmp_rtx;
5104 fprintf (outf, "(nil)\n");
5108 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5109 int max_uid = get_max_uid ();
5110 basic_block *start = (basic_block *)
5111 xcalloc (max_uid, sizeof (basic_block));
5112 basic_block *end = (basic_block *)
5113 xcalloc (max_uid, sizeof (basic_block));
5114 enum bb_state *in_bb_p = (enum bb_state *)
5115 xcalloc (max_uid, sizeof (enum bb_state));
5117 for (i = n_basic_blocks - 1; i >= 0; i--)
5119 basic_block bb = BASIC_BLOCK (i);
5122 start[INSN_UID (bb->head)] = bb;
5123 end[INSN_UID (bb->end)] = bb;
5124 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5126 enum bb_state state = IN_MULTIPLE_BB;
5127 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5129 in_bb_p[INSN_UID(x)] = state;
5136 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5141 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5143 fprintf (outf, ";; Start of basic block %d, registers live:",
5145 dump_regset (bb->global_live_at_start, outf);
5149 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5150 && GET_CODE (tmp_rtx) != NOTE
5151 && GET_CODE (tmp_rtx) != BARRIER)
5152 fprintf (outf, ";; Insn is not within a basic block\n");
5153 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5154 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5156 did_output = print_rtl_single (outf, tmp_rtx);
5158 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5159 fprintf (outf, ";; End of basic block %d\n", bb->index);
5170 if (current_function_epilogue_delay_list != 0)
5172 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5173 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5174 tmp_rtx = XEXP (tmp_rtx, 1))
5175 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5179 /* Compute dominator relationships using new flow graph structures. */
5181 compute_flow_dominators (dominators, post_dominators)
5182 sbitmap *dominators;
5183 sbitmap *post_dominators;
5186 sbitmap *temp_bitmap;
5188 basic_block *worklist, *tos;
5190 /* Allocate a worklist array/queue. Entries are only added to the
5191 list if they were not already on the list. So the size is
5192 bounded by the number of basic blocks. */
5193 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5196 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5197 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5201 /* The optimistic setting of dominators requires us to put every
5202 block on the work list initially. */
5203 for (bb = 0; bb < n_basic_blocks; bb++)
5205 *tos++ = BASIC_BLOCK (bb);
5206 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5209 /* We want a maximal solution, so initially assume everything dominates
5211 sbitmap_vector_ones (dominators, n_basic_blocks);
5213 /* Mark successors of the entry block so we can identify them below. */
5214 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5215 e->dest->aux = ENTRY_BLOCK_PTR;
5217 /* Iterate until the worklist is empty. */
5218 while (tos != worklist)
5220 /* Take the first entry off the worklist. */
5221 basic_block b = *--tos;
5224 /* Compute the intersection of the dominators of all the
5227 If one of the predecessor blocks is the ENTRY block, then the
5228 intersection of the dominators of the predecessor blocks is
5229 defined as the null set. We can identify such blocks by the
5230 special value in the AUX field in the block structure. */
5231 if (b->aux == ENTRY_BLOCK_PTR)
5233 /* Do not clear the aux field for blocks which are
5234 successors of the ENTRY block. That way we we never
5235 add them to the worklist again.
5237 The intersect of dominators of the preds of this block is
5238 defined as the null set. */
5239 sbitmap_zero (temp_bitmap[bb]);
5243 /* Clear the aux field of this block so it can be added to
5244 the worklist again if necessary. */
5246 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5249 /* Make sure each block always dominates itself. */
5250 SET_BIT (temp_bitmap[bb], bb);
5252 /* If the out state of this block changed, then we need to
5253 add the successors of this block to the worklist if they
5254 are not already on the worklist. */
5255 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5257 for (e = b->succ; e; e = e->succ_next)
5259 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5269 if (post_dominators)
5271 /* The optimistic setting of dominators requires us to put every
5272 block on the work list initially. */
5273 for (bb = 0; bb < n_basic_blocks; bb++)
5275 *tos++ = BASIC_BLOCK (bb);
5276 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5279 /* We want a maximal solution, so initially assume everything post
5280 dominates everything else. */
5281 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5283 /* Mark predecessors of the exit block so we can identify them below. */
5284 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5285 e->src->aux = EXIT_BLOCK_PTR;
5287 /* Iterate until the worklist is empty. */
5288 while (tos != worklist)
5290 /* Take the first entry off the worklist. */
5291 basic_block b = *--tos;
5294 /* Compute the intersection of the post dominators of all the
5297 If one of the successor blocks is the EXIT block, then the
5298 intersection of the dominators of the successor blocks is
5299 defined as the null set. We can identify such blocks by the
5300 special value in the AUX field in the block structure. */
5301 if (b->aux == EXIT_BLOCK_PTR)
5303 /* Do not clear the aux field for blocks which are
5304 predecessors of the EXIT block. That way we we never
5305 add them to the worklist again.
5307 The intersect of dominators of the succs of this block is
5308 defined as the null set. */
5309 sbitmap_zero (temp_bitmap[bb]);
5313 /* Clear the aux field of this block so it can be added to
5314 the worklist again if necessary. */
5316 sbitmap_intersection_of_succs (temp_bitmap[bb],
5317 post_dominators, bb);
5320 /* Make sure each block always post dominates itself. */
5321 SET_BIT (temp_bitmap[bb], bb);
5323 /* If the out state of this block changed, then we need to
5324 add the successors of this block to the worklist if they
5325 are not already on the worklist. */
5326 if (sbitmap_a_and_b (post_dominators[bb],
5327 post_dominators[bb],
5330 for (e = b->pred; e; e = e->pred_next)
5332 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5344 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5347 compute_immediate_dominators (idom, dominators)
5349 sbitmap *dominators;
5354 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5356 /* Begin with tmp(n) = dom(n) - { n }. */
5357 for (b = n_basic_blocks; --b >= 0; )
5359 sbitmap_copy (tmp[b], dominators[b]);
5360 RESET_BIT (tmp[b], b);
5363 /* Subtract out all of our dominator's dominators. */
5364 for (b = n_basic_blocks; --b >= 0; )
5366 sbitmap tmp_b = tmp[b];
5369 for (s = n_basic_blocks; --s >= 0; )
5370 if (TEST_BIT (tmp_b, s))
5371 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5374 /* Find the one bit set in the bitmap and put it in the output array. */
5375 for (b = n_basic_blocks; --b >= 0; )
5378 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5381 sbitmap_vector_free (tmp);
5384 /* Count for a single SET rtx, X. */
5387 count_reg_sets_1 (x)
5391 register rtx reg = SET_DEST (x);
5393 /* Find the register that's set/clobbered. */
5394 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5395 || GET_CODE (reg) == SIGN_EXTRACT
5396 || GET_CODE (reg) == STRICT_LOW_PART)
5397 reg = XEXP (reg, 0);
5399 if (GET_CODE (reg) == PARALLEL
5400 && GET_MODE (reg) == BLKmode)
5403 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5404 count_reg_sets_1 (XVECEXP (reg, 0, i));
5408 if (GET_CODE (reg) == REG)
5410 regno = REGNO (reg);
5411 if (regno >= FIRST_PSEUDO_REGISTER)
5413 /* Count (weighted) references, stores, etc. This counts a
5414 register twice if it is modified, but that is correct. */
5415 REG_N_SETS (regno)++;
5416 REG_N_REFS (regno) += loop_depth + 1;
5421 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5422 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5428 register RTX_CODE code = GET_CODE (x);
5430 if (code == SET || code == CLOBBER)
5431 count_reg_sets_1 (x);
5432 else if (code == PARALLEL)
5435 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5437 code = GET_CODE (XVECEXP (x, 0, i));
5438 if (code == SET || code == CLOBBER)
5439 count_reg_sets_1 (XVECEXP (x, 0, i));
5444 /* Increment REG_N_REFS by the current loop depth each register reference
5448 count_reg_references (x)
5451 register RTX_CODE code;
5454 code = GET_CODE (x);
5474 /* If we are clobbering a MEM, mark any registers inside the address
5476 if (GET_CODE (XEXP (x, 0)) == MEM)
5477 count_reg_references (XEXP (XEXP (x, 0), 0));
5481 /* While we're here, optimize this case. */
5484 /* In case the SUBREG is not of a register, don't optimize */
5485 if (GET_CODE (x) != REG)
5487 count_reg_references (x);
5491 /* ... fall through ... */
5494 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5495 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5500 register rtx testreg = SET_DEST (x);
5503 /* If storing into MEM, don't show it as being used. But do
5504 show the address as being used. */
5505 if (GET_CODE (testreg) == MEM)
5507 count_reg_references (XEXP (testreg, 0));
5508 count_reg_references (SET_SRC (x));
5512 /* Storing in STRICT_LOW_PART is like storing in a reg
5513 in that this SET might be dead, so ignore it in TESTREG.
5514 but in some other ways it is like using the reg.
5516 Storing in a SUBREG or a bit field is like storing the entire
5517 register in that if the register's value is not used
5518 then this SET is not needed. */
5519 while (GET_CODE (testreg) == STRICT_LOW_PART
5520 || GET_CODE (testreg) == ZERO_EXTRACT
5521 || GET_CODE (testreg) == SIGN_EXTRACT
5522 || GET_CODE (testreg) == SUBREG)
5524 /* Modifying a single register in an alternate mode
5525 does not use any of the old value. But these other
5526 ways of storing in a register do use the old value. */
5527 if (GET_CODE (testreg) == SUBREG
5528 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5533 testreg = XEXP (testreg, 0);
5536 /* If this is a store into a register,
5537 recursively scan the value being stored. */
5539 if ((GET_CODE (testreg) == PARALLEL
5540 && GET_MODE (testreg) == BLKmode)
5541 || GET_CODE (testreg) == REG)
5543 count_reg_references (SET_SRC (x));
5545 count_reg_references (SET_DEST (x));
5555 /* Recursively scan the operands of this expression. */
5558 register const char *fmt = GET_RTX_FORMAT (code);
5561 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5565 /* Tail recursive case: save a function call level. */
5571 count_reg_references (XEXP (x, i));
5573 else if (fmt[i] == 'E')
5576 for (j = 0; j < XVECLEN (x, i); j++)
5577 count_reg_references (XVECEXP (x, i, j));
5583 /* Recompute register set/reference counts immediately prior to register
5586 This avoids problems with set/reference counts changing to/from values
5587 which have special meanings to the register allocators.
5589 Additionally, the reference counts are the primary component used by the
5590 register allocators to prioritize pseudos for allocation to hard regs.
5591 More accurate reference counts generally lead to better register allocation.
5593 F is the first insn to be scanned.
5595 LOOP_STEP denotes how much loop_depth should be incremented per
5596 loop nesting level in order to increase the ref count more for
5597 references in a loop.
5599 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5600 possibly other information which is used by the register allocators. */
5603 recompute_reg_usage (f, loop_step)
5604 rtx f ATTRIBUTE_UNUSED;
5605 int loop_step ATTRIBUTE_UNUSED;
5611 /* Clear out the old data. */
5612 max_reg = max_reg_num ();
5613 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5619 /* Scan each insn in the chain and count how many times each register is
5621 for (index = 0; index < n_basic_blocks; index++)
5623 basic_block bb = BASIC_BLOCK (index);
5624 loop_depth = bb->loop_depth;
5625 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5627 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5631 /* This call will increment REG_N_SETS for each SET or CLOBBER
5632 of a register in INSN. It will also increment REG_N_REFS
5633 by the loop depth for each set of a register in INSN. */
5634 count_reg_sets (PATTERN (insn));
5636 /* count_reg_sets does not detect autoincrement address modes, so
5637 detect them here by looking at the notes attached to INSN. */
5638 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5640 if (REG_NOTE_KIND (links) == REG_INC)
5641 /* Count (weighted) references, stores, etc. This counts a
5642 register twice if it is modified, but that is correct. */
5643 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5646 /* This call will increment REG_N_REFS by the current loop depth for
5647 each reference to a register in INSN. */
5648 count_reg_references (PATTERN (insn));
5650 /* count_reg_references will not include counts for arguments to
5651 function calls, so detect them here by examining the
5652 CALL_INSN_FUNCTION_USAGE data. */
5653 if (GET_CODE (insn) == CALL_INSN)
5657 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5659 note = XEXP (note, 1))
5660 if (GET_CODE (XEXP (note, 0)) == USE)
5661 count_reg_references (XEXP (XEXP (note, 0), 0));
5664 if (insn == bb->end)
5670 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5671 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5672 of the number of registers that died. */
5675 count_or_remove_death_notes (blocks, kill)
5681 for (i = n_basic_blocks - 1; i >= 0; --i)
5686 if (blocks && ! TEST_BIT (blocks, i))
5689 bb = BASIC_BLOCK (i);
5691 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5693 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5695 rtx *pprev = ®_NOTES (insn);
5700 switch (REG_NOTE_KIND (link))
5703 if (GET_CODE (XEXP (link, 0)) == REG)
5705 rtx reg = XEXP (link, 0);
5708 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5711 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5719 rtx next = XEXP (link, 1);
5720 free_EXPR_LIST_node (link);
5721 *pprev = link = next;
5727 pprev = &XEXP (link, 1);
5734 if (insn == bb->end)
5742 /* Record INSN's block as BB. */
5745 set_block_for_insn (insn, bb)
5749 size_t uid = INSN_UID (insn);
5750 if (uid >= basic_block_for_insn->num_elements)
5754 /* Add one-eighth the size so we don't keep calling xrealloc. */
5755 new_size = uid + (uid + 7) / 8;
5757 VARRAY_GROW (basic_block_for_insn, new_size);
5759 VARRAY_BB (basic_block_for_insn, uid) = bb;
5762 /* Record INSN's block number as BB. */
5763 /* ??? This has got to go. */
5766 set_block_num (insn, bb)
5770 set_block_for_insn (insn, BASIC_BLOCK (bb));
5773 /* Verify the CFG consistency. This function check some CFG invariants and
5774 aborts when something is wrong. Hope that this function will help to
5775 convert many optimization passes to preserve CFG consistent.
5777 Currently it does following checks:
5779 - test head/end pointers
5780 - overlapping of basic blocks
5781 - edge list corectness
5782 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5783 - tails of basic blocks (ensure that boundary is necesary)
5784 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5785 and NOTE_INSN_BASIC_BLOCK
5786 - check that all insns are in the basic blocks
5787 (except the switch handling code, barriers and notes)
5789 In future it can be extended check a lot of other stuff as well
5790 (reachability of basic blocks, life information, etc. etc.). */
5795 const int max_uid = get_max_uid ();
5796 const rtx rtx_first = get_insns ();
5797 basic_block *bb_info;
5801 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5803 /* First pass check head/end pointers and set bb_info array used by
5805 for (i = n_basic_blocks - 1; i >= 0; i--)
5807 basic_block bb = BASIC_BLOCK (i);
5809 /* Check the head pointer and make sure that it is pointing into
5811 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5816 error ("Head insn %d for block %d not found in the insn stream.",
5817 INSN_UID (bb->head), bb->index);
5821 /* Check the end pointer and make sure that it is pointing into
5823 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5825 if (bb_info[INSN_UID (x)] != NULL)
5827 error ("Insn %d is in multiple basic blocks (%d and %d)",
5828 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5831 bb_info[INSN_UID (x)] = bb;
5838 error ("End insn %d for block %d not found in the insn stream.",
5839 INSN_UID (bb->end), bb->index);
5844 /* Now check the basic blocks (boundaries etc.) */
5845 for (i = n_basic_blocks - 1; i >= 0; i--)
5847 basic_block bb = BASIC_BLOCK (i);
5848 /* Check corectness of edge lists */
5856 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5858 fprintf (stderr, "Predecessor: ");
5859 dump_edge_info (stderr, e, 0);
5860 fprintf (stderr, "\nSuccessor: ");
5861 dump_edge_info (stderr, e, 1);
5865 if (e->dest != EXIT_BLOCK_PTR)
5867 edge e2 = e->dest->pred;
5868 while (e2 && e2 != e)
5872 error ("Basic block %i edge lists are corrupted", bb->index);
5884 error ("Basic block %d pred edge is corrupted", bb->index);
5885 fputs ("Predecessor: ", stderr);
5886 dump_edge_info (stderr, e, 0);
5887 fputs ("\nSuccessor: ", stderr);
5888 dump_edge_info (stderr, e, 1);
5889 fputc ('\n', stderr);
5892 if (e->src != ENTRY_BLOCK_PTR)
5894 edge e2 = e->src->succ;
5895 while (e2 && e2 != e)
5899 error ("Basic block %i edge lists are corrupted", bb->index);
5906 /* OK pointers are correct. Now check the header of basic
5907 block. It ought to contain optional CODE_LABEL followed
5908 by NOTE_BASIC_BLOCK. */
5910 if (GET_CODE (x) == CODE_LABEL)
5914 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5920 if (GET_CODE (x) != NOTE
5921 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5922 || NOTE_BASIC_BLOCK (x) != bb)
5924 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5931 /* Do checks for empty blocks here */
5938 if (GET_CODE (x) == NOTE
5939 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5941 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5942 INSN_UID (x), bb->index);
5949 if (GET_CODE (x) == JUMP_INSN
5950 || GET_CODE (x) == CODE_LABEL
5951 || GET_CODE (x) == BARRIER)
5953 error ("In basic block %d:", bb->index);
5954 fatal_insn ("Flow control insn inside a basic block", x);
5965 if (!bb_info[INSN_UID (x)])
5967 switch (GET_CODE (x))
5974 /* An addr_vec is placed outside any block block. */
5976 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5977 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5978 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5983 /* But in any case, non-deletable labels can appear anywhere. */
5987 fatal_insn ("Insn outside basic block", x);
6001 /* Functions to access an edge list with a vector representation.
6002 Enough data is kept such that given an index number, the
6003 pred and succ that edge reprsents can be determined, or
6004 given a pred and a succ, it's index number can be returned.
6005 This allows algorithms which comsume a lot of memory to
6006 represent the normally full matrix of edge (pred,succ) with a
6007 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6008 wasted space in the client code due to sparse flow graphs. */
6010 /* This functions initializes the edge list. Basically the entire
6011 flowgraph is processed, and all edges are assigned a number,
6012 and the data structure is filed in. */
6016 struct edge_list *elist;
6022 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6026 /* Determine the number of edges in the flow graph by counting successor
6027 edges on each basic block. */
6028 for (x = 0; x < n_basic_blocks; x++)
6030 basic_block bb = BASIC_BLOCK (x);
6032 for (e = bb->succ; e; e = e->succ_next)
6035 /* Don't forget successors of the entry block. */
6036 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6039 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6040 elist->num_blocks = block_count;
6041 elist->num_edges = num_edges;
6042 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6046 /* Follow successors of the entry block, and register these edges. */
6047 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6049 elist->index_to_edge[num_edges] = e;
6053 for (x = 0; x < n_basic_blocks; x++)
6055 basic_block bb = BASIC_BLOCK (x);
6057 /* Follow all successors of blocks, and register these edges. */
6058 for (e = bb->succ; e; e = e->succ_next)
6060 elist->index_to_edge[num_edges] = e;
6067 /* This function free's memory associated with an edge list. */
6069 free_edge_list (elist)
6070 struct edge_list *elist;
6074 free (elist->index_to_edge);
6079 /* This function provides debug output showing an edge list. */
6081 print_edge_list (f, elist)
6083 struct edge_list *elist;
6086 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6087 elist->num_blocks - 2, elist->num_edges);
6089 for (x = 0; x < elist->num_edges; x++)
6091 fprintf (f, " %-4d - edge(", x);
6092 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6093 fprintf (f,"entry,");
6095 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6097 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6098 fprintf (f,"exit)\n");
6100 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6104 /* This function provides an internal consistancy check of an edge list,
6105 verifying that all edges are present, and that there are no
6108 verify_edge_list (f, elist)
6110 struct edge_list *elist;
6112 int x, pred, succ, index;
6115 for (x = 0; x < n_basic_blocks; x++)
6117 basic_block bb = BASIC_BLOCK (x);
6119 for (e = bb->succ; e; e = e->succ_next)
6121 pred = e->src->index;
6122 succ = e->dest->index;
6123 index = EDGE_INDEX (elist, e->src, e->dest);
6124 if (index == EDGE_INDEX_NO_EDGE)
6126 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6129 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6130 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6131 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6132 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6133 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6134 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6137 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6139 pred = e->src->index;
6140 succ = e->dest->index;
6141 index = EDGE_INDEX (elist, e->src, e->dest);
6142 if (index == EDGE_INDEX_NO_EDGE)
6144 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6147 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6148 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6149 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6150 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6151 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6152 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6154 /* We've verified that all the edges are in the list, no lets make sure
6155 there are no spurious edges in the list. */
6157 for (pred = 0 ; pred < n_basic_blocks; pred++)
6158 for (succ = 0 ; succ < n_basic_blocks; succ++)
6160 basic_block p = BASIC_BLOCK (pred);
6161 basic_block s = BASIC_BLOCK (succ);
6165 for (e = p->succ; e; e = e->succ_next)
6171 for (e = s->pred; e; e = e->pred_next)
6177 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6178 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6179 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6181 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6182 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6183 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6184 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6185 BASIC_BLOCK (succ)));
6187 for (succ = 0 ; succ < n_basic_blocks; succ++)
6189 basic_block p = ENTRY_BLOCK_PTR;
6190 basic_block s = BASIC_BLOCK (succ);
6194 for (e = p->succ; e; e = e->succ_next)
6200 for (e = s->pred; e; e = e->pred_next)
6206 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6207 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6208 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6210 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6211 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6212 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6213 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6214 BASIC_BLOCK (succ)));
6216 for (pred = 0 ; pred < n_basic_blocks; pred++)
6218 basic_block p = BASIC_BLOCK (pred);
6219 basic_block s = EXIT_BLOCK_PTR;
6223 for (e = p->succ; e; e = e->succ_next)
6229 for (e = s->pred; e; e = e->pred_next)
6235 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6236 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6237 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6239 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6240 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6241 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6242 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6247 /* This routine will determine what, if any, edge there is between
6248 a specified predecessor and successor. */
6251 find_edge_index (edge_list, pred, succ)
6252 struct edge_list *edge_list;
6253 basic_block pred, succ;
6256 for (x = 0; x < NUM_EDGES (edge_list); x++)
6258 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6259 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6262 return (EDGE_INDEX_NO_EDGE);
6265 /* This function will remove an edge from the flow graph. */
6270 edge last_pred = NULL;
6271 edge last_succ = NULL;
6273 basic_block src, dest;
6276 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6282 last_succ->succ_next = e->succ_next;
6284 src->succ = e->succ_next;
6286 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6292 last_pred->pred_next = e->pred_next;
6294 dest->pred = e->pred_next;
6300 /* This routine will remove any fake successor edges for a basic block.
6301 When the edge is removed, it is also removed from whatever predecessor
6304 remove_fake_successors (bb)
6308 for (e = bb->succ; e ; )
6312 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6317 /* This routine will remove all fake edges from the flow graph. If
6318 we remove all fake successors, it will automatically remove all
6319 fake predecessors. */
6321 remove_fake_edges ()
6325 for (x = 0; x < n_basic_blocks; x++)
6326 remove_fake_successors (BASIC_BLOCK (x));
6328 /* We've handled all successors except the entry block's. */
6329 remove_fake_successors (ENTRY_BLOCK_PTR);
6332 /* This functions will add a fake edge between any block which has no
6333 successors, and the exit block. Some data flow equations require these
6336 add_noreturn_fake_exit_edges ()
6340 for (x = 0; x < n_basic_blocks; x++)
6341 if (BASIC_BLOCK (x)->succ == NULL)
6342 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6345 /* Dump the list of basic blocks in the bitmap NODES. */
6347 flow_nodes_print (str, nodes, file)
6349 const sbitmap nodes;
6354 fprintf (file, "%s { ", str);
6355 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6356 fputs ("}\n", file);
6360 /* Dump the list of exiting edges in the array EDGES. */
6362 flow_exits_print (str, edges, num_edges, file)
6370 fprintf (file, "%s { ", str);
6371 for (i = 0; i < num_edges; i++)
6372 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6373 fputs ("}\n", file);
6377 /* Dump loop related CFG information. */
6379 flow_loops_cfg_dump (loops, file)
6380 const struct loops *loops;
6385 if (! loops->num || ! file || ! loops->cfg.dom)
6388 for (i = 0; i < n_basic_blocks; i++)
6392 fprintf (file, ";; %d succs { ", i);
6393 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6394 fprintf (file, "%d ", succ->dest->index);
6395 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6399 /* Dump the DFS node order. */
6400 if (loops->cfg.dfs_order)
6402 fputs (";; DFS order: ", file);
6403 for (i = 0; i < n_basic_blocks; i++)
6404 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6410 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6412 flow_loop_nested_p (outer, loop)
6416 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6420 /* Dump the loop information specified by LOOPS to the stream FILE. */
6422 flow_loops_dump (loops, file, verbose)
6423 const struct loops *loops;
6430 num_loops = loops->num;
6431 if (! num_loops || ! file)
6434 fprintf (file, ";; %d loops found, %d levels\n",
6435 num_loops, loops->levels);
6437 for (i = 0; i < num_loops; i++)
6439 struct loop *loop = &loops->array[i];
6441 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6442 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6443 loop->header->index, loop->latch->index,
6444 loop->pre_header ? loop->pre_header->index : -1,
6445 loop->depth, loop->level,
6446 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6447 fprintf (file, ";; %d", loop->num_nodes);
6448 flow_nodes_print (" nodes", loop->nodes, file);
6449 fprintf (file, ";; %d", loop->num_exits);
6450 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6456 for (j = 0; j < i; j++)
6458 struct loop *oloop = &loops->array[j];
6460 if (loop->header == oloop->header)
6465 smaller = loop->num_nodes < oloop->num_nodes;
6467 /* If the union of LOOP and OLOOP is different than
6468 the larger of LOOP and OLOOP then LOOP and OLOOP
6469 must be disjoint. */
6470 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6471 smaller ? oloop : loop);
6472 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6473 loop->header->index, i, j,
6474 disjoint ? "disjoint" : "nested");
6481 /* Print diagnostics to compare our concept of a loop with
6482 what the loop notes say. */
6483 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6484 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6485 != NOTE_INSN_LOOP_BEG)
6486 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6487 INSN_UID (PREV_INSN (loop->first->head)));
6488 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6489 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6490 != NOTE_INSN_LOOP_END)
6491 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6492 INSN_UID (NEXT_INSN (loop->last->end)));
6497 flow_loops_cfg_dump (loops, file);
6501 /* Free all the memory allocated for LOOPS. */
6503 flow_loops_free (loops)
6504 struct loops *loops;
6513 /* Free the loop descriptors. */
6514 for (i = 0; i < loops->num; i++)
6516 struct loop *loop = &loops->array[i];
6519 sbitmap_free (loop->nodes);
6523 free (loops->array);
6524 loops->array = NULL;
6527 sbitmap_vector_free (loops->cfg.dom);
6528 if (loops->cfg.dfs_order)
6529 free (loops->cfg.dfs_order);
6531 sbitmap_free (loops->shared_headers);
6536 /* Find the exits from the loop using the bitmap of loop nodes NODES
6537 and store in EXITS array. Return the number of exits from the
6540 flow_loop_exits_find (nodes, exits)
6541 const sbitmap nodes;
6550 /* Check all nodes within the loop to see if there are any
6551 successors not in the loop. Note that a node may have multiple
6554 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6555 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6557 basic_block dest = e->dest;
6559 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6567 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6569 /* Store all exiting edges into an array. */
6571 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6572 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6574 basic_block dest = e->dest;
6576 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6577 (*exits)[num_exits++] = e;
6585 /* Find the nodes contained within the loop with header HEADER and
6586 latch LATCH and store in NODES. Return the number of nodes within
6589 flow_loop_nodes_find (header, latch, nodes)
6598 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6601 /* Start with only the loop header in the set of loop nodes. */
6602 sbitmap_zero (nodes);
6603 SET_BIT (nodes, header->index);
6605 header->loop_depth++;
6607 /* Push the loop latch on to the stack. */
6608 if (! TEST_BIT (nodes, latch->index))
6610 SET_BIT (nodes, latch->index);
6611 latch->loop_depth++;
6613 stack[sp++] = latch;
6622 for (e = node->pred; e; e = e->pred_next)
6624 basic_block ancestor = e->src;
6626 /* If each ancestor not marked as part of loop, add to set of
6627 loop nodes and push on to stack. */
6628 if (ancestor != ENTRY_BLOCK_PTR
6629 && ! TEST_BIT (nodes, ancestor->index))
6631 SET_BIT (nodes, ancestor->index);
6632 ancestor->loop_depth++;
6634 stack[sp++] = ancestor;
6643 /* Compute the depth first search order and store in the array
6644 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6645 number of nodes visited. */
6647 flow_depth_first_order_compute (dfs_order)
6656 /* Allocate stack for back-tracking up CFG. */
6657 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6660 /* Allocate bitmap to track nodes that have been visited. */
6661 visited = sbitmap_alloc (n_basic_blocks);
6663 /* None of the nodes in the CFG have been visited yet. */
6664 sbitmap_zero (visited);
6666 /* Start with the first successor edge from the entry block. */
6667 e = ENTRY_BLOCK_PTR->succ;
6670 basic_block src = e->src;
6671 basic_block dest = e->dest;
6673 /* Mark that we have visited this node. */
6674 if (src != ENTRY_BLOCK_PTR)
6675 SET_BIT (visited, src->index);
6677 /* If this node has not been visited before, push the current
6678 edge on to the stack and proceed with the first successor
6679 edge of this node. */
6680 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6688 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6691 /* DEST has no successors (for example, a non-returning
6692 function is called) so do not push the current edge
6693 but carry on with its next successor. */
6694 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6695 SET_BIT (visited, dest->index);
6698 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6700 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6702 /* Pop edge off stack. */
6710 sbitmap_free (visited);
6712 /* The number of nodes visited should not be greater than
6714 if (dfsnum > n_basic_blocks)
6717 /* There are some nodes left in the CFG that are unreachable. */
6718 if (dfsnum < n_basic_blocks)
6724 /* Return the block for the pre-header of the loop with header
6725 HEADER where DOM specifies the dominator information. Return NULL if
6726 there is no pre-header. */
6728 flow_loop_pre_header_find (header, dom)
6732 basic_block pre_header;
6735 /* If block p is a predecessor of the header and is the only block
6736 that the header does not dominate, then it is the pre-header. */
6738 for (e = header->pred; e; e = e->pred_next)
6740 basic_block node = e->src;
6742 if (node != ENTRY_BLOCK_PTR
6743 && ! TEST_BIT (dom[node->index], header->index))
6745 if (pre_header == NULL)
6749 /* There are multiple edges into the header from outside
6750 the loop so there is no pre-header block. */
6760 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6761 previously added. The insertion algorithm assumes that the loops
6762 are added in the order found by a depth first search of the CFG. */
6764 flow_loop_tree_node_add (prevloop, loop)
6765 struct loop *prevloop;
6769 if (flow_loop_nested_p (prevloop, loop))
6771 prevloop->inner = loop;
6772 loop->outer = prevloop;
6776 while (prevloop->outer)
6778 if (flow_loop_nested_p (prevloop->outer, loop))
6780 prevloop->next = loop;
6781 loop->outer = prevloop->outer;
6784 prevloop = prevloop->outer;
6787 prevloop->next = loop;
6792 /* Build the loop hierarchy tree for LOOPS. */
6794 flow_loops_tree_build (loops)
6795 struct loops *loops;
6800 num_loops = loops->num;
6804 /* Root the loop hierarchy tree with the first loop found.
6805 Since we used a depth first search this should be the
6807 loops->tree = &loops->array[0];
6808 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6810 /* Add the remaining loops to the tree. */
6811 for (i = 1; i < num_loops; i++)
6812 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6816 /* Helper function to compute loop nesting depth and enclosed loop level
6817 for the natural loop specified by LOOP at the loop depth DEPTH.
6818 Returns the loop level. */
6820 flow_loop_level_compute (loop, depth)
6830 /* Traverse loop tree assigning depth and computing level as the
6831 maximum level of all the inner loops of this loop. The loop
6832 level is equivalent to the height of the loop in the loop tree
6833 and corresponds to the number of enclosed loop levels (including
6835 for (inner = loop->inner; inner; inner = inner->next)
6839 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6844 loop->level = level;
6845 loop->depth = depth;
6850 /* Compute the loop nesting depth and enclosed loop level for the loop
6851 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6855 flow_loops_level_compute (loops)
6856 struct loops *loops;
6862 /* Traverse all the outer level loops. */
6863 for (loop = loops->tree; loop; loop = loop->next)
6865 level = flow_loop_level_compute (loop, 1);
6873 /* Find all the natural loops in the function and save in LOOPS structure
6874 and recalculate loop_depth information in basic block structures.
6875 Return the number of natural loops found. */
6878 flow_loops_find (loops)
6879 struct loops *loops;
6890 loops->array = NULL;
6894 /* Taking care of this degenerate case makes the rest of
6895 this code simpler. */
6896 if (n_basic_blocks == 0)
6899 /* Compute the dominators. */
6900 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6901 compute_flow_dominators (dom, NULL);
6903 /* Count the number of loop edges (back edges). This should be the
6904 same as the number of natural loops. Also clear the loop_depth
6905 and as we work from inner->outer in a loop nest we call
6906 find_loop_nodes_find which will increment loop_depth for nodes
6907 within the current loop, which happens to enclose inner loops. */
6910 for (b = 0; b < n_basic_blocks; b++)
6912 BASIC_BLOCK (b)->loop_depth = 0;
6913 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6915 basic_block latch = e->src;
6917 /* Look for back edges where a predecessor is dominated
6918 by this block. A natural loop has a single entry
6919 node (header) that dominates all the nodes in the
6920 loop. It also has single back edge to the header
6921 from a latch node. Note that multiple natural loops
6922 may share the same header. */
6923 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6930 /* Compute depth first search order of the CFG so that outer
6931 natural loops will be found before inner natural loops. */
6932 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6933 flow_depth_first_order_compute (dfs_order);
6935 /* Allocate loop structures. */
6937 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6939 headers = sbitmap_alloc (n_basic_blocks);
6940 sbitmap_zero (headers);
6942 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6943 sbitmap_zero (loops->shared_headers);
6945 /* Find and record information about all the natural loops
6948 for (b = 0; b < n_basic_blocks; b++)
6952 /* Search the nodes of the CFG in DFS order that we can find
6953 outer loops first. */
6954 header = BASIC_BLOCK (dfs_order[b]);
6956 /* Look for all the possible latch blocks for this header. */
6957 for (e = header->pred; e; e = e->pred_next)
6959 basic_block latch = e->src;
6961 /* Look for back edges where a predecessor is dominated
6962 by this block. A natural loop has a single entry
6963 node (header) that dominates all the nodes in the
6964 loop. It also has single back edge to the header
6965 from a latch node. Note that multiple natural loops
6966 may share the same header. */
6967 if (latch != ENTRY_BLOCK_PTR
6968 && TEST_BIT (dom[latch->index], header->index))
6972 loop = loops->array + num_loops;
6974 loop->header = header;
6975 loop->latch = latch;
6977 /* Keep track of blocks that are loop headers so
6978 that we can tell which loops should be merged. */
6979 if (TEST_BIT (headers, header->index))
6980 SET_BIT (loops->shared_headers, header->index);
6981 SET_BIT (headers, header->index);
6983 /* Find nodes contained within the loop. */
6984 loop->nodes = sbitmap_alloc (n_basic_blocks);
6986 = flow_loop_nodes_find (header, latch, loop->nodes);
6988 /* Compute first and last blocks within the loop.
6989 These are often the same as the loop header and
6990 loop latch respectively, but this is not always
6993 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6995 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6997 /* Find edges which exit the loop. Note that a node
6998 may have several exit edges. */
7000 = flow_loop_exits_find (loop->nodes, &loop->exits);
7002 /* Look to see if the loop has a pre-header node. */
7004 = flow_loop_pre_header_find (header, dom);
7011 /* Natural loops with shared headers may either be disjoint or
7012 nested. Disjoint loops with shared headers cannot be inner
7013 loops and should be merged. For now just mark loops that share
7015 for (i = 0; i < num_loops; i++)
7016 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7017 loops->array[i].shared = 1;
7019 sbitmap_free (headers);
7022 loops->num = num_loops;
7024 /* Save CFG derived information to avoid recomputing it. */
7025 loops->cfg.dom = dom;
7026 loops->cfg.dfs_order = dfs_order;
7028 /* Build the loop hierarchy tree. */
7029 flow_loops_tree_build (loops);
7031 /* Assign the loop nesting depth and enclosed loop level for each
7033 loops->levels = flow_loops_level_compute (loops);
7039 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7041 flow_loop_outside_edge_p (loop, e)
7042 const struct loop *loop;
7045 if (e->dest != loop->header)
7047 return (e->src == ENTRY_BLOCK_PTR)
7048 || ! TEST_BIT (loop->nodes, e->src->index);
7052 typedef struct reorder_block_def {
7055 basic_block add_jump;
7060 } *reorder_block_def;
7062 #define REORDER_BLOCK_HEAD 0x1
7063 #define REORDER_BLOCK_VISITED 0x2
7064 #define REORDER_MOVED_BLOCK_END 0x3
7066 #define REORDER_BLOCK_FLAGS(bb) \
7067 ((reorder_block_def) (bb)->aux)->flags
7069 #define REORDER_BLOCK_INDEX(bb) \
7070 ((reorder_block_def) (bb)->aux)->index
7072 #define REORDER_BLOCK_ADD_JUMP(bb) \
7073 ((reorder_block_def) (bb)->aux)->add_jump
7075 #define REORDER_BLOCK_SUCC(bb) \
7076 ((reorder_block_def) (bb)->aux)->succ
7078 #define REORDER_BLOCK_OLD_END(bb) \
7079 ((reorder_block_def) (bb)->aux)->end
7081 #define REORDER_BLOCK_BEGIN(bb) \
7082 ((reorder_block_def) (bb)->aux)->block_begin
7084 #define REORDER_BLOCK_END(bb) \
7085 ((reorder_block_def) (bb)->aux)->block_end
7088 static int reorder_index;
7089 static basic_block reorder_last_visited;
7091 enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
7092 REORDER_SKIP_BLOCK_END};
7094 static rtx skip_insns_between_block PARAMS ((basic_block,
7095 enum reorder_skip_type));
7097 /* Skip over insns BEFORE or AFTER BB which are typically associated with
7101 skip_insns_between_block (bb, skip_type)
7103 enum reorder_skip_type skip_type;
7105 rtx insn, last_insn;
7107 if (skip_type == REORDER_SKIP_BEFORE)
7109 if (bb == ENTRY_BLOCK_PTR)
7112 last_insn = bb->head;
7113 for (insn = PREV_INSN (bb->head);
7114 insn && insn != BASIC_BLOCK (bb->index - 1)->end;
7115 last_insn = insn, insn = PREV_INSN (insn))
7117 if (NEXT_INSN (insn) != last_insn)
7120 if (GET_CODE (insn) == NOTE
7121 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
7122 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
7123 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
7132 last_insn = bb->end;
7134 if (bb == EXIT_BLOCK_PTR)
7137 for (insn = NEXT_INSN (bb->end);
7139 last_insn = insn, insn = NEXT_INSN (insn))
7141 if (bb->index + 1 != n_basic_blocks
7142 && insn == BASIC_BLOCK (bb->index + 1)->head)
7145 if (GET_CODE (insn) == BARRIER
7146 || GET_CODE (insn) == JUMP_INSN
7147 || (GET_CODE (insn) == NOTE
7148 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
7149 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
7152 if (GET_CODE (insn) == CODE_LABEL
7153 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
7154 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
7155 || GET_CODE (PATTERN
7156 (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
7158 insn = NEXT_INSN (insn);
7164 if (skip_type == REORDER_SKIP_BLOCK_END)
7166 int found_block_end = 0;
7168 for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
7170 if (bb->index + 1 != n_basic_blocks
7171 && insn == BASIC_BLOCK (bb->index + 1)->head)
7174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7176 found_block_end = 1;
7180 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
7183 if (GET_CODE (insn) == NOTE
7184 && NOTE_LINE_NUMBER (insn) >= 0
7186 && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
7187 == NOTE_INSN_BLOCK_END))
7192 if (! found_block_end)
7201 /* Return common destination for blocks BB0 and BB1. */
7204 get_common_dest (bb0, bb1)
7205 basic_block bb0, bb1;
7209 for (e0 = bb0->succ; e0; e0 = e0->succ_next)
7211 for (e1 = bb1->succ; e1; e1 = e1->succ_next)
7213 if (e0->dest == e1->dest)
7223 /* Move the destination block for edge E after chain end block CEB
7224 Adding jumps and labels is deferred until fixup_reorder_chain. */
7227 chain_reorder_blocks (e, ceb)
7231 basic_block sb = e->src;
7232 basic_block db = e->dest;
7233 rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
7236 enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
7237 PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
7238 enum cond_types cond_type;
7239 enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
7241 enum cond_block_types cond_block_type;
7244 fprintf (rtl_dump_file,
7245 "Edge from basic block %d to basic block %d last visited %d\n",
7246 sb->index, db->index, ceb->index);
7248 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7249 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7250 cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
7253 int block_begins = 0;
7256 for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
7258 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
7264 REORDER_BLOCK_BEGIN (sb) = block_begins;
7272 for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
7274 if (PREV_INSN (insn) == cebbe_insn)
7276 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7282 REORDER_BLOCK_END (ceb) = block_ends;
7285 /* Blocks are in original order. */
7286 if (sb->index == ceb->index
7287 && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
7290 /* Get the type of block and type of condition. */
7291 cond_type = NO_COND;
7292 cond_block_type = NO_COND_BLOCK;
7293 if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
7294 && condjump_p (sb->end))
7296 if (e->flags & EDGE_FALLTHRU)
7297 cond_block_type = THEN_BLOCK;
7298 else if (get_common_dest (sb->succ->dest, sb))
7299 cond_block_type = NO_ELSE_BLOCK;
7301 cond_block_type = ELSE_BLOCK;
7303 if (sb->succ->succ_next
7304 && get_common_dest (sb->succ->dest, sb))
7306 if (cond_block_type == THEN_BLOCK)
7308 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7309 & REORDER_BLOCK_VISITED))
7310 cond_type = PREDICT_THEN_NO_ELSE;
7312 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7314 else if (cond_block_type == NO_ELSE_BLOCK)
7316 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7317 & REORDER_BLOCK_VISITED))
7318 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7320 cond_type = PREDICT_THEN_NO_ELSE;
7325 if (cond_block_type == THEN_BLOCK)
7327 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7328 & REORDER_BLOCK_VISITED))
7329 cond_type = PREDICT_THEN_WITH_ELSE;
7331 cond_type = PREDICT_ELSE;
7333 else if (cond_block_type == ELSE_BLOCK
7334 && sb->succ->dest != EXIT_BLOCK_PTR)
7336 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7337 & REORDER_BLOCK_VISITED))
7338 cond_type = PREDICT_ELSE;
7340 cond_type = PREDICT_THEN_WITH_ELSE;
7347 static const char * cond_type_str [] = {"not cond jump", "predict then",
7349 "predict then w/o else",
7350 "predict not then w/o else"};
7351 static const char * cond_block_type_str [] = {"not then or else block",
7354 "then w/o else block"};
7356 fprintf (rtl_dump_file, " %s (looking at %s)\n",
7357 cond_type_str[(int)cond_type],
7358 cond_block_type_str[(int)cond_block_type]);
7361 /* Reflect that then block will move and we'll jump to it. */
7362 if (cond_block_type != THEN_BLOCK
7363 && (cond_type == PREDICT_ELSE
7364 || cond_type == PREDICT_NOT_THEN_NO_ELSE))
7367 fprintf (rtl_dump_file,
7368 " then jump from block %d to block %d\n",
7369 sb->index, sb->succ->dest->index);
7371 /* Jump to reordered then block. */
7372 REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
7375 /* Reflect that then block will jump back when we have no else. */
7376 if (cond_block_type != THEN_BLOCK
7377 && cond_type == PREDICT_NOT_THEN_NO_ELSE)
7379 for (ee = sb->succ->dest->succ;
7380 ee && ! (ee->flags & EDGE_FALLTHRU);
7384 if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
7385 && ! simplejump_p (sb->succ->dest->end)))
7387 REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
7391 /* Reflect that else block will jump back. */
7392 if (cond_block_type == ELSE_BLOCK
7393 && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
7398 && last_edge->dest != EXIT_BLOCK_PTR
7399 && GET_CODE (last_edge->dest->head) == CODE_LABEL
7400 && ! (GET_CODE (db->end) == JUMP_INSN))
7403 fprintf (rtl_dump_file,
7404 " else jump from block %d to block %d\n",
7405 db->index, last_edge->dest->index);
7407 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7411 /* This block's successor has already been reordered. This can happen
7412 when we reorder a chain starting at a then or else. */
7413 for (last_edge = db->succ;
7414 last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
7415 last_edge = last_edge->succ_next)
7419 && last_edge->dest != EXIT_BLOCK_PTR
7420 && (REORDER_BLOCK_FLAGS (last_edge->dest)
7421 & REORDER_BLOCK_VISITED))
7424 fprintf (rtl_dump_file,
7425 " end of chain jump from block %d to block %d\n",
7426 db->index, last_edge->dest->index);
7428 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7431 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7432 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7433 dbe_insn = skip_insns_between_block (db, REORDER_SKIP_AFTER);
7435 /* Leave behind any lexical block markers. */
7436 if (debug_info_level > DINFO_LEVEL_TERSE
7437 && ceb->index + 1 < db->index)
7439 rtx insn, last_insn = get_last_insn ();
7440 insn = NEXT_INSN (ceb->end);
7442 insn = REORDER_BLOCK_OLD_END (ceb);
7444 if (NEXT_INSN (cebe_insn) == 0)
7445 set_last_insn (cebe_insn);
7446 for (; insn && insn != db->head/*dbh_insn*/;
7447 insn = NEXT_INSN (insn))
7449 if (GET_CODE (insn) == NOTE
7450 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
7452 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
7455 if (GET_CODE (insn) == NOTE
7456 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
7458 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
7462 set_last_insn (last_insn);
7465 /* Rechain predicted block. */
7466 NEXT_INSN (cebe_insn) = dbh_insn;
7467 PREV_INSN (dbh_insn) = cebe_insn;
7469 REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
7470 if (db->index != n_basic_blocks - 1)
7471 NEXT_INSN (dbe_insn) = 0;
7477 /* Reorder blocks starting at block B. */
7480 make_reorder_chain (bb)
7484 basic_block visited_edge = NULL;
7488 if (bb == EXIT_BLOCK_PTR)
7491 /* Find the most probable block. */
7493 block_end = bb->end;
7494 if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
7496 rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
7499 probability = XINT (XEXP (note, 0), 0);
7503 if (probability >= REG_BR_PROB_BASE / 2)
7504 e = bb->succ->succ_next;
7507 /* Add chosen successor to chain and recurse on it. */
7508 if (e && e->dest != EXIT_BLOCK_PTR
7509 && e->dest != e->src
7510 && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7511 || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
7513 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7515 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
7516 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7517 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7520 if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7521 REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
7523 REORDER_BLOCK_SUCC (bb) = e;
7525 visited_edge = e->dest;
7527 reorder_last_visited = chain_reorder_blocks (e, bb);
7530 && ! (REORDER_BLOCK_FLAGS (e->dest)
7531 & REORDER_BLOCK_VISITED))
7532 make_reorder_chain (e->dest);
7536 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7538 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7539 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7543 /* Recurse on the successors. */
7544 for (e = bb->succ; e; e = e->succ_next)
7546 if (e->dest && e->dest == EXIT_BLOCK_PTR)
7550 && e->dest != e->src
7551 && e->dest != visited_edge
7552 && ! (REORDER_BLOCK_FLAGS (e->dest)
7553 & REORDER_BLOCK_VISITED))
7555 reorder_last_visited
7556 = chain_reorder_blocks (e, reorder_last_visited);
7557 make_reorder_chain (e->dest);
7563 /* Fixup jumps and labels after reordering basic blocks. */
7566 fixup_reorder_chain ()
7571 /* Set the new last insn. */
7573 i < n_basic_blocks - 1
7574 && REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) != n_basic_blocks;
7578 for (insn = BASIC_BLOCK (i)->head;
7579 NEXT_INSN (insn) != 0;
7580 insn = NEXT_INSN (insn))
7583 set_last_insn (insn);
7585 /* Add jumps and labels to fixup blocks. */
7586 for (i = 0; i < n_basic_blocks - 1; i++)
7588 if (REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i)))
7590 rtx new_label = gen_label_rtx ();
7591 rtx label_insn, jump_insn, barrier_insn;
7593 label_insn = emit_label_before (new_label,
7594 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head);
7595 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head = label_insn;
7597 jump_insn = emit_jump_insn_after (gen_jump (label_insn),
7598 BASIC_BLOCK (i)->end);
7599 JUMP_LABEL (jump_insn) = label_insn;
7600 ++LABEL_NUSES (label_insn);
7601 barrier_insn = emit_barrier_after (jump_insn);
7602 if (GET_CODE (BASIC_BLOCK (i)->end) != JUMP_INSN)
7603 BASIC_BLOCK (i)->end = barrier_insn;
7604 /* Add block for jump. Typically this is when a then is not
7605 predicted and we are jumping to the moved then block. */
7610 b = (basic_block) obstack_alloc (function_obstack, sizeof (*b));
7611 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
7612 BASIC_BLOCK (n_basic_blocks - 1) = b;
7613 b->index = n_basic_blocks - 1;
7614 b->head = emit_note_before (NOTE_INSN_BASIC_BLOCK, jump_insn);
7615 NOTE_BASIC_BLOCK (b->head) = b;
7616 b->end = barrier_insn;
7619 basic_block nb = BASIC_BLOCK (n_basic_blocks - 1);
7620 nb->global_live_at_start
7621 = OBSTACK_ALLOC_REG_SET (function_obstack);
7622 nb->global_live_at_end
7623 = OBSTACK_ALLOC_REG_SET (function_obstack);
7625 COPY_REG_SET (nb->global_live_at_start,
7626 BASIC_BLOCK (i)->global_live_at_start);
7627 COPY_REG_SET (nb->global_live_at_end,
7628 BASIC_BLOCK (i)->global_live_at_start);
7629 if (BASIC_BLOCK (i)->local_set)
7631 OBSTACK_ALLOC_REG_SET (function_obstack);
7632 COPY_REG_SET (nb->local_set, BASIC_BLOCK (i)->local_set);
7635 BASIC_BLOCK (nb->index)->local_set = 0;
7637 nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
7638 REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
7639 = REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) + 1;
7640 /* Relink to new block. */
7641 nb->succ = BASIC_BLOCK (i)->succ;
7643 make_edge (0, BASIC_BLOCK (i), nb, 0);
7644 BASIC_BLOCK (i)->succ->succ_next
7645 = BASIC_BLOCK (i)->succ->succ_next->succ_next;
7646 nb->succ->succ_next = 0;
7647 /* Fix reorder block index to reflect new block. */
7648 for (j = 0; j < n_basic_blocks - 1; j++)
7650 basic_block bbj = BASIC_BLOCK (j);
7651 basic_block bbi = BASIC_BLOCK (i);
7652 if (REORDER_BLOCK_INDEX (bbj)
7653 >= REORDER_BLOCK_INDEX (bbi) + 1)
7654 REORDER_BLOCK_INDEX (bbj)++;
7663 /* Reorder basic blocks. */
7666 reorder_basic_blocks ()
7669 struct loops loops_info;
7673 if (profile_arc_flag)
7676 if (n_basic_blocks <= 1)
7679 /* Exception edges are not currently handled. */
7680 for (i = 0; i < n_basic_blocks; i++)
7684 for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
7688 if (e && (e->flags & EDGE_EH))
7694 /* Find natural loops using the CFG. */
7695 num_loops = flow_loops_find (&loops_info);
7697 /* Dump loop information. */
7698 flow_loops_dump (&loops_info, rtl_dump_file, 0);
7700 /* Estimate using heuristics if no profiling info is available. */
7701 if (! flag_branch_probabilities)
7702 estimate_probability (&loops_info);
7704 reorder_last_visited = BASIC_BLOCK (0);
7706 for (i = 0; i < n_basic_blocks; i++)
7708 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
7712 = NEXT_INSN (skip_insns_between_block (BASIC_BLOCK (n_basic_blocks - 1),
7713 REORDER_SKIP_AFTER));
7715 make_reorder_chain (BASIC_BLOCK (0));
7717 fixup_reorder_chain ();
7719 #ifdef ENABLE_CHECKING
7721 rtx insn, last_insn;
7722 last_insn = get_insns ();
7723 for (insn = NEXT_INSN (get_insns ());
7724 insn && PREV_INSN (insn) == last_insn
7725 && NEXT_INSN (PREV_INSN (insn)) == insn;
7727 insn = NEXT_INSN (insn))
7733 fprintf (rtl_dump_file, "insn chaining error at %d\n",
7734 INSN_UID (last_insn));
7740 /* Put basic_block_info in new order. */
7741 for (i = 0; i < n_basic_blocks - 1; i++)
7743 for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
7746 if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
7751 int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
7753 temprbi = BASIC_BLOCK (rbi)->index;
7754 BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
7755 BASIC_BLOCK (j)->index = temprbi;
7756 tempbb = BASIC_BLOCK (rbi);
7757 BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
7758 BASIC_BLOCK (j) = tempbb;
7762 NEXT_INSN (BASIC_BLOCK (n_basic_blocks - 1)->end) = last_insn;
7764 for (i = 0; i < n_basic_blocks - 1; i++)
7766 free (BASIC_BLOCK (i)->aux);
7769 /* Free loop information. */
7770 flow_loops_free (&loops_info);
7774 /* Clear LOG_LINKS fields of insns in a chain. */
7776 clear_log_links (insns)
7780 for (i = insns; i; i = NEXT_INSN (i))
7781 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')