1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #define obstack_chunk_alloc xmalloc
142 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
161 /* The contents of the current function definition are allocated
162 in this obstack, and all are freed at the end of the function.
163 For top-level functions, this is temporary_obstack.
164 Separate obstacks are made for nested functions. */
166 extern struct obstack *function_obstack;
168 /* Number of basic blocks in the current function. */
172 /* Number of edges in the current function. */
176 /* The basic block array. */
178 varray_type basic_block_info;
180 /* The special entry and exit blocks. */
182 struct basic_block_def entry_exit_blocks[2]
187 NULL, /* local_set */
188 NULL, /* global_live_at_start */
189 NULL, /* global_live_at_end */
191 ENTRY_BLOCK, /* index */
193 -1, -1 /* eh_beg, eh_end */
200 NULL, /* local_set */
201 NULL, /* global_live_at_start */
202 NULL, /* global_live_at_end */
204 EXIT_BLOCK, /* index */
206 -1, -1 /* eh_beg, eh_end */
210 /* Nonzero if the second flow pass has completed. */
213 /* Maximum register number used in this function, plus one. */
217 /* Indexed by n, giving various register information */
219 varray_type reg_n_info;
221 /* Size of the reg_n_info table. */
223 unsigned int reg_n_max;
225 /* Element N is the next insn that uses (hard or pseudo) register number N
226 within the current basic block; or zero, if there is no such insn.
227 This is valid only during the final backward scan in propagate_block. */
229 static rtx *reg_next_use;
231 /* Size of a regset for the current function,
232 in (1) bytes and (2) elements. */
237 /* Regset of regs live when calls to `setjmp'-like functions happen. */
238 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240 regset regs_live_at_setjmp;
242 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
243 that have to go in the same hard reg.
244 The first two regs in the list are a pair, and the next two
245 are another pair, etc. */
248 /* Depth within loops of basic block being scanned for lifetime analysis,
249 plus one. This is the weight attached to references to registers. */
251 static int loop_depth;
253 /* During propagate_block, this is non-zero if the value of CC0 is live. */
257 /* During propagate_block, this contains a list of all the MEMs we are
258 tracking for dead store elimination. */
260 static rtx mem_set_list;
262 /* Set of registers that may be eliminable. These are handled specially
263 in updating regs_ever_live. */
265 static HARD_REG_SET elim_reg_set;
267 /* The basic block structure for every insn, indexed by uid. */
269 varray_type basic_block_for_insn;
271 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
272 /* ??? Should probably be using LABEL_NUSES instead. It would take a
273 bit of surgery to be able to use or co-opt the routines in jump. */
275 static rtx label_value_list;
277 /* Forward declarations */
278 static int count_basic_blocks PARAMS ((rtx));
279 static rtx find_basic_blocks_1 PARAMS ((rtx));
280 static void create_basic_block PARAMS ((int, rtx, rtx, rtx));
281 static void clear_edges PARAMS ((void));
282 static void make_edges PARAMS ((rtx));
283 static void make_label_edge PARAMS ((sbitmap *, basic_block,
285 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
286 basic_block, rtx, int));
287 static void mark_critical_edges PARAMS ((void));
288 static void move_stray_eh_region_notes PARAMS ((void));
289 static void record_active_eh_regions PARAMS ((rtx));
291 static void commit_one_edge_insertion PARAMS ((edge));
293 static void delete_unreachable_blocks PARAMS ((void));
294 static void delete_eh_regions PARAMS ((void));
295 static int can_delete_note_p PARAMS ((rtx));
296 static int delete_block PARAMS ((basic_block));
297 static void expunge_block PARAMS ((basic_block));
298 static int can_delete_label_p PARAMS ((rtx));
299 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
301 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
303 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
304 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
305 static void try_merge_blocks PARAMS ((void));
306 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
307 static void tidy_fallthru_edges PARAMS ((void));
308 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
309 static void verify_wide_reg PARAMS ((int, rtx, rtx));
310 static void verify_local_live_at_start PARAMS ((regset, basic_block));
311 static int set_noop_p PARAMS ((rtx));
312 static int noop_move_p PARAMS ((rtx));
313 static void delete_noop_moves PARAMS ((rtx));
314 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
315 static void notice_stack_pointer_modification PARAMS ((rtx));
316 static void mark_reg PARAMS ((rtx, void *));
317 static void mark_regs_live_at_end PARAMS ((regset));
318 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
319 static void propagate_block PARAMS ((basic_block, regset,
321 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
322 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
323 static void mark_set_regs PARAMS ((regset, regset, rtx,
325 static void mark_set_1 PARAMS ((regset, regset, rtx,
328 static void find_auto_inc PARAMS ((regset, rtx, rtx));
329 static int try_pre_increment_1 PARAMS ((rtx));
330 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
332 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
333 void dump_flow_info PARAMS ((FILE *));
334 void debug_flow_info PARAMS ((void));
335 static void dump_edge_info PARAMS ((FILE *, edge, int));
337 static void count_reg_sets_1 PARAMS ((rtx));
338 static void count_reg_sets PARAMS ((rtx));
339 static void count_reg_references PARAMS ((rtx));
340 static void invalidate_mems_from_autoinc PARAMS ((rtx));
341 static void remove_fake_successors PARAMS ((basic_block));
342 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
343 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
344 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
345 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
346 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
347 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
348 static int flow_depth_first_order_compute PARAMS ((int *));
349 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
350 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
351 static void flow_loops_tree_build PARAMS ((struct loops *));
352 static int flow_loop_level_compute PARAMS ((struct loop *, int));
353 static int flow_loops_level_compute PARAMS ((struct loops *));
354 static basic_block get_common_dest PARAMS ((basic_block, basic_block));
355 static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
356 static void make_reorder_chain PARAMS ((basic_block));
357 static void fixup_reorder_chain PARAMS ((void));
359 /* This function is always defined so it can be called from the
360 debugger, and it is declared extern so we don't get warnings about
362 void verify_flow_info PARAMS ((void));
363 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
365 /* Find basic blocks of the current function.
366 F is the first insn of the function and NREGS the number of register
370 find_basic_blocks (f, nregs, file)
372 int nregs ATTRIBUTE_UNUSED;
373 FILE *file ATTRIBUTE_UNUSED;
377 /* Flush out existing data. */
378 if (basic_block_info != NULL)
384 /* Clear bb->aux on all extant basic blocks. We'll use this as a
385 tag for reuse during create_basic_block, just in case some pass
386 copies around basic block notes improperly. */
387 for (i = 0; i < n_basic_blocks; ++i)
388 BASIC_BLOCK (i)->aux = NULL;
390 VARRAY_FREE (basic_block_info);
393 n_basic_blocks = count_basic_blocks (f);
395 /* Size the basic block table. The actual structures will be allocated
396 by find_basic_blocks_1, since we want to keep the structure pointers
397 stable across calls to find_basic_blocks. */
398 /* ??? This whole issue would be much simpler if we called find_basic_blocks
399 exactly once, and thereafter we don't have a single long chain of
400 instructions at all until close to the end of compilation when we
401 actually lay them out. */
403 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
405 label_value_list = find_basic_blocks_1 (f);
407 /* Record the block to which an insn belongs. */
408 /* ??? This should be done another way, by which (perhaps) a label is
409 tagged directly with the basic block that it starts. It is used for
410 more than that currently, but IMO that is the only valid use. */
412 max_uid = get_max_uid ();
414 /* Leave space for insns life_analysis makes in some cases for auto-inc.
415 These cases are rare, so we don't need too much space. */
416 max_uid += max_uid / 10;
419 compute_bb_for_insn (max_uid);
421 /* Discover the edges of our cfg. */
422 record_active_eh_regions (f);
423 make_edges (label_value_list);
425 /* Do very simple cleanup now, for the benefit of code that runs between
426 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
427 tidy_fallthru_edges ();
429 mark_critical_edges ();
431 #ifdef ENABLE_CHECKING
436 /* Count the basic blocks of the function. */
439 count_basic_blocks (f)
443 register RTX_CODE prev_code;
444 register int count = 0;
446 int call_had_abnormal_edge = 0;
447 rtx prev_call = NULL_RTX;
449 prev_code = JUMP_INSN;
450 for (insn = f; insn; insn = NEXT_INSN (insn))
452 register RTX_CODE code = GET_CODE (insn);
454 if (code == CODE_LABEL
455 || (GET_RTX_CLASS (code) == 'i'
456 && (prev_code == JUMP_INSN
457 || prev_code == BARRIER
458 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
463 /* Record whether this call created an edge. */
464 if (code == CALL_INSN)
466 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
467 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
469 call_had_abnormal_edge = 0;
471 /* If there is a specified EH region, we have an edge. */
472 if (eh_region && region > 0)
473 call_had_abnormal_edge = 1;
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
512 find_basic_blocks_1 (f)
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region, we have an edge. */
544 if (eh_list && region > 0)
545 call_has_abnormal_edge = 1;
548 /* If there is a nonlocal goto label and the specified
549 region number isn't -1, we have an edge. (0 means
550 no throw, but might have a nonlocal goto). */
551 if (nonlocal_goto_handler_labels && region >= 0)
552 call_has_abnormal_edge = 1;
560 int kind = NOTE_LINE_NUMBER (insn);
562 /* Keep a LIFO list of the currently active exception notes. */
563 if (kind == NOTE_INSN_EH_REGION_BEG)
564 eh_list = alloc_INSN_LIST (insn, eh_list);
565 else if (kind == NOTE_INSN_EH_REGION_END)
568 eh_list = XEXP (eh_list, 1);
569 free_INSN_LIST_node (t);
572 /* Look for basic block notes with which to keep the
573 basic_block_info pointers stable. Unthread the note now;
574 we'll put it back at the right place in create_basic_block.
575 Or not at all if we've already found a note in this block. */
576 else if (kind == NOTE_INSN_BASIC_BLOCK)
578 if (bb_note == NULL_RTX)
580 next = flow_delete_insn (insn);
587 /* A basic block starts at a label. If we've closed one off due
588 to a barrier or some such, no need to do it again. */
589 if (head != NULL_RTX)
591 /* While we now have edge lists with which other portions of
592 the compiler might determine a call ending a basic block
593 does not imply an abnormal edge, it will be a bit before
594 everything can be updated. So continue to emit a noop at
595 the end of such a block. */
596 if (GET_CODE (end) == CALL_INSN)
598 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
599 end = emit_insn_after (nop, end);
602 create_basic_block (i++, head, end, bb_note);
609 /* A basic block ends at a jump. */
610 if (head == NULL_RTX)
614 /* ??? Make a special check for table jumps. The way this
615 happens is truly and amazingly gross. We are about to
616 create a basic block that contains just a code label and
617 an addr*vec jump insn. Worse, an addr_diff_vec creates
618 its own natural loop.
620 Prevent this bit of brain damage, pasting things together
621 correctly in make_edges.
623 The correct solution involves emitting the table directly
624 on the tablejump instruction as a note, or JUMP_LABEL. */
626 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
627 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
635 goto new_bb_inclusive;
638 /* A basic block ends at a barrier. It may be that an unconditional
639 jump already closed the basic block -- no need to do it again. */
640 if (head == NULL_RTX)
643 /* While we now have edge lists with which other portions of the
644 compiler might determine a call ending a basic block does not
645 imply an abnormal edge, it will be a bit before everything can
646 be updated. So continue to emit a noop at the end of such a
648 if (GET_CODE (end) == CALL_INSN)
650 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
651 end = emit_insn_after (nop, end);
653 goto new_bb_exclusive;
656 /* A basic block ends at a call that can either throw or
657 do a non-local goto. */
658 if (call_has_abnormal_edge)
661 if (head == NULL_RTX)
666 create_basic_block (i++, head, end, bb_note);
667 head = end = NULL_RTX;
674 if (GET_RTX_CLASS (code) == 'i')
676 if (head == NULL_RTX)
683 if (GET_RTX_CLASS (code) == 'i')
687 /* Make a list of all labels referred to other than by jumps
688 (which just don't have the REG_LABEL notes).
690 Make a special exception for labels followed by an ADDR*VEC,
691 as this would be a part of the tablejump setup code.
693 Make a special exception for the eh_return_stub_label, which
694 we know isn't part of any otherwise visible control flow. */
696 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
697 if (REG_NOTE_KIND (note) == REG_LABEL)
699 rtx lab = XEXP (note, 0), next;
701 if (lab == eh_return_stub_label)
703 else if ((next = next_nonnote_insn (lab)) != NULL
704 && GET_CODE (next) == JUMP_INSN
705 && (GET_CODE (PATTERN (next)) == ADDR_VEC
706 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
710 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
715 if (head != NULL_RTX)
716 create_basic_block (i++, head, end, bb_note);
718 if (i != n_basic_blocks)
721 return label_value_list;
724 /* Tidy the CFG by deleting unreachable code and whatnot. */
730 delete_unreachable_blocks ();
731 move_stray_eh_region_notes ();
732 record_active_eh_regions (f);
734 mark_critical_edges ();
736 /* Kill the data we won't maintain. */
737 label_value_list = NULL_RTX;
740 /* Create a new basic block consisting of the instructions between
741 HEAD and END inclusive. Reuses the note and basic block struct
742 in BB_NOTE, if any. */
745 create_basic_block (index, head, end, bb_note)
747 rtx head, end, bb_note;
752 && ! RTX_INTEGRATED_P (bb_note)
753 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 /* If we found an existing note, thread it back onto the chain. */
758 if (GET_CODE (head) == CODE_LABEL)
759 add_insn_after (bb_note, head);
762 add_insn_before (bb_note, head);
768 /* Otherwise we must create a note and a basic block structure.
769 Since we allow basic block structs in rtl, give the struct
770 the same lifetime by allocating it off the function obstack
771 rather than using malloc. */
773 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
774 memset (bb, 0, sizeof (*bb));
776 if (GET_CODE (head) == CODE_LABEL)
777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 NOTE_BASIC_BLOCK (bb_note) = bb;
786 /* Always include the bb note in the block. */
787 if (NEXT_INSN (end) == bb_note)
793 BASIC_BLOCK (index) = bb;
795 /* Tag the block so that we know it has been used when considering
796 other basic block notes. */
800 /* Records the basic block struct in BB_FOR_INSN, for every instruction
801 indexed by INSN_UID. MAX is the size of the array. */
804 compute_bb_for_insn (max)
809 if (basic_block_for_insn)
810 VARRAY_FREE (basic_block_for_insn);
811 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
813 for (i = 0; i < n_basic_blocks; ++i)
815 basic_block bb = BASIC_BLOCK (i);
822 int uid = INSN_UID (insn);
824 VARRAY_BB (basic_block_for_insn, uid) = bb;
827 insn = NEXT_INSN (insn);
832 /* Free the memory associated with the edge structures. */
840 for (i = 0; i < n_basic_blocks; ++i)
842 basic_block bb = BASIC_BLOCK (i);
844 for (e = bb->succ; e ; e = n)
854 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
860 ENTRY_BLOCK_PTR->succ = 0;
861 EXIT_BLOCK_PTR->pred = 0;
866 /* Identify the edges between basic blocks.
868 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
869 that are otherwise unreachable may be reachable with a non-local goto.
871 BB_EH_END is an array indexed by basic block number in which we record
872 the list of exception regions active at the end of the basic block. */
875 make_edges (label_value_list)
876 rtx label_value_list;
879 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
880 sbitmap *edge_cache = NULL;
882 /* Assume no computed jump; revise as we create edges. */
883 current_function_has_computed_jump = 0;
885 /* Heavy use of computed goto in machine-generated code can lead to
886 nearly fully-connected CFGs. In that case we spend a significant
887 amount of time searching the edge lists for duplicates. */
888 if (forced_labels || label_value_list)
890 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
891 sbitmap_vector_zero (edge_cache, n_basic_blocks);
894 /* By nature of the way these get numbered, block 0 is always the entry. */
895 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
897 for (i = 0; i < n_basic_blocks; ++i)
899 basic_block bb = BASIC_BLOCK (i);
902 int force_fallthru = 0;
904 /* Examine the last instruction of the block, and discover the
905 ways we can leave the block. */
908 code = GET_CODE (insn);
911 if (code == JUMP_INSN)
915 /* ??? Recognize a tablejump and do the right thing. */
916 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
917 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
918 && GET_CODE (tmp) == JUMP_INSN
919 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
920 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
925 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
926 vec = XVEC (PATTERN (tmp), 0);
928 vec = XVEC (PATTERN (tmp), 1);
930 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
931 make_label_edge (edge_cache, bb,
932 XEXP (RTVEC_ELT (vec, j), 0), 0);
934 /* Some targets (eg, ARM) emit a conditional jump that also
935 contains the out-of-range target. Scan for these and
936 add an edge if necessary. */
937 if ((tmp = single_set (insn)) != NULL
938 && SET_DEST (tmp) == pc_rtx
939 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
940 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
941 make_label_edge (edge_cache, bb,
942 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944 #ifdef CASE_DROPS_THROUGH
945 /* Silly VAXen. The ADDR_VEC is going to be in the way of
946 us naturally detecting fallthru into the next block. */
951 /* If this is a computed jump, then mark it as reaching
952 everything on the label_value_list and forced_labels list. */
953 else if (computed_jump_p (insn))
955 current_function_has_computed_jump = 1;
957 for (x = label_value_list; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 for (x = forced_labels; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 /* Returns create an exit out. */
965 else if (returnjump_p (insn))
966 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
968 /* Otherwise, we have a plain conditional or unconditional jump. */
971 if (! JUMP_LABEL (insn))
973 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
977 /* If this is a CALL_INSN, then mark it as reaching the active EH
978 handler for this CALL_INSN. If we're handling asynchronous
979 exceptions then any insn can reach any of the active handlers.
981 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
983 if (code == CALL_INSN || asynchronous_exceptions)
985 /* If there's an EH region active at the end of a block,
986 add the appropriate edges. */
988 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
990 /* If we have asynchronous exceptions, do the same for *all*
991 exception regions active in the block. */
992 if (asynchronous_exceptions
993 && bb->eh_beg != bb->eh_end)
996 make_eh_edge (edge_cache, eh_nest_info, bb,
997 NULL_RTX, bb->eh_beg);
999 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1000 if (GET_CODE (x) == NOTE
1001 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1002 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1004 int region = NOTE_EH_HANDLER (x);
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1010 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1012 /* ??? This could be made smarter: in some cases it's possible
1013 to tell that certain calls will not do a nonlocal goto.
1015 For example, if the nested functions that do the nonlocal
1016 gotos do not have their addresses taken, then only calls to
1017 those functions or to other nested functions that use them
1018 could possibly do nonlocal gotos. */
1019 /* We do know that a REG_EH_REGION note with a value less
1020 than 0 is guaranteed not to perform a non-local goto. */
1021 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1022 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1023 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1024 make_label_edge (edge_cache, bb, XEXP (x, 0),
1025 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1029 /* We know something about the structure of the function __throw in
1030 libgcc2.c. It is the only function that ever contains eh_stub
1031 labels. It modifies its return address so that the last block
1032 returns to one of the eh_stub labels within it. So we have to
1033 make additional edges in the flow graph. */
1034 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1035 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1037 /* Find out if we can drop through to the next block. */
1038 insn = next_nonnote_insn (insn);
1039 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1040 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1041 else if (i + 1 < n_basic_blocks)
1043 rtx tmp = BLOCK_HEAD (i + 1);
1044 if (GET_CODE (tmp) == NOTE)
1045 tmp = next_nonnote_insn (tmp);
1046 if (force_fallthru || insn == tmp)
1047 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1051 free_eh_nesting_info (eh_nest_info);
1053 sbitmap_vector_free (edge_cache);
1056 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1057 about the edge that is accumulated between calls. */
1060 make_edge (edge_cache, src, dst, flags)
1061 sbitmap *edge_cache;
1062 basic_block src, dst;
1068 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1069 many edges to them, and we didn't allocate memory for it. */
1070 use_edge_cache = (edge_cache
1071 && src != ENTRY_BLOCK_PTR
1072 && dst != EXIT_BLOCK_PTR);
1074 /* Make sure we don't add duplicate edges. */
1075 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1076 for (e = src->succ; e ; e = e->succ_next)
1083 e = (edge) xcalloc (1, sizeof (*e));
1086 e->succ_next = src->succ;
1087 e->pred_next = dst->pred;
1096 SET_BIT (edge_cache[src->index], dst->index);
1099 /* Create an edge from a basic block to a label. */
1102 make_label_edge (edge_cache, src, label, flags)
1103 sbitmap *edge_cache;
1108 if (GET_CODE (label) != CODE_LABEL)
1111 /* If the label was never emitted, this insn is junk, but avoid a
1112 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1113 as a result of a syntax error and a diagnostic has already been
1116 if (INSN_UID (label) == 0)
1119 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1122 /* Create the edges generated by INSN in REGION. */
1125 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1126 sbitmap *edge_cache;
1127 eh_nesting_info *eh_nest_info;
1132 handler_info **handler_list;
1135 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1136 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1139 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1140 EDGE_ABNORMAL | EDGE_EH | is_call);
1144 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1145 dangerous if we intend to move basic blocks around. Move such notes
1146 into the following block. */
1149 move_stray_eh_region_notes ()
1154 if (n_basic_blocks < 2)
1157 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1158 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1160 rtx insn, next, list = NULL_RTX;
1162 b1 = BASIC_BLOCK (i);
1163 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1165 next = NEXT_INSN (insn);
1166 if (GET_CODE (insn) == NOTE
1167 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1168 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1170 /* Unlink from the insn chain. */
1171 NEXT_INSN (PREV_INSN (insn)) = next;
1172 PREV_INSN (next) = PREV_INSN (insn);
1175 NEXT_INSN (insn) = list;
1180 if (list == NULL_RTX)
1183 /* Find where to insert these things. */
1185 if (GET_CODE (insn) == CODE_LABEL)
1186 insn = NEXT_INSN (insn);
1190 next = NEXT_INSN (list);
1191 add_insn_after (list, insn);
1197 /* Recompute eh_beg/eh_end for each basic block. */
1200 record_active_eh_regions (f)
1203 rtx insn, eh_list = NULL_RTX;
1205 basic_block bb = BASIC_BLOCK (0);
1207 for (insn = f; insn ; insn = NEXT_INSN (insn))
1209 if (bb->head == insn)
1210 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1212 if (GET_CODE (insn) == NOTE)
1214 int kind = NOTE_LINE_NUMBER (insn);
1215 if (kind == NOTE_INSN_EH_REGION_BEG)
1216 eh_list = alloc_INSN_LIST (insn, eh_list);
1217 else if (kind == NOTE_INSN_EH_REGION_END)
1219 rtx t = XEXP (eh_list, 1);
1220 free_INSN_LIST_node (eh_list);
1225 if (bb->end == insn)
1227 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1229 if (i == n_basic_blocks)
1231 bb = BASIC_BLOCK (i);
1236 /* Identify critical edges and set the bits appropriately. */
1239 mark_critical_edges ()
1241 int i, n = n_basic_blocks;
1244 /* We begin with the entry block. This is not terribly important now,
1245 but could be if a front end (Fortran) implemented alternate entry
1247 bb = ENTRY_BLOCK_PTR;
1254 /* (1) Critical edges must have a source with multiple successors. */
1255 if (bb->succ && bb->succ->succ_next)
1257 for (e = bb->succ; e ; e = e->succ_next)
1259 /* (2) Critical edges must have a destination with multiple
1260 predecessors. Note that we know there is at least one
1261 predecessor -- the edge we followed to get here. */
1262 if (e->dest->pred->pred_next)
1263 e->flags |= EDGE_CRITICAL;
1265 e->flags &= ~EDGE_CRITICAL;
1270 for (e = bb->succ; e ; e = e->succ_next)
1271 e->flags &= ~EDGE_CRITICAL;
1276 bb = BASIC_BLOCK (i);
1280 /* Split a (typically critical) edge. Return the new block.
1281 Abort on abnormal edges.
1283 ??? The code generally expects to be called on critical edges.
1284 The case of a block ending in an unconditional jump to a
1285 block with multiple predecessors is not handled optimally. */
1288 split_edge (edge_in)
1291 basic_block old_pred, bb, old_succ;
1296 /* Abnormal edges cannot be split. */
1297 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1300 old_pred = edge_in->src;
1301 old_succ = edge_in->dest;
1303 /* Remove the existing edge from the destination's pred list. */
1306 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1308 *pp = edge_in->pred_next;
1309 edge_in->pred_next = NULL;
1312 /* Create the new structures. */
1313 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1314 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1317 memset (bb, 0, sizeof (*bb));
1318 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1319 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1321 /* ??? This info is likely going to be out of date very soon. */
1322 if (old_succ->global_live_at_start)
1324 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1325 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1329 CLEAR_REG_SET (bb->global_live_at_start);
1330 CLEAR_REG_SET (bb->global_live_at_end);
1335 bb->succ = edge_out;
1338 edge_in->flags &= ~EDGE_CRITICAL;
1340 edge_out->pred_next = old_succ->pred;
1341 edge_out->succ_next = NULL;
1343 edge_out->dest = old_succ;
1344 edge_out->flags = EDGE_FALLTHRU;
1345 edge_out->probability = REG_BR_PROB_BASE;
1347 old_succ->pred = edge_out;
1349 /* Tricky case -- if there existed a fallthru into the successor
1350 (and we're not it) we must add a new unconditional jump around
1351 the new block we're actually interested in.
1353 Further, if that edge is critical, this means a second new basic
1354 block must be created to hold it. In order to simplify correct
1355 insn placement, do this before we touch the existing basic block
1356 ordering for the block we were really wanting. */
1357 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1360 for (e = edge_out->pred_next; e ; e = e->pred_next)
1361 if (e->flags & EDGE_FALLTHRU)
1366 basic_block jump_block;
1369 if ((e->flags & EDGE_CRITICAL) == 0)
1371 /* Non critical -- we can simply add a jump to the end
1372 of the existing predecessor. */
1373 jump_block = e->src;
1377 /* We need a new block to hold the jump. The simplest
1378 way to do the bulk of the work here is to recursively
1380 jump_block = split_edge (e);
1381 e = jump_block->succ;
1384 /* Now add the jump insn ... */
1385 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1387 jump_block->end = pos;
1388 if (basic_block_for_insn)
1389 set_block_for_insn (pos, jump_block);
1390 emit_barrier_after (pos);
1392 /* ... let jump know that label is in use, ... */
1393 JUMP_LABEL (pos) = old_succ->head;
1394 ++LABEL_NUSES (old_succ->head);
1396 /* ... and clear fallthru on the outgoing edge. */
1397 e->flags &= ~EDGE_FALLTHRU;
1399 /* Continue splitting the interesting edge. */
1403 /* Place the new block just in front of the successor. */
1404 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1405 if (old_succ == EXIT_BLOCK_PTR)
1406 j = n_basic_blocks - 1;
1408 j = old_succ->index;
1409 for (i = n_basic_blocks - 1; i > j; --i)
1411 basic_block tmp = BASIC_BLOCK (i - 1);
1412 BASIC_BLOCK (i) = tmp;
1415 BASIC_BLOCK (i) = bb;
1418 /* Create the basic block note.
1420 Where we place the note can have a noticable impact on the generated
1421 code. Consider this cfg:
1432 If we need to insert an insn on the edge from block 0 to block 1,
1433 we want to ensure the instructions we insert are outside of any
1434 loop notes that physically sit between block 0 and block 1. Otherwise
1435 we confuse the loop optimizer into thinking the loop is a phony. */
1436 if (old_succ != EXIT_BLOCK_PTR
1437 && PREV_INSN (old_succ->head)
1438 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1439 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1440 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1441 PREV_INSN (old_succ->head));
1442 else if (old_succ != EXIT_BLOCK_PTR)
1443 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1445 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1446 NOTE_BASIC_BLOCK (bb_note) = bb;
1447 bb->head = bb->end = bb_note;
1449 /* Not quite simple -- for non-fallthru edges, we must adjust the
1450 predecessor's jump instruction to target our new block. */
1451 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1453 rtx tmp, insn = old_pred->end;
1454 rtx old_label = old_succ->head;
1455 rtx new_label = gen_label_rtx ();
1457 if (GET_CODE (insn) != JUMP_INSN)
1460 /* ??? Recognize a tablejump and adjust all matching cases. */
1461 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1462 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1463 && GET_CODE (tmp) == JUMP_INSN
1464 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1465 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1470 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1471 vec = XVEC (PATTERN (tmp), 0);
1473 vec = XVEC (PATTERN (tmp), 1);
1475 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1476 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1478 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1479 --LABEL_NUSES (old_label);
1480 ++LABEL_NUSES (new_label);
1483 /* Handle casesi dispatch insns */
1484 if ((tmp = single_set (insn)) != NULL
1485 && SET_DEST (tmp) == pc_rtx
1486 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1487 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1488 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1490 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1498 /* This would have indicated an abnormal edge. */
1499 if (computed_jump_p (insn))
1502 /* A return instruction can't be redirected. */
1503 if (returnjump_p (insn))
1506 /* If the insn doesn't go where we think, we're confused. */
1507 if (JUMP_LABEL (insn) != old_label)
1510 redirect_jump (insn, new_label);
1513 emit_label_before (new_label, bb_note);
1514 bb->head = new_label;
1520 /* Queue instructions for insertion on an edge between two basic blocks.
1521 The new instructions and basic blocks (if any) will not appear in the
1522 CFG until commit_edge_insertions is called. */
1525 insert_insn_on_edge (pattern, e)
1529 /* We cannot insert instructions on an abnormal critical edge.
1530 It will be easier to find the culprit if we die now. */
1531 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1532 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1535 if (e->insns == NULL_RTX)
1538 push_to_sequence (e->insns);
1540 emit_insn (pattern);
1542 e->insns = get_insns ();
1546 /* Update the CFG for the instructions queued on edge E. */
1549 commit_one_edge_insertion (e)
1552 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1555 /* Pull the insns off the edge now since the edge might go away. */
1557 e->insns = NULL_RTX;
1559 /* Figure out where to put these things. If the destination has
1560 one predecessor, insert there. Except for the exit block. */
1561 if (e->dest->pred->pred_next == NULL
1562 && e->dest != EXIT_BLOCK_PTR)
1566 /* Get the location correct wrt a code label, and "nice" wrt
1567 a basic block note, and before everything else. */
1569 if (GET_CODE (tmp) == CODE_LABEL)
1570 tmp = NEXT_INSN (tmp);
1571 if (GET_CODE (tmp) == NOTE
1572 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1573 tmp = NEXT_INSN (tmp);
1574 if (tmp == bb->head)
1577 after = PREV_INSN (tmp);
1580 /* If the source has one successor and the edge is not abnormal,
1581 insert there. Except for the entry block. */
1582 else if ((e->flags & EDGE_ABNORMAL) == 0
1583 && e->src->succ->succ_next == NULL
1584 && e->src != ENTRY_BLOCK_PTR)
1587 /* It is possible to have a non-simple jump here. Consider a target
1588 where some forms of unconditional jumps clobber a register. This
1589 happens on the fr30 for example.
1591 We know this block has a single successor, so we can just emit
1592 the queued insns before the jump. */
1593 if (GET_CODE (bb->end) == JUMP_INSN)
1599 /* We'd better be fallthru, or we've lost track of what's what. */
1600 if ((e->flags & EDGE_FALLTHRU) == 0)
1607 /* Otherwise we must split the edge. */
1610 bb = split_edge (e);
1614 /* Now that we've found the spot, do the insertion. */
1616 /* Set the new block number for these insns, if structure is allocated. */
1617 if (basic_block_for_insn)
1620 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1621 set_block_for_insn (i, bb);
1626 emit_insns_before (insns, before);
1627 if (before == bb->head)
1632 rtx last = emit_insns_after (insns, after);
1633 if (after == bb->end)
1637 if (GET_CODE (last) == JUMP_INSN)
1639 if (returnjump_p (last))
1641 /* ??? Remove all outgoing edges from BB and add one
1642 for EXIT. This is not currently a problem because
1643 this only happens for the (single) epilogue, which
1644 already has a fallthru edge to EXIT. */
1647 if (e->dest != EXIT_BLOCK_PTR
1648 || e->succ_next != NULL
1649 || (e->flags & EDGE_FALLTHRU) == 0)
1651 e->flags &= ~EDGE_FALLTHRU;
1653 emit_barrier_after (last);
1662 /* Update the CFG for all queued instructions. */
1665 commit_edge_insertions ()
1670 #ifdef ENABLE_CHECKING
1671 verify_flow_info ();
1675 bb = ENTRY_BLOCK_PTR;
1680 for (e = bb->succ; e ; e = next)
1682 next = e->succ_next;
1684 commit_one_edge_insertion (e);
1687 if (++i >= n_basic_blocks)
1689 bb = BASIC_BLOCK (i);
1693 /* Delete all unreachable basic blocks. */
1696 delete_unreachable_blocks ()
1698 basic_block *worklist, *tos;
1699 int deleted_handler;
1704 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1706 /* Use basic_block->aux as a marker. Clear them all. */
1708 for (i = 0; i < n; ++i)
1709 BASIC_BLOCK (i)->aux = NULL;
1711 /* Add our starting points to the worklist. Almost always there will
1712 be only one. It isn't inconcievable that we might one day directly
1713 support Fortran alternate entry points. */
1715 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1719 /* Mark the block with a handy non-null value. */
1723 /* Iterate: find everything reachable from what we've already seen. */
1725 while (tos != worklist)
1727 basic_block b = *--tos;
1729 for (e = b->succ; e ; e = e->succ_next)
1737 /* Delete all unreachable basic blocks. Count down so that we don't
1738 interfere with the block renumbering that happens in delete_block. */
1740 deleted_handler = 0;
1742 for (i = n - 1; i >= 0; --i)
1744 basic_block b = BASIC_BLOCK (i);
1747 /* This block was found. Tidy up the mark. */
1750 deleted_handler |= delete_block (b);
1753 tidy_fallthru_edges ();
1755 /* If we deleted an exception handler, we may have EH region begin/end
1756 blocks to remove as well. */
1757 if (deleted_handler)
1758 delete_eh_regions ();
1763 /* Find EH regions for which there is no longer a handler, and delete them. */
1766 delete_eh_regions ()
1770 update_rethrow_references ();
1772 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1773 if (GET_CODE (insn) == NOTE)
1775 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1776 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1778 int num = NOTE_EH_HANDLER (insn);
1779 /* A NULL handler indicates a region is no longer needed,
1780 as long as it isn't the target of a rethrow. */
1781 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1783 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1784 NOTE_SOURCE_FILE (insn) = 0;
1790 /* Return true if NOTE is not one of the ones that must be kept paired,
1791 so that we may simply delete them. */
1794 can_delete_note_p (note)
1797 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1798 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1801 /* Unlink a chain of insns between START and FINISH, leaving notes
1802 that must be paired. */
1805 flow_delete_insn_chain (start, finish)
1808 /* Unchain the insns one by one. It would be quicker to delete all
1809 of these with a single unchaining, rather than one at a time, but
1810 we need to keep the NOTE's. */
1816 next = NEXT_INSN (start);
1817 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1819 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1822 next = flow_delete_insn (start);
1824 if (start == finish)
1830 /* Delete the insns in a (non-live) block. We physically delete every
1831 non-deleted-note insn, and update the flow graph appropriately.
1833 Return nonzero if we deleted an exception handler. */
1835 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1836 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1842 int deleted_handler = 0;
1845 /* If the head of this block is a CODE_LABEL, then it might be the
1846 label for an exception handler which can't be reached.
1848 We need to remove the label from the exception_handler_label list
1849 and remove the associated NOTE_INSN_EH_REGION_BEG and
1850 NOTE_INSN_EH_REGION_END notes. */
1854 never_reached_warning (insn);
1856 if (GET_CODE (insn) == CODE_LABEL)
1858 rtx x, *prev = &exception_handler_labels;
1860 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1862 if (XEXP (x, 0) == insn)
1864 /* Found a match, splice this label out of the EH label list. */
1865 *prev = XEXP (x, 1);
1866 XEXP (x, 1) = NULL_RTX;
1867 XEXP (x, 0) = NULL_RTX;
1869 /* Remove the handler from all regions */
1870 remove_handler (insn);
1871 deleted_handler = 1;
1874 prev = &XEXP (x, 1);
1877 /* This label may be referenced by code solely for its value, or
1878 referenced by static data, or something. We have determined
1879 that it is not reachable, but cannot delete the label itself.
1880 Save code space and continue to delete the balance of the block,
1881 along with properly updating the cfg. */
1882 if (!can_delete_label_p (insn))
1884 /* If we've only got one of these, skip the whole deleting
1887 goto no_delete_insns;
1888 insn = NEXT_INSN (insn);
1892 /* Selectively unlink the insn chain. Include any BARRIER that may
1893 follow the basic block. */
1894 end = next_nonnote_insn (b->end);
1895 if (!end || GET_CODE (end) != BARRIER)
1897 flow_delete_insn_chain (insn, end);
1901 /* Remove the edges into and out of this block. Note that there may
1902 indeed be edges in, if we are removing an unreachable loop. */
1906 for (e = b->pred; e ; e = next)
1908 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1911 next = e->pred_next;
1915 for (e = b->succ; e ; e = next)
1917 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1920 next = e->succ_next;
1929 /* Remove the basic block from the array, and compact behind it. */
1932 return deleted_handler;
1935 /* Remove block B from the basic block array and compact behind it. */
1941 int i, n = n_basic_blocks;
1943 for (i = b->index; i + 1 < n; ++i)
1945 basic_block x = BASIC_BLOCK (i + 1);
1946 BASIC_BLOCK (i) = x;
1950 basic_block_info->num_elements--;
1954 /* Delete INSN by patching it out. Return the next insn. */
1957 flow_delete_insn (insn)
1960 rtx prev = PREV_INSN (insn);
1961 rtx next = NEXT_INSN (insn);
1963 PREV_INSN (insn) = NULL_RTX;
1964 NEXT_INSN (insn) = NULL_RTX;
1967 NEXT_INSN (prev) = next;
1969 PREV_INSN (next) = prev;
1971 set_last_insn (prev);
1973 if (GET_CODE (insn) == CODE_LABEL)
1974 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1976 /* If deleting a jump, decrement the use count of the label. Deleting
1977 the label itself should happen in the normal course of block merging. */
1978 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1979 LABEL_NUSES (JUMP_LABEL (insn))--;
1984 /* True if a given label can be deleted. */
1987 can_delete_label_p (label)
1992 if (LABEL_PRESERVE_P (label))
1995 for (x = forced_labels; x ; x = XEXP (x, 1))
1996 if (label == XEXP (x, 0))
1998 for (x = label_value_list; x ; x = XEXP (x, 1))
1999 if (label == XEXP (x, 0))
2001 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2002 if (label == XEXP (x, 0))
2005 /* User declared labels must be preserved. */
2006 if (LABEL_NAME (label) != 0)
2012 /* Blocks A and B are to be merged into a single block. A has no incoming
2013 fallthru edge, so it can be moved before B without adding or modifying
2014 any jumps (aside from the jump from A to B). */
2017 merge_blocks_move_predecessor_nojumps (a, b)
2020 rtx start, end, barrier;
2026 /* We want to delete the BARRIER after the end of the insns we are
2027 going to move. If we don't find a BARRIER, then do nothing. This
2028 can happen in some cases if we have labels we can not delete.
2030 Similarly, do nothing if we can not delete the label at the start
2031 of the target block. */
2032 barrier = next_nonnote_insn (end);
2033 if (GET_CODE (barrier) != BARRIER
2034 || (GET_CODE (b->head) == CODE_LABEL
2035 && ! can_delete_label_p (b->head)))
2038 flow_delete_insn (barrier);
2040 /* Move block and loop notes out of the chain so that we do not
2041 disturb their order.
2043 ??? A better solution would be to squeeze out all the non-nested notes
2044 and adjust the block trees appropriately. Even better would be to have
2045 a tighter connection between block trees and rtl so that this is not
2047 start = squeeze_notes (start, end);
2049 /* Scramble the insn chain. */
2050 if (end != PREV_INSN (b->head))
2051 reorder_insns (start, end, PREV_INSN (b->head));
2055 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2056 a->index, b->index);
2059 /* Swap the records for the two blocks around. Although we are deleting B,
2060 A is now where B was and we want to compact the BB array from where
2062 BASIC_BLOCK(a->index) = b;
2063 BASIC_BLOCK(b->index) = a;
2065 a->index = b->index;
2068 /* Now blocks A and B are contiguous. Merge them. */
2069 merge_blocks_nomove (a, b);
2074 /* Blocks A and B are to be merged into a single block. B has no outgoing
2075 fallthru edge, so it can be moved after A without adding or modifying
2076 any jumps (aside from the jump from A to B). */
2079 merge_blocks_move_successor_nojumps (a, b)
2082 rtx start, end, barrier;
2087 /* We want to delete the BARRIER after the end of the insns we are
2088 going to move. If we don't find a BARRIER, then do nothing. This
2089 can happen in some cases if we have labels we can not delete.
2091 Similarly, do nothing if we can not delete the label at the start
2092 of the target block. */
2093 barrier = next_nonnote_insn (end);
2094 if (GET_CODE (barrier) != BARRIER
2095 || (GET_CODE (b->head) == CODE_LABEL
2096 && ! can_delete_label_p (b->head)))
2099 flow_delete_insn (barrier);
2101 /* Move block and loop notes out of the chain so that we do not
2102 disturb their order.
2104 ??? A better solution would be to squeeze out all the non-nested notes
2105 and adjust the block trees appropriately. Even better would be to have
2106 a tighter connection between block trees and rtl so that this is not
2108 start = squeeze_notes (start, end);
2110 /* Scramble the insn chain. */
2111 reorder_insns (start, end, a->end);
2113 /* Now blocks A and B are contiguous. Merge them. */
2114 merge_blocks_nomove (a, b);
2118 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2119 b->index, a->index);
2125 /* Blocks A and B are to be merged into a single block. The insns
2126 are already contiguous, hence `nomove'. */
2129 merge_blocks_nomove (a, b)
2133 rtx b_head, b_end, a_end;
2136 /* If there was a CODE_LABEL beginning B, delete it. */
2139 if (GET_CODE (b_head) == CODE_LABEL)
2141 /* Detect basic blocks with nothing but a label. This can happen
2142 in particular at the end of a function. */
2143 if (b_head == b_end)
2145 b_head = flow_delete_insn (b_head);
2148 /* Delete the basic block note. */
2149 if (GET_CODE (b_head) == NOTE
2150 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2152 if (b_head == b_end)
2154 b_head = flow_delete_insn (b_head);
2157 /* If there was a jump out of A, delete it. */
2159 if (GET_CODE (a_end) == JUMP_INSN)
2163 prev = prev_nonnote_insn (a_end);
2168 /* If this was a conditional jump, we need to also delete
2169 the insn that set cc0. */
2171 if (prev && sets_cc0_p (prev))
2174 prev = prev_nonnote_insn (prev);
2177 flow_delete_insn (tmp);
2181 /* Note that a->head != a->end, since we should have at least a
2182 bb note plus the jump, so prev != insn. */
2183 flow_delete_insn (a_end);
2187 /* By definition, there should only be one successor of A, and that is
2188 B. Free that edge struct. */
2192 /* Adjust the edges out of B for the new owner. */
2193 for (e = b->succ; e ; e = e->succ_next)
2197 /* Reassociate the insns of B with A. */
2200 BLOCK_FOR_INSN (b_head) = a;
2201 while (b_head != b_end)
2203 b_head = NEXT_INSN (b_head);
2204 BLOCK_FOR_INSN (b_head) = a;
2210 /* Compact the basic block array. */
2214 /* Attempt to merge basic blocks that are potentially non-adjacent.
2215 Return true iff the attempt succeeded. */
2218 merge_blocks (e, b, c)
2222 /* If B has a fallthru edge to C, no need to move anything. */
2223 if (e->flags & EDGE_FALLTHRU)
2225 /* If a label still appears somewhere and we cannot delete the label,
2226 then we cannot merge the blocks. The edge was tidied already. */
2228 rtx insn, stop = NEXT_INSN (c->head);
2229 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2230 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2233 merge_blocks_nomove (b, c);
2237 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2238 b->index, c->index);
2247 int c_has_outgoing_fallthru;
2248 int b_has_incoming_fallthru;
2250 /* We must make sure to not munge nesting of exception regions,
2251 lexical blocks, and loop notes.
2253 The first is taken care of by requiring that the active eh
2254 region at the end of one block always matches the active eh
2255 region at the beginning of the next block.
2257 The later two are taken care of by squeezing out all the notes. */
2259 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2260 executed and we may want to treat blocks which have two out
2261 edges, one normal, one abnormal as only having one edge for
2262 block merging purposes. */
2264 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2265 if (tmp_edge->flags & EDGE_FALLTHRU)
2267 c_has_outgoing_fallthru = (tmp_edge != NULL);
2269 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2270 if (tmp_edge->flags & EDGE_FALLTHRU)
2272 b_has_incoming_fallthru = (tmp_edge != NULL);
2274 /* If B does not have an incoming fallthru, and the exception regions
2275 match, then it can be moved immediately before C without introducing
2278 C can not be the first block, so we do not have to worry about
2279 accessing a non-existent block. */
2280 d = BASIC_BLOCK (c->index - 1);
2281 if (! b_has_incoming_fallthru
2282 && d->eh_end == b->eh_beg
2283 && b->eh_end == c->eh_beg)
2284 return merge_blocks_move_predecessor_nojumps (b, c);
2286 /* Otherwise, we're going to try to move C after B. Make sure the
2287 exception regions match.
2289 If B is the last basic block, then we must not try to access the
2290 block structure for block B + 1. Luckily in that case we do not
2291 need to worry about matching exception regions. */
2292 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2293 if (b->eh_end == c->eh_beg
2294 && (d == NULL || c->eh_end == d->eh_beg))
2296 /* If C does not have an outgoing fallthru, then it can be moved
2297 immediately after B without introducing or modifying jumps. */
2298 if (! c_has_outgoing_fallthru)
2299 return merge_blocks_move_successor_nojumps (b, c);
2301 /* Otherwise, we'll need to insert an extra jump, and possibly
2302 a new block to contain it. */
2303 /* ??? Not implemented yet. */
2310 /* Top level driver for merge_blocks. */
2317 /* Attempt to merge blocks as made possible by edge removal. If a block
2318 has only one successor, and the successor has only one predecessor,
2319 they may be combined. */
2321 for (i = 0; i < n_basic_blocks; )
2323 basic_block c, b = BASIC_BLOCK (i);
2326 /* A loop because chains of blocks might be combineable. */
2327 while ((s = b->succ) != NULL
2328 && s->succ_next == NULL
2329 && (s->flags & EDGE_EH) == 0
2330 && (c = s->dest) != EXIT_BLOCK_PTR
2331 && c->pred->pred_next == NULL
2332 /* If the jump insn has side effects, we can't kill the edge. */
2333 && (GET_CODE (b->end) != JUMP_INSN
2334 || onlyjump_p (b->end))
2335 && merge_blocks (s, b, c))
2338 /* Don't get confused by the index shift caused by deleting blocks. */
2343 /* The given edge should potentially be a fallthru edge. If that is in
2344 fact true, delete the jump and barriers that are in the way. */
2347 tidy_fallthru_edge (e, b, c)
2353 /* ??? In a late-running flow pass, other folks may have deleted basic
2354 blocks by nopping out blocks, leaving multiple BARRIERs between here
2355 and the target label. They ought to be chastized and fixed.
2357 We can also wind up with a sequence of undeletable labels between
2358 one block and the next.
2360 So search through a sequence of barriers, labels, and notes for
2361 the head of block C and assert that we really do fall through. */
2363 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2366 /* Remove what will soon cease being the jump insn from the source block.
2367 If block B consisted only of this single jump, turn it into a deleted
2370 if (GET_CODE (q) == JUMP_INSN)
2373 /* If this was a conditional jump, we need to also delete
2374 the insn that set cc0. */
2375 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2382 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2383 NOTE_SOURCE_FILE (q) = 0;
2386 b->end = q = PREV_INSN (q);
2389 /* Selectively unlink the sequence. */
2390 if (q != PREV_INSN (c->head))
2391 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2393 e->flags |= EDGE_FALLTHRU;
2396 /* Fix up edges that now fall through, or rather should now fall through
2397 but previously required a jump around now deleted blocks. Simplify
2398 the search by only examining blocks numerically adjacent, since this
2399 is how find_basic_blocks created them. */
2402 tidy_fallthru_edges ()
2406 for (i = 1; i < n_basic_blocks; ++i)
2408 basic_block b = BASIC_BLOCK (i - 1);
2409 basic_block c = BASIC_BLOCK (i);
2412 /* We care about simple conditional or unconditional jumps with
2415 If we had a conditional branch to the next instruction when
2416 find_basic_blocks was called, then there will only be one
2417 out edge for the block which ended with the conditional
2418 branch (since we do not create duplicate edges).
2420 Furthermore, the edge will be marked as a fallthru because we
2421 merge the flags for the duplicate edges. So we do not want to
2422 check that the edge is not a FALLTHRU edge. */
2423 if ((s = b->succ) != NULL
2424 && s->succ_next == NULL
2426 /* If the jump insn has side effects, we can't tidy the edge. */
2427 && (GET_CODE (b->end) != JUMP_INSN
2428 || onlyjump_p (b->end)))
2429 tidy_fallthru_edge (s, b, c);
2433 /* Discover and record the loop depth at the head of each basic block. */
2436 calculate_loop_depth (dump)
2441 /* The loop infrastructure does the real job for us. */
2442 flow_loops_find (&loops);
2445 flow_loops_dump (&loops, dump, 0);
2447 flow_loops_free (&loops);
2450 /* Perform data flow analysis.
2451 F is the first insn of the function and NREGS the number of register numbers
2455 life_analysis (f, nregs, file, remove_dead_code)
2459 int remove_dead_code;
2461 #ifdef ELIMINABLE_REGS
2463 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2468 /* Record which registers will be eliminated. We use this in
2471 CLEAR_HARD_REG_SET (elim_reg_set);
2473 #ifdef ELIMINABLE_REGS
2474 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2475 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2477 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2480 /* We want alias analysis information for local dead store elimination. */
2481 init_alias_analysis ();
2484 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2488 if (! remove_dead_code)
2489 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2492 /* The post-reload life analysis have (on a global basis) the same
2493 registers live as was computed by reload itself. elimination
2494 Otherwise offsets and such may be incorrect.
2496 Reload will make some registers as live even though they do not
2497 appear in the rtl. */
2498 if (reload_completed)
2499 flags &= ~PROP_REG_INFO;
2503 /* Always remove no-op moves. Do this before other processing so
2504 that we don't have to keep re-scanning them. */
2505 delete_noop_moves (f);
2507 /* Some targets can emit simpler epilogues if they know that sp was
2508 not ever modified during the function. After reload, of course,
2509 we've already emitted the epilogue so there's no sense searching. */
2510 if (! reload_completed)
2511 notice_stack_pointer_modification (f);
2513 /* Allocate and zero out data structures that will record the
2514 data from lifetime analysis. */
2515 allocate_reg_life_data ();
2516 allocate_bb_life_data ();
2517 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2518 all_blocks = sbitmap_alloc (n_basic_blocks);
2519 sbitmap_ones (all_blocks);
2521 /* Find the set of registers live on function exit. */
2522 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2524 /* "Update" life info from zero. It'd be nice to begin the
2525 relaxation with just the exit and noreturn blocks, but that set
2526 is not immediately handy. */
2528 if (flags & PROP_REG_INFO)
2529 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2530 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2533 sbitmap_free (all_blocks);
2534 free (reg_next_use);
2535 reg_next_use = NULL;
2536 end_alias_analysis ();
2539 dump_flow_info (file);
2541 free_basic_block_vars (1);
2544 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2545 Search for REGNO. If found, abort if it is not wider than word_mode. */
2548 verify_wide_reg_1 (px, pregno)
2553 int regno = *(int *) pregno;
2555 if (GET_CODE (x) == REG && REGNO (x) == regno)
2557 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2564 /* A subroutine of verify_local_live_at_start. Search through insns
2565 between HEAD and END looking for register REGNO. */
2568 verify_wide_reg (regno, head, end)
2574 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2575 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2579 head = NEXT_INSN (head);
2582 /* We didn't find the register at all. Something's way screwy. */
2586 /* A subroutine of update_life_info. Verify that there are no untoward
2587 changes in live_at_start during a local update. */
2590 verify_local_live_at_start (new_live_at_start, bb)
2591 regset new_live_at_start;
2594 if (reload_completed)
2596 /* After reload, there are no pseudos, nor subregs of multi-word
2597 registers. The regsets should exactly match. */
2598 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2605 /* Find the set of changed registers. */
2606 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2608 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2610 /* No registers should die. */
2611 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2613 /* Verify that the now-live register is wider than word_mode. */
2614 verify_wide_reg (i, bb->head, bb->end);
2619 /* Updates life information starting with the basic blocks set in BLOCKS.
2621 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2622 expecting local modifications to basic blocks. If we find extra
2623 registers live at the beginning of a block, then we either killed
2624 useful data, or we have a broken split that wants data not provided.
2625 If we find registers removed from live_at_start, that means we have
2626 a broken peephole that is killing a register it shouldn't.
2628 ??? This is not true in one situation -- when a pre-reload splitter
2629 generates subregs of a multi-word pseudo, current life analysis will
2630 lose the kill. So we _can_ have a pseudo go live. How irritating.
2632 BLOCK_FOR_INSN is assumed to be correct.
2634 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2635 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2636 regs_ever_live unless the caller resets it to zero. */
2639 update_life_info (blocks, extent, prop_flags)
2641 enum update_life_extent extent;
2647 tmp = ALLOCA_REG_SET ();
2649 /* For a global update, we go through the relaxation process again. */
2650 if (extent != UPDATE_LIFE_LOCAL)
2652 calculate_global_regs_live (blocks, blocks,
2653 prop_flags & PROP_SCAN_DEAD_CODE);
2655 /* If asked, remove notes from the blocks we'll update. */
2656 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2657 count_or_remove_death_notes (blocks, 1);
2660 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2662 basic_block bb = BASIC_BLOCK (i);
2664 COPY_REG_SET (tmp, bb->global_live_at_end);
2665 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2667 if (extent == UPDATE_LIFE_LOCAL)
2668 verify_local_live_at_start (tmp, bb);
2673 if (prop_flags & PROP_REG_INFO)
2675 /* The only pseudos that are live at the beginning of the function
2676 are those that were not set anywhere in the function. local-alloc
2677 doesn't know how to handle these correctly, so mark them as not
2678 local to any one basic block. */
2679 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2680 FIRST_PSEUDO_REGISTER, i,
2681 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2683 /* We have a problem with any pseudoreg that lives across the setjmp.
2684 ANSI says that if a user variable does not change in value between
2685 the setjmp and the longjmp, then the longjmp preserves it. This
2686 includes longjmp from a place where the pseudo appears dead.
2687 (In principle, the value still exists if it is in scope.)
2688 If the pseudo goes in a hard reg, some other value may occupy
2689 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2690 Conclusion: such a pseudo must not go in a hard reg. */
2691 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2692 FIRST_PSEUDO_REGISTER, i,
2694 if (regno_reg_rtx[i] != 0)
2696 REG_LIVE_LENGTH (i) = -1;
2697 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2703 /* Free the variables allocated by find_basic_blocks.
2705 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2708 free_basic_block_vars (keep_head_end_p)
2709 int keep_head_end_p;
2711 if (basic_block_for_insn)
2713 VARRAY_FREE (basic_block_for_insn);
2714 basic_block_for_insn = NULL;
2717 if (! keep_head_end_p)
2720 VARRAY_FREE (basic_block_info);
2723 ENTRY_BLOCK_PTR->aux = NULL;
2724 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2725 EXIT_BLOCK_PTR->aux = NULL;
2726 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2730 /* Return nonzero if the destination of SET equals the source. */
2735 rtx src = SET_SRC (set);
2736 rtx dst = SET_DEST (set);
2738 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2740 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2742 src = SUBREG_REG (src);
2743 dst = SUBREG_REG (dst);
2746 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2747 && REGNO (src) == REGNO (dst));
2750 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2756 rtx pat = PATTERN (insn);
2758 /* Insns carrying these notes are useful later on. */
2759 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2762 if (GET_CODE (pat) == SET && set_noop_p (pat))
2765 if (GET_CODE (pat) == PARALLEL)
2768 /* If nothing but SETs of registers to themselves,
2769 this insn can also be deleted. */
2770 for (i = 0; i < XVECLEN (pat, 0); i++)
2772 rtx tem = XVECEXP (pat, 0, i);
2774 if (GET_CODE (tem) == USE
2775 || GET_CODE (tem) == CLOBBER)
2778 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2787 /* Delete any insns that copy a register to itself. */
2790 delete_noop_moves (f)
2794 for (insn = f; insn; insn = NEXT_INSN (insn))
2796 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2798 PUT_CODE (insn, NOTE);
2799 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2800 NOTE_SOURCE_FILE (insn) = 0;
2805 /* Determine if the stack pointer is constant over the life of the function.
2806 Only useful before prologues have been emitted. */
2809 notice_stack_pointer_modification_1 (x, pat, data)
2811 rtx pat ATTRIBUTE_UNUSED;
2812 void *data ATTRIBUTE_UNUSED;
2814 if (x == stack_pointer_rtx
2815 /* The stack pointer is only modified indirectly as the result
2816 of a push until later in flow. See the comments in rtl.texi
2817 regarding Embedded Side-Effects on Addresses. */
2818 || (GET_CODE (x) == MEM
2819 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2820 || GET_CODE (XEXP (x, 0)) == PRE_INC
2821 || GET_CODE (XEXP (x, 0)) == POST_DEC
2822 || GET_CODE (XEXP (x, 0)) == POST_INC)
2823 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2824 current_function_sp_is_unchanging = 0;
2828 notice_stack_pointer_modification (f)
2833 /* Assume that the stack pointer is unchanging if alloca hasn't
2835 current_function_sp_is_unchanging = !current_function_calls_alloca;
2836 if (! current_function_sp_is_unchanging)
2839 for (insn = f; insn; insn = NEXT_INSN (insn))
2841 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2843 /* Check if insn modifies the stack pointer. */
2844 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2846 if (! current_function_sp_is_unchanging)
2852 /* Mark a register in SET. Hard registers in large modes get all
2853 of their component registers set as well. */
2855 mark_reg (reg, xset)
2859 regset set = (regset) xset;
2860 int regno = REGNO (reg);
2862 if (GET_MODE (reg) == BLKmode)
2865 SET_REGNO_REG_SET (set, regno);
2866 if (regno < FIRST_PSEUDO_REGISTER)
2868 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2870 SET_REGNO_REG_SET (set, regno + n);
2874 /* Mark those regs which are needed at the end of the function as live
2875 at the end of the last basic block. */
2877 mark_regs_live_at_end (set)
2882 /* If exiting needs the right stack value, consider the stack pointer
2883 live at the end of the function. */
2884 if ((HAVE_epilogue && reload_completed)
2885 || ! EXIT_IGNORE_STACK
2886 || (! FRAME_POINTER_REQUIRED
2887 && ! current_function_calls_alloca
2888 && flag_omit_frame_pointer)
2889 || current_function_sp_is_unchanging)
2891 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2894 /* Mark the frame pointer if needed at the end of the function. If
2895 we end up eliminating it, it will be removed from the live list
2896 of each basic block by reload. */
2898 if (! reload_completed || frame_pointer_needed)
2900 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2901 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2902 /* If they are different, also mark the hard frame pointer as live */
2903 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2907 #ifdef PIC_OFFSET_TABLE_REGNUM
2908 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2909 /* Many architectures have a GP register even without flag_pic.
2910 Assume the pic register is not in use, or will be handled by
2911 other means, if it is not fixed. */
2912 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2913 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2917 /* Mark all global registers, and all registers used by the epilogue
2918 as being live at the end of the function since they may be
2919 referenced by our caller. */
2920 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2922 #ifdef EPILOGUE_USES
2923 || EPILOGUE_USES (i)
2926 SET_REGNO_REG_SET (set, i);
2928 /* Mark all call-saved registers that we actaully used. */
2929 if (HAVE_epilogue && reload_completed)
2931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2932 if (! call_used_regs[i] && regs_ever_live[i])
2933 SET_REGNO_REG_SET (set, i);
2936 /* Mark function return value. */
2937 diddle_return_value (mark_reg, set);
2940 /* Propagate global life info around the graph of basic blocks. Begin
2941 considering blocks with their corresponding bit set in BLOCKS_IN.
2942 BLOCKS_OUT is set for every block that was changed. */
2945 calculate_global_regs_live (blocks_in, blocks_out, flags)
2946 sbitmap blocks_in, blocks_out;
2949 basic_block *queue, *qhead, *qtail, *qend;
2950 regset tmp, new_live_at_end;
2953 tmp = ALLOCA_REG_SET ();
2954 new_live_at_end = ALLOCA_REG_SET ();
2956 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2957 because the `head == tail' style test for an empty queue doesn't
2958 work with a full queue. */
2959 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2961 qhead = qend = queue + n_basic_blocks + 2;
2963 /* Clear out the garbage that might be hanging out in bb->aux. */
2964 for (i = n_basic_blocks - 1; i >= 0; --i)
2965 BASIC_BLOCK (i)->aux = NULL;
2967 /* Queue the blocks set in the initial mask. Do this in reverse block
2968 number order so that we are more likely for the first round to do
2969 useful work. We use AUX non-null to flag that the block is queued. */
2970 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2972 basic_block bb = BASIC_BLOCK (i);
2977 sbitmap_zero (blocks_out);
2979 while (qhead != qtail)
2981 int rescan, changed;
2990 /* Begin by propogating live_at_start from the successor blocks. */
2991 CLEAR_REG_SET (new_live_at_end);
2992 for (e = bb->succ; e ; e = e->succ_next)
2994 basic_block sb = e->dest;
2995 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
2998 if (bb == ENTRY_BLOCK_PTR)
3000 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3004 /* On our first pass through this block, we'll go ahead and continue.
3005 Recognize first pass by local_set NULL. On subsequent passes, we
3006 get to skip out early if live_at_end wouldn't have changed. */
3008 if (bb->local_set == NULL)
3010 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3015 /* If any bits were removed from live_at_end, we'll have to
3016 rescan the block. This wouldn't be necessary if we had
3017 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3018 local_live is really dependant on live_at_end. */
3019 CLEAR_REG_SET (tmp);
3020 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3021 new_live_at_end, BITMAP_AND_COMPL);
3025 /* Find the set of changed bits. Take this opportunity
3026 to notice that this set is empty and early out. */
3027 CLEAR_REG_SET (tmp);
3028 changed = bitmap_operation (tmp, bb->global_live_at_end,
3029 new_live_at_end, BITMAP_XOR);
3033 /* If any of the changed bits overlap with local_set,
3034 we'll have to rescan the block. Detect overlap by
3035 the AND with ~local_set turning off bits. */
3036 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3041 /* Let our caller know that BB changed enough to require its
3042 death notes updated. */
3043 SET_BIT (blocks_out, bb->index);
3047 /* Add to live_at_start the set of all registers in
3048 new_live_at_end that aren't in the old live_at_end. */
3050 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3052 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3054 changed = bitmap_operation (bb->global_live_at_start,
3055 bb->global_live_at_start,
3062 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3064 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3065 into live_at_start. */
3066 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3068 /* If live_at start didn't change, no need to go farther. */
3069 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3072 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3075 /* Queue all predecessors of BB so that we may re-examine
3076 their live_at_end. */
3077 for (e = bb->pred; e ; e = e->pred_next)
3079 basic_block pb = e->src;
3080 if (pb->aux == NULL)
3091 FREE_REG_SET (new_live_at_end);
3093 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3095 basic_block bb = BASIC_BLOCK (i);
3096 FREE_REG_SET (bb->local_set);
3102 /* Subroutines of life analysis. */
3104 /* Allocate the permanent data structures that represent the results
3105 of life analysis. Not static since used also for stupid life analysis. */
3108 allocate_bb_life_data ()
3112 for (i = 0; i < n_basic_blocks; i++)
3114 basic_block bb = BASIC_BLOCK (i);
3116 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3117 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3120 ENTRY_BLOCK_PTR->global_live_at_end
3121 = OBSTACK_ALLOC_REG_SET (function_obstack);
3122 EXIT_BLOCK_PTR->global_live_at_start
3123 = OBSTACK_ALLOC_REG_SET (function_obstack);
3125 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3129 allocate_reg_life_data ()
3133 /* Recalculate the register space, in case it has grown. Old style
3134 vector oriented regsets would set regset_{size,bytes} here also. */
3135 allocate_reg_info (max_regno, FALSE, FALSE);
3137 /* Reset all the data we'll collect in propagate_block and its
3139 for (i = 0; i < max_regno; i++)
3143 REG_N_DEATHS (i) = 0;
3144 REG_N_CALLS_CROSSED (i) = 0;
3145 REG_LIVE_LENGTH (i) = 0;
3146 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3150 /* Compute the registers live at the beginning of a basic block
3151 from those live at the end.
3153 When called, OLD contains those live at the end.
3154 On return, it contains those live at the beginning.
3155 FIRST and LAST are the first and last insns of the basic block.
3157 FINAL is nonzero if we are doing the final pass which is not
3158 for computing the life info (since that has already been done)
3159 but for acting on it. On this pass, we delete dead stores,
3160 set up the logical links and dead-variables lists of instructions,
3161 and merge instructions for autoincrement and autodecrement addresses.
3163 SIGNIFICANT is nonzero only the first time for each basic block.
3164 If it is nonzero, it points to a regset in which we store
3165 a 1 for each register that is set within the block.
3167 BNUM is the number of the basic block. */
3170 propagate_block (bb, old, significant, flags)
3181 /* Find the loop depth for this block. Ignore loop level changes in the
3182 middle of the basic block -- for register allocation purposes, the
3183 important uses will be in the blocks wholely contained within the loop
3184 not in the loop pre-header or post-trailer. */
3185 loop_depth = bb->loop_depth;
3187 dead = ALLOCA_REG_SET ();
3188 live = ALLOCA_REG_SET ();
3192 if (flags & PROP_REG_INFO)
3196 /* Process the regs live at the end of the block.
3197 Mark them as not local to any one basic block. */
3198 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3200 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3204 /* Scan the block an insn at a time from end to beginning. */
3206 for (insn = bb->end; ; insn = prev)
3208 prev = PREV_INSN (insn);
3210 if (GET_CODE (insn) == NOTE)
3212 /* If this is a call to `setjmp' et al,
3213 warn if any non-volatile datum is live. */
3215 if ((flags & PROP_REG_INFO)
3216 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3217 IOR_REG_SET (regs_live_at_setjmp, old);
3220 /* Update the life-status of regs for this insn.
3221 First DEAD gets which regs are set in this insn
3222 then LIVE gets which regs are used in this insn.
3223 Then the regs live before the insn
3224 are those live after, with DEAD regs turned off,
3225 and then LIVE regs turned on. */
3227 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3230 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3231 int insn_is_dead = 0;
3232 int libcall_is_dead = 0;
3234 if (flags & PROP_SCAN_DEAD_CODE)
3236 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3238 libcall_is_dead = (insn_is_dead && note != 0
3239 && libcall_dead_p (PATTERN (insn), old,
3243 /* We almost certainly don't want to delete prologue or epilogue
3244 instructions. Warn about probable compiler losage. */
3247 && (HAVE_epilogue || HAVE_prologue)
3248 && prologue_epilogue_contains (insn))
3250 if (flags & PROP_KILL_DEAD_CODE)
3252 warning ("ICE: would have deleted prologue/epilogue insn");
3253 if (!inhibit_warnings)
3256 libcall_is_dead = insn_is_dead = 0;
3259 /* If an instruction consists of just dead store(s) on final pass,
3260 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3261 We could really delete it with delete_insn, but that
3262 can cause trouble for first or last insn in a basic block. */
3263 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3266 /* If the insn referred to a label, note that the label is
3268 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3270 if (REG_NOTE_KIND (inote) == REG_LABEL)
3272 rtx label = XEXP (inote, 0);
3274 LABEL_NUSES (label)--;
3276 /* If this label was attached to an ADDR_VEC, it's
3277 safe to delete the ADDR_VEC. In fact, it's pretty
3278 much mandatory to delete it, because the ADDR_VEC may
3279 be referencing labels that no longer exist. */
3280 if (LABEL_NUSES (label) == 0
3281 && (next = next_nonnote_insn (label)) != NULL
3282 && GET_CODE (next) == JUMP_INSN
3283 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3284 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3286 rtx pat = PATTERN (next);
3287 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3288 int len = XVECLEN (pat, diff_vec_p);
3290 for (i = 0; i < len; i++)
3291 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3292 PUT_CODE (next, NOTE);
3293 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3294 NOTE_SOURCE_FILE (next) = 0;
3299 PUT_CODE (insn, NOTE);
3300 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3301 NOTE_SOURCE_FILE (insn) = 0;
3303 /* CC0 is now known to be dead. Either this insn used it,
3304 in which case it doesn't anymore, or clobbered it,
3305 so the next insn can't use it. */
3308 /* If this insn is copying the return value from a library call,
3309 delete the entire library call. */
3310 if (libcall_is_dead)
3312 rtx first = XEXP (note, 0);
3314 while (INSN_DELETED_P (first))
3315 first = NEXT_INSN (first);
3320 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3321 NOTE_SOURCE_FILE (p) = 0;
3327 CLEAR_REG_SET (dead);
3328 CLEAR_REG_SET (live);
3330 /* See if this is an increment or decrement that can be
3331 merged into a following memory address. */
3334 register rtx x = single_set (insn);
3336 /* Does this instruction increment or decrement a register? */
3337 if (!reload_completed
3338 && (flags & PROP_AUTOINC)
3340 && GET_CODE (SET_DEST (x)) == REG
3341 && (GET_CODE (SET_SRC (x)) == PLUS
3342 || GET_CODE (SET_SRC (x)) == MINUS)
3343 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3344 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3345 /* Ok, look for a following memory ref we can combine with.
3346 If one is found, change the memory ref to a PRE_INC
3347 or PRE_DEC, cancel this insn, and return 1.
3348 Return 0 if nothing has been done. */
3349 && try_pre_increment_1 (insn))
3352 #endif /* AUTO_INC_DEC */
3354 /* If this is not the final pass, and this insn is copying the
3355 value of a library call and it's dead, don't scan the
3356 insns that perform the library call, so that the call's
3357 arguments are not marked live. */
3358 if (libcall_is_dead)
3360 /* Mark the dest reg as `significant'. */
3361 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3362 significant, flags);
3364 insn = XEXP (note, 0);
3365 prev = PREV_INSN (insn);
3367 else if (GET_CODE (PATTERN (insn)) == SET
3368 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3369 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3370 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3371 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3372 /* We have an insn to pop a constant amount off the stack.
3373 (Such insns use PLUS regardless of the direction of the stack,
3374 and any insn to adjust the stack by a constant is always a pop.)
3375 These insns, if not dead stores, have no effect on life. */
3379 /* Any regs live at the time of a call instruction
3380 must not go in a register clobbered by calls.
3381 Find all regs now live and record this for them. */
3383 if (GET_CODE (insn) == CALL_INSN
3384 && (flags & PROP_REG_INFO))
3385 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3387 REG_N_CALLS_CROSSED (i)++;
3390 /* LIVE gets the regs used in INSN;
3391 DEAD gets those set by it. Dead insns don't make anything
3394 mark_set_regs (old, dead, PATTERN (insn),
3395 insn, significant, flags);
3397 /* If an insn doesn't use CC0, it becomes dead since we
3398 assume that every insn clobbers it. So show it dead here;
3399 mark_used_regs will set it live if it is referenced. */
3403 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3405 /* Sometimes we may have inserted something before INSN (such as
3406 a move) when we make an auto-inc. So ensure we will scan
3409 prev = PREV_INSN (insn);
3412 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3418 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3420 note = XEXP (note, 1))
3421 if (GET_CODE (XEXP (note, 0)) == USE)
3422 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3425 /* Each call clobbers all call-clobbered regs that are not
3426 global or fixed. Note that the function-value reg is a
3427 call-clobbered reg, and mark_set_regs has already had
3428 a chance to handle it. */
3430 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3431 if (call_used_regs[i] && ! global_regs[i]
3434 SET_REGNO_REG_SET (dead, i);
3436 SET_REGNO_REG_SET (significant, i);
3439 /* The stack ptr is used (honorarily) by a CALL insn. */
3440 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3442 /* Calls may also reference any of the global registers,
3443 so they are made live. */
3444 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3446 mark_used_regs (old, live,
3447 gen_rtx_REG (reg_raw_mode[i], i),
3450 /* Calls also clobber memory. */
3451 free_EXPR_LIST_list (&mem_set_list);
3454 /* Update OLD for the registers used or set. */
3455 AND_COMPL_REG_SET (old, dead);
3456 IOR_REG_SET (old, live);
3460 /* On final pass, update counts of how many insns in which
3461 each reg is live. */
3462 if (flags & PROP_REG_INFO)
3463 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3466 if (insn == bb->head)
3470 FREE_REG_SET (dead);
3471 FREE_REG_SET (live);
3472 free_EXPR_LIST_list (&mem_set_list);
3475 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3476 (SET expressions whose destinations are registers dead after the insn).
3477 NEEDED is the regset that says which regs are alive after the insn.
3479 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3481 If X is the entire body of an insn, NOTES contains the reg notes
3482 pertaining to the insn. */
3485 insn_dead_p (x, needed, call_ok, notes)
3489 rtx notes ATTRIBUTE_UNUSED;
3491 enum rtx_code code = GET_CODE (x);
3494 /* If flow is invoked after reload, we must take existing AUTO_INC
3495 expresions into account. */
3496 if (reload_completed)
3498 for ( ; notes; notes = XEXP (notes, 1))
3500 if (REG_NOTE_KIND (notes) == REG_INC)
3502 int regno = REGNO (XEXP (notes, 0));
3504 /* Don't delete insns to set global regs. */
3505 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3506 || REGNO_REG_SET_P (needed, regno))
3513 /* If setting something that's a reg or part of one,
3514 see if that register's altered value will be live. */
3518 rtx r = SET_DEST (x);
3521 if (GET_CODE (r) == CC0)
3525 /* A SET that is a subroutine call cannot be dead. */
3526 if (GET_CODE (SET_SRC (x)) == CALL)
3532 /* Don't eliminate loads from volatile memory or volatile asms. */
3533 else if (volatile_refs_p (SET_SRC (x)))
3536 if (GET_CODE (r) == MEM)
3540 if (MEM_VOLATILE_P (r))
3543 /* Walk the set of memory locations we are currently tracking
3544 and see if one is an identical match to this memory location.
3545 If so, this memory write is dead (remember, we're walking
3546 backwards from the end of the block to the start. */
3547 temp = mem_set_list;
3550 if (rtx_equal_p (XEXP (temp, 0), r))
3552 temp = XEXP (temp, 1);
3557 while (GET_CODE (r) == SUBREG
3558 || GET_CODE (r) == STRICT_LOW_PART
3559 || GET_CODE (r) == ZERO_EXTRACT)
3562 if (GET_CODE (r) == REG)
3564 int regno = REGNO (r);
3567 if (REGNO_REG_SET_P (needed, regno))
3570 /* If this is a hard register, verify that subsequent
3571 words are not needed. */
3572 if (regno < FIRST_PSEUDO_REGISTER)
3574 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3577 if (REGNO_REG_SET_P (needed, regno+n))
3581 /* Don't delete insns to set global regs. */
3582 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3585 /* Make sure insns to set the stack pointer aren't deleted. */
3586 if (regno == STACK_POINTER_REGNUM)
3589 /* Make sure insns to set the frame pointer aren't deleted. */
3590 if (regno == FRAME_POINTER_REGNUM
3591 && (! reload_completed || frame_pointer_needed))
3593 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3594 if (regno == HARD_FRAME_POINTER_REGNUM
3595 && (! reload_completed || frame_pointer_needed))
3599 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3600 /* Make sure insns to set arg pointer are never deleted
3601 (if the arg pointer isn't fixed, there will be a USE
3602 for it, so we can treat it normally). */
3603 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3607 /* Otherwise, the set is dead. */
3613 /* If performing several activities, insn is dead if each activity
3614 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3615 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3617 else if (code == PARALLEL)
3619 int i = XVECLEN (x, 0);
3621 for (i--; i >= 0; i--)
3622 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3623 && GET_CODE (XVECEXP (x, 0, i)) != USE
3624 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3630 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3631 is not necessarily true for hard registers. */
3632 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3633 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3634 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3637 /* We do not check other CLOBBER or USE here. An insn consisting of just
3638 a CLOBBER or just a USE should not be deleted. */
3642 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3643 return 1 if the entire library call is dead.
3644 This is true if X copies a register (hard or pseudo)
3645 and if the hard return reg of the call insn is dead.
3646 (The caller should have tested the destination of X already for death.)
3648 If this insn doesn't just copy a register, then we don't
3649 have an ordinary libcall. In that case, cse could not have
3650 managed to substitute the source for the dest later on,
3651 so we can assume the libcall is dead.
3653 NEEDED is the bit vector of pseudoregs live before this insn.
3654 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3657 libcall_dead_p (x, needed, note, insn)
3663 register RTX_CODE code = GET_CODE (x);
3667 register rtx r = SET_SRC (x);
3668 if (GET_CODE (r) == REG)
3670 rtx call = XEXP (note, 0);
3674 /* Find the call insn. */
3675 while (call != insn && GET_CODE (call) != CALL_INSN)
3676 call = NEXT_INSN (call);
3678 /* If there is none, do nothing special,
3679 since ordinary death handling can understand these insns. */
3683 /* See if the hard reg holding the value is dead.
3684 If this is a PARALLEL, find the call within it. */
3685 call_pat = PATTERN (call);
3686 if (GET_CODE (call_pat) == PARALLEL)
3688 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3689 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3690 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3693 /* This may be a library call that is returning a value
3694 via invisible pointer. Do nothing special, since
3695 ordinary death handling can understand these insns. */
3699 call_pat = XVECEXP (call_pat, 0, i);
3702 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3708 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3709 live at function entry. Don't count global register variables, variables
3710 in registers that can be used for function arg passing, or variables in
3711 fixed hard registers. */
3714 regno_uninitialized (regno)
3717 if (n_basic_blocks == 0
3718 || (regno < FIRST_PSEUDO_REGISTER
3719 && (global_regs[regno]
3720 || fixed_regs[regno]
3721 || FUNCTION_ARG_REGNO_P (regno))))
3724 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3727 /* 1 if register REGNO was alive at a place where `setjmp' was called
3728 and was set more than once or is an argument.
3729 Such regs may be clobbered by `longjmp'. */
3732 regno_clobbered_at_setjmp (regno)
3735 if (n_basic_blocks == 0)
3738 return ((REG_N_SETS (regno) > 1
3739 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3740 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3743 /* INSN references memory, possibly using autoincrement addressing modes.
3744 Find any entries on the mem_set_list that need to be invalidated due
3745 to an address change. */
3747 invalidate_mems_from_autoinc (insn)
3750 rtx note = REG_NOTES (insn);
3751 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3753 if (REG_NOTE_KIND (note) == REG_INC)
3755 rtx temp = mem_set_list;
3756 rtx prev = NULL_RTX;
3761 next = XEXP (temp, 1);
3762 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3764 /* Splice temp out of list. */
3766 XEXP (prev, 1) = next;
3768 mem_set_list = next;
3769 free_EXPR_LIST_node (temp);
3779 /* Process the registers that are set within X. Their bits are set to
3780 1 in the regset DEAD, because they are dead prior to this insn.
3782 If INSN is nonzero, it is the insn being processed.
3784 FLAGS is the set of operations to perform. */
3787 mark_set_regs (needed, dead, x, insn, significant, flags)
3795 register RTX_CODE code = GET_CODE (x);
3797 if (code == SET || code == CLOBBER)
3798 mark_set_1 (needed, dead, x, insn, significant, flags);
3799 else if (code == PARALLEL)
3802 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3804 code = GET_CODE (XVECEXP (x, 0, i));
3805 if (code == SET || code == CLOBBER)
3806 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3807 significant, flags);
3812 /* Process a single SET rtx, X. */
3815 mark_set_1 (needed, dead, x, insn, significant, flags)
3823 register int regno = -1;
3824 register rtx reg = SET_DEST (x);
3826 /* Some targets place small structures in registers for
3827 return values of functions. We have to detect this
3828 case specially here to get correct flow information. */
3829 if (GET_CODE (reg) == PARALLEL
3830 && GET_MODE (reg) == BLKmode)
3834 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3835 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3836 significant, flags);
3840 /* Modifying just one hardware register of a multi-reg value
3841 or just a byte field of a register
3842 does not mean the value from before this insn is now dead.
3843 But it does mean liveness of that register at the end of the block
3846 Within mark_set_1, however, we treat it as if the register is
3847 indeed modified. mark_used_regs will, however, also treat this
3848 register as being used. Thus, we treat these insns as setting a
3849 new value for the register as a function of its old value. This
3850 cases LOG_LINKS to be made appropriately and this will help combine. */
3852 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3853 || GET_CODE (reg) == SIGN_EXTRACT
3854 || GET_CODE (reg) == STRICT_LOW_PART)
3855 reg = XEXP (reg, 0);
3857 /* If this set is a MEM, then it kills any aliased writes.
3858 If this set is a REG, then it kills any MEMs which use the reg. */
3859 if (flags & PROP_SCAN_DEAD_CODE)
3861 if (GET_CODE (reg) == MEM
3862 || GET_CODE (reg) == REG)
3864 rtx temp = mem_set_list;
3865 rtx prev = NULL_RTX;
3870 next = XEXP (temp, 1);
3871 if ((GET_CODE (reg) == MEM
3872 && output_dependence (XEXP (temp, 0), reg))
3873 || (GET_CODE (reg) == REG
3874 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3876 /* Splice this entry out of the list. */
3878 XEXP (prev, 1) = next;
3880 mem_set_list = next;
3881 free_EXPR_LIST_node (temp);
3889 /* If the memory reference had embedded side effects (autoincrement
3890 address modes. Then we may need to kill some entries on the
3892 if (insn && GET_CODE (reg) == MEM)
3893 invalidate_mems_from_autoinc (insn);
3895 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3896 /* We do not know the size of a BLKmode store, so we do not track
3897 them for redundant store elimination. */
3898 && GET_MODE (reg) != BLKmode
3899 /* There are no REG_INC notes for SP, so we can't assume we'll see
3900 everything that invalidates it. To be safe, don't eliminate any
3901 stores though SP; none of them should be redundant anyway. */
3902 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3903 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3906 if (GET_CODE (reg) == REG
3907 && (regno = REGNO (reg),
3908 ! (regno == FRAME_POINTER_REGNUM
3909 && (! reload_completed || frame_pointer_needed)))
3910 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3911 && ! (regno == HARD_FRAME_POINTER_REGNUM
3912 && (! reload_completed || frame_pointer_needed))
3914 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3915 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3917 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3918 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3920 int some_needed = REGNO_REG_SET_P (needed, regno);
3921 int some_not_needed = ! some_needed;
3923 /* Mark it as a significant register for this basic block. */
3925 SET_REGNO_REG_SET (significant, regno);
3927 /* Mark it as dead before this insn. */
3928 SET_REGNO_REG_SET (dead, regno);
3930 /* A hard reg in a wide mode may really be multiple registers.
3931 If so, mark all of them just like the first. */
3932 if (regno < FIRST_PSEUDO_REGISTER)
3936 /* Nothing below is needed for the stack pointer; get out asap.
3937 Eg, log links aren't needed, since combine won't use them. */
3938 if (regno == STACK_POINTER_REGNUM)
3941 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3944 int regno_n = regno + n;
3945 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3947 SET_REGNO_REG_SET (significant, regno_n);
3949 SET_REGNO_REG_SET (dead, regno_n);
3950 some_needed |= needed_regno;
3951 some_not_needed |= ! needed_regno;
3955 /* Additional data to record if this is the final pass. */
3956 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3957 | PROP_DEATH_NOTES | PROP_AUTOINC))
3960 register int blocknum = BLOCK_NUM (insn);
3963 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3964 y = reg_next_use[regno];
3966 /* If this is a hard reg, record this function uses the reg. */
3968 if (regno < FIRST_PSEUDO_REGISTER)
3971 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3973 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3974 for (i = regno; i < endregno; i++)
3976 /* The next use is no longer "next", since a store
3978 reg_next_use[i] = 0;
3981 if (flags & PROP_REG_INFO)
3982 for (i = regno; i < endregno; i++)
3984 regs_ever_live[i] = 1;
3990 /* The next use is no longer "next", since a store
3992 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3993 reg_next_use[regno] = 0;
3995 /* Keep track of which basic blocks each reg appears in. */
3997 if (flags & PROP_REG_INFO)
3999 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4000 REG_BASIC_BLOCK (regno) = blocknum;
4001 else if (REG_BASIC_BLOCK (regno) != blocknum)
4002 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4004 /* Count (weighted) references, stores, etc. This counts a
4005 register twice if it is modified, but that is correct. */
4006 REG_N_SETS (regno)++;
4007 REG_N_REFS (regno) += loop_depth + 1;
4009 /* The insns where a reg is live are normally counted
4010 elsewhere, but we want the count to include the insn
4011 where the reg is set, and the normal counting mechanism
4012 would not count it. */
4013 REG_LIVE_LENGTH (regno)++;
4017 if (! some_not_needed)
4019 if (flags & PROP_LOG_LINKS)
4021 /* Make a logical link from the next following insn
4022 that uses this register, back to this insn.
4023 The following insns have already been processed.
4025 We don't build a LOG_LINK for hard registers containing
4026 in ASM_OPERANDs. If these registers get replaced,
4027 we might wind up changing the semantics of the insn,
4028 even if reload can make what appear to be valid
4029 assignments later. */
4030 if (y && (BLOCK_NUM (y) == blocknum)
4031 && (regno >= FIRST_PSEUDO_REGISTER
4032 || asm_noperands (PATTERN (y)) < 0))
4033 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4036 else if (! some_needed)
4038 if (flags & PROP_REG_INFO)
4039 REG_N_DEATHS (REGNO (reg))++;
4041 if (flags & PROP_DEATH_NOTES)
4043 /* Note that dead stores have already been deleted
4044 when possible. If we get here, we have found a
4045 dead store that cannot be eliminated (because the
4046 same insn does something useful). Indicate this
4047 by marking the reg being set as dying here. */
4049 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4054 if (flags & PROP_DEATH_NOTES)
4056 /* This is a case where we have a multi-word hard register
4057 and some, but not all, of the words of the register are
4058 needed in subsequent insns. Write REG_UNUSED notes
4059 for those parts that were not needed. This case should
4064 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4066 if (!REGNO_REG_SET_P (needed, regno + i))
4070 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4076 else if (GET_CODE (reg) == REG)
4078 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4079 reg_next_use[regno] = 0;
4082 /* If this is the last pass and this is a SCRATCH, show it will be dying
4083 here and count it. */
4084 else if (GET_CODE (reg) == SCRATCH)
4086 if (flags & PROP_DEATH_NOTES)
4088 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4094 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4098 find_auto_inc (needed, x, insn)
4103 rtx addr = XEXP (x, 0);
4104 HOST_WIDE_INT offset = 0;
4107 /* Here we detect use of an index register which might be good for
4108 postincrement, postdecrement, preincrement, or predecrement. */
4110 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4111 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4113 if (GET_CODE (addr) == REG)
4116 register int size = GET_MODE_SIZE (GET_MODE (x));
4119 int regno = REGNO (addr);
4121 /* Is the next use an increment that might make auto-increment? */
4122 if ((incr = reg_next_use[regno]) != 0
4123 && (set = single_set (incr)) != 0
4124 && GET_CODE (set) == SET
4125 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4126 /* Can't add side effects to jumps; if reg is spilled and
4127 reloaded, there's no way to store back the altered value. */
4128 && GET_CODE (insn) != JUMP_INSN
4129 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4130 && XEXP (y, 0) == addr
4131 && GET_CODE (XEXP (y, 1)) == CONST_INT
4132 && ((HAVE_POST_INCREMENT
4133 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4134 || (HAVE_POST_DECREMENT
4135 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4136 || (HAVE_PRE_INCREMENT
4137 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4138 || (HAVE_PRE_DECREMENT
4139 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4140 /* Make sure this reg appears only once in this insn. */
4141 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4142 use != 0 && use != (rtx) 1))
4144 rtx q = SET_DEST (set);
4145 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4146 ? (offset ? PRE_INC : POST_INC)
4147 : (offset ? PRE_DEC : POST_DEC));
4149 if (dead_or_set_p (incr, addr))
4151 /* This is the simple case. Try to make the auto-inc. If
4152 we can't, we are done. Otherwise, we will do any
4153 needed updates below. */
4154 if (! validate_change (insn, &XEXP (x, 0),
4155 gen_rtx_fmt_e (inc_code, Pmode, addr),
4159 else if (GET_CODE (q) == REG
4160 /* PREV_INSN used here to check the semi-open interval
4162 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4163 /* We must also check for sets of q as q may be
4164 a call clobbered hard register and there may
4165 be a call between PREV_INSN (insn) and incr. */
4166 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4168 /* We have *p followed sometime later by q = p+size.
4169 Both p and q must be live afterward,
4170 and q is not used between INSN and its assignment.
4171 Change it to q = p, ...*q..., q = q+size.
4172 Then fall into the usual case. */
4177 emit_move_insn (q, addr);
4178 insns = get_insns ();
4181 bb = BLOCK_FOR_INSN (insn);
4182 for (temp = insns; temp; temp = NEXT_INSN (temp))
4183 set_block_for_insn (temp, bb);
4185 /* If we can't make the auto-inc, or can't make the
4186 replacement into Y, exit. There's no point in making
4187 the change below if we can't do the auto-inc and doing
4188 so is not correct in the pre-inc case. */
4190 validate_change (insn, &XEXP (x, 0),
4191 gen_rtx_fmt_e (inc_code, Pmode, q),
4193 validate_change (incr, &XEXP (y, 0), q, 1);
4194 if (! apply_change_group ())
4197 /* We now know we'll be doing this change, so emit the
4198 new insn(s) and do the updates. */
4199 emit_insns_before (insns, insn);
4201 if (BLOCK_FOR_INSN (insn)->head == insn)
4202 BLOCK_FOR_INSN (insn)->head = insns;
4204 /* INCR will become a NOTE and INSN won't contain a
4205 use of ADDR. If a use of ADDR was just placed in
4206 the insn before INSN, make that the next use.
4207 Otherwise, invalidate it. */
4208 if (GET_CODE (PREV_INSN (insn)) == INSN
4209 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4210 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4211 reg_next_use[regno] = PREV_INSN (insn);
4213 reg_next_use[regno] = 0;
4218 /* REGNO is now used in INCR which is below INSN, but
4219 it previously wasn't live here. If we don't mark
4220 it as needed, we'll put a REG_DEAD note for it
4221 on this insn, which is incorrect. */
4222 SET_REGNO_REG_SET (needed, regno);
4224 /* If there are any calls between INSN and INCR, show
4225 that REGNO now crosses them. */
4226 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4227 if (GET_CODE (temp) == CALL_INSN)
4228 REG_N_CALLS_CROSSED (regno)++;
4233 /* If we haven't returned, it means we were able to make the
4234 auto-inc, so update the status. First, record that this insn
4235 has an implicit side effect. */
4238 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4240 /* Modify the old increment-insn to simply copy
4241 the already-incremented value of our register. */
4242 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4245 /* If that makes it a no-op (copying the register into itself) delete
4246 it so it won't appear to be a "use" and a "set" of this
4248 if (SET_DEST (set) == addr)
4250 PUT_CODE (incr, NOTE);
4251 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4252 NOTE_SOURCE_FILE (incr) = 0;
4255 if (regno >= FIRST_PSEUDO_REGISTER)
4257 /* Count an extra reference to the reg. When a reg is
4258 incremented, spilling it is worse, so we want to make
4259 that less likely. */
4260 REG_N_REFS (regno) += loop_depth + 1;
4262 /* Count the increment as a setting of the register,
4263 even though it isn't a SET in rtl. */
4264 REG_N_SETS (regno)++;
4269 #endif /* AUTO_INC_DEC */
4271 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4272 This is done assuming the registers needed from X
4273 are those that have 1-bits in NEEDED.
4275 FLAGS is the set of enabled operations.
4277 INSN is the containing instruction. If INSN is dead, this function is not
4281 mark_used_regs (needed, live, x, flags, insn)
4288 register RTX_CODE code;
4293 code = GET_CODE (x);
4313 /* If we are clobbering a MEM, mark any registers inside the address
4315 if (GET_CODE (XEXP (x, 0)) == MEM)
4316 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4320 /* Don't bother watching stores to mems if this is not the
4321 final pass. We'll not be deleting dead stores this round. */
4322 if (flags & PROP_SCAN_DEAD_CODE)
4324 /* Invalidate the data for the last MEM stored, but only if MEM is
4325 something that can be stored into. */
4326 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4327 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4328 ; /* needn't clear the memory set list */
4331 rtx temp = mem_set_list;
4332 rtx prev = NULL_RTX;
4337 next = XEXP (temp, 1);
4338 if (anti_dependence (XEXP (temp, 0), x))
4340 /* Splice temp out of the list. */
4342 XEXP (prev, 1) = next;
4344 mem_set_list = next;
4345 free_EXPR_LIST_node (temp);
4353 /* If the memory reference had embedded side effects (autoincrement
4354 address modes. Then we may need to kill some entries on the
4357 invalidate_mems_from_autoinc (insn);
4361 if (flags & PROP_AUTOINC)
4362 find_auto_inc (needed, x, insn);
4367 if (GET_CODE (SUBREG_REG (x)) == REG
4368 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4369 && (GET_MODE_SIZE (GET_MODE (x))
4370 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4371 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4373 /* While we're here, optimize this case. */
4376 /* In case the SUBREG is not of a register, don't optimize */
4377 if (GET_CODE (x) != REG)
4379 mark_used_regs (needed, live, x, flags, insn);
4383 /* ... fall through ... */
4386 /* See a register other than being set
4387 => mark it as needed. */
4391 int some_needed = REGNO_REG_SET_P (needed, regno);
4392 int some_not_needed = ! some_needed;
4394 SET_REGNO_REG_SET (live, regno);
4396 /* A hard reg in a wide mode may really be multiple registers.
4397 If so, mark all of them just like the first. */
4398 if (regno < FIRST_PSEUDO_REGISTER)
4402 /* For stack ptr or fixed arg pointer,
4403 nothing below can be necessary, so waste no more time. */
4404 if (regno == STACK_POINTER_REGNUM
4405 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4406 || (regno == HARD_FRAME_POINTER_REGNUM
4407 && (! reload_completed || frame_pointer_needed))
4409 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4410 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4412 || (regno == FRAME_POINTER_REGNUM
4413 && (! reload_completed || frame_pointer_needed)))
4415 /* If this is a register we are going to try to eliminate,
4416 don't mark it live here. If we are successful in
4417 eliminating it, it need not be live unless it is used for
4418 pseudos, in which case it will have been set live when
4419 it was allocated to the pseudos. If the register will not
4420 be eliminated, reload will set it live at that point. */
4422 if ((flags & PROP_REG_INFO)
4423 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4424 regs_ever_live[regno] = 1;
4427 /* No death notes for global register variables;
4428 their values are live after this function exits. */
4429 if (global_regs[regno])
4431 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4432 reg_next_use[regno] = insn;
4436 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4439 int regno_n = regno + n;
4440 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4442 SET_REGNO_REG_SET (live, regno_n);
4443 some_needed |= needed_regno;
4444 some_not_needed |= ! needed_regno;
4448 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4450 /* Record where each reg is used, so when the reg
4451 is set we know the next insn that uses it. */
4453 reg_next_use[regno] = insn;
4455 if (flags & PROP_REG_INFO)
4457 if (regno < FIRST_PSEUDO_REGISTER)
4459 /* If a hard reg is being used,
4460 record that this function does use it. */
4462 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4466 regs_ever_live[regno + --i] = 1;
4471 /* Keep track of which basic block each reg appears in. */
4473 register int blocknum = BLOCK_NUM (insn);
4475 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4476 REG_BASIC_BLOCK (regno) = blocknum;
4477 else if (REG_BASIC_BLOCK (regno) != blocknum)
4478 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4480 /* Count (weighted) number of uses of each reg. */
4482 REG_N_REFS (regno) += loop_depth + 1;
4486 /* Record and count the insns in which a reg dies.
4487 If it is used in this insn and was dead below the insn
4488 then it dies in this insn. If it was set in this insn,
4489 we do not make a REG_DEAD note; likewise if we already
4490 made such a note. */
4492 if (flags & PROP_DEATH_NOTES)
4495 && ! dead_or_set_p (insn, x)
4497 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4501 /* Check for the case where the register dying partially
4502 overlaps the register set by this insn. */
4503 if (regno < FIRST_PSEUDO_REGISTER
4504 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4506 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4508 some_needed |= dead_or_set_regno_p (insn, regno + n);
4511 /* If none of the words in X is needed, make a REG_DEAD
4512 note. Otherwise, we must make partial REG_DEAD notes. */
4516 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4517 REG_N_DEATHS (regno)++;
4523 /* Don't make a REG_DEAD note for a part of a register
4524 that is set in the insn. */
4526 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4528 if (!REGNO_REG_SET_P (needed, regno + i)
4529 && ! dead_or_set_regno_p (insn, regno + i))
4532 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4543 register rtx testreg = SET_DEST (x);
4546 /* If storing into MEM, don't show it as being used. But do
4547 show the address as being used. */
4548 if (GET_CODE (testreg) == MEM)
4551 if (flags & PROP_AUTOINC)
4552 find_auto_inc (needed, testreg, insn);
4554 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4555 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4559 /* Storing in STRICT_LOW_PART is like storing in a reg
4560 in that this SET might be dead, so ignore it in TESTREG.
4561 but in some other ways it is like using the reg.
4563 Storing in a SUBREG or a bit field is like storing the entire
4564 register in that if the register's value is not used
4565 then this SET is not needed. */
4566 while (GET_CODE (testreg) == STRICT_LOW_PART
4567 || GET_CODE (testreg) == ZERO_EXTRACT
4568 || GET_CODE (testreg) == SIGN_EXTRACT
4569 || GET_CODE (testreg) == SUBREG)
4571 if (GET_CODE (testreg) == SUBREG
4572 && GET_CODE (SUBREG_REG (testreg)) == REG
4573 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4574 && (GET_MODE_SIZE (GET_MODE (testreg))
4575 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4576 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4578 /* Modifying a single register in an alternate mode
4579 does not use any of the old value. But these other
4580 ways of storing in a register do use the old value. */
4581 if (GET_CODE (testreg) == SUBREG
4582 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4587 testreg = XEXP (testreg, 0);
4590 /* If this is a store into a register,
4591 recursively scan the value being stored. */
4593 if ((GET_CODE (testreg) == PARALLEL
4594 && GET_MODE (testreg) == BLKmode)
4595 || (GET_CODE (testreg) == REG
4596 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4597 && (! reload_completed || frame_pointer_needed)))
4598 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4599 && ! (regno == HARD_FRAME_POINTER_REGNUM
4600 && (! reload_completed || frame_pointer_needed))
4602 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4603 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4606 /* We used to exclude global_regs here, but that seems wrong.
4607 Storing in them is like storing in mem. */
4609 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4611 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4618 case UNSPEC_VOLATILE:
4622 /* Traditional and volatile asm instructions must be considered to use
4623 and clobber all hard registers, all pseudo-registers and all of
4624 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4626 Consider for instance a volatile asm that changes the fpu rounding
4627 mode. An insn should not be moved across this even if it only uses
4628 pseudo-regs because it might give an incorrectly rounded result.
4630 ?!? Unfortunately, marking all hard registers as live causes massive
4631 problems for the register allocator and marking all pseudos as live
4632 creates mountains of uninitialized variable warnings.
4634 So for now, just clear the memory set list and mark any regs
4635 we can find in ASM_OPERANDS as used. */
4636 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4637 free_EXPR_LIST_list (&mem_set_list);
4639 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4640 We can not just fall through here since then we would be confused
4641 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4642 traditional asms unlike their normal usage. */
4643 if (code == ASM_OPERANDS)
4647 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4648 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4659 /* Recursively scan the operands of this expression. */
4662 register const char *fmt = GET_RTX_FORMAT (code);
4665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4669 /* Tail recursive case: save a function call level. */
4675 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4677 else if (fmt[i] == 'E')
4680 for (j = 0; j < XVECLEN (x, i); j++)
4681 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4690 try_pre_increment_1 (insn)
4693 /* Find the next use of this reg. If in same basic block,
4694 make it do pre-increment or pre-decrement if appropriate. */
4695 rtx x = single_set (insn);
4696 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4697 * INTVAL (XEXP (SET_SRC (x), 1)));
4698 int regno = REGNO (SET_DEST (x));
4699 rtx y = reg_next_use[regno];
4701 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4702 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4703 mode would be better. */
4704 && ! dead_or_set_p (y, SET_DEST (x))
4705 && try_pre_increment (y, SET_DEST (x), amount))
4707 /* We have found a suitable auto-increment
4708 and already changed insn Y to do it.
4709 So flush this increment-instruction. */
4710 PUT_CODE (insn, NOTE);
4711 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4712 NOTE_SOURCE_FILE (insn) = 0;
4713 /* Count a reference to this reg for the increment
4714 insn we are deleting. When a reg is incremented.
4715 spilling it is worse, so we want to make that
4717 if (regno >= FIRST_PSEUDO_REGISTER)
4719 REG_N_REFS (regno) += loop_depth + 1;
4720 REG_N_SETS (regno)++;
4727 /* Try to change INSN so that it does pre-increment or pre-decrement
4728 addressing on register REG in order to add AMOUNT to REG.
4729 AMOUNT is negative for pre-decrement.
4730 Returns 1 if the change could be made.
4731 This checks all about the validity of the result of modifying INSN. */
4734 try_pre_increment (insn, reg, amount)
4736 HOST_WIDE_INT amount;
4740 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4741 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4743 /* Nonzero if we can try to make a post-increment or post-decrement.
4744 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4745 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4746 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4749 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4752 /* From the sign of increment, see which possibilities are conceivable
4753 on this target machine. */
4754 if (HAVE_PRE_INCREMENT && amount > 0)
4756 if (HAVE_POST_INCREMENT && amount > 0)
4759 if (HAVE_PRE_DECREMENT && amount < 0)
4761 if (HAVE_POST_DECREMENT && amount < 0)
4764 if (! (pre_ok || post_ok))
4767 /* It is not safe to add a side effect to a jump insn
4768 because if the incremented register is spilled and must be reloaded
4769 there would be no way to store the incremented value back in memory. */
4771 if (GET_CODE (insn) == JUMP_INSN)
4776 use = find_use_as_address (PATTERN (insn), reg, 0);
4777 if (post_ok && (use == 0 || use == (rtx) 1))
4779 use = find_use_as_address (PATTERN (insn), reg, -amount);
4783 if (use == 0 || use == (rtx) 1)
4786 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4789 /* See if this combination of instruction and addressing mode exists. */
4790 if (! validate_change (insn, &XEXP (use, 0),
4791 gen_rtx_fmt_e (amount > 0
4792 ? (do_post ? POST_INC : PRE_INC)
4793 : (do_post ? POST_DEC : PRE_DEC),
4797 /* Record that this insn now has an implicit side effect on X. */
4798 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4802 #endif /* AUTO_INC_DEC */
4804 /* Find the place in the rtx X where REG is used as a memory address.
4805 Return the MEM rtx that so uses it.
4806 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4807 (plus REG (const_int PLUSCONST)).
4809 If such an address does not appear, return 0.
4810 If REG appears more than once, or is used other than in such an address,
4814 find_use_as_address (x, reg, plusconst)
4817 HOST_WIDE_INT plusconst;
4819 enum rtx_code code = GET_CODE (x);
4820 const char *fmt = GET_RTX_FORMAT (code);
4822 register rtx value = 0;
4825 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4828 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4829 && XEXP (XEXP (x, 0), 0) == reg
4830 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4831 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4834 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4836 /* If REG occurs inside a MEM used in a bit-field reference,
4837 that is unacceptable. */
4838 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4839 return (rtx) (HOST_WIDE_INT) 1;
4843 return (rtx) (HOST_WIDE_INT) 1;
4845 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4849 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4853 return (rtx) (HOST_WIDE_INT) 1;
4855 else if (fmt[i] == 'E')
4858 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4860 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4864 return (rtx) (HOST_WIDE_INT) 1;
4872 /* Write information about registers and basic blocks into FILE.
4873 This is part of making a debugging dump. */
4876 dump_regset (r, outf)
4883 fputs (" (nil)", outf);
4887 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4889 fprintf (outf, " %d", i);
4890 if (i < FIRST_PSEUDO_REGISTER)
4891 fprintf (outf, " [%s]",
4900 dump_regset (r, stderr);
4901 putc ('\n', stderr);
4905 dump_flow_info (file)
4909 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4911 fprintf (file, "%d registers.\n", max_regno);
4912 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4915 enum reg_class class, altclass;
4916 fprintf (file, "\nRegister %d used %d times across %d insns",
4917 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4918 if (REG_BASIC_BLOCK (i) >= 0)
4919 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4921 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4922 (REG_N_SETS (i) == 1) ? "" : "s");
4923 if (REG_USERVAR_P (regno_reg_rtx[i]))
4924 fprintf (file, "; user var");
4925 if (REG_N_DEATHS (i) != 1)
4926 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4927 if (REG_N_CALLS_CROSSED (i) == 1)
4928 fprintf (file, "; crosses 1 call");
4929 else if (REG_N_CALLS_CROSSED (i))
4930 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4931 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4932 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4933 class = reg_preferred_class (i);
4934 altclass = reg_alternate_class (i);
4935 if (class != GENERAL_REGS || altclass != ALL_REGS)
4937 if (altclass == ALL_REGS || class == ALL_REGS)
4938 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4939 else if (altclass == NO_REGS)
4940 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4942 fprintf (file, "; pref %s, else %s",
4943 reg_class_names[(int) class],
4944 reg_class_names[(int) altclass]);
4946 if (REGNO_POINTER_FLAG (i))
4947 fprintf (file, "; pointer");
4948 fprintf (file, ".\n");
4951 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4952 for (i = 0; i < n_basic_blocks; i++)
4954 register basic_block bb = BASIC_BLOCK (i);
4957 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4958 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4960 fprintf (file, "Predecessors: ");
4961 for (e = bb->pred; e ; e = e->pred_next)
4962 dump_edge_info (file, e, 0);
4964 fprintf (file, "\nSuccessors: ");
4965 for (e = bb->succ; e ; e = e->succ_next)
4966 dump_edge_info (file, e, 1);
4968 fprintf (file, "\nRegisters live at start:");
4969 dump_regset (bb->global_live_at_start, file);
4971 fprintf (file, "\nRegisters live at end:");
4972 dump_regset (bb->global_live_at_end, file);
4983 dump_flow_info (stderr);
4987 dump_edge_info (file, e, do_succ)
4992 basic_block side = (do_succ ? e->dest : e->src);
4994 if (side == ENTRY_BLOCK_PTR)
4995 fputs (" ENTRY", file);
4996 else if (side == EXIT_BLOCK_PTR)
4997 fputs (" EXIT", file);
4999 fprintf (file, " %d", side->index);
5003 static const char * const bitnames[] = {
5004 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5007 int i, flags = e->flags;
5011 for (i = 0; flags; i++)
5012 if (flags & (1 << i))
5018 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5019 fputs (bitnames[i], file);
5021 fprintf (file, "%d", i);
5029 /* Print out one basic block with live information at start and end. */
5039 fprintf (outf, ";; Basic block %d, loop depth %d",
5040 bb->index, bb->loop_depth - 1);
5041 if (bb->eh_beg != -1 || bb->eh_end != -1)
5042 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5045 fputs (";; Predecessors: ", outf);
5046 for (e = bb->pred; e ; e = e->pred_next)
5047 dump_edge_info (outf, e, 0);
5050 fputs (";; Registers live at start:", outf);
5051 dump_regset (bb->global_live_at_start, outf);
5054 for (insn = bb->head, last = NEXT_INSN (bb->end);
5056 insn = NEXT_INSN (insn))
5057 print_rtl_single (outf, insn);
5059 fputs (";; Registers live at end:", outf);
5060 dump_regset (bb->global_live_at_end, outf);
5063 fputs (";; Successors: ", outf);
5064 for (e = bb->succ; e; e = e->succ_next)
5065 dump_edge_info (outf, e, 1);
5073 dump_bb (bb, stderr);
5080 dump_bb (BASIC_BLOCK(n), stderr);
5083 /* Like print_rtl, but also print out live information for the start of each
5087 print_rtl_with_bb (outf, rtx_first)
5091 register rtx tmp_rtx;
5094 fprintf (outf, "(nil)\n");
5098 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5099 int max_uid = get_max_uid ();
5100 basic_block *start = (basic_block *)
5101 xcalloc (max_uid, sizeof (basic_block));
5102 basic_block *end = (basic_block *)
5103 xcalloc (max_uid, sizeof (basic_block));
5104 enum bb_state *in_bb_p = (enum bb_state *)
5105 xcalloc (max_uid, sizeof (enum bb_state));
5107 for (i = n_basic_blocks - 1; i >= 0; i--)
5109 basic_block bb = BASIC_BLOCK (i);
5112 start[INSN_UID (bb->head)] = bb;
5113 end[INSN_UID (bb->end)] = bb;
5114 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5116 enum bb_state state = IN_MULTIPLE_BB;
5117 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5119 in_bb_p[INSN_UID(x)] = state;
5126 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5131 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5133 fprintf (outf, ";; Start of basic block %d, registers live:",
5135 dump_regset (bb->global_live_at_start, outf);
5139 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5140 && GET_CODE (tmp_rtx) != NOTE
5141 && GET_CODE (tmp_rtx) != BARRIER)
5142 fprintf (outf, ";; Insn is not within a basic block\n");
5143 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5144 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5146 did_output = print_rtl_single (outf, tmp_rtx);
5148 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5149 fprintf (outf, ";; End of basic block %d\n", bb->index);
5160 if (current_function_epilogue_delay_list != 0)
5162 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5163 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5164 tmp_rtx = XEXP (tmp_rtx, 1))
5165 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5169 /* Compute dominator relationships using new flow graph structures. */
5171 compute_flow_dominators (dominators, post_dominators)
5172 sbitmap *dominators;
5173 sbitmap *post_dominators;
5176 sbitmap *temp_bitmap;
5178 basic_block *worklist, *tos;
5180 /* Allocate a worklist array/queue. Entries are only added to the
5181 list if they were not already on the list. So the size is
5182 bounded by the number of basic blocks. */
5183 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5186 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5187 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5191 /* The optimistic setting of dominators requires us to put every
5192 block on the work list initially. */
5193 for (bb = 0; bb < n_basic_blocks; bb++)
5195 *tos++ = BASIC_BLOCK (bb);
5196 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5199 /* We want a maximal solution, so initially assume everything dominates
5201 sbitmap_vector_ones (dominators, n_basic_blocks);
5203 /* Mark successors of the entry block so we can identify them below. */
5204 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5205 e->dest->aux = ENTRY_BLOCK_PTR;
5207 /* Iterate until the worklist is empty. */
5208 while (tos != worklist)
5210 /* Take the first entry off the worklist. */
5211 basic_block b = *--tos;
5214 /* Compute the intersection of the dominators of all the
5217 If one of the predecessor blocks is the ENTRY block, then the
5218 intersection of the dominators of the predecessor blocks is
5219 defined as the null set. We can identify such blocks by the
5220 special value in the AUX field in the block structure. */
5221 if (b->aux == ENTRY_BLOCK_PTR)
5223 /* Do not clear the aux field for blocks which are
5224 successors of the ENTRY block. That way we we never
5225 add them to the worklist again.
5227 The intersect of dominators of the preds of this block is
5228 defined as the null set. */
5229 sbitmap_zero (temp_bitmap[bb]);
5233 /* Clear the aux field of this block so it can be added to
5234 the worklist again if necessary. */
5236 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5239 /* Make sure each block always dominates itself. */
5240 SET_BIT (temp_bitmap[bb], bb);
5242 /* If the out state of this block changed, then we need to
5243 add the successors of this block to the worklist if they
5244 are not already on the worklist. */
5245 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5247 for (e = b->succ; e; e = e->succ_next)
5249 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5259 if (post_dominators)
5261 /* The optimistic setting of dominators requires us to put every
5262 block on the work list initially. */
5263 for (bb = 0; bb < n_basic_blocks; bb++)
5265 *tos++ = BASIC_BLOCK (bb);
5266 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5269 /* We want a maximal solution, so initially assume everything post
5270 dominates everything else. */
5271 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5273 /* Mark predecessors of the exit block so we can identify them below. */
5274 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5275 e->src->aux = EXIT_BLOCK_PTR;
5277 /* Iterate until the worklist is empty. */
5278 while (tos != worklist)
5280 /* Take the first entry off the worklist. */
5281 basic_block b = *--tos;
5284 /* Compute the intersection of the post dominators of all the
5287 If one of the successor blocks is the EXIT block, then the
5288 intersection of the dominators of the successor blocks is
5289 defined as the null set. We can identify such blocks by the
5290 special value in the AUX field in the block structure. */
5291 if (b->aux == EXIT_BLOCK_PTR)
5293 /* Do not clear the aux field for blocks which are
5294 predecessors of the EXIT block. That way we we never
5295 add them to the worklist again.
5297 The intersect of dominators of the succs of this block is
5298 defined as the null set. */
5299 sbitmap_zero (temp_bitmap[bb]);
5303 /* Clear the aux field of this block so it can be added to
5304 the worklist again if necessary. */
5306 sbitmap_intersection_of_succs (temp_bitmap[bb],
5307 post_dominators, bb);
5310 /* Make sure each block always post dominates itself. */
5311 SET_BIT (temp_bitmap[bb], bb);
5313 /* If the out state of this block changed, then we need to
5314 add the successors of this block to the worklist if they
5315 are not already on the worklist. */
5316 if (sbitmap_a_and_b (post_dominators[bb],
5317 post_dominators[bb],
5320 for (e = b->pred; e; e = e->pred_next)
5322 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5334 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5337 compute_immediate_dominators (idom, dominators)
5339 sbitmap *dominators;
5344 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5346 /* Begin with tmp(n) = dom(n) - { n }. */
5347 for (b = n_basic_blocks; --b >= 0; )
5349 sbitmap_copy (tmp[b], dominators[b]);
5350 RESET_BIT (tmp[b], b);
5353 /* Subtract out all of our dominator's dominators. */
5354 for (b = n_basic_blocks; --b >= 0; )
5356 sbitmap tmp_b = tmp[b];
5359 for (s = n_basic_blocks; --s >= 0; )
5360 if (TEST_BIT (tmp_b, s))
5361 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5364 /* Find the one bit set in the bitmap and put it in the output array. */
5365 for (b = n_basic_blocks; --b >= 0; )
5368 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5371 sbitmap_vector_free (tmp);
5374 /* Count for a single SET rtx, X. */
5377 count_reg_sets_1 (x)
5381 register rtx reg = SET_DEST (x);
5383 /* Find the register that's set/clobbered. */
5384 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5385 || GET_CODE (reg) == SIGN_EXTRACT
5386 || GET_CODE (reg) == STRICT_LOW_PART)
5387 reg = XEXP (reg, 0);
5389 if (GET_CODE (reg) == PARALLEL
5390 && GET_MODE (reg) == BLKmode)
5393 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5394 count_reg_sets_1 (XVECEXP (reg, 0, i));
5398 if (GET_CODE (reg) == REG)
5400 regno = REGNO (reg);
5401 if (regno >= FIRST_PSEUDO_REGISTER)
5403 /* Count (weighted) references, stores, etc. This counts a
5404 register twice if it is modified, but that is correct. */
5405 REG_N_SETS (regno)++;
5406 REG_N_REFS (regno) += loop_depth + 1;
5411 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5412 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5418 register RTX_CODE code = GET_CODE (x);
5420 if (code == SET || code == CLOBBER)
5421 count_reg_sets_1 (x);
5422 else if (code == PARALLEL)
5425 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5427 code = GET_CODE (XVECEXP (x, 0, i));
5428 if (code == SET || code == CLOBBER)
5429 count_reg_sets_1 (XVECEXP (x, 0, i));
5434 /* Increment REG_N_REFS by the current loop depth each register reference
5438 count_reg_references (x)
5441 register RTX_CODE code;
5444 code = GET_CODE (x);
5464 /* If we are clobbering a MEM, mark any registers inside the address
5466 if (GET_CODE (XEXP (x, 0)) == MEM)
5467 count_reg_references (XEXP (XEXP (x, 0), 0));
5471 /* While we're here, optimize this case. */
5474 /* In case the SUBREG is not of a register, don't optimize */
5475 if (GET_CODE (x) != REG)
5477 count_reg_references (x);
5481 /* ... fall through ... */
5484 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5485 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5490 register rtx testreg = SET_DEST (x);
5493 /* If storing into MEM, don't show it as being used. But do
5494 show the address as being used. */
5495 if (GET_CODE (testreg) == MEM)
5497 count_reg_references (XEXP (testreg, 0));
5498 count_reg_references (SET_SRC (x));
5502 /* Storing in STRICT_LOW_PART is like storing in a reg
5503 in that this SET might be dead, so ignore it in TESTREG.
5504 but in some other ways it is like using the reg.
5506 Storing in a SUBREG or a bit field is like storing the entire
5507 register in that if the register's value is not used
5508 then this SET is not needed. */
5509 while (GET_CODE (testreg) == STRICT_LOW_PART
5510 || GET_CODE (testreg) == ZERO_EXTRACT
5511 || GET_CODE (testreg) == SIGN_EXTRACT
5512 || GET_CODE (testreg) == SUBREG)
5514 /* Modifying a single register in an alternate mode
5515 does not use any of the old value. But these other
5516 ways of storing in a register do use the old value. */
5517 if (GET_CODE (testreg) == SUBREG
5518 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5523 testreg = XEXP (testreg, 0);
5526 /* If this is a store into a register,
5527 recursively scan the value being stored. */
5529 if ((GET_CODE (testreg) == PARALLEL
5530 && GET_MODE (testreg) == BLKmode)
5531 || GET_CODE (testreg) == REG)
5533 count_reg_references (SET_SRC (x));
5535 count_reg_references (SET_DEST (x));
5545 /* Recursively scan the operands of this expression. */
5548 register const char *fmt = GET_RTX_FORMAT (code);
5551 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5555 /* Tail recursive case: save a function call level. */
5561 count_reg_references (XEXP (x, i));
5563 else if (fmt[i] == 'E')
5566 for (j = 0; j < XVECLEN (x, i); j++)
5567 count_reg_references (XVECEXP (x, i, j));
5573 /* Recompute register set/reference counts immediately prior to register
5576 This avoids problems with set/reference counts changing to/from values
5577 which have special meanings to the register allocators.
5579 Additionally, the reference counts are the primary component used by the
5580 register allocators to prioritize pseudos for allocation to hard regs.
5581 More accurate reference counts generally lead to better register allocation.
5583 F is the first insn to be scanned.
5585 LOOP_STEP denotes how much loop_depth should be incremented per
5586 loop nesting level in order to increase the ref count more for
5587 references in a loop.
5589 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5590 possibly other information which is used by the register allocators. */
5593 recompute_reg_usage (f, loop_step)
5594 rtx f ATTRIBUTE_UNUSED;
5595 int loop_step ATTRIBUTE_UNUSED;
5601 /* Clear out the old data. */
5602 max_reg = max_reg_num ();
5603 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5609 /* Scan each insn in the chain and count how many times each register is
5611 for (index = 0; index < n_basic_blocks; index++)
5613 basic_block bb = BASIC_BLOCK (index);
5614 loop_depth = bb->loop_depth;
5615 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5617 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5621 /* This call will increment REG_N_SETS for each SET or CLOBBER
5622 of a register in INSN. It will also increment REG_N_REFS
5623 by the loop depth for each set of a register in INSN. */
5624 count_reg_sets (PATTERN (insn));
5626 /* count_reg_sets does not detect autoincrement address modes, so
5627 detect them here by looking at the notes attached to INSN. */
5628 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5630 if (REG_NOTE_KIND (links) == REG_INC)
5631 /* Count (weighted) references, stores, etc. This counts a
5632 register twice if it is modified, but that is correct. */
5633 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5636 /* This call will increment REG_N_REFS by the current loop depth for
5637 each reference to a register in INSN. */
5638 count_reg_references (PATTERN (insn));
5640 /* count_reg_references will not include counts for arguments to
5641 function calls, so detect them here by examining the
5642 CALL_INSN_FUNCTION_USAGE data. */
5643 if (GET_CODE (insn) == CALL_INSN)
5647 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5649 note = XEXP (note, 1))
5650 if (GET_CODE (XEXP (note, 0)) == USE)
5651 count_reg_references (XEXP (XEXP (note, 0), 0));
5654 if (insn == bb->end)
5660 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5661 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5662 of the number of registers that died. */
5665 count_or_remove_death_notes (blocks, kill)
5671 for (i = n_basic_blocks - 1; i >= 0; --i)
5676 if (blocks && ! TEST_BIT (blocks, i))
5679 bb = BASIC_BLOCK (i);
5681 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5683 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5685 rtx *pprev = ®_NOTES (insn);
5690 switch (REG_NOTE_KIND (link))
5693 if (GET_CODE (XEXP (link, 0)) == REG)
5695 rtx reg = XEXP (link, 0);
5698 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5701 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5709 rtx next = XEXP (link, 1);
5710 free_EXPR_LIST_node (link);
5711 *pprev = link = next;
5717 pprev = &XEXP (link, 1);
5724 if (insn == bb->end)
5732 /* Record INSN's block as BB. */
5735 set_block_for_insn (insn, bb)
5739 size_t uid = INSN_UID (insn);
5740 if (uid >= basic_block_for_insn->num_elements)
5744 /* Add one-eighth the size so we don't keep calling xrealloc. */
5745 new_size = uid + (uid + 7) / 8;
5747 VARRAY_GROW (basic_block_for_insn, new_size);
5749 VARRAY_BB (basic_block_for_insn, uid) = bb;
5752 /* Record INSN's block number as BB. */
5753 /* ??? This has got to go. */
5756 set_block_num (insn, bb)
5760 set_block_for_insn (insn, BASIC_BLOCK (bb));
5763 /* Verify the CFG consistency. This function check some CFG invariants and
5764 aborts when something is wrong. Hope that this function will help to
5765 convert many optimization passes to preserve CFG consistent.
5767 Currently it does following checks:
5769 - test head/end pointers
5770 - overlapping of basic blocks
5771 - edge list corectness
5772 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5773 - tails of basic blocks (ensure that boundary is necesary)
5774 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5775 and NOTE_INSN_BASIC_BLOCK
5776 - check that all insns are in the basic blocks
5777 (except the switch handling code, barriers and notes)
5779 In future it can be extended check a lot of other stuff as well
5780 (reachability of basic blocks, life information, etc. etc.). */
5785 const int max_uid = get_max_uid ();
5786 const rtx rtx_first = get_insns ();
5787 basic_block *bb_info;
5791 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5793 /* First pass check head/end pointers and set bb_info array used by
5795 for (i = n_basic_blocks - 1; i >= 0; i--)
5797 basic_block bb = BASIC_BLOCK (i);
5799 /* Check the head pointer and make sure that it is pointing into
5801 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5806 error ("Head insn %d for block %d not found in the insn stream.",
5807 INSN_UID (bb->head), bb->index);
5811 /* Check the end pointer and make sure that it is pointing into
5813 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5815 if (bb_info[INSN_UID (x)] != NULL)
5817 error ("Insn %d is in multiple basic blocks (%d and %d)",
5818 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5821 bb_info[INSN_UID (x)] = bb;
5828 error ("End insn %d for block %d not found in the insn stream.",
5829 INSN_UID (bb->end), bb->index);
5834 /* Now check the basic blocks (boundaries etc.) */
5835 for (i = n_basic_blocks - 1; i >= 0; i--)
5837 basic_block bb = BASIC_BLOCK (i);
5838 /* Check corectness of edge lists */
5846 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5848 fprintf (stderr, "Predecessor: ");
5849 dump_edge_info (stderr, e, 0);
5850 fprintf (stderr, "\nSuccessor: ");
5851 dump_edge_info (stderr, e, 1);
5855 if (e->dest != EXIT_BLOCK_PTR)
5857 edge e2 = e->dest->pred;
5858 while (e2 && e2 != e)
5862 error ("Basic block %i edge lists are corrupted", bb->index);
5874 error ("Basic block %d pred edge is corrupted", bb->index);
5875 fputs ("Predecessor: ", stderr);
5876 dump_edge_info (stderr, e, 0);
5877 fputs ("\nSuccessor: ", stderr);
5878 dump_edge_info (stderr, e, 1);
5879 fputc ('\n', stderr);
5882 if (e->src != ENTRY_BLOCK_PTR)
5884 edge e2 = e->src->succ;
5885 while (e2 && e2 != e)
5889 error ("Basic block %i edge lists are corrupted", bb->index);
5896 /* OK pointers are correct. Now check the header of basic
5897 block. It ought to contain optional CODE_LABEL followed
5898 by NOTE_BASIC_BLOCK. */
5900 if (GET_CODE (x) == CODE_LABEL)
5904 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5910 if (GET_CODE (x) != NOTE
5911 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5912 || NOTE_BASIC_BLOCK (x) != bb)
5914 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5921 /* Do checks for empty blocks here */
5928 if (GET_CODE (x) == NOTE
5929 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5931 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5932 INSN_UID (x), bb->index);
5939 if (GET_CODE (x) == JUMP_INSN
5940 || GET_CODE (x) == CODE_LABEL
5941 || GET_CODE (x) == BARRIER)
5943 error ("In basic block %d:", bb->index);
5944 fatal_insn ("Flow control insn inside a basic block", x);
5955 if (!bb_info[INSN_UID (x)])
5957 switch (GET_CODE (x))
5964 /* An addr_vec is placed outside any block block. */
5966 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5967 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5968 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5973 /* But in any case, non-deletable labels can appear anywhere. */
5977 fatal_insn ("Insn outside basic block", x);
5991 /* Functions to access an edge list with a vector representation.
5992 Enough data is kept such that given an index number, the
5993 pred and succ that edge reprsents can be determined, or
5994 given a pred and a succ, it's index number can be returned.
5995 This allows algorithms which comsume a lot of memory to
5996 represent the normally full matrix of edge (pred,succ) with a
5997 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5998 wasted space in the client code due to sparse flow graphs. */
6000 /* This functions initializes the edge list. Basically the entire
6001 flowgraph is processed, and all edges are assigned a number,
6002 and the data structure is filed in. */
6006 struct edge_list *elist;
6012 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6016 /* Determine the number of edges in the flow graph by counting successor
6017 edges on each basic block. */
6018 for (x = 0; x < n_basic_blocks; x++)
6020 basic_block bb = BASIC_BLOCK (x);
6022 for (e = bb->succ; e; e = e->succ_next)
6025 /* Don't forget successors of the entry block. */
6026 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6029 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6030 elist->num_blocks = block_count;
6031 elist->num_edges = num_edges;
6032 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6036 /* Follow successors of the entry block, and register these edges. */
6037 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6039 elist->index_to_edge[num_edges] = e;
6043 for (x = 0; x < n_basic_blocks; x++)
6045 basic_block bb = BASIC_BLOCK (x);
6047 /* Follow all successors of blocks, and register these edges. */
6048 for (e = bb->succ; e; e = e->succ_next)
6050 elist->index_to_edge[num_edges] = e;
6057 /* This function free's memory associated with an edge list. */
6059 free_edge_list (elist)
6060 struct edge_list *elist;
6064 free (elist->index_to_edge);
6069 /* This function provides debug output showing an edge list. */
6071 print_edge_list (f, elist)
6073 struct edge_list *elist;
6076 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6077 elist->num_blocks - 2, elist->num_edges);
6079 for (x = 0; x < elist->num_edges; x++)
6081 fprintf (f, " %-4d - edge(", x);
6082 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6083 fprintf (f,"entry,");
6085 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6087 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6088 fprintf (f,"exit)\n");
6090 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6094 /* This function provides an internal consistancy check of an edge list,
6095 verifying that all edges are present, and that there are no
6098 verify_edge_list (f, elist)
6100 struct edge_list *elist;
6102 int x, pred, succ, index;
6105 for (x = 0; x < n_basic_blocks; x++)
6107 basic_block bb = BASIC_BLOCK (x);
6109 for (e = bb->succ; e; e = e->succ_next)
6111 pred = e->src->index;
6112 succ = e->dest->index;
6113 index = EDGE_INDEX (elist, e->src, e->dest);
6114 if (index == EDGE_INDEX_NO_EDGE)
6116 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6119 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6120 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6121 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6122 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6123 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6124 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6127 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6129 pred = e->src->index;
6130 succ = e->dest->index;
6131 index = EDGE_INDEX (elist, e->src, e->dest);
6132 if (index == EDGE_INDEX_NO_EDGE)
6134 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6137 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6138 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6139 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6140 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6141 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6142 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6144 /* We've verified that all the edges are in the list, no lets make sure
6145 there are no spurious edges in the list. */
6147 for (pred = 0 ; pred < n_basic_blocks; pred++)
6148 for (succ = 0 ; succ < n_basic_blocks; succ++)
6150 basic_block p = BASIC_BLOCK (pred);
6151 basic_block s = BASIC_BLOCK (succ);
6155 for (e = p->succ; e; e = e->succ_next)
6161 for (e = s->pred; e; e = e->pred_next)
6167 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6168 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6169 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6171 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6172 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6173 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6174 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6175 BASIC_BLOCK (succ)));
6177 for (succ = 0 ; succ < n_basic_blocks; succ++)
6179 basic_block p = ENTRY_BLOCK_PTR;
6180 basic_block s = BASIC_BLOCK (succ);
6184 for (e = p->succ; e; e = e->succ_next)
6190 for (e = s->pred; e; e = e->pred_next)
6196 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6197 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6198 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6200 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6201 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6202 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6203 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6204 BASIC_BLOCK (succ)));
6206 for (pred = 0 ; pred < n_basic_blocks; pred++)
6208 basic_block p = BASIC_BLOCK (pred);
6209 basic_block s = EXIT_BLOCK_PTR;
6213 for (e = p->succ; e; e = e->succ_next)
6219 for (e = s->pred; e; e = e->pred_next)
6225 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6226 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6227 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6229 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6230 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6231 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6232 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6237 /* This routine will determine what, if any, edge there is between
6238 a specified predecessor and successor. */
6241 find_edge_index (edge_list, pred, succ)
6242 struct edge_list *edge_list;
6243 basic_block pred, succ;
6246 for (x = 0; x < NUM_EDGES (edge_list); x++)
6248 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6249 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6252 return (EDGE_INDEX_NO_EDGE);
6255 /* This function will remove an edge from the flow graph. */
6260 edge last_pred = NULL;
6261 edge last_succ = NULL;
6263 basic_block src, dest;
6266 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6272 last_succ->succ_next = e->succ_next;
6274 src->succ = e->succ_next;
6276 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6282 last_pred->pred_next = e->pred_next;
6284 dest->pred = e->pred_next;
6290 /* This routine will remove any fake successor edges for a basic block.
6291 When the edge is removed, it is also removed from whatever predecessor
6294 remove_fake_successors (bb)
6298 for (e = bb->succ; e ; )
6302 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6307 /* This routine will remove all fake edges from the flow graph. If
6308 we remove all fake successors, it will automatically remove all
6309 fake predecessors. */
6311 remove_fake_edges ()
6315 for (x = 0; x < n_basic_blocks; x++)
6316 remove_fake_successors (BASIC_BLOCK (x));
6318 /* We've handled all successors except the entry block's. */
6319 remove_fake_successors (ENTRY_BLOCK_PTR);
6322 /* This functions will add a fake edge between any block which has no
6323 successors, and the exit block. Some data flow equations require these
6326 add_noreturn_fake_exit_edges ()
6330 for (x = 0; x < n_basic_blocks; x++)
6331 if (BASIC_BLOCK (x)->succ == NULL)
6332 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6335 /* Dump the list of basic blocks in the bitmap NODES. */
6337 flow_nodes_print (str, nodes, file)
6339 const sbitmap nodes;
6344 fprintf (file, "%s { ", str);
6345 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6346 fputs ("}\n", file);
6350 /* Dump the list of exiting edges in the array EDGES. */
6352 flow_exits_print (str, edges, num_edges, file)
6360 fprintf (file, "%s { ", str);
6361 for (i = 0; i < num_edges; i++)
6362 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6363 fputs ("}\n", file);
6367 /* Dump loop related CFG information. */
6369 flow_loops_cfg_dump (loops, file)
6370 const struct loops *loops;
6375 if (! loops->num || ! file || ! loops->cfg.dom)
6378 for (i = 0; i < n_basic_blocks; i++)
6382 fprintf (file, ";; %d succs { ", i);
6383 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6384 fprintf (file, "%d ", succ->dest->index);
6385 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6389 /* Dump the DFS node order. */
6390 if (loops->cfg.dfs_order)
6392 fputs (";; DFS order: ", file);
6393 for (i = 0; i < n_basic_blocks; i++)
6394 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6400 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6402 flow_loop_nested_p (outer, loop)
6406 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6410 /* Dump the loop information specified by LOOPS to the stream FILE. */
6412 flow_loops_dump (loops, file, verbose)
6413 const struct loops *loops;
6420 num_loops = loops->num;
6421 if (! num_loops || ! file)
6424 fprintf (file, ";; %d loops found, %d levels\n",
6425 num_loops, loops->levels);
6427 for (i = 0; i < num_loops; i++)
6429 struct loop *loop = &loops->array[i];
6431 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6432 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6433 loop->header->index, loop->latch->index,
6434 loop->pre_header ? loop->pre_header->index : -1,
6435 loop->depth, loop->level,
6436 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6437 fprintf (file, ";; %d", loop->num_nodes);
6438 flow_nodes_print (" nodes", loop->nodes, file);
6439 fprintf (file, ";; %d", loop->num_exits);
6440 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6446 for (j = 0; j < i; j++)
6448 struct loop *oloop = &loops->array[j];
6450 if (loop->header == oloop->header)
6455 smaller = loop->num_nodes < oloop->num_nodes;
6457 /* If the union of LOOP and OLOOP is different than
6458 the larger of LOOP and OLOOP then LOOP and OLOOP
6459 must be disjoint. */
6460 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6461 smaller ? oloop : loop);
6462 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6463 loop->header->index, i, j,
6464 disjoint ? "disjoint" : "nested");
6471 /* Print diagnostics to compare our concept of a loop with
6472 what the loop notes say. */
6473 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6474 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6475 != NOTE_INSN_LOOP_BEG)
6476 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6477 INSN_UID (PREV_INSN (loop->first->head)));
6478 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6479 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6480 != NOTE_INSN_LOOP_END)
6481 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6482 INSN_UID (NEXT_INSN (loop->last->end)));
6487 flow_loops_cfg_dump (loops, file);
6491 /* Free all the memory allocated for LOOPS. */
6493 flow_loops_free (loops)
6494 struct loops *loops;
6503 /* Free the loop descriptors. */
6504 for (i = 0; i < loops->num; i++)
6506 struct loop *loop = &loops->array[i];
6509 sbitmap_free (loop->nodes);
6513 free (loops->array);
6514 loops->array = NULL;
6517 sbitmap_vector_free (loops->cfg.dom);
6518 if (loops->cfg.dfs_order)
6519 free (loops->cfg.dfs_order);
6521 sbitmap_free (loops->shared_headers);
6526 /* Find the exits from the loop using the bitmap of loop nodes NODES
6527 and store in EXITS array. Return the number of exits from the
6530 flow_loop_exits_find (nodes, exits)
6531 const sbitmap nodes;
6540 /* Check all nodes within the loop to see if there are any
6541 successors not in the loop. Note that a node may have multiple
6544 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6545 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6547 basic_block dest = e->dest;
6549 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6557 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6559 /* Store all exiting edges into an array. */
6561 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6562 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6564 basic_block dest = e->dest;
6566 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6567 (*exits)[num_exits++] = e;
6575 /* Find the nodes contained within the loop with header HEADER and
6576 latch LATCH and store in NODES. Return the number of nodes within
6579 flow_loop_nodes_find (header, latch, nodes)
6588 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6591 /* Start with only the loop header in the set of loop nodes. */
6592 sbitmap_zero (nodes);
6593 SET_BIT (nodes, header->index);
6595 header->loop_depth++;
6597 /* Push the loop latch on to the stack. */
6598 if (! TEST_BIT (nodes, latch->index))
6600 SET_BIT (nodes, latch->index);
6601 latch->loop_depth++;
6603 stack[sp++] = latch;
6612 for (e = node->pred; e; e = e->pred_next)
6614 basic_block ancestor = e->src;
6616 /* If each ancestor not marked as part of loop, add to set of
6617 loop nodes and push on to stack. */
6618 if (ancestor != ENTRY_BLOCK_PTR
6619 && ! TEST_BIT (nodes, ancestor->index))
6621 SET_BIT (nodes, ancestor->index);
6622 ancestor->loop_depth++;
6624 stack[sp++] = ancestor;
6633 /* Compute the depth first search order and store in the array
6634 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6635 number of nodes visited. */
6637 flow_depth_first_order_compute (dfs_order)
6646 /* Allocate stack for back-tracking up CFG. */
6647 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6650 /* Allocate bitmap to track nodes that have been visited. */
6651 visited = sbitmap_alloc (n_basic_blocks);
6653 /* None of the nodes in the CFG have been visited yet. */
6654 sbitmap_zero (visited);
6656 /* Start with the first successor edge from the entry block. */
6657 e = ENTRY_BLOCK_PTR->succ;
6660 basic_block src = e->src;
6661 basic_block dest = e->dest;
6663 /* Mark that we have visited this node. */
6664 if (src != ENTRY_BLOCK_PTR)
6665 SET_BIT (visited, src->index);
6667 /* If this node has not been visited before, push the current
6668 edge on to the stack and proceed with the first successor
6669 edge of this node. */
6670 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6678 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6681 /* DEST has no successors (for example, a non-returning
6682 function is called) so do not push the current edge
6683 but carry on with its next successor. */
6684 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6685 SET_BIT (visited, dest->index);
6688 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6690 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6692 /* Pop edge off stack. */
6700 sbitmap_free (visited);
6702 /* The number of nodes visited should not be greater than
6704 if (dfsnum > n_basic_blocks)
6707 /* There are some nodes left in the CFG that are unreachable. */
6708 if (dfsnum < n_basic_blocks)
6714 /* Return the block for the pre-header of the loop with header
6715 HEADER where DOM specifies the dominator information. Return NULL if
6716 there is no pre-header. */
6718 flow_loop_pre_header_find (header, dom)
6722 basic_block pre_header;
6725 /* If block p is a predecessor of the header and is the only block
6726 that the header does not dominate, then it is the pre-header. */
6728 for (e = header->pred; e; e = e->pred_next)
6730 basic_block node = e->src;
6732 if (node != ENTRY_BLOCK_PTR
6733 && ! TEST_BIT (dom[node->index], header->index))
6735 if (pre_header == NULL)
6739 /* There are multiple edges into the header from outside
6740 the loop so there is no pre-header block. */
6750 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6751 previously added. The insertion algorithm assumes that the loops
6752 are added in the order found by a depth first search of the CFG. */
6754 flow_loop_tree_node_add (prevloop, loop)
6755 struct loop *prevloop;
6759 if (flow_loop_nested_p (prevloop, loop))
6761 prevloop->inner = loop;
6762 loop->outer = prevloop;
6766 while (prevloop->outer)
6768 if (flow_loop_nested_p (prevloop->outer, loop))
6770 prevloop->next = loop;
6771 loop->outer = prevloop->outer;
6774 prevloop = prevloop->outer;
6777 prevloop->next = loop;
6782 /* Build the loop hierarchy tree for LOOPS. */
6784 flow_loops_tree_build (loops)
6785 struct loops *loops;
6790 num_loops = loops->num;
6794 /* Root the loop hierarchy tree with the first loop found.
6795 Since we used a depth first search this should be the
6797 loops->tree = &loops->array[0];
6798 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6800 /* Add the remaining loops to the tree. */
6801 for (i = 1; i < num_loops; i++)
6802 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6806 /* Helper function to compute loop nesting depth and enclosed loop level
6807 for the natural loop specified by LOOP at the loop depth DEPTH.
6808 Returns the loop level. */
6810 flow_loop_level_compute (loop, depth)
6820 /* Traverse loop tree assigning depth and computing level as the
6821 maximum level of all the inner loops of this loop. The loop
6822 level is equivalent to the height of the loop in the loop tree
6823 and corresponds to the number of enclosed loop levels (including
6825 for (inner = loop->inner; inner; inner = inner->next)
6829 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6834 loop->level = level;
6835 loop->depth = depth;
6840 /* Compute the loop nesting depth and enclosed loop level for the loop
6841 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6845 flow_loops_level_compute (loops)
6846 struct loops *loops;
6852 /* Traverse all the outer level loops. */
6853 for (loop = loops->tree; loop; loop = loop->next)
6855 level = flow_loop_level_compute (loop, 1);
6863 /* Find all the natural loops in the function and save in LOOPS structure
6864 and recalculate loop_depth information in basic block structures.
6865 Return the number of natural loops found. */
6868 flow_loops_find (loops)
6869 struct loops *loops;
6880 loops->array = NULL;
6884 /* Taking care of this degenerate case makes the rest of
6885 this code simpler. */
6886 if (n_basic_blocks == 0)
6889 /* Compute the dominators. */
6890 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6891 compute_flow_dominators (dom, NULL);
6893 /* Count the number of loop edges (back edges). This should be the
6894 same as the number of natural loops. Also clear the loop_depth
6895 and as we work from inner->outer in a loop nest we call
6896 find_loop_nodes_find which will increment loop_depth for nodes
6897 within the current loop, which happens to enclose inner loops. */
6900 for (b = 0; b < n_basic_blocks; b++)
6902 BASIC_BLOCK (b)->loop_depth = 0;
6903 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6905 basic_block latch = e->src;
6907 /* Look for back edges where a predecessor is dominated
6908 by this block. A natural loop has a single entry
6909 node (header) that dominates all the nodes in the
6910 loop. It also has single back edge to the header
6911 from a latch node. Note that multiple natural loops
6912 may share the same header. */
6913 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6920 /* Compute depth first search order of the CFG so that outer
6921 natural loops will be found before inner natural loops. */
6922 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6923 flow_depth_first_order_compute (dfs_order);
6925 /* Allocate loop structures. */
6927 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6929 headers = sbitmap_alloc (n_basic_blocks);
6930 sbitmap_zero (headers);
6932 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6933 sbitmap_zero (loops->shared_headers);
6935 /* Find and record information about all the natural loops
6938 for (b = 0; b < n_basic_blocks; b++)
6942 /* Search the nodes of the CFG in DFS order that we can find
6943 outer loops first. */
6944 header = BASIC_BLOCK (dfs_order[b]);
6946 /* Look for all the possible latch blocks for this header. */
6947 for (e = header->pred; e; e = e->pred_next)
6949 basic_block latch = e->src;
6951 /* Look for back edges where a predecessor is dominated
6952 by this block. A natural loop has a single entry
6953 node (header) that dominates all the nodes in the
6954 loop. It also has single back edge to the header
6955 from a latch node. Note that multiple natural loops
6956 may share the same header. */
6957 if (latch != ENTRY_BLOCK_PTR
6958 && TEST_BIT (dom[latch->index], header->index))
6962 loop = loops->array + num_loops;
6964 loop->header = header;
6965 loop->latch = latch;
6967 /* Keep track of blocks that are loop headers so
6968 that we can tell which loops should be merged. */
6969 if (TEST_BIT (headers, header->index))
6970 SET_BIT (loops->shared_headers, header->index);
6971 SET_BIT (headers, header->index);
6973 /* Find nodes contained within the loop. */
6974 loop->nodes = sbitmap_alloc (n_basic_blocks);
6976 = flow_loop_nodes_find (header, latch, loop->nodes);
6978 /* Compute first and last blocks within the loop.
6979 These are often the same as the loop header and
6980 loop latch respectively, but this is not always
6983 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6985 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6987 /* Find edges which exit the loop. Note that a node
6988 may have several exit edges. */
6990 = flow_loop_exits_find (loop->nodes, &loop->exits);
6992 /* Look to see if the loop has a pre-header node. */
6994 = flow_loop_pre_header_find (header, dom);
7001 /* Natural loops with shared headers may either be disjoint or
7002 nested. Disjoint loops with shared headers cannot be inner
7003 loops and should be merged. For now just mark loops that share
7005 for (i = 0; i < num_loops; i++)
7006 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7007 loops->array[i].shared = 1;
7009 sbitmap_free (headers);
7012 loops->num = num_loops;
7014 /* Save CFG derived information to avoid recomputing it. */
7015 loops->cfg.dom = dom;
7016 loops->cfg.dfs_order = dfs_order;
7018 /* Build the loop hierarchy tree. */
7019 flow_loops_tree_build (loops);
7021 /* Assign the loop nesting depth and enclosed loop level for each
7023 loops->levels = flow_loops_level_compute (loops);
7029 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7031 flow_loop_outside_edge_p (loop, e)
7032 const struct loop *loop;
7035 if (e->dest != loop->header)
7037 return (e->src == ENTRY_BLOCK_PTR)
7038 || ! TEST_BIT (loop->nodes, e->src->index);
7043 typedef struct reorder_block_def {
7046 basic_block add_jump;
7051 } *reorder_block_def;
7053 #define REORDER_BLOCK_HEAD 0x1
7054 #define REORDER_BLOCK_VISITED 0x2
7055 #define REORDER_MOVED_BLOCK_END 0x3
7057 #define REORDER_BLOCK_FLAGS(bb) \
7058 ((reorder_block_def) (bb)->aux)->flags
7060 #define REORDER_BLOCK_INDEX(bb) \
7061 ((reorder_block_def) (bb)->aux)->index
7063 #define REORDER_BLOCK_ADD_JUMP(bb) \
7064 ((reorder_block_def) (bb)->aux)->add_jump
7066 #define REORDER_BLOCK_SUCC(bb) \
7067 ((reorder_block_def) (bb)->aux)->succ
7069 #define REORDER_BLOCK_OLD_END(bb) \
7070 ((reorder_block_def) (bb)->aux)->end
7072 #define REORDER_BLOCK_BEGIN(bb) \
7073 ((reorder_block_def) (bb)->aux)->block_begin
7075 #define REORDER_BLOCK_END(bb) \
7076 ((reorder_block_def) (bb)->aux)->block_end
7079 static int reorder_index;
7080 static basic_block reorder_last_visited;
7082 enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
7083 REORDER_SKIP_BLOCK_END};
7085 static rtx skip_insns_between_block PARAMS ((basic_block,
7086 enum reorder_skip_type));
7088 /* Skip over insns BEFORE or AFTER BB which are typically associated with
7092 skip_insns_between_block (bb, skip_type)
7094 enum reorder_skip_type skip_type;
7096 rtx insn, last_insn;
7098 if (skip_type == REORDER_SKIP_BEFORE)
7100 if (bb == ENTRY_BLOCK_PTR)
7103 last_insn = bb->head;
7104 for (insn = PREV_INSN (bb->head);
7105 insn && insn != BASIC_BLOCK (bb->index - 1)->end;
7106 last_insn = insn, insn = PREV_INSN (insn))
7108 if (NEXT_INSN (insn) != last_insn)
7111 if (GET_CODE (insn) == NOTE
7112 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
7113 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
7114 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
7123 last_insn = bb->end;
7125 if (bb == EXIT_BLOCK_PTR)
7128 for (insn = NEXT_INSN (bb->end);
7130 last_insn = insn, insn = NEXT_INSN (insn))
7132 if (bb->index + 1 != n_basic_blocks
7133 && insn == BASIC_BLOCK (bb->index + 1)->head)
7136 if (GET_CODE (insn) == BARRIER
7137 || GET_CODE (insn) == JUMP_INSN
7138 || (GET_CODE (insn) == NOTE
7139 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
7140 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
7143 if (GET_CODE (insn) == CODE_LABEL
7144 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
7145 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
7146 || GET_CODE (PATTERN
7147 (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
7149 insn = NEXT_INSN (insn);
7155 if (skip_type == REORDER_SKIP_BLOCK_END)
7157 int found_block_end = 0;
7159 for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
7161 if (bb->index + 1 != n_basic_blocks
7162 && insn == BASIC_BLOCK (bb->index + 1)->head)
7165 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7167 found_block_end = 1;
7171 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
7174 if (GET_CODE (insn) == NOTE
7175 && NOTE_LINE_NUMBER (insn) >= 0
7177 && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
7178 == NOTE_INSN_BLOCK_END))
7183 if (! found_block_end)
7192 /* Return common destination for blocks BB0 and BB1. */
7195 get_common_dest (bb0, bb1)
7196 basic_block bb0, bb1;
7200 for (e0 = bb0->succ; e0; e0 = e0->succ_next)
7202 for (e1 = bb1->succ; e1; e1 = e1->succ_next)
7204 if (e0->dest == e1->dest)
7214 /* Move the destination block for edge E after chain end block CEB
7215 Adding jumps and labels is deferred until fixup_reorder_chain. */
7218 chain_reorder_blocks (e, ceb)
7222 basic_block sb = e->src;
7223 basic_block db = e->dest;
7224 rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
7227 enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
7228 PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
7229 enum cond_types cond_type;
7230 enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
7232 enum cond_block_types cond_block_type;
7235 fprintf (rtl_dump_file,
7236 "Edge from basic block %d to basic block %d last visited %d\n",
7237 sb->index, db->index, ceb->index);
7239 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7240 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7241 cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
7244 int block_begins = 0;
7247 for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
7249 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
7255 REORDER_BLOCK_BEGIN (sb) = block_begins;
7263 for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
7265 if (PREV_INSN (insn) == cebbe_insn)
7267 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7273 REORDER_BLOCK_END (ceb) = block_ends;
7276 /* Blocks are in original order. */
7277 if (sb->index == ceb->index
7278 && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
7281 /* Get the type of block and type of condition. */
7282 cond_type = NO_COND;
7283 cond_block_type = NO_COND_BLOCK;
7284 if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
7285 && condjump_p (sb->end))
7287 if (e->flags & EDGE_FALLTHRU)
7288 cond_block_type = THEN_BLOCK;
7289 else if (get_common_dest (sb->succ->dest, sb))
7290 cond_block_type = NO_ELSE_BLOCK;
7292 cond_block_type = ELSE_BLOCK;
7294 if (sb->succ->succ_next
7295 && get_common_dest (sb->succ->dest, sb))
7297 if (cond_block_type == THEN_BLOCK)
7299 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7300 & REORDER_BLOCK_VISITED))
7301 cond_type = PREDICT_THEN_NO_ELSE;
7303 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7305 else if (cond_block_type == NO_ELSE_BLOCK)
7307 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7308 & REORDER_BLOCK_VISITED))
7309 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7311 cond_type = PREDICT_THEN_NO_ELSE;
7316 if (cond_block_type == THEN_BLOCK)
7318 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7319 & REORDER_BLOCK_VISITED))
7320 cond_type = PREDICT_THEN_WITH_ELSE;
7322 cond_type = PREDICT_ELSE;
7324 else if (cond_block_type == ELSE_BLOCK
7325 && sb->succ->dest != EXIT_BLOCK_PTR)
7327 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7328 & REORDER_BLOCK_VISITED))
7329 cond_type = PREDICT_ELSE;
7331 cond_type = PREDICT_THEN_WITH_ELSE;
7338 static const char * cond_type_str [] = {"not cond jump", "predict then",
7340 "predict then w/o else",
7341 "predict not then w/o else"};
7342 static const char * cond_block_type_str [] = {"not then or else block",
7345 "then w/o else block"};
7347 fprintf (rtl_dump_file, " %s (looking at %s)\n",
7348 cond_type_str[(int)cond_type],
7349 cond_block_type_str[(int)cond_block_type]);
7352 /* Reflect that then block will move and we'll jump to it. */
7353 if (cond_block_type != THEN_BLOCK
7354 && (cond_type == PREDICT_ELSE
7355 || cond_type == PREDICT_NOT_THEN_NO_ELSE))
7358 fprintf (rtl_dump_file,
7359 " then jump from block %d to block %d\n",
7360 sb->index, sb->succ->dest->index);
7362 /* Jump to reordered then block. */
7363 REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
7366 /* Reflect that then block will jump back when we have no else. */
7367 if (cond_block_type != THEN_BLOCK
7368 && cond_type == PREDICT_NOT_THEN_NO_ELSE)
7370 for (ee = sb->succ->dest->succ;
7371 ee && ! (ee->flags & EDGE_FALLTHRU);
7375 if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
7376 && ! simplejump_p (sb->succ->dest->end)))
7378 REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
7382 /* Reflect that else block will jump back. */
7383 if (cond_block_type == ELSE_BLOCK
7384 && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
7389 && last_edge->dest != EXIT_BLOCK_PTR
7390 && GET_CODE (last_edge->dest->head) == CODE_LABEL
7391 && ! (GET_CODE (db->end) == JUMP_INSN))
7394 fprintf (rtl_dump_file,
7395 " else jump from block %d to block %d\n",
7396 db->index, last_edge->dest->index);
7398 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7402 /* This block's successor has already been reordered. This can happen
7403 when we reorder a chain starting at a then or else. */
7404 for (last_edge = db->succ;
7405 last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
7406 last_edge = last_edge->succ_next)
7410 && last_edge->dest != EXIT_BLOCK_PTR
7411 && (REORDER_BLOCK_FLAGS (last_edge->dest)
7412 & REORDER_BLOCK_VISITED))
7415 fprintf (rtl_dump_file,
7416 " end of chain jump from block %d to block %d\n",
7417 db->index, last_edge->dest->index);
7419 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7422 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7423 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7424 dbe_insn = skip_insns_between_block (db, REORDER_SKIP_AFTER);
7426 /* Leave behind any lexical block markers. */
7427 if (debug_info_level > DINFO_LEVEL_TERSE
7428 && ceb->index + 1 < db->index)
7430 rtx insn, last_insn = get_last_insn ();
7431 insn = NEXT_INSN (ceb->end);
7433 insn = REORDER_BLOCK_OLD_END (ceb);
7435 if (NEXT_INSN (cebe_insn) == 0)
7436 set_last_insn (cebe_insn);
7437 for (; insn && insn != db->head/*dbh_insn*/;
7438 insn = NEXT_INSN (insn))
7440 if (GET_CODE (insn) == NOTE
7441 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
7443 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
7446 if (GET_CODE (insn) == NOTE
7447 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
7449 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
7453 set_last_insn (last_insn);
7456 /* Rechain predicted block. */
7457 NEXT_INSN (cebe_insn) = dbh_insn;
7458 PREV_INSN (dbh_insn) = cebe_insn;
7460 REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
7461 if (db->index != n_basic_blocks - 1)
7462 NEXT_INSN (dbe_insn) = 0;
7468 /* Reorder blocks starting at block B. */
7471 make_reorder_chain (bb)
7475 basic_block visited_edge = NULL;
7479 if (bb == EXIT_BLOCK_PTR)
7482 /* Find the most probable block. */
7484 block_end = bb->end;
7485 if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
7487 rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
7490 probability = XINT (XEXP (note, 0), 0);
7494 if (probability >= REG_BR_PROB_BASE / 2)
7495 e = bb->succ->succ_next;
7498 /* Add chosen successor to chain and recurse on it. */
7499 if (e && e->dest != EXIT_BLOCK_PTR
7500 && e->dest != e->src
7501 && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7502 || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
7504 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7506 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
7507 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7508 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7511 if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7512 REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
7514 REORDER_BLOCK_SUCC (bb) = e;
7516 visited_edge = e->dest;
7518 reorder_last_visited = chain_reorder_blocks (e, bb);
7521 && ! (REORDER_BLOCK_FLAGS (e->dest)
7522 & REORDER_BLOCK_VISITED))
7523 make_reorder_chain (e->dest);
7527 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7529 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7530 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7534 /* Recurse on the successors. */
7535 for (e = bb->succ; e; e = e->succ_next)
7537 if (e->dest && e->dest == EXIT_BLOCK_PTR)
7541 && e->dest != e->src
7542 && e->dest != visited_edge
7543 && ! (REORDER_BLOCK_FLAGS (e->dest)
7544 & REORDER_BLOCK_VISITED))
7546 reorder_last_visited
7547 = chain_reorder_blocks (e, reorder_last_visited);
7548 make_reorder_chain (e->dest);
7554 /* Fixup jumps and labels after reordering basic blocks. */
7557 fixup_reorder_chain ()
7562 /* Set the new last insn. */
7564 i < n_basic_blocks - 1
7565 && REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) != n_basic_blocks;
7569 for (insn = BASIC_BLOCK (i)->head;
7570 NEXT_INSN (insn) != 0;
7571 insn = NEXT_INSN (insn))
7574 set_last_insn (insn);
7576 /* Add jumps and labels to fixup blocks. */
7577 for (i = 0; i < n_basic_blocks - 1; i++)
7579 if (REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i)))
7581 rtx new_label = gen_label_rtx ();
7582 rtx label_insn, jump_insn, barrier_insn;
7584 label_insn = emit_label_before (new_label,
7585 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head);
7586 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head = label_insn;
7588 jump_insn = emit_jump_insn_after (gen_jump (label_insn),
7589 BASIC_BLOCK (i)->end);
7590 JUMP_LABEL (jump_insn) = label_insn;
7591 ++LABEL_NUSES (label_insn);
7592 barrier_insn = emit_barrier_after (jump_insn);
7593 if (GET_CODE (BASIC_BLOCK (i)->end) != JUMP_INSN)
7594 BASIC_BLOCK (i)->end = barrier_insn;
7595 /* Add block for jump. Typically this is when a then is not
7596 predicted and we are jumping to the moved then block. */
7601 b = (basic_block) obstack_alloc (function_obstack, sizeof (*b));
7602 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
7603 BASIC_BLOCK (n_basic_blocks - 1) = b;
7604 b->index = n_basic_blocks - 1;
7605 b->head = emit_note_before (NOTE_INSN_BASIC_BLOCK, jump_insn);
7606 NOTE_BASIC_BLOCK (b->head) = b;
7607 b->end = barrier_insn;
7610 basic_block nb = BASIC_BLOCK (n_basic_blocks - 1);
7611 nb->global_live_at_start
7612 = OBSTACK_ALLOC_REG_SET (function_obstack);
7613 nb->global_live_at_end
7614 = OBSTACK_ALLOC_REG_SET (function_obstack);
7616 COPY_REG_SET (nb->global_live_at_start,
7617 BASIC_BLOCK (i)->global_live_at_start);
7618 COPY_REG_SET (nb->global_live_at_end,
7619 BASIC_BLOCK (i)->global_live_at_start);
7620 if (BASIC_BLOCK (i)->local_set)
7622 OBSTACK_ALLOC_REG_SET (function_obstack);
7623 COPY_REG_SET (nb->local_set, BASIC_BLOCK (i)->local_set);
7626 BASIC_BLOCK (nb->index)->local_set = 0;
7628 nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
7629 REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
7630 = REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) + 1;
7631 /* Relink to new block. */
7632 nb->succ = BASIC_BLOCK (i)->succ;
7634 make_edge (0, BASIC_BLOCK (i), nb, 0);
7635 BASIC_BLOCK (i)->succ->succ_next
7636 = BASIC_BLOCK (i)->succ->succ_next->succ_next;
7637 nb->succ->succ_next = 0;
7638 /* Fix reorder block index to reflect new block. */
7639 for (j = 0; j < n_basic_blocks - 1; j++)
7641 basic_block bbj = BASIC_BLOCK (j);
7642 basic_block bbi = BASIC_BLOCK (i);
7643 if (REORDER_BLOCK_INDEX (bbj)
7644 >= REORDER_BLOCK_INDEX (bbi) + 1)
7645 REORDER_BLOCK_INDEX (bbj)++;
7654 /* Reorder basic blocks. */
7657 reorder_basic_blocks ()
7660 struct loops loops_info;
7664 if (profile_arc_flag)
7667 if (n_basic_blocks <= 1)
7670 /* Exception edges are not currently handled. */
7671 for (i = 0; i < n_basic_blocks; i++)
7675 for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
7679 if (e && (e->flags & EDGE_EH))
7685 /* Find natural loops using the CFG. */
7686 num_loops = flow_loops_find (&loops_info);
7688 /* Dump loop information. */
7689 flow_loops_dump (&loops_info, rtl_dump_file, 0);
7691 /* Estimate using heuristics if no profiling info is available. */
7692 if (! flag_branch_probabilities)
7693 estimate_probability (&loops_info);
7695 reorder_last_visited = BASIC_BLOCK (0);
7697 for (i = 0; i < n_basic_blocks; i++)
7699 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
7703 = NEXT_INSN (skip_insns_between_block (BASIC_BLOCK (n_basic_blocks - 1),
7704 REORDER_SKIP_AFTER));
7706 make_reorder_chain (BASIC_BLOCK (0));
7708 fixup_reorder_chain ();
7710 #ifdef ENABLE_CHECKING
7712 rtx insn, last_insn;
7713 last_insn = get_insns ();
7714 for (insn = NEXT_INSN (get_insns ());
7715 insn && PREV_INSN (insn) == last_insn
7716 && NEXT_INSN (PREV_INSN (insn)) == insn;
7718 insn = NEXT_INSN (insn))
7724 fprintf (rtl_dump_file, "insn chaining error at %d\n",
7725 INSN_UID (last_insn));
7731 /* Put basic_block_info in new order. */
7732 for (i = 0; i < n_basic_blocks - 1; i++)
7734 for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
7737 if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
7742 int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
7744 temprbi = BASIC_BLOCK (rbi)->index;
7745 BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
7746 BASIC_BLOCK (j)->index = temprbi;
7747 tempbb = BASIC_BLOCK (rbi);
7748 BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
7749 BASIC_BLOCK (j) = tempbb;
7753 NEXT_INSN (BASIC_BLOCK (n_basic_blocks - 1)->end) = last_insn;
7755 for (i = 0; i < n_basic_blocks - 1; i++)
7757 free (BASIC_BLOCK (i)->aux);
7760 /* Free loop information. */
7761 flow_loops_free (&loops_info);