1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
321 static void propagate_block PARAMS ((basic_block, regset,
323 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
324 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
325 static void mark_set_regs PARAMS ((regset, regset, rtx,
327 static void mark_set_1 PARAMS ((regset, regset, rtx,
330 static void find_auto_inc PARAMS ((regset, rtx, rtx));
331 static int try_pre_increment_1 PARAMS ((rtx));
332 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
334 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
335 void dump_flow_info PARAMS ((FILE *));
336 void debug_flow_info PARAMS ((void));
337 static void dump_edge_info PARAMS ((FILE *, edge, int));
339 static void count_reg_sets_1 PARAMS ((rtx));
340 static void count_reg_sets PARAMS ((rtx));
341 static void count_reg_references PARAMS ((rtx));
342 static void invalidate_mems_from_autoinc PARAMS ((rtx));
343 static void remove_fake_successors PARAMS ((basic_block));
344 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
345 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
346 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
347 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
348 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
349 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
350 static int flow_depth_first_order_compute PARAMS ((int *));
351 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
352 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
353 static void flow_loops_tree_build PARAMS ((struct loops *));
354 static int flow_loop_level_compute PARAMS ((struct loop *, int));
355 static int flow_loops_level_compute PARAMS ((struct loops *));
357 /* This function is always defined so it can be called from the
358 debugger, and it is declared extern so we don't get warnings about
360 void verify_flow_info PARAMS ((void));
361 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
362 void clear_log_links PARAMS ((rtx));
364 /* Find basic blocks of the current function.
365 F is the first insn of the function and NREGS the number of register
369 find_basic_blocks (f, nregs, file)
371 int nregs ATTRIBUTE_UNUSED;
372 FILE *file ATTRIBUTE_UNUSED;
376 /* Flush out existing data. */
377 if (basic_block_info != NULL)
383 /* Clear bb->aux on all extant basic blocks. We'll use this as a
384 tag for reuse during create_basic_block, just in case some pass
385 copies around basic block notes improperly. */
386 for (i = 0; i < n_basic_blocks; ++i)
387 BASIC_BLOCK (i)->aux = NULL;
389 VARRAY_FREE (basic_block_info);
392 n_basic_blocks = count_basic_blocks (f);
394 /* Size the basic block table. The actual structures will be allocated
395 by find_basic_blocks_1, since we want to keep the structure pointers
396 stable across calls to find_basic_blocks. */
397 /* ??? This whole issue would be much simpler if we called find_basic_blocks
398 exactly once, and thereafter we don't have a single long chain of
399 instructions at all until close to the end of compilation when we
400 actually lay them out. */
402 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
404 label_value_list = find_basic_blocks_1 (f);
406 /* Record the block to which an insn belongs. */
407 /* ??? This should be done another way, by which (perhaps) a label is
408 tagged directly with the basic block that it starts. It is used for
409 more than that currently, but IMO that is the only valid use. */
411 max_uid = get_max_uid ();
413 /* Leave space for insns life_analysis makes in some cases for auto-inc.
414 These cases are rare, so we don't need too much space. */
415 max_uid += max_uid / 10;
418 compute_bb_for_insn (max_uid);
420 /* Discover the edges of our cfg. */
421 record_active_eh_regions (f);
422 make_edges (label_value_list);
424 /* Do very simple cleanup now, for the benefit of code that runs between
425 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
426 tidy_fallthru_edges ();
428 mark_critical_edges ();
430 #ifdef ENABLE_CHECKING
435 /* Count the basic blocks of the function. */
438 count_basic_blocks (f)
442 register RTX_CODE prev_code;
443 register int count = 0;
445 int call_had_abnormal_edge = 0;
446 rtx prev_call = NULL_RTX;
448 prev_code = JUMP_INSN;
449 for (insn = f; insn; insn = NEXT_INSN (insn))
451 register RTX_CODE code = GET_CODE (insn);
453 if (code == CODE_LABEL
454 || (GET_RTX_CLASS (code) == 'i'
455 && (prev_code == JUMP_INSN
456 || prev_code == BARRIER
457 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
462 /* Record whether this call created an edge. */
463 if (code == CALL_INSN)
465 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
466 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
468 call_had_abnormal_edge = 0;
470 /* If there is an EH region or rethrow, we have an edge. */
471 if ((eh_region && region > 0)
472 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
473 call_had_abnormal_edge = 1;
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
512 find_basic_blocks_1 (f)
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region or rethrow, we have an edge. */
544 if ((eh_list && region > 0)
545 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
546 call_has_abnormal_edge = 1;
549 /* If there is a nonlocal goto label and the specified
550 region number isn't -1, we have an edge. (0 means
551 no throw, but might have a nonlocal goto). */
552 if (nonlocal_goto_handler_labels && region >= 0)
553 call_has_abnormal_edge = 1;
561 int kind = NOTE_LINE_NUMBER (insn);
563 /* Keep a LIFO list of the currently active exception notes. */
564 if (kind == NOTE_INSN_EH_REGION_BEG)
565 eh_list = alloc_INSN_LIST (insn, eh_list);
566 else if (kind == NOTE_INSN_EH_REGION_END)
569 eh_list = XEXP (eh_list, 1);
570 free_INSN_LIST_node (t);
573 /* Look for basic block notes with which to keep the
574 basic_block_info pointers stable. Unthread the note now;
575 we'll put it back at the right place in create_basic_block.
576 Or not at all if we've already found a note in this block. */
577 else if (kind == NOTE_INSN_BASIC_BLOCK)
579 if (bb_note == NULL_RTX)
581 next = flow_delete_insn (insn);
588 /* A basic block starts at a label. If we've closed one off due
589 to a barrier or some such, no need to do it again. */
590 if (head != NULL_RTX)
592 /* While we now have edge lists with which other portions of
593 the compiler might determine a call ending a basic block
594 does not imply an abnormal edge, it will be a bit before
595 everything can be updated. So continue to emit a noop at
596 the end of such a block. */
597 if (GET_CODE (end) == CALL_INSN
598 && ! SIBLING_CALL_P (end))
600 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
601 end = emit_insn_after (nop, end);
604 create_basic_block (i++, head, end, bb_note);
611 /* A basic block ends at a jump. */
612 if (head == NULL_RTX)
616 /* ??? Make a special check for table jumps. The way this
617 happens is truly and amazingly gross. We are about to
618 create a basic block that contains just a code label and
619 an addr*vec jump insn. Worse, an addr_diff_vec creates
620 its own natural loop.
622 Prevent this bit of brain damage, pasting things together
623 correctly in make_edges.
625 The correct solution involves emitting the table directly
626 on the tablejump instruction as a note, or JUMP_LABEL. */
628 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
629 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
637 goto new_bb_inclusive;
640 /* A basic block ends at a barrier. It may be that an unconditional
641 jump already closed the basic block -- no need to do it again. */
642 if (head == NULL_RTX)
645 /* While we now have edge lists with which other portions of the
646 compiler might determine a call ending a basic block does not
647 imply an abnormal edge, it will be a bit before everything can
648 be updated. So continue to emit a noop at the end of such a
650 if (GET_CODE (end) == CALL_INSN
651 && ! SIBLING_CALL_P (end))
653 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
654 end = emit_insn_after (nop, end);
656 goto new_bb_exclusive;
659 /* A basic block ends at a call that can either throw or
660 do a non-local goto. */
661 if (call_has_abnormal_edge)
664 if (head == NULL_RTX)
669 create_basic_block (i++, head, end, bb_note);
670 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1095 e = (edge) xcalloc (1, sizeof (*e));
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1120 if (GET_CODE (label) != CODE_LABEL)
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1128 if (INSN_UID (label) == 0)
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1144 handler_info **handler_list;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1161 move_stray_eh_region_notes ()
1166 if (n_basic_blocks < 2)
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1187 NEXT_INSN (insn) = list;
1192 if (list == NULL_RTX)
1195 /* Find where to insert these things. */
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1209 /* Recompute eh_beg/eh_end for each basic block. */
1212 record_active_eh_regions (f)
1215 rtx insn, eh_list = NULL_RTX;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1241 if (i == n_basic_blocks)
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1259 bb = ENTRY_BLOCK_PTR;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1277 e->flags &= ~EDGE_CRITICAL;
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1300 split_edge (edge_in)
1303 basic_block old_pred, bb, old_succ;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1347 bb->succ = edge_out;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1378 basic_block jump_block;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1428 BASIC_BLOCK (i) = bb;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1538 insert_insn_on_edge (pattern, e)
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1548 if (e->insns == NULL_RTX)
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1559 /* Update the CFG for the instructions queued on edge E. */
1562 commit_one_edge_insertion (e)
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1568 /* Pull the insns off the edge now since the edge might go away. */
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1620 /* Otherwise we must split the edge. */
1623 bb = split_edge (e);
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1675 /* Update the CFG for all queued instructions. */
1678 commit_edge_insertions ()
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1688 bb = ENTRY_BLOCK_PTR;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1732 /* Mark the block with a handy non-null value. */
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1760 /* This block was found. Tidy up the mark. */
1763 deleted_handler |= delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1779 delete_eh_regions ()
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1807 can_delete_note_p (note)
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1818 flow_delete_insn_chain (start, finish)
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1855 int deleted_handler = 0;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Include any jump table following the basic block. */
1907 if (GET_CODE (end) == JUMP_INSN
1908 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1910 && GET_CODE (tmp) == JUMP_INSN
1911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1915 /* Include any barrier that may follow the basic block. */
1916 tmp = next_nonnote_insn (b->end);
1917 if (tmp && GET_CODE (tmp) == BARRIER)
1920 /* Selectively delete the entire chain. */
1921 flow_delete_insn_chain (insn, end);
1925 /* Remove the edges into and out of this block. Note that there may
1926 indeed be edges in, if we are removing an unreachable loop. */
1930 for (e = b->pred; e ; e = next)
1932 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1935 next = e->pred_next;
1939 for (e = b->succ; e ; e = next)
1941 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1944 next = e->succ_next;
1953 /* Remove the basic block from the array, and compact behind it. */
1956 return deleted_handler;
1959 /* Remove block B from the basic block array and compact behind it. */
1965 int i, n = n_basic_blocks;
1967 for (i = b->index; i + 1 < n; ++i)
1969 basic_block x = BASIC_BLOCK (i + 1);
1970 BASIC_BLOCK (i) = x;
1974 basic_block_info->num_elements--;
1978 /* Delete INSN by patching it out. Return the next insn. */
1981 flow_delete_insn (insn)
1984 rtx prev = PREV_INSN (insn);
1985 rtx next = NEXT_INSN (insn);
1988 PREV_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (insn) = NULL_RTX;
1992 NEXT_INSN (prev) = next;
1994 PREV_INSN (next) = prev;
1996 set_last_insn (prev);
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2001 /* If deleting a jump, decrement the use count of the label. Deleting
2002 the label itself should happen in the normal course of block merging. */
2003 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2004 LABEL_NUSES (JUMP_LABEL (insn))--;
2006 /* Also if deleting an insn that references a label. */
2007 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2008 LABEL_NUSES (XEXP (note, 0))--;
2013 /* True if a given label can be deleted. */
2016 can_delete_label_p (label)
2021 if (LABEL_PRESERVE_P (label))
2024 for (x = forced_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = label_value_list; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2030 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2031 if (label == XEXP (x, 0))
2034 /* User declared labels must be preserved. */
2035 if (LABEL_NAME (label) != 0)
2041 /* Blocks A and B are to be merged into a single block. A has no incoming
2042 fallthru edge, so it can be moved before B without adding or modifying
2043 any jumps (aside from the jump from A to B). */
2046 merge_blocks_move_predecessor_nojumps (a, b)
2049 rtx start, end, barrier;
2055 /* We want to delete the BARRIER after the end of the insns we are
2056 going to move. If we don't find a BARRIER, then do nothing. This
2057 can happen in some cases if we have labels we can not delete.
2059 Similarly, do nothing if we can not delete the label at the start
2060 of the target block. */
2061 barrier = next_nonnote_insn (end);
2062 if (GET_CODE (barrier) != BARRIER
2063 || (GET_CODE (b->head) == CODE_LABEL
2064 && ! can_delete_label_p (b->head)))
2067 flow_delete_insn (barrier);
2069 /* Move block and loop notes out of the chain so that we do not
2070 disturb their order.
2072 ??? A better solution would be to squeeze out all the non-nested notes
2073 and adjust the block trees appropriately. Even better would be to have
2074 a tighter connection between block trees and rtl so that this is not
2076 start = squeeze_notes (start, end);
2078 /* Scramble the insn chain. */
2079 if (end != PREV_INSN (b->head))
2080 reorder_insns (start, end, PREV_INSN (b->head));
2084 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2085 a->index, b->index);
2088 /* Swap the records for the two blocks around. Although we are deleting B,
2089 A is now where B was and we want to compact the BB array from where
2091 BASIC_BLOCK(a->index) = b;
2092 BASIC_BLOCK(b->index) = a;
2094 a->index = b->index;
2097 /* Now blocks A and B are contiguous. Merge them. */
2098 merge_blocks_nomove (a, b);
2103 /* Blocks A and B are to be merged into a single block. B has no outgoing
2104 fallthru edge, so it can be moved after A without adding or modifying
2105 any jumps (aside from the jump from A to B). */
2108 merge_blocks_move_successor_nojumps (a, b)
2111 rtx start, end, barrier;
2116 /* We want to delete the BARRIER after the end of the insns we are
2117 going to move. If we don't find a BARRIER, then do nothing. This
2118 can happen in some cases if we have labels we can not delete.
2120 Similarly, do nothing if we can not delete the label at the start
2121 of the target block. */
2122 barrier = next_nonnote_insn (end);
2123 if (GET_CODE (barrier) != BARRIER
2124 || (GET_CODE (b->head) == CODE_LABEL
2125 && ! can_delete_label_p (b->head)))
2128 flow_delete_insn (barrier);
2130 /* Move block and loop notes out of the chain so that we do not
2131 disturb their order.
2133 ??? A better solution would be to squeeze out all the non-nested notes
2134 and adjust the block trees appropriately. Even better would be to have
2135 a tighter connection between block trees and rtl so that this is not
2137 start = squeeze_notes (start, end);
2139 /* Scramble the insn chain. */
2140 reorder_insns (start, end, a->end);
2142 /* Now blocks A and B are contiguous. Merge them. */
2143 merge_blocks_nomove (a, b);
2147 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2148 b->index, a->index);
2154 /* Blocks A and B are to be merged into a single block. The insns
2155 are already contiguous, hence `nomove'. */
2158 merge_blocks_nomove (a, b)
2162 rtx b_head, b_end, a_end;
2165 /* If there was a CODE_LABEL beginning B, delete it. */
2168 if (GET_CODE (b_head) == CODE_LABEL)
2170 /* Detect basic blocks with nothing but a label. This can happen
2171 in particular at the end of a function. */
2172 if (b_head == b_end)
2174 b_head = flow_delete_insn (b_head);
2177 /* Delete the basic block note. */
2178 if (GET_CODE (b_head) == NOTE
2179 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2181 if (b_head == b_end)
2183 b_head = flow_delete_insn (b_head);
2186 /* If there was a jump out of A, delete it. */
2188 if (GET_CODE (a_end) == JUMP_INSN)
2192 prev = prev_nonnote_insn (a_end);
2197 /* If this was a conditional jump, we need to also delete
2198 the insn that set cc0. */
2200 if (prev && sets_cc0_p (prev))
2203 prev = prev_nonnote_insn (prev);
2206 flow_delete_insn (tmp);
2210 /* Note that a->head != a->end, since we should have at least a
2211 bb note plus the jump, so prev != insn. */
2212 flow_delete_insn (a_end);
2216 /* By definition, there should only be one successor of A, and that is
2217 B. Free that edge struct. */
2221 /* Adjust the edges out of B for the new owner. */
2222 for (e = b->succ; e ; e = e->succ_next)
2226 /* Reassociate the insns of B with A. */
2229 BLOCK_FOR_INSN (b_head) = a;
2230 while (b_head != b_end)
2232 b_head = NEXT_INSN (b_head);
2233 BLOCK_FOR_INSN (b_head) = a;
2239 /* Compact the basic block array. */
2243 /* Attempt to merge basic blocks that are potentially non-adjacent.
2244 Return true iff the attempt succeeded. */
2247 merge_blocks (e, b, c)
2251 /* If B has a fallthru edge to C, no need to move anything. */
2252 if (e->flags & EDGE_FALLTHRU)
2254 /* If a label still appears somewhere and we cannot delete the label,
2255 then we cannot merge the blocks. The edge was tidied already. */
2257 rtx insn, stop = NEXT_INSN (c->head);
2258 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2259 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2262 merge_blocks_nomove (b, c);
2266 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2267 b->index, c->index);
2276 int c_has_outgoing_fallthru;
2277 int b_has_incoming_fallthru;
2279 /* We must make sure to not munge nesting of exception regions,
2280 lexical blocks, and loop notes.
2282 The first is taken care of by requiring that the active eh
2283 region at the end of one block always matches the active eh
2284 region at the beginning of the next block.
2286 The later two are taken care of by squeezing out all the notes. */
2288 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2289 executed and we may want to treat blocks which have two out
2290 edges, one normal, one abnormal as only having one edge for
2291 block merging purposes. */
2293 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2294 if (tmp_edge->flags & EDGE_FALLTHRU)
2296 c_has_outgoing_fallthru = (tmp_edge != NULL);
2298 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2299 if (tmp_edge->flags & EDGE_FALLTHRU)
2301 b_has_incoming_fallthru = (tmp_edge != NULL);
2303 /* If B does not have an incoming fallthru, and the exception regions
2304 match, then it can be moved immediately before C without introducing
2307 C can not be the first block, so we do not have to worry about
2308 accessing a non-existent block. */
2309 d = BASIC_BLOCK (c->index - 1);
2310 if (! b_has_incoming_fallthru
2311 && d->eh_end == b->eh_beg
2312 && b->eh_end == c->eh_beg)
2313 return merge_blocks_move_predecessor_nojumps (b, c);
2315 /* Otherwise, we're going to try to move C after B. Make sure the
2316 exception regions match.
2318 If B is the last basic block, then we must not try to access the
2319 block structure for block B + 1. Luckily in that case we do not
2320 need to worry about matching exception regions. */
2321 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2322 if (b->eh_end == c->eh_beg
2323 && (d == NULL || c->eh_end == d->eh_beg))
2325 /* If C does not have an outgoing fallthru, then it can be moved
2326 immediately after B without introducing or modifying jumps. */
2327 if (! c_has_outgoing_fallthru)
2328 return merge_blocks_move_successor_nojumps (b, c);
2330 /* Otherwise, we'll need to insert an extra jump, and possibly
2331 a new block to contain it. */
2332 /* ??? Not implemented yet. */
2339 /* Top level driver for merge_blocks. */
2346 /* Attempt to merge blocks as made possible by edge removal. If a block
2347 has only one successor, and the successor has only one predecessor,
2348 they may be combined. */
2350 for (i = 0; i < n_basic_blocks; )
2352 basic_block c, b = BASIC_BLOCK (i);
2355 /* A loop because chains of blocks might be combineable. */
2356 while ((s = b->succ) != NULL
2357 && s->succ_next == NULL
2358 && (s->flags & EDGE_EH) == 0
2359 && (c = s->dest) != EXIT_BLOCK_PTR
2360 && c->pred->pred_next == NULL
2361 /* If the jump insn has side effects, we can't kill the edge. */
2362 && (GET_CODE (b->end) != JUMP_INSN
2363 || onlyjump_p (b->end))
2364 && merge_blocks (s, b, c))
2367 /* Don't get confused by the index shift caused by deleting blocks. */
2372 /* The given edge should potentially be a fallthru edge. If that is in
2373 fact true, delete the jump and barriers that are in the way. */
2376 tidy_fallthru_edge (e, b, c)
2382 /* ??? In a late-running flow pass, other folks may have deleted basic
2383 blocks by nopping out blocks, leaving multiple BARRIERs between here
2384 and the target label. They ought to be chastized and fixed.
2386 We can also wind up with a sequence of undeletable labels between
2387 one block and the next.
2389 So search through a sequence of barriers, labels, and notes for
2390 the head of block C and assert that we really do fall through. */
2392 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2395 /* Remove what will soon cease being the jump insn from the source block.
2396 If block B consisted only of this single jump, turn it into a deleted
2399 if (GET_CODE (q) == JUMP_INSN)
2402 /* If this was a conditional jump, we need to also delete
2403 the insn that set cc0. */
2404 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2411 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2412 NOTE_SOURCE_FILE (q) = 0;
2415 b->end = q = PREV_INSN (q);
2418 /* Selectively unlink the sequence. */
2419 if (q != PREV_INSN (c->head))
2420 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2422 e->flags |= EDGE_FALLTHRU;
2425 /* Fix up edges that now fall through, or rather should now fall through
2426 but previously required a jump around now deleted blocks. Simplify
2427 the search by only examining blocks numerically adjacent, since this
2428 is how find_basic_blocks created them. */
2431 tidy_fallthru_edges ()
2435 for (i = 1; i < n_basic_blocks; ++i)
2437 basic_block b = BASIC_BLOCK (i - 1);
2438 basic_block c = BASIC_BLOCK (i);
2441 /* We care about simple conditional or unconditional jumps with
2444 If we had a conditional branch to the next instruction when
2445 find_basic_blocks was called, then there will only be one
2446 out edge for the block which ended with the conditional
2447 branch (since we do not create duplicate edges).
2449 Furthermore, the edge will be marked as a fallthru because we
2450 merge the flags for the duplicate edges. So we do not want to
2451 check that the edge is not a FALLTHRU edge. */
2452 if ((s = b->succ) != NULL
2453 && s->succ_next == NULL
2455 /* If the jump insn has side effects, we can't tidy the edge. */
2456 && (GET_CODE (b->end) != JUMP_INSN
2457 || onlyjump_p (b->end)))
2458 tidy_fallthru_edge (s, b, c);
2462 /* Discover and record the loop depth at the head of each basic block. */
2465 calculate_loop_depth (dump)
2470 /* The loop infrastructure does the real job for us. */
2471 flow_loops_find (&loops);
2474 flow_loops_dump (&loops, dump, 0);
2476 flow_loops_free (&loops);
2479 /* Perform data flow analysis.
2480 F is the first insn of the function and NREGS the number of register numbers
2484 life_analysis (f, nregs, file, remove_dead_code)
2488 int remove_dead_code;
2490 #ifdef ELIMINABLE_REGS
2492 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2497 /* Record which registers will be eliminated. We use this in
2500 CLEAR_HARD_REG_SET (elim_reg_set);
2502 #ifdef ELIMINABLE_REGS
2503 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2504 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2506 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2509 /* We want alias analysis information for local dead store elimination. */
2510 init_alias_analysis ();
2513 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2517 if (! remove_dead_code)
2518 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2521 /* The post-reload life analysis have (on a global basis) the same
2522 registers live as was computed by reload itself. elimination
2523 Otherwise offsets and such may be incorrect.
2525 Reload will make some registers as live even though they do not
2526 appear in the rtl. */
2527 if (reload_completed)
2528 flags &= ~PROP_REG_INFO;
2532 /* Always remove no-op moves. Do this before other processing so
2533 that we don't have to keep re-scanning them. */
2534 delete_noop_moves (f);
2536 /* Some targets can emit simpler epilogues if they know that sp was
2537 not ever modified during the function. After reload, of course,
2538 we've already emitted the epilogue so there's no sense searching. */
2539 if (! reload_completed)
2540 notice_stack_pointer_modification (f);
2542 /* Allocate and zero out data structures that will record the
2543 data from lifetime analysis. */
2544 allocate_reg_life_data ();
2545 allocate_bb_life_data ();
2546 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2547 all_blocks = sbitmap_alloc (n_basic_blocks);
2548 sbitmap_ones (all_blocks);
2550 /* Find the set of registers live on function exit. */
2551 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2553 /* "Update" life info from zero. It'd be nice to begin the
2554 relaxation with just the exit and noreturn blocks, but that set
2555 is not immediately handy. */
2557 if (flags & PROP_REG_INFO)
2558 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2559 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2562 sbitmap_free (all_blocks);
2563 free (reg_next_use);
2564 reg_next_use = NULL;
2565 end_alias_analysis ();
2568 dump_flow_info (file);
2570 free_basic_block_vars (1);
2573 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2574 Search for REGNO. If found, abort if it is not wider than word_mode. */
2577 verify_wide_reg_1 (px, pregno)
2582 int regno = *(int *) pregno;
2584 if (GET_CODE (x) == REG && REGNO (x) == regno)
2586 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2593 /* A subroutine of verify_local_live_at_start. Search through insns
2594 between HEAD and END looking for register REGNO. */
2597 verify_wide_reg (regno, head, end)
2603 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2604 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2608 head = NEXT_INSN (head);
2611 /* We didn't find the register at all. Something's way screwy. */
2615 /* A subroutine of update_life_info. Verify that there are no untoward
2616 changes in live_at_start during a local update. */
2619 verify_local_live_at_start (new_live_at_start, bb)
2620 regset new_live_at_start;
2623 if (reload_completed)
2625 /* After reload, there are no pseudos, nor subregs of multi-word
2626 registers. The regsets should exactly match. */
2627 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2634 /* Find the set of changed registers. */
2635 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2637 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2639 /* No registers should die. */
2640 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2642 /* Verify that the now-live register is wider than word_mode. */
2643 verify_wide_reg (i, bb->head, bb->end);
2648 /* Updates life information starting with the basic blocks set in BLOCKS.
2650 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2651 expecting local modifications to basic blocks. If we find extra
2652 registers live at the beginning of a block, then we either killed
2653 useful data, or we have a broken split that wants data not provided.
2654 If we find registers removed from live_at_start, that means we have
2655 a broken peephole that is killing a register it shouldn't.
2657 ??? This is not true in one situation -- when a pre-reload splitter
2658 generates subregs of a multi-word pseudo, current life analysis will
2659 lose the kill. So we _can_ have a pseudo go live. How irritating.
2661 BLOCK_FOR_INSN is assumed to be correct.
2663 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2664 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2665 regs_ever_live unless the caller resets it to zero. */
2668 update_life_info (blocks, extent, prop_flags)
2670 enum update_life_extent extent;
2674 regset_head tmp_head;
2677 tmp = INITIALIZE_REG_SET (tmp_head);
2679 /* For a global update, we go through the relaxation process again. */
2680 if (extent != UPDATE_LIFE_LOCAL)
2682 calculate_global_regs_live (blocks, blocks,
2683 prop_flags & PROP_SCAN_DEAD_CODE);
2685 /* If asked, remove notes from the blocks we'll update. */
2686 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2687 count_or_remove_death_notes (blocks, 1);
2690 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2692 basic_block bb = BASIC_BLOCK (i);
2694 COPY_REG_SET (tmp, bb->global_live_at_end);
2695 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2697 if (extent == UPDATE_LIFE_LOCAL)
2698 verify_local_live_at_start (tmp, bb);
2703 if (prop_flags & PROP_REG_INFO)
2705 /* The only pseudos that are live at the beginning of the function
2706 are those that were not set anywhere in the function. local-alloc
2707 doesn't know how to handle these correctly, so mark them as not
2708 local to any one basic block. */
2709 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2710 FIRST_PSEUDO_REGISTER, i,
2711 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2713 /* We have a problem with any pseudoreg that lives across the setjmp.
2714 ANSI says that if a user variable does not change in value between
2715 the setjmp and the longjmp, then the longjmp preserves it. This
2716 includes longjmp from a place where the pseudo appears dead.
2717 (In principle, the value still exists if it is in scope.)
2718 If the pseudo goes in a hard reg, some other value may occupy
2719 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2720 Conclusion: such a pseudo must not go in a hard reg. */
2721 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2722 FIRST_PSEUDO_REGISTER, i,
2724 if (regno_reg_rtx[i] != 0)
2726 REG_LIVE_LENGTH (i) = -1;
2727 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2733 /* Free the variables allocated by find_basic_blocks.
2735 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2738 free_basic_block_vars (keep_head_end_p)
2739 int keep_head_end_p;
2741 if (basic_block_for_insn)
2743 VARRAY_FREE (basic_block_for_insn);
2744 basic_block_for_insn = NULL;
2747 if (! keep_head_end_p)
2750 VARRAY_FREE (basic_block_info);
2753 ENTRY_BLOCK_PTR->aux = NULL;
2754 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2755 EXIT_BLOCK_PTR->aux = NULL;
2756 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2760 /* Return nonzero if the destination of SET equals the source. */
2765 rtx src = SET_SRC (set);
2766 rtx dst = SET_DEST (set);
2768 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2770 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2772 src = SUBREG_REG (src);
2773 dst = SUBREG_REG (dst);
2776 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2777 && REGNO (src) == REGNO (dst));
2780 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2786 rtx pat = PATTERN (insn);
2788 /* Insns carrying these notes are useful later on. */
2789 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2792 if (GET_CODE (pat) == SET && set_noop_p (pat))
2795 if (GET_CODE (pat) == PARALLEL)
2798 /* If nothing but SETs of registers to themselves,
2799 this insn can also be deleted. */
2800 for (i = 0; i < XVECLEN (pat, 0); i++)
2802 rtx tem = XVECEXP (pat, 0, i);
2804 if (GET_CODE (tem) == USE
2805 || GET_CODE (tem) == CLOBBER)
2808 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2817 /* Delete any insns that copy a register to itself. */
2820 delete_noop_moves (f)
2824 for (insn = f; insn; insn = NEXT_INSN (insn))
2826 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2828 PUT_CODE (insn, NOTE);
2829 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2830 NOTE_SOURCE_FILE (insn) = 0;
2835 /* Determine if the stack pointer is constant over the life of the function.
2836 Only useful before prologues have been emitted. */
2839 notice_stack_pointer_modification_1 (x, pat, data)
2841 rtx pat ATTRIBUTE_UNUSED;
2842 void *data ATTRIBUTE_UNUSED;
2844 if (x == stack_pointer_rtx
2845 /* The stack pointer is only modified indirectly as the result
2846 of a push until later in flow. See the comments in rtl.texi
2847 regarding Embedded Side-Effects on Addresses. */
2848 || (GET_CODE (x) == MEM
2849 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2850 || GET_CODE (XEXP (x, 0)) == PRE_INC
2851 || GET_CODE (XEXP (x, 0)) == POST_DEC
2852 || GET_CODE (XEXP (x, 0)) == POST_INC)
2853 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2854 current_function_sp_is_unchanging = 0;
2858 notice_stack_pointer_modification (f)
2863 /* Assume that the stack pointer is unchanging if alloca hasn't
2865 current_function_sp_is_unchanging = !current_function_calls_alloca;
2866 if (! current_function_sp_is_unchanging)
2869 for (insn = f; insn; insn = NEXT_INSN (insn))
2871 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2873 /* Check if insn modifies the stack pointer. */
2874 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2876 if (! current_function_sp_is_unchanging)
2882 /* Mark a register in SET. Hard registers in large modes get all
2883 of their component registers set as well. */
2885 mark_reg (reg, xset)
2889 regset set = (regset) xset;
2890 int regno = REGNO (reg);
2892 if (GET_MODE (reg) == BLKmode)
2895 SET_REGNO_REG_SET (set, regno);
2896 if (regno < FIRST_PSEUDO_REGISTER)
2898 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2900 SET_REGNO_REG_SET (set, regno + n);
2904 /* Mark those regs which are needed at the end of the function as live
2905 at the end of the last basic block. */
2907 mark_regs_live_at_end (set)
2912 /* If exiting needs the right stack value, consider the stack pointer
2913 live at the end of the function. */
2914 if ((HAVE_epilogue && reload_completed)
2915 || ! EXIT_IGNORE_STACK
2916 || (! FRAME_POINTER_REQUIRED
2917 && ! current_function_calls_alloca
2918 && flag_omit_frame_pointer)
2919 || current_function_sp_is_unchanging)
2921 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2924 /* Mark the frame pointer if needed at the end of the function. If
2925 we end up eliminating it, it will be removed from the live list
2926 of each basic block by reload. */
2928 if (! reload_completed || frame_pointer_needed)
2930 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2931 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2932 /* If they are different, also mark the hard frame pointer as live */
2933 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2937 #ifdef PIC_OFFSET_TABLE_REGNUM
2938 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2939 /* Many architectures have a GP register even without flag_pic.
2940 Assume the pic register is not in use, or will be handled by
2941 other means, if it is not fixed. */
2942 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2943 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2947 /* Mark all global registers, and all registers used by the epilogue
2948 as being live at the end of the function since they may be
2949 referenced by our caller. */
2950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2952 #ifdef EPILOGUE_USES
2953 || EPILOGUE_USES (i)
2956 SET_REGNO_REG_SET (set, i);
2958 /* Mark all call-saved registers that we actaully used. */
2959 if (HAVE_epilogue && reload_completed)
2961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2962 if (! call_used_regs[i] && regs_ever_live[i])
2963 SET_REGNO_REG_SET (set, i);
2966 /* Mark function return value. */
2967 diddle_return_value (mark_reg, set);
2970 /* Propagate global life info around the graph of basic blocks. Begin
2971 considering blocks with their corresponding bit set in BLOCKS_IN.
2972 BLOCKS_OUT is set for every block that was changed. */
2975 calculate_global_regs_live (blocks_in, blocks_out, flags)
2976 sbitmap blocks_in, blocks_out;
2979 basic_block *queue, *qhead, *qtail, *qend;
2980 regset tmp, new_live_at_end;
2981 regset_head tmp_head;
2982 regset_head new_live_at_end_head;
2985 tmp = INITIALIZE_REG_SET (tmp_head);
2986 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2988 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2989 because the `head == tail' style test for an empty queue doesn't
2990 work with a full queue. */
2991 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2993 qhead = qend = queue + n_basic_blocks + 2;
2995 /* Clear out the garbage that might be hanging out in bb->aux. */
2996 for (i = n_basic_blocks - 1; i >= 0; --i)
2997 BASIC_BLOCK (i)->aux = NULL;
2999 /* Queue the blocks set in the initial mask. Do this in reverse block
3000 number order so that we are more likely for the first round to do
3001 useful work. We use AUX non-null to flag that the block is queued. */
3002 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3004 basic_block bb = BASIC_BLOCK (i);
3009 sbitmap_zero (blocks_out);
3011 while (qhead != qtail)
3013 int rescan, changed;
3022 /* Begin by propogating live_at_start from the successor blocks. */
3023 CLEAR_REG_SET (new_live_at_end);
3024 for (e = bb->succ; e ; e = e->succ_next)
3026 basic_block sb = e->dest;
3027 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3030 if (bb == ENTRY_BLOCK_PTR)
3032 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3036 /* On our first pass through this block, we'll go ahead and continue.
3037 Recognize first pass by local_set NULL. On subsequent passes, we
3038 get to skip out early if live_at_end wouldn't have changed. */
3040 if (bb->local_set == NULL)
3042 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3047 /* If any bits were removed from live_at_end, we'll have to
3048 rescan the block. This wouldn't be necessary if we had
3049 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3050 local_live is really dependant on live_at_end. */
3051 CLEAR_REG_SET (tmp);
3052 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3053 new_live_at_end, BITMAP_AND_COMPL);
3057 /* Find the set of changed bits. Take this opportunity
3058 to notice that this set is empty and early out. */
3059 CLEAR_REG_SET (tmp);
3060 changed = bitmap_operation (tmp, bb->global_live_at_end,
3061 new_live_at_end, BITMAP_XOR);
3065 /* If any of the changed bits overlap with local_set,
3066 we'll have to rescan the block. Detect overlap by
3067 the AND with ~local_set turning off bits. */
3068 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3073 /* Let our caller know that BB changed enough to require its
3074 death notes updated. */
3075 SET_BIT (blocks_out, bb->index);
3079 /* Add to live_at_start the set of all registers in
3080 new_live_at_end that aren't in the old live_at_end. */
3082 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3084 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3086 changed = bitmap_operation (bb->global_live_at_start,
3087 bb->global_live_at_start,
3094 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3096 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3097 into live_at_start. */
3098 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3100 /* If live_at start didn't change, no need to go farther. */
3101 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3104 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3107 /* Queue all predecessors of BB so that we may re-examine
3108 their live_at_end. */
3109 for (e = bb->pred; e ; e = e->pred_next)
3111 basic_block pb = e->src;
3112 if (pb->aux == NULL)
3123 FREE_REG_SET (new_live_at_end);
3125 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3127 basic_block bb = BASIC_BLOCK (i);
3128 FREE_REG_SET (bb->local_set);
3134 /* Subroutines of life analysis. */
3136 /* Allocate the permanent data structures that represent the results
3137 of life analysis. Not static since used also for stupid life analysis. */
3140 allocate_bb_life_data ()
3144 for (i = 0; i < n_basic_blocks; i++)
3146 basic_block bb = BASIC_BLOCK (i);
3148 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3149 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3152 ENTRY_BLOCK_PTR->global_live_at_end
3153 = OBSTACK_ALLOC_REG_SET (function_obstack);
3154 EXIT_BLOCK_PTR->global_live_at_start
3155 = OBSTACK_ALLOC_REG_SET (function_obstack);
3157 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3161 allocate_reg_life_data ()
3165 /* Recalculate the register space, in case it has grown. Old style
3166 vector oriented regsets would set regset_{size,bytes} here also. */
3167 allocate_reg_info (max_regno, FALSE, FALSE);
3169 /* Reset all the data we'll collect in propagate_block and its
3171 for (i = 0; i < max_regno; i++)
3175 REG_N_DEATHS (i) = 0;
3176 REG_N_CALLS_CROSSED (i) = 0;
3177 REG_LIVE_LENGTH (i) = 0;
3178 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3182 /* Compute the registers live at the beginning of a basic block
3183 from those live at the end.
3185 When called, OLD contains those live at the end.
3186 On return, it contains those live at the beginning.
3187 FIRST and LAST are the first and last insns of the basic block.
3189 FINAL is nonzero if we are doing the final pass which is not
3190 for computing the life info (since that has already been done)
3191 but for acting on it. On this pass, we delete dead stores,
3192 set up the logical links and dead-variables lists of instructions,
3193 and merge instructions for autoincrement and autodecrement addresses.
3195 SIGNIFICANT is nonzero only the first time for each basic block.
3196 If it is nonzero, it points to a regset in which we store
3197 a 1 for each register that is set within the block.
3199 BNUM is the number of the basic block. */
3202 propagate_block (bb, old, significant, flags)
3211 regset_head live_head;
3213 regset_head dead_head;
3215 /* Find the loop depth for this block. Ignore loop level changes in the
3216 middle of the basic block -- for register allocation purposes, the
3217 important uses will be in the blocks wholely contained within the loop
3218 not in the loop pre-header or post-trailer. */
3219 loop_depth = bb->loop_depth;
3221 dead = INITIALIZE_REG_SET (live_head);
3222 live = INITIALIZE_REG_SET (dead_head);
3226 if (flags & PROP_REG_INFO)
3230 /* Process the regs live at the end of the block.
3231 Mark them as not local to any one basic block. */
3232 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3238 /* Scan the block an insn at a time from end to beginning. */
3240 for (insn = bb->end; ; insn = prev)
3242 prev = PREV_INSN (insn);
3244 if (GET_CODE (insn) == NOTE)
3246 /* If this is a call to `setjmp' et al,
3247 warn if any non-volatile datum is live. */
3249 if ((flags & PROP_REG_INFO)
3250 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3251 IOR_REG_SET (regs_live_at_setjmp, old);
3254 /* Update the life-status of regs for this insn.
3255 First DEAD gets which regs are set in this insn
3256 then LIVE gets which regs are used in this insn.
3257 Then the regs live before the insn
3258 are those live after, with DEAD regs turned off,
3259 and then LIVE regs turned on. */
3261 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3264 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3265 int insn_is_dead = 0;
3266 int libcall_is_dead = 0;
3268 if (flags & PROP_SCAN_DEAD_CODE)
3270 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3272 libcall_is_dead = (insn_is_dead && note != 0
3273 && libcall_dead_p (PATTERN (insn), old,
3277 /* We almost certainly don't want to delete prologue or epilogue
3278 instructions. Warn about probable compiler losage. */
3281 && (((HAVE_epilogue || HAVE_prologue)
3282 && prologue_epilogue_contains (insn))
3283 || (HAVE_sibcall_epilogue
3284 && sibcall_epilogue_contains (insn))))
3286 if (flags & PROP_KILL_DEAD_CODE)
3288 warning ("ICE: would have deleted prologue/epilogue insn");
3289 if (!inhibit_warnings)
3292 libcall_is_dead = insn_is_dead = 0;
3295 /* If an instruction consists of just dead store(s) on final pass,
3296 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3297 We could really delete it with delete_insn, but that
3298 can cause trouble for first or last insn in a basic block. */
3299 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3302 /* If the insn referred to a label, note that the label is
3304 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3306 if (REG_NOTE_KIND (inote) == REG_LABEL)
3308 rtx label = XEXP (inote, 0);
3310 LABEL_NUSES (label)--;
3312 /* If this label was attached to an ADDR_VEC, it's
3313 safe to delete the ADDR_VEC. In fact, it's pretty
3314 much mandatory to delete it, because the ADDR_VEC may
3315 be referencing labels that no longer exist. */
3316 if (LABEL_NUSES (label) == 0
3317 && (next = next_nonnote_insn (label)) != NULL
3318 && GET_CODE (next) == JUMP_INSN
3319 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3320 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3322 rtx pat = PATTERN (next);
3323 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3324 int len = XVECLEN (pat, diff_vec_p);
3326 for (i = 0; i < len; i++)
3327 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3328 PUT_CODE (next, NOTE);
3329 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3330 NOTE_SOURCE_FILE (next) = 0;
3335 PUT_CODE (insn, NOTE);
3336 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3337 NOTE_SOURCE_FILE (insn) = 0;
3339 /* CC0 is now known to be dead. Either this insn used it,
3340 in which case it doesn't anymore, or clobbered it,
3341 so the next insn can't use it. */
3344 /* If this insn is copying the return value from a library call,
3345 delete the entire library call. */
3346 if (libcall_is_dead)
3348 rtx first = XEXP (note, 0);
3350 while (INSN_DELETED_P (first))
3351 first = NEXT_INSN (first);
3356 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3357 NOTE_SOURCE_FILE (p) = 0;
3363 CLEAR_REG_SET (dead);
3364 CLEAR_REG_SET (live);
3366 /* See if this is an increment or decrement that can be
3367 merged into a following memory address. */
3370 register rtx x = single_set (insn);
3372 /* Does this instruction increment or decrement a register? */
3373 if (!reload_completed
3374 && (flags & PROP_AUTOINC)
3376 && GET_CODE (SET_DEST (x)) == REG
3377 && (GET_CODE (SET_SRC (x)) == PLUS
3378 || GET_CODE (SET_SRC (x)) == MINUS)
3379 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3380 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3381 /* Ok, look for a following memory ref we can combine with.
3382 If one is found, change the memory ref to a PRE_INC
3383 or PRE_DEC, cancel this insn, and return 1.
3384 Return 0 if nothing has been done. */
3385 && try_pre_increment_1 (insn))
3388 #endif /* AUTO_INC_DEC */
3390 /* If this is not the final pass, and this insn is copying the
3391 value of a library call and it's dead, don't scan the
3392 insns that perform the library call, so that the call's
3393 arguments are not marked live. */
3394 if (libcall_is_dead)
3396 /* Mark the dest reg as `significant'. */
3397 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3398 significant, flags);
3400 insn = XEXP (note, 0);
3401 prev = PREV_INSN (insn);
3403 else if (GET_CODE (PATTERN (insn)) == SET
3404 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3405 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3406 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3407 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3408 /* We have an insn to pop a constant amount off the stack.
3409 (Such insns use PLUS regardless of the direction of the stack,
3410 and any insn to adjust the stack by a constant is always a pop.)
3411 These insns, if not dead stores, have no effect on life. */
3415 /* Any regs live at the time of a call instruction
3416 must not go in a register clobbered by calls.
3417 Find all regs now live and record this for them. */
3419 if (GET_CODE (insn) == CALL_INSN
3420 && (flags & PROP_REG_INFO))
3421 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3423 REG_N_CALLS_CROSSED (i)++;
3426 /* LIVE gets the regs used in INSN;
3427 DEAD gets those set by it. Dead insns don't make anything
3430 mark_set_regs (old, dead, PATTERN (insn),
3431 insn, significant, flags);
3433 /* If an insn doesn't use CC0, it becomes dead since we
3434 assume that every insn clobbers it. So show it dead here;
3435 mark_used_regs will set it live if it is referenced. */
3439 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3441 /* Sometimes we may have inserted something before INSN (such as
3442 a move) when we make an auto-inc. So ensure we will scan
3445 prev = PREV_INSN (insn);
3448 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3454 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3456 note = XEXP (note, 1))
3457 if (GET_CODE (XEXP (note, 0)) == USE)
3458 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3461 /* Each call clobbers all call-clobbered regs that are not
3462 global or fixed. Note that the function-value reg is a
3463 call-clobbered reg, and mark_set_regs has already had
3464 a chance to handle it. */
3466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3467 if (call_used_regs[i] && ! global_regs[i]
3470 SET_REGNO_REG_SET (dead, i);
3472 SET_REGNO_REG_SET (significant, i);
3475 /* The stack ptr is used (honorarily) by a CALL insn. */
3476 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3478 /* Calls may also reference any of the global registers,
3479 so they are made live. */
3480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3482 mark_used_regs (old, live,
3483 gen_rtx_REG (reg_raw_mode[i], i),
3486 /* Calls also clobber memory. */
3487 free_EXPR_LIST_list (&mem_set_list);
3490 /* Update OLD for the registers used or set. */
3491 AND_COMPL_REG_SET (old, dead);
3492 IOR_REG_SET (old, live);
3496 /* On final pass, update counts of how many insns in which
3497 each reg is live. */
3498 if (flags & PROP_REG_INFO)
3499 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3502 if (insn == bb->head)
3506 FREE_REG_SET (dead);
3507 FREE_REG_SET (live);
3508 free_EXPR_LIST_list (&mem_set_list);
3511 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3512 (SET expressions whose destinations are registers dead after the insn).
3513 NEEDED is the regset that says which regs are alive after the insn.
3515 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3517 If X is the entire body of an insn, NOTES contains the reg notes
3518 pertaining to the insn. */
3521 insn_dead_p (x, needed, call_ok, notes)
3525 rtx notes ATTRIBUTE_UNUSED;
3527 enum rtx_code code = GET_CODE (x);
3530 /* If flow is invoked after reload, we must take existing AUTO_INC
3531 expresions into account. */
3532 if (reload_completed)
3534 for ( ; notes; notes = XEXP (notes, 1))
3536 if (REG_NOTE_KIND (notes) == REG_INC)
3538 int regno = REGNO (XEXP (notes, 0));
3540 /* Don't delete insns to set global regs. */
3541 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3542 || REGNO_REG_SET_P (needed, regno))
3549 /* If setting something that's a reg or part of one,
3550 see if that register's altered value will be live. */
3554 rtx r = SET_DEST (x);
3557 if (GET_CODE (r) == CC0)
3561 /* A SET that is a subroutine call cannot be dead. */
3562 if (GET_CODE (SET_SRC (x)) == CALL)
3568 /* Don't eliminate loads from volatile memory or volatile asms. */
3569 else if (volatile_refs_p (SET_SRC (x)))
3572 if (GET_CODE (r) == MEM)
3576 if (MEM_VOLATILE_P (r))
3579 /* Walk the set of memory locations we are currently tracking
3580 and see if one is an identical match to this memory location.
3581 If so, this memory write is dead (remember, we're walking
3582 backwards from the end of the block to the start. */
3583 temp = mem_set_list;
3586 if (rtx_equal_p (XEXP (temp, 0), r))
3588 temp = XEXP (temp, 1);
3593 while (GET_CODE (r) == SUBREG
3594 || GET_CODE (r) == STRICT_LOW_PART
3595 || GET_CODE (r) == ZERO_EXTRACT)
3598 if (GET_CODE (r) == REG)
3600 int regno = REGNO (r);
3603 if (REGNO_REG_SET_P (needed, regno))
3606 /* If this is a hard register, verify that subsequent
3607 words are not needed. */
3608 if (regno < FIRST_PSEUDO_REGISTER)
3610 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3613 if (REGNO_REG_SET_P (needed, regno+n))
3617 /* Don't delete insns to set global regs. */
3618 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3621 /* Make sure insns to set the stack pointer aren't deleted. */
3622 if (regno == STACK_POINTER_REGNUM)
3625 /* Make sure insns to set the frame pointer aren't deleted. */
3626 if (regno == FRAME_POINTER_REGNUM
3627 && (! reload_completed || frame_pointer_needed))
3629 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3630 if (regno == HARD_FRAME_POINTER_REGNUM
3631 && (! reload_completed || frame_pointer_needed))
3635 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3636 /* Make sure insns to set arg pointer are never deleted
3637 (if the arg pointer isn't fixed, there will be a USE
3638 for it, so we can treat it normally). */
3639 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3643 /* Otherwise, the set is dead. */
3649 /* If performing several activities, insn is dead if each activity
3650 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3651 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3653 else if (code == PARALLEL)
3655 int i = XVECLEN (x, 0);
3657 for (i--; i >= 0; i--)
3658 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3659 && GET_CODE (XVECEXP (x, 0, i)) != USE
3660 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3666 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3667 is not necessarily true for hard registers. */
3668 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3669 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3670 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3673 /* We do not check other CLOBBER or USE here. An insn consisting of just
3674 a CLOBBER or just a USE should not be deleted. */
3678 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3679 return 1 if the entire library call is dead.
3680 This is true if X copies a register (hard or pseudo)
3681 and if the hard return reg of the call insn is dead.
3682 (The caller should have tested the destination of X already for death.)
3684 If this insn doesn't just copy a register, then we don't
3685 have an ordinary libcall. In that case, cse could not have
3686 managed to substitute the source for the dest later on,
3687 so we can assume the libcall is dead.
3689 NEEDED is the bit vector of pseudoregs live before this insn.
3690 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3693 libcall_dead_p (x, needed, note, insn)
3699 register RTX_CODE code = GET_CODE (x);
3703 register rtx r = SET_SRC (x);
3704 if (GET_CODE (r) == REG)
3706 rtx call = XEXP (note, 0);
3710 /* Find the call insn. */
3711 while (call != insn && GET_CODE (call) != CALL_INSN)
3712 call = NEXT_INSN (call);
3714 /* If there is none, do nothing special,
3715 since ordinary death handling can understand these insns. */
3719 /* See if the hard reg holding the value is dead.
3720 If this is a PARALLEL, find the call within it. */
3721 call_pat = PATTERN (call);
3722 if (GET_CODE (call_pat) == PARALLEL)
3724 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3725 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3726 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3729 /* This may be a library call that is returning a value
3730 via invisible pointer. Do nothing special, since
3731 ordinary death handling can understand these insns. */
3735 call_pat = XVECEXP (call_pat, 0, i);
3738 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3744 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3745 live at function entry. Don't count global register variables, variables
3746 in registers that can be used for function arg passing, or variables in
3747 fixed hard registers. */
3750 regno_uninitialized (regno)
3753 if (n_basic_blocks == 0
3754 || (regno < FIRST_PSEUDO_REGISTER
3755 && (global_regs[regno]
3756 || fixed_regs[regno]
3757 || FUNCTION_ARG_REGNO_P (regno))))
3760 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3763 /* 1 if register REGNO was alive at a place where `setjmp' was called
3764 and was set more than once or is an argument.
3765 Such regs may be clobbered by `longjmp'. */
3768 regno_clobbered_at_setjmp (regno)
3771 if (n_basic_blocks == 0)
3774 return ((REG_N_SETS (regno) > 1
3775 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3776 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3779 /* INSN references memory, possibly using autoincrement addressing modes.
3780 Find any entries on the mem_set_list that need to be invalidated due
3781 to an address change. */
3783 invalidate_mems_from_autoinc (insn)
3786 rtx note = REG_NOTES (insn);
3787 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3789 if (REG_NOTE_KIND (note) == REG_INC)
3791 rtx temp = mem_set_list;
3792 rtx prev = NULL_RTX;
3797 next = XEXP (temp, 1);
3798 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3800 /* Splice temp out of list. */
3802 XEXP (prev, 1) = next;
3804 mem_set_list = next;
3805 free_EXPR_LIST_node (temp);
3815 /* Process the registers that are set within X. Their bits are set to
3816 1 in the regset DEAD, because they are dead prior to this insn.
3818 If INSN is nonzero, it is the insn being processed.
3820 FLAGS is the set of operations to perform. */
3823 mark_set_regs (needed, dead, x, insn, significant, flags)
3831 register RTX_CODE code = GET_CODE (x);
3833 if (code == SET || code == CLOBBER)
3834 mark_set_1 (needed, dead, x, insn, significant, flags);
3835 else if (code == PARALLEL)
3838 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3840 code = GET_CODE (XVECEXP (x, 0, i));
3841 if (code == SET || code == CLOBBER)
3842 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3843 significant, flags);
3848 /* Process a single SET rtx, X. */
3851 mark_set_1 (needed, dead, x, insn, significant, flags)
3859 register int regno = -1;
3860 register rtx reg = SET_DEST (x);
3862 /* Some targets place small structures in registers for
3863 return values of functions. We have to detect this
3864 case specially here to get correct flow information. */
3865 if (GET_CODE (reg) == PARALLEL
3866 && GET_MODE (reg) == BLKmode)
3870 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3871 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3872 significant, flags);
3876 /* Modifying just one hardware register of a multi-reg value
3877 or just a byte field of a register
3878 does not mean the value from before this insn is now dead.
3879 But it does mean liveness of that register at the end of the block
3882 Within mark_set_1, however, we treat it as if the register is
3883 indeed modified. mark_used_regs will, however, also treat this
3884 register as being used. Thus, we treat these insns as setting a
3885 new value for the register as a function of its old value. This
3886 cases LOG_LINKS to be made appropriately and this will help combine. */
3888 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3889 || GET_CODE (reg) == SIGN_EXTRACT
3890 || GET_CODE (reg) == STRICT_LOW_PART)
3891 reg = XEXP (reg, 0);
3893 /* If this set is a MEM, then it kills any aliased writes.
3894 If this set is a REG, then it kills any MEMs which use the reg. */
3895 if (flags & PROP_SCAN_DEAD_CODE)
3897 if (GET_CODE (reg) == MEM
3898 || GET_CODE (reg) == REG)
3900 rtx temp = mem_set_list;
3901 rtx prev = NULL_RTX;
3906 next = XEXP (temp, 1);
3907 if ((GET_CODE (reg) == MEM
3908 && output_dependence (XEXP (temp, 0), reg))
3909 || (GET_CODE (reg) == REG
3910 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3912 /* Splice this entry out of the list. */
3914 XEXP (prev, 1) = next;
3916 mem_set_list = next;
3917 free_EXPR_LIST_node (temp);
3925 /* If the memory reference had embedded side effects (autoincrement
3926 address modes. Then we may need to kill some entries on the
3928 if (insn && GET_CODE (reg) == MEM)
3929 invalidate_mems_from_autoinc (insn);
3931 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3932 /* We do not know the size of a BLKmode store, so we do not track
3933 them for redundant store elimination. */
3934 && GET_MODE (reg) != BLKmode
3935 /* There are no REG_INC notes for SP, so we can't assume we'll see
3936 everything that invalidates it. To be safe, don't eliminate any
3937 stores though SP; none of them should be redundant anyway. */
3938 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3939 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3942 if (GET_CODE (reg) == REG
3943 && (regno = REGNO (reg),
3944 ! (regno == FRAME_POINTER_REGNUM
3945 && (! reload_completed || frame_pointer_needed)))
3946 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3947 && ! (regno == HARD_FRAME_POINTER_REGNUM
3948 && (! reload_completed || frame_pointer_needed))
3950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3951 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3953 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3954 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3956 int some_needed = REGNO_REG_SET_P (needed, regno);
3957 int some_not_needed = ! some_needed;
3959 /* Mark it as a significant register for this basic block. */
3961 SET_REGNO_REG_SET (significant, regno);
3963 /* Mark it as dead before this insn. */
3964 SET_REGNO_REG_SET (dead, regno);
3966 /* A hard reg in a wide mode may really be multiple registers.
3967 If so, mark all of them just like the first. */
3968 if (regno < FIRST_PSEUDO_REGISTER)
3972 /* Nothing below is needed for the stack pointer; get out asap.
3973 Eg, log links aren't needed, since combine won't use them. */
3974 if (regno == STACK_POINTER_REGNUM)
3977 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3980 int regno_n = regno + n;
3981 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3983 SET_REGNO_REG_SET (significant, regno_n);
3985 SET_REGNO_REG_SET (dead, regno_n);
3986 some_needed |= needed_regno;
3987 some_not_needed |= ! needed_regno;
3991 /* Additional data to record if this is the final pass. */
3992 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3993 | PROP_DEATH_NOTES | PROP_AUTOINC))
3996 register int blocknum = BLOCK_NUM (insn);
3999 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4000 y = reg_next_use[regno];
4002 /* If this is a hard reg, record this function uses the reg. */
4004 if (regno < FIRST_PSEUDO_REGISTER)
4007 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4009 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4010 for (i = regno; i < endregno; i++)
4012 /* The next use is no longer "next", since a store
4014 reg_next_use[i] = 0;
4017 if (flags & PROP_REG_INFO)
4018 for (i = regno; i < endregno; i++)
4020 regs_ever_live[i] = 1;
4026 /* The next use is no longer "next", since a store
4028 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4029 reg_next_use[regno] = 0;
4031 /* Keep track of which basic blocks each reg appears in. */
4033 if (flags & PROP_REG_INFO)
4035 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4036 REG_BASIC_BLOCK (regno) = blocknum;
4037 else if (REG_BASIC_BLOCK (regno) != blocknum)
4038 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4040 /* Count (weighted) references, stores, etc. This counts a
4041 register twice if it is modified, but that is correct. */
4042 REG_N_SETS (regno)++;
4043 REG_N_REFS (regno) += loop_depth + 1;
4045 /* The insns where a reg is live are normally counted
4046 elsewhere, but we want the count to include the insn
4047 where the reg is set, and the normal counting mechanism
4048 would not count it. */
4049 REG_LIVE_LENGTH (regno)++;
4053 if (! some_not_needed)
4055 if (flags & PROP_LOG_LINKS)
4057 /* Make a logical link from the next following insn
4058 that uses this register, back to this insn.
4059 The following insns have already been processed.
4061 We don't build a LOG_LINK for hard registers containing
4062 in ASM_OPERANDs. If these registers get replaced,
4063 we might wind up changing the semantics of the insn,
4064 even if reload can make what appear to be valid
4065 assignments later. */
4066 if (y && (BLOCK_NUM (y) == blocknum)
4067 && (regno >= FIRST_PSEUDO_REGISTER
4068 || asm_noperands (PATTERN (y)) < 0))
4069 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4072 else if (! some_needed)
4074 if (flags & PROP_REG_INFO)
4075 REG_N_DEATHS (REGNO (reg))++;
4077 if (flags & PROP_DEATH_NOTES)
4079 /* Note that dead stores have already been deleted
4080 when possible. If we get here, we have found a
4081 dead store that cannot be eliminated (because the
4082 same insn does something useful). Indicate this
4083 by marking the reg being set as dying here. */
4085 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4090 if (flags & PROP_DEATH_NOTES)
4092 /* This is a case where we have a multi-word hard register
4093 and some, but not all, of the words of the register are
4094 needed in subsequent insns. Write REG_UNUSED notes
4095 for those parts that were not needed. This case should
4100 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4102 if (!REGNO_REG_SET_P (needed, regno + i))
4106 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4112 else if (GET_CODE (reg) == REG)
4114 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4115 reg_next_use[regno] = 0;
4118 /* If this is the last pass and this is a SCRATCH, show it will be dying
4119 here and count it. */
4120 else if (GET_CODE (reg) == SCRATCH)
4122 if (flags & PROP_DEATH_NOTES)
4124 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4130 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4134 find_auto_inc (needed, x, insn)
4139 rtx addr = XEXP (x, 0);
4140 HOST_WIDE_INT offset = 0;
4143 /* Here we detect use of an index register which might be good for
4144 postincrement, postdecrement, preincrement, or predecrement. */
4146 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4147 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4149 if (GET_CODE (addr) == REG)
4152 register int size = GET_MODE_SIZE (GET_MODE (x));
4155 int regno = REGNO (addr);
4157 /* Is the next use an increment that might make auto-increment? */
4158 if ((incr = reg_next_use[regno]) != 0
4159 && (set = single_set (incr)) != 0
4160 && GET_CODE (set) == SET
4161 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4162 /* Can't add side effects to jumps; if reg is spilled and
4163 reloaded, there's no way to store back the altered value. */
4164 && GET_CODE (insn) != JUMP_INSN
4165 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4166 && XEXP (y, 0) == addr
4167 && GET_CODE (XEXP (y, 1)) == CONST_INT
4168 && ((HAVE_POST_INCREMENT
4169 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4170 || (HAVE_POST_DECREMENT
4171 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4172 || (HAVE_PRE_INCREMENT
4173 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4174 || (HAVE_PRE_DECREMENT
4175 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4176 /* Make sure this reg appears only once in this insn. */
4177 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4178 use != 0 && use != (rtx) 1))
4180 rtx q = SET_DEST (set);
4181 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4182 ? (offset ? PRE_INC : POST_INC)
4183 : (offset ? PRE_DEC : POST_DEC));
4185 if (dead_or_set_p (incr, addr))
4187 /* This is the simple case. Try to make the auto-inc. If
4188 we can't, we are done. Otherwise, we will do any
4189 needed updates below. */
4190 if (! validate_change (insn, &XEXP (x, 0),
4191 gen_rtx_fmt_e (inc_code, Pmode, addr),
4195 else if (GET_CODE (q) == REG
4196 /* PREV_INSN used here to check the semi-open interval
4198 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4199 /* We must also check for sets of q as q may be
4200 a call clobbered hard register and there may
4201 be a call between PREV_INSN (insn) and incr. */
4202 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4204 /* We have *p followed sometime later by q = p+size.
4205 Both p and q must be live afterward,
4206 and q is not used between INSN and its assignment.
4207 Change it to q = p, ...*q..., q = q+size.
4208 Then fall into the usual case. */
4213 emit_move_insn (q, addr);
4214 insns = get_insns ();
4217 bb = BLOCK_FOR_INSN (insn);
4218 for (temp = insns; temp; temp = NEXT_INSN (temp))
4219 set_block_for_insn (temp, bb);
4221 /* If we can't make the auto-inc, or can't make the
4222 replacement into Y, exit. There's no point in making
4223 the change below if we can't do the auto-inc and doing
4224 so is not correct in the pre-inc case. */
4226 validate_change (insn, &XEXP (x, 0),
4227 gen_rtx_fmt_e (inc_code, Pmode, q),
4229 validate_change (incr, &XEXP (y, 0), q, 1);
4230 if (! apply_change_group ())
4233 /* We now know we'll be doing this change, so emit the
4234 new insn(s) and do the updates. */
4235 emit_insns_before (insns, insn);
4237 if (BLOCK_FOR_INSN (insn)->head == insn)
4238 BLOCK_FOR_INSN (insn)->head = insns;
4240 /* INCR will become a NOTE and INSN won't contain a
4241 use of ADDR. If a use of ADDR was just placed in
4242 the insn before INSN, make that the next use.
4243 Otherwise, invalidate it. */
4244 if (GET_CODE (PREV_INSN (insn)) == INSN
4245 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4246 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4247 reg_next_use[regno] = PREV_INSN (insn);
4249 reg_next_use[regno] = 0;
4254 /* REGNO is now used in INCR which is below INSN, but
4255 it previously wasn't live here. If we don't mark
4256 it as needed, we'll put a REG_DEAD note for it
4257 on this insn, which is incorrect. */
4258 SET_REGNO_REG_SET (needed, regno);
4260 /* If there are any calls between INSN and INCR, show
4261 that REGNO now crosses them. */
4262 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4263 if (GET_CODE (temp) == CALL_INSN)
4264 REG_N_CALLS_CROSSED (regno)++;
4269 /* If we haven't returned, it means we were able to make the
4270 auto-inc, so update the status. First, record that this insn
4271 has an implicit side effect. */
4274 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4276 /* Modify the old increment-insn to simply copy
4277 the already-incremented value of our register. */
4278 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4281 /* If that makes it a no-op (copying the register into itself) delete
4282 it so it won't appear to be a "use" and a "set" of this
4284 if (SET_DEST (set) == addr)
4286 PUT_CODE (incr, NOTE);
4287 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4288 NOTE_SOURCE_FILE (incr) = 0;
4291 if (regno >= FIRST_PSEUDO_REGISTER)
4293 /* Count an extra reference to the reg. When a reg is
4294 incremented, spilling it is worse, so we want to make
4295 that less likely. */
4296 REG_N_REFS (regno) += loop_depth + 1;
4298 /* Count the increment as a setting of the register,
4299 even though it isn't a SET in rtl. */
4300 REG_N_SETS (regno)++;
4305 #endif /* AUTO_INC_DEC */
4307 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4308 This is done assuming the registers needed from X
4309 are those that have 1-bits in NEEDED.
4311 FLAGS is the set of enabled operations.
4313 INSN is the containing instruction. If INSN is dead, this function is not
4317 mark_used_regs (needed, live, x, flags, insn)
4324 register RTX_CODE code;
4329 code = GET_CODE (x);
4349 /* If we are clobbering a MEM, mark any registers inside the address
4351 if (GET_CODE (XEXP (x, 0)) == MEM)
4352 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4356 /* Don't bother watching stores to mems if this is not the
4357 final pass. We'll not be deleting dead stores this round. */
4358 if (flags & PROP_SCAN_DEAD_CODE)
4360 /* Invalidate the data for the last MEM stored, but only if MEM is
4361 something that can be stored into. */
4362 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4363 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4364 ; /* needn't clear the memory set list */
4367 rtx temp = mem_set_list;
4368 rtx prev = NULL_RTX;
4373 next = XEXP (temp, 1);
4374 if (anti_dependence (XEXP (temp, 0), x))
4376 /* Splice temp out of the list. */
4378 XEXP (prev, 1) = next;
4380 mem_set_list = next;
4381 free_EXPR_LIST_node (temp);
4389 /* If the memory reference had embedded side effects (autoincrement
4390 address modes. Then we may need to kill some entries on the
4393 invalidate_mems_from_autoinc (insn);
4397 if (flags & PROP_AUTOINC)
4398 find_auto_inc (needed, x, insn);
4403 if (GET_CODE (SUBREG_REG (x)) == REG
4404 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4405 && (GET_MODE_SIZE (GET_MODE (x))
4406 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4407 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4409 /* While we're here, optimize this case. */
4412 /* In case the SUBREG is not of a register, don't optimize */
4413 if (GET_CODE (x) != REG)
4415 mark_used_regs (needed, live, x, flags, insn);
4419 /* ... fall through ... */
4422 /* See a register other than being set
4423 => mark it as needed. */
4427 int some_needed = REGNO_REG_SET_P (needed, regno);
4428 int some_not_needed = ! some_needed;
4430 SET_REGNO_REG_SET (live, regno);
4432 /* A hard reg in a wide mode may really be multiple registers.
4433 If so, mark all of them just like the first. */
4434 if (regno < FIRST_PSEUDO_REGISTER)
4438 /* For stack ptr or fixed arg pointer,
4439 nothing below can be necessary, so waste no more time. */
4440 if (regno == STACK_POINTER_REGNUM
4441 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4442 || (regno == HARD_FRAME_POINTER_REGNUM
4443 && (! reload_completed || frame_pointer_needed))
4445 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4446 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4448 || (regno == FRAME_POINTER_REGNUM
4449 && (! reload_completed || frame_pointer_needed)))
4451 /* If this is a register we are going to try to eliminate,
4452 don't mark it live here. If we are successful in
4453 eliminating it, it need not be live unless it is used for
4454 pseudos, in which case it will have been set live when
4455 it was allocated to the pseudos. If the register will not
4456 be eliminated, reload will set it live at that point. */
4458 if ((flags & PROP_REG_INFO)
4459 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4460 regs_ever_live[regno] = 1;
4463 /* No death notes for global register variables;
4464 their values are live after this function exits. */
4465 if (global_regs[regno])
4467 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4468 reg_next_use[regno] = insn;
4472 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4475 int regno_n = regno + n;
4476 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4478 SET_REGNO_REG_SET (live, regno_n);
4479 some_needed |= needed_regno;
4480 some_not_needed |= ! needed_regno;
4484 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4486 /* Record where each reg is used, so when the reg
4487 is set we know the next insn that uses it. */
4489 reg_next_use[regno] = insn;
4491 if (flags & PROP_REG_INFO)
4493 if (regno < FIRST_PSEUDO_REGISTER)
4495 /* If a hard reg is being used,
4496 record that this function does use it. */
4498 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4502 regs_ever_live[regno + --i] = 1;
4507 /* Keep track of which basic block each reg appears in. */
4509 register int blocknum = BLOCK_NUM (insn);
4511 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4512 REG_BASIC_BLOCK (regno) = blocknum;
4513 else if (REG_BASIC_BLOCK (regno) != blocknum)
4514 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4516 /* Count (weighted) number of uses of each reg. */
4518 REG_N_REFS (regno) += loop_depth + 1;
4522 /* Record and count the insns in which a reg dies.
4523 If it is used in this insn and was dead below the insn
4524 then it dies in this insn. If it was set in this insn,
4525 we do not make a REG_DEAD note; likewise if we already
4526 made such a note. */
4528 if (flags & PROP_DEATH_NOTES)
4531 && ! dead_or_set_p (insn, x)
4533 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4537 /* Check for the case where the register dying partially
4538 overlaps the register set by this insn. */
4539 if (regno < FIRST_PSEUDO_REGISTER
4540 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4542 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4544 some_needed |= dead_or_set_regno_p (insn, regno + n);
4547 /* If none of the words in X is needed, make a REG_DEAD
4548 note. Otherwise, we must make partial REG_DEAD notes. */
4552 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4553 REG_N_DEATHS (regno)++;
4559 /* Don't make a REG_DEAD note for a part of a register
4560 that is set in the insn. */
4562 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4564 if (!REGNO_REG_SET_P (needed, regno + i)
4565 && ! dead_or_set_regno_p (insn, regno + i))
4568 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4579 register rtx testreg = SET_DEST (x);
4582 /* If storing into MEM, don't show it as being used. But do
4583 show the address as being used. */
4584 if (GET_CODE (testreg) == MEM)
4587 if (flags & PROP_AUTOINC)
4588 find_auto_inc (needed, testreg, insn);
4590 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4591 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4595 /* Storing in STRICT_LOW_PART is like storing in a reg
4596 in that this SET might be dead, so ignore it in TESTREG.
4597 but in some other ways it is like using the reg.
4599 Storing in a SUBREG or a bit field is like storing the entire
4600 register in that if the register's value is not used
4601 then this SET is not needed. */
4602 while (GET_CODE (testreg) == STRICT_LOW_PART
4603 || GET_CODE (testreg) == ZERO_EXTRACT
4604 || GET_CODE (testreg) == SIGN_EXTRACT
4605 || GET_CODE (testreg) == SUBREG)
4607 if (GET_CODE (testreg) == SUBREG
4608 && GET_CODE (SUBREG_REG (testreg)) == REG
4609 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4610 && (GET_MODE_SIZE (GET_MODE (testreg))
4611 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4612 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4614 /* Modifying a single register in an alternate mode
4615 does not use any of the old value. But these other
4616 ways of storing in a register do use the old value. */
4617 if (GET_CODE (testreg) == SUBREG
4618 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4623 testreg = XEXP (testreg, 0);
4626 /* If this is a store into a register,
4627 recursively scan the value being stored. */
4629 if ((GET_CODE (testreg) == PARALLEL
4630 && GET_MODE (testreg) == BLKmode)
4631 || (GET_CODE (testreg) == REG
4632 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4633 && (! reload_completed || frame_pointer_needed)))
4634 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4635 && ! (regno == HARD_FRAME_POINTER_REGNUM
4636 && (! reload_completed || frame_pointer_needed))
4638 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4639 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4642 /* We used to exclude global_regs here, but that seems wrong.
4643 Storing in them is like storing in mem. */
4645 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4647 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4654 case UNSPEC_VOLATILE:
4658 /* Traditional and volatile asm instructions must be considered to use
4659 and clobber all hard registers, all pseudo-registers and all of
4660 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4662 Consider for instance a volatile asm that changes the fpu rounding
4663 mode. An insn should not be moved across this even if it only uses
4664 pseudo-regs because it might give an incorrectly rounded result.
4666 ?!? Unfortunately, marking all hard registers as live causes massive
4667 problems for the register allocator and marking all pseudos as live
4668 creates mountains of uninitialized variable warnings.
4670 So for now, just clear the memory set list and mark any regs
4671 we can find in ASM_OPERANDS as used. */
4672 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4673 free_EXPR_LIST_list (&mem_set_list);
4675 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4676 We can not just fall through here since then we would be confused
4677 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4678 traditional asms unlike their normal usage. */
4679 if (code == ASM_OPERANDS)
4683 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4684 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4695 /* Recursively scan the operands of this expression. */
4698 register const char *fmt = GET_RTX_FORMAT (code);
4701 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4705 /* Tail recursive case: save a function call level. */
4711 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4713 else if (fmt[i] == 'E')
4716 for (j = 0; j < XVECLEN (x, i); j++)
4717 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4726 try_pre_increment_1 (insn)
4729 /* Find the next use of this reg. If in same basic block,
4730 make it do pre-increment or pre-decrement if appropriate. */
4731 rtx x = single_set (insn);
4732 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4733 * INTVAL (XEXP (SET_SRC (x), 1)));
4734 int regno = REGNO (SET_DEST (x));
4735 rtx y = reg_next_use[regno];
4737 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4738 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4739 mode would be better. */
4740 && ! dead_or_set_p (y, SET_DEST (x))
4741 && try_pre_increment (y, SET_DEST (x), amount))
4743 /* We have found a suitable auto-increment
4744 and already changed insn Y to do it.
4745 So flush this increment-instruction. */
4746 PUT_CODE (insn, NOTE);
4747 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4748 NOTE_SOURCE_FILE (insn) = 0;
4749 /* Count a reference to this reg for the increment
4750 insn we are deleting. When a reg is incremented.
4751 spilling it is worse, so we want to make that
4753 if (regno >= FIRST_PSEUDO_REGISTER)
4755 REG_N_REFS (regno) += loop_depth + 1;
4756 REG_N_SETS (regno)++;
4763 /* Try to change INSN so that it does pre-increment or pre-decrement
4764 addressing on register REG in order to add AMOUNT to REG.
4765 AMOUNT is negative for pre-decrement.
4766 Returns 1 if the change could be made.
4767 This checks all about the validity of the result of modifying INSN. */
4770 try_pre_increment (insn, reg, amount)
4772 HOST_WIDE_INT amount;
4776 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4777 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4779 /* Nonzero if we can try to make a post-increment or post-decrement.
4780 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4781 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4782 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4785 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4788 /* From the sign of increment, see which possibilities are conceivable
4789 on this target machine. */
4790 if (HAVE_PRE_INCREMENT && amount > 0)
4792 if (HAVE_POST_INCREMENT && amount > 0)
4795 if (HAVE_PRE_DECREMENT && amount < 0)
4797 if (HAVE_POST_DECREMENT && amount < 0)
4800 if (! (pre_ok || post_ok))
4803 /* It is not safe to add a side effect to a jump insn
4804 because if the incremented register is spilled and must be reloaded
4805 there would be no way to store the incremented value back in memory. */
4807 if (GET_CODE (insn) == JUMP_INSN)
4812 use = find_use_as_address (PATTERN (insn), reg, 0);
4813 if (post_ok && (use == 0 || use == (rtx) 1))
4815 use = find_use_as_address (PATTERN (insn), reg, -amount);
4819 if (use == 0 || use == (rtx) 1)
4822 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4825 /* See if this combination of instruction and addressing mode exists. */
4826 if (! validate_change (insn, &XEXP (use, 0),
4827 gen_rtx_fmt_e (amount > 0
4828 ? (do_post ? POST_INC : PRE_INC)
4829 : (do_post ? POST_DEC : PRE_DEC),
4833 /* Record that this insn now has an implicit side effect on X. */
4834 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4838 #endif /* AUTO_INC_DEC */
4840 /* Find the place in the rtx X where REG is used as a memory address.
4841 Return the MEM rtx that so uses it.
4842 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4843 (plus REG (const_int PLUSCONST)).
4845 If such an address does not appear, return 0.
4846 If REG appears more than once, or is used other than in such an address,
4850 find_use_as_address (x, reg, plusconst)
4853 HOST_WIDE_INT plusconst;
4855 enum rtx_code code = GET_CODE (x);
4856 const char *fmt = GET_RTX_FORMAT (code);
4858 register rtx value = 0;
4861 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4864 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4865 && XEXP (XEXP (x, 0), 0) == reg
4866 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4867 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4870 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4872 /* If REG occurs inside a MEM used in a bit-field reference,
4873 that is unacceptable. */
4874 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4875 return (rtx) (HOST_WIDE_INT) 1;
4879 return (rtx) (HOST_WIDE_INT) 1;
4881 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4885 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4889 return (rtx) (HOST_WIDE_INT) 1;
4891 else if (fmt[i] == 'E')
4894 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4896 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4900 return (rtx) (HOST_WIDE_INT) 1;
4908 /* Write information about registers and basic blocks into FILE.
4909 This is part of making a debugging dump. */
4912 dump_regset (r, outf)
4919 fputs (" (nil)", outf);
4923 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4925 fprintf (outf, " %d", i);
4926 if (i < FIRST_PSEUDO_REGISTER)
4927 fprintf (outf, " [%s]",
4936 dump_regset (r, stderr);
4937 putc ('\n', stderr);
4941 dump_flow_info (file)
4945 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4947 fprintf (file, "%d registers.\n", max_regno);
4948 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4951 enum reg_class class, altclass;
4952 fprintf (file, "\nRegister %d used %d times across %d insns",
4953 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4954 if (REG_BASIC_BLOCK (i) >= 0)
4955 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4957 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4958 (REG_N_SETS (i) == 1) ? "" : "s");
4959 if (REG_USERVAR_P (regno_reg_rtx[i]))
4960 fprintf (file, "; user var");
4961 if (REG_N_DEATHS (i) != 1)
4962 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4963 if (REG_N_CALLS_CROSSED (i) == 1)
4964 fprintf (file, "; crosses 1 call");
4965 else if (REG_N_CALLS_CROSSED (i))
4966 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4967 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4968 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4969 class = reg_preferred_class (i);
4970 altclass = reg_alternate_class (i);
4971 if (class != GENERAL_REGS || altclass != ALL_REGS)
4973 if (altclass == ALL_REGS || class == ALL_REGS)
4974 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4975 else if (altclass == NO_REGS)
4976 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4978 fprintf (file, "; pref %s, else %s",
4979 reg_class_names[(int) class],
4980 reg_class_names[(int) altclass]);
4982 if (REGNO_POINTER_FLAG (i))
4983 fprintf (file, "; pointer");
4984 fprintf (file, ".\n");
4987 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4988 for (i = 0; i < n_basic_blocks; i++)
4990 register basic_block bb = BASIC_BLOCK (i);
4993 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4994 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4996 fprintf (file, "Predecessors: ");
4997 for (e = bb->pred; e ; e = e->pred_next)
4998 dump_edge_info (file, e, 0);
5000 fprintf (file, "\nSuccessors: ");
5001 for (e = bb->succ; e ; e = e->succ_next)
5002 dump_edge_info (file, e, 1);
5004 fprintf (file, "\nRegisters live at start:");
5005 dump_regset (bb->global_live_at_start, file);
5007 fprintf (file, "\nRegisters live at end:");
5008 dump_regset (bb->global_live_at_end, file);
5019 dump_flow_info (stderr);
5023 dump_edge_info (file, e, do_succ)
5028 basic_block side = (do_succ ? e->dest : e->src);
5030 if (side == ENTRY_BLOCK_PTR)
5031 fputs (" ENTRY", file);
5032 else if (side == EXIT_BLOCK_PTR)
5033 fputs (" EXIT", file);
5035 fprintf (file, " %d", side->index);
5039 static const char * const bitnames[] = {
5040 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5043 int i, flags = e->flags;
5047 for (i = 0; flags; i++)
5048 if (flags & (1 << i))
5054 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5055 fputs (bitnames[i], file);
5057 fprintf (file, "%d", i);
5065 /* Print out one basic block with live information at start and end. */
5075 fprintf (outf, ";; Basic block %d, loop depth %d",
5076 bb->index, bb->loop_depth - 1);
5077 if (bb->eh_beg != -1 || bb->eh_end != -1)
5078 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5081 fputs (";; Predecessors: ", outf);
5082 for (e = bb->pred; e ; e = e->pred_next)
5083 dump_edge_info (outf, e, 0);
5086 fputs (";; Registers live at start:", outf);
5087 dump_regset (bb->global_live_at_start, outf);
5090 for (insn = bb->head, last = NEXT_INSN (bb->end);
5092 insn = NEXT_INSN (insn))
5093 print_rtl_single (outf, insn);
5095 fputs (";; Registers live at end:", outf);
5096 dump_regset (bb->global_live_at_end, outf);
5099 fputs (";; Successors: ", outf);
5100 for (e = bb->succ; e; e = e->succ_next)
5101 dump_edge_info (outf, e, 1);
5109 dump_bb (bb, stderr);
5116 dump_bb (BASIC_BLOCK(n), stderr);
5119 /* Like print_rtl, but also print out live information for the start of each
5123 print_rtl_with_bb (outf, rtx_first)
5127 register rtx tmp_rtx;
5130 fprintf (outf, "(nil)\n");
5134 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5135 int max_uid = get_max_uid ();
5136 basic_block *start = (basic_block *)
5137 xcalloc (max_uid, sizeof (basic_block));
5138 basic_block *end = (basic_block *)
5139 xcalloc (max_uid, sizeof (basic_block));
5140 enum bb_state *in_bb_p = (enum bb_state *)
5141 xcalloc (max_uid, sizeof (enum bb_state));
5143 for (i = n_basic_blocks - 1; i >= 0; i--)
5145 basic_block bb = BASIC_BLOCK (i);
5148 start[INSN_UID (bb->head)] = bb;
5149 end[INSN_UID (bb->end)] = bb;
5150 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5152 enum bb_state state = IN_MULTIPLE_BB;
5153 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5155 in_bb_p[INSN_UID(x)] = state;
5162 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5167 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5169 fprintf (outf, ";; Start of basic block %d, registers live:",
5171 dump_regset (bb->global_live_at_start, outf);
5175 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5176 && GET_CODE (tmp_rtx) != NOTE
5177 && GET_CODE (tmp_rtx) != BARRIER)
5178 fprintf (outf, ";; Insn is not within a basic block\n");
5179 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5180 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5182 did_output = print_rtl_single (outf, tmp_rtx);
5184 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5185 fprintf (outf, ";; End of basic block %d\n", bb->index);
5196 if (current_function_epilogue_delay_list != 0)
5198 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5199 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5200 tmp_rtx = XEXP (tmp_rtx, 1))
5201 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5205 /* Compute dominator relationships using new flow graph structures. */
5207 compute_flow_dominators (dominators, post_dominators)
5208 sbitmap *dominators;
5209 sbitmap *post_dominators;
5212 sbitmap *temp_bitmap;
5214 basic_block *worklist, *tos;
5216 /* Allocate a worklist array/queue. Entries are only added to the
5217 list if they were not already on the list. So the size is
5218 bounded by the number of basic blocks. */
5219 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5222 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5223 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5227 /* The optimistic setting of dominators requires us to put every
5228 block on the work list initially. */
5229 for (bb = 0; bb < n_basic_blocks; bb++)
5231 *tos++ = BASIC_BLOCK (bb);
5232 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5235 /* We want a maximal solution, so initially assume everything dominates
5237 sbitmap_vector_ones (dominators, n_basic_blocks);
5239 /* Mark successors of the entry block so we can identify them below. */
5240 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5241 e->dest->aux = ENTRY_BLOCK_PTR;
5243 /* Iterate until the worklist is empty. */
5244 while (tos != worklist)
5246 /* Take the first entry off the worklist. */
5247 basic_block b = *--tos;
5250 /* Compute the intersection of the dominators of all the
5253 If one of the predecessor blocks is the ENTRY block, then the
5254 intersection of the dominators of the predecessor blocks is
5255 defined as the null set. We can identify such blocks by the
5256 special value in the AUX field in the block structure. */
5257 if (b->aux == ENTRY_BLOCK_PTR)
5259 /* Do not clear the aux field for blocks which are
5260 successors of the ENTRY block. That way we we never
5261 add them to the worklist again.
5263 The intersect of dominators of the preds of this block is
5264 defined as the null set. */
5265 sbitmap_zero (temp_bitmap[bb]);
5269 /* Clear the aux field of this block so it can be added to
5270 the worklist again if necessary. */
5272 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5275 /* Make sure each block always dominates itself. */
5276 SET_BIT (temp_bitmap[bb], bb);
5278 /* If the out state of this block changed, then we need to
5279 add the successors of this block to the worklist if they
5280 are not already on the worklist. */
5281 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5283 for (e = b->succ; e; e = e->succ_next)
5285 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5295 if (post_dominators)
5297 /* The optimistic setting of dominators requires us to put every
5298 block on the work list initially. */
5299 for (bb = 0; bb < n_basic_blocks; bb++)
5301 *tos++ = BASIC_BLOCK (bb);
5302 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5305 /* We want a maximal solution, so initially assume everything post
5306 dominates everything else. */
5307 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5309 /* Mark predecessors of the exit block so we can identify them below. */
5310 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5311 e->src->aux = EXIT_BLOCK_PTR;
5313 /* Iterate until the worklist is empty. */
5314 while (tos != worklist)
5316 /* Take the first entry off the worklist. */
5317 basic_block b = *--tos;
5320 /* Compute the intersection of the post dominators of all the
5323 If one of the successor blocks is the EXIT block, then the
5324 intersection of the dominators of the successor blocks is
5325 defined as the null set. We can identify such blocks by the
5326 special value in the AUX field in the block structure. */
5327 if (b->aux == EXIT_BLOCK_PTR)
5329 /* Do not clear the aux field for blocks which are
5330 predecessors of the EXIT block. That way we we never
5331 add them to the worklist again.
5333 The intersect of dominators of the succs of this block is
5334 defined as the null set. */
5335 sbitmap_zero (temp_bitmap[bb]);
5339 /* Clear the aux field of this block so it can be added to
5340 the worklist again if necessary. */
5342 sbitmap_intersection_of_succs (temp_bitmap[bb],
5343 post_dominators, bb);
5346 /* Make sure each block always post dominates itself. */
5347 SET_BIT (temp_bitmap[bb], bb);
5349 /* If the out state of this block changed, then we need to
5350 add the successors of this block to the worklist if they
5351 are not already on the worklist. */
5352 if (sbitmap_a_and_b (post_dominators[bb],
5353 post_dominators[bb],
5356 for (e = b->pred; e; e = e->pred_next)
5358 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5370 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5373 compute_immediate_dominators (idom, dominators)
5375 sbitmap *dominators;
5380 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5382 /* Begin with tmp(n) = dom(n) - { n }. */
5383 for (b = n_basic_blocks; --b >= 0; )
5385 sbitmap_copy (tmp[b], dominators[b]);
5386 RESET_BIT (tmp[b], b);
5389 /* Subtract out all of our dominator's dominators. */
5390 for (b = n_basic_blocks; --b >= 0; )
5392 sbitmap tmp_b = tmp[b];
5395 for (s = n_basic_blocks; --s >= 0; )
5396 if (TEST_BIT (tmp_b, s))
5397 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5400 /* Find the one bit set in the bitmap and put it in the output array. */
5401 for (b = n_basic_blocks; --b >= 0; )
5404 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5407 sbitmap_vector_free (tmp);
5410 /* Count for a single SET rtx, X. */
5413 count_reg_sets_1 (x)
5417 register rtx reg = SET_DEST (x);
5419 /* Find the register that's set/clobbered. */
5420 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5421 || GET_CODE (reg) == SIGN_EXTRACT
5422 || GET_CODE (reg) == STRICT_LOW_PART)
5423 reg = XEXP (reg, 0);
5425 if (GET_CODE (reg) == PARALLEL
5426 && GET_MODE (reg) == BLKmode)
5429 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5430 count_reg_sets_1 (XVECEXP (reg, 0, i));
5434 if (GET_CODE (reg) == REG)
5436 regno = REGNO (reg);
5437 if (regno >= FIRST_PSEUDO_REGISTER)
5439 /* Count (weighted) references, stores, etc. This counts a
5440 register twice if it is modified, but that is correct. */
5441 REG_N_SETS (regno)++;
5442 REG_N_REFS (regno) += loop_depth + 1;
5447 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5448 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5454 register RTX_CODE code = GET_CODE (x);
5456 if (code == SET || code == CLOBBER)
5457 count_reg_sets_1 (x);
5458 else if (code == PARALLEL)
5461 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5463 code = GET_CODE (XVECEXP (x, 0, i));
5464 if (code == SET || code == CLOBBER)
5465 count_reg_sets_1 (XVECEXP (x, 0, i));
5470 /* Increment REG_N_REFS by the current loop depth each register reference
5474 count_reg_references (x)
5477 register RTX_CODE code;
5480 code = GET_CODE (x);
5500 /* If we are clobbering a MEM, mark any registers inside the address
5502 if (GET_CODE (XEXP (x, 0)) == MEM)
5503 count_reg_references (XEXP (XEXP (x, 0), 0));
5507 /* While we're here, optimize this case. */
5510 /* In case the SUBREG is not of a register, don't optimize */
5511 if (GET_CODE (x) != REG)
5513 count_reg_references (x);
5517 /* ... fall through ... */
5520 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5521 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5526 register rtx testreg = SET_DEST (x);
5529 /* If storing into MEM, don't show it as being used. But do
5530 show the address as being used. */
5531 if (GET_CODE (testreg) == MEM)
5533 count_reg_references (XEXP (testreg, 0));
5534 count_reg_references (SET_SRC (x));
5538 /* Storing in STRICT_LOW_PART is like storing in a reg
5539 in that this SET might be dead, so ignore it in TESTREG.
5540 but in some other ways it is like using the reg.
5542 Storing in a SUBREG or a bit field is like storing the entire
5543 register in that if the register's value is not used
5544 then this SET is not needed. */
5545 while (GET_CODE (testreg) == STRICT_LOW_PART
5546 || GET_CODE (testreg) == ZERO_EXTRACT
5547 || GET_CODE (testreg) == SIGN_EXTRACT
5548 || GET_CODE (testreg) == SUBREG)
5550 /* Modifying a single register in an alternate mode
5551 does not use any of the old value. But these other
5552 ways of storing in a register do use the old value. */
5553 if (GET_CODE (testreg) == SUBREG
5554 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5559 testreg = XEXP (testreg, 0);
5562 /* If this is a store into a register,
5563 recursively scan the value being stored. */
5565 if ((GET_CODE (testreg) == PARALLEL
5566 && GET_MODE (testreg) == BLKmode)
5567 || GET_CODE (testreg) == REG)
5569 count_reg_references (SET_SRC (x));
5571 count_reg_references (SET_DEST (x));
5581 /* Recursively scan the operands of this expression. */
5584 register const char *fmt = GET_RTX_FORMAT (code);
5587 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5591 /* Tail recursive case: save a function call level. */
5597 count_reg_references (XEXP (x, i));
5599 else if (fmt[i] == 'E')
5602 for (j = 0; j < XVECLEN (x, i); j++)
5603 count_reg_references (XVECEXP (x, i, j));
5609 /* Recompute register set/reference counts immediately prior to register
5612 This avoids problems with set/reference counts changing to/from values
5613 which have special meanings to the register allocators.
5615 Additionally, the reference counts are the primary component used by the
5616 register allocators to prioritize pseudos for allocation to hard regs.
5617 More accurate reference counts generally lead to better register allocation.
5619 F is the first insn to be scanned.
5621 LOOP_STEP denotes how much loop_depth should be incremented per
5622 loop nesting level in order to increase the ref count more for
5623 references in a loop.
5625 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5626 possibly other information which is used by the register allocators. */
5629 recompute_reg_usage (f, loop_step)
5630 rtx f ATTRIBUTE_UNUSED;
5631 int loop_step ATTRIBUTE_UNUSED;
5637 /* Clear out the old data. */
5638 max_reg = max_reg_num ();
5639 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5645 /* Scan each insn in the chain and count how many times each register is
5647 for (index = 0; index < n_basic_blocks; index++)
5649 basic_block bb = BASIC_BLOCK (index);
5650 loop_depth = bb->loop_depth;
5651 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5653 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5657 /* This call will increment REG_N_SETS for each SET or CLOBBER
5658 of a register in INSN. It will also increment REG_N_REFS
5659 by the loop depth for each set of a register in INSN. */
5660 count_reg_sets (PATTERN (insn));
5662 /* count_reg_sets does not detect autoincrement address modes, so
5663 detect them here by looking at the notes attached to INSN. */
5664 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5666 if (REG_NOTE_KIND (links) == REG_INC)
5667 /* Count (weighted) references, stores, etc. This counts a
5668 register twice if it is modified, but that is correct. */
5669 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5672 /* This call will increment REG_N_REFS by the current loop depth for
5673 each reference to a register in INSN. */
5674 count_reg_references (PATTERN (insn));
5676 /* count_reg_references will not include counts for arguments to
5677 function calls, so detect them here by examining the
5678 CALL_INSN_FUNCTION_USAGE data. */
5679 if (GET_CODE (insn) == CALL_INSN)
5683 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5685 note = XEXP (note, 1))
5686 if (GET_CODE (XEXP (note, 0)) == USE)
5687 count_reg_references (XEXP (XEXP (note, 0), 0));
5690 if (insn == bb->end)
5696 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5697 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5698 of the number of registers that died. */
5701 count_or_remove_death_notes (blocks, kill)
5707 for (i = n_basic_blocks - 1; i >= 0; --i)
5712 if (blocks && ! TEST_BIT (blocks, i))
5715 bb = BASIC_BLOCK (i);
5717 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5719 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5721 rtx *pprev = ®_NOTES (insn);
5726 switch (REG_NOTE_KIND (link))
5729 if (GET_CODE (XEXP (link, 0)) == REG)
5731 rtx reg = XEXP (link, 0);
5734 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5737 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5745 rtx next = XEXP (link, 1);
5746 free_EXPR_LIST_node (link);
5747 *pprev = link = next;
5753 pprev = &XEXP (link, 1);
5760 if (insn == bb->end)
5768 /* Record INSN's block as BB. */
5771 set_block_for_insn (insn, bb)
5775 size_t uid = INSN_UID (insn);
5776 if (uid >= basic_block_for_insn->num_elements)
5780 /* Add one-eighth the size so we don't keep calling xrealloc. */
5781 new_size = uid + (uid + 7) / 8;
5783 VARRAY_GROW (basic_block_for_insn, new_size);
5785 VARRAY_BB (basic_block_for_insn, uid) = bb;
5788 /* Record INSN's block number as BB. */
5789 /* ??? This has got to go. */
5792 set_block_num (insn, bb)
5796 set_block_for_insn (insn, BASIC_BLOCK (bb));
5799 /* Verify the CFG consistency. This function check some CFG invariants and
5800 aborts when something is wrong. Hope that this function will help to
5801 convert many optimization passes to preserve CFG consistent.
5803 Currently it does following checks:
5805 - test head/end pointers
5806 - overlapping of basic blocks
5807 - edge list corectness
5808 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5809 - tails of basic blocks (ensure that boundary is necesary)
5810 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5811 and NOTE_INSN_BASIC_BLOCK
5812 - check that all insns are in the basic blocks
5813 (except the switch handling code, barriers and notes)
5814 - check that all returns are followed by barriers
5816 In future it can be extended check a lot of other stuff as well
5817 (reachability of basic blocks, life information, etc. etc.). */
5822 const int max_uid = get_max_uid ();
5823 const rtx rtx_first = get_insns ();
5824 basic_block *bb_info;
5828 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5830 /* First pass check head/end pointers and set bb_info array used by
5832 for (i = n_basic_blocks - 1; i >= 0; i--)
5834 basic_block bb = BASIC_BLOCK (i);
5836 /* Check the head pointer and make sure that it is pointing into
5838 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5843 error ("Head insn %d for block %d not found in the insn stream.",
5844 INSN_UID (bb->head), bb->index);
5848 /* Check the end pointer and make sure that it is pointing into
5850 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5852 if (bb_info[INSN_UID (x)] != NULL)
5854 error ("Insn %d is in multiple basic blocks (%d and %d)",
5855 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5858 bb_info[INSN_UID (x)] = bb;
5865 error ("End insn %d for block %d not found in the insn stream.",
5866 INSN_UID (bb->end), bb->index);
5871 /* Now check the basic blocks (boundaries etc.) */
5872 for (i = n_basic_blocks - 1; i >= 0; i--)
5874 basic_block bb = BASIC_BLOCK (i);
5875 /* Check corectness of edge lists */
5883 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5885 fprintf (stderr, "Predecessor: ");
5886 dump_edge_info (stderr, e, 0);
5887 fprintf (stderr, "\nSuccessor: ");
5888 dump_edge_info (stderr, e, 1);
5892 if (e->dest != EXIT_BLOCK_PTR)
5894 edge e2 = e->dest->pred;
5895 while (e2 && e2 != e)
5899 error ("Basic block %i edge lists are corrupted", bb->index);
5911 error ("Basic block %d pred edge is corrupted", bb->index);
5912 fputs ("Predecessor: ", stderr);
5913 dump_edge_info (stderr, e, 0);
5914 fputs ("\nSuccessor: ", stderr);
5915 dump_edge_info (stderr, e, 1);
5916 fputc ('\n', stderr);
5919 if (e->src != ENTRY_BLOCK_PTR)
5921 edge e2 = e->src->succ;
5922 while (e2 && e2 != e)
5926 error ("Basic block %i edge lists are corrupted", bb->index);
5933 /* OK pointers are correct. Now check the header of basic
5934 block. It ought to contain optional CODE_LABEL followed
5935 by NOTE_BASIC_BLOCK. */
5937 if (GET_CODE (x) == CODE_LABEL)
5941 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5947 if (GET_CODE (x) != NOTE
5948 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5949 || NOTE_BASIC_BLOCK (x) != bb)
5951 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5958 /* Do checks for empty blocks here */
5965 if (GET_CODE (x) == NOTE
5966 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5968 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5969 INSN_UID (x), bb->index);
5976 if (GET_CODE (x) == JUMP_INSN
5977 || GET_CODE (x) == CODE_LABEL
5978 || GET_CODE (x) == BARRIER)
5980 error ("In basic block %d:", bb->index);
5981 fatal_insn ("Flow control insn inside a basic block", x);
5992 if (!bb_info[INSN_UID (x)])
5994 switch (GET_CODE (x))
6001 /* An addr_vec is placed outside any block block. */
6003 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6004 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6005 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6010 /* But in any case, non-deletable labels can appear anywhere. */
6014 fatal_insn ("Insn outside basic block", x);
6018 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6019 && GET_CODE (x) == JUMP_INSN
6020 && returnjump_p (x) && ! condjump_p (x)
6021 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6022 fatal_insn ("Return not followed by barrier", x);
6034 /* Functions to access an edge list with a vector representation.
6035 Enough data is kept such that given an index number, the
6036 pred and succ that edge reprsents can be determined, or
6037 given a pred and a succ, it's index number can be returned.
6038 This allows algorithms which comsume a lot of memory to
6039 represent the normally full matrix of edge (pred,succ) with a
6040 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6041 wasted space in the client code due to sparse flow graphs. */
6043 /* This functions initializes the edge list. Basically the entire
6044 flowgraph is processed, and all edges are assigned a number,
6045 and the data structure is filed in. */
6049 struct edge_list *elist;
6055 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6059 /* Determine the number of edges in the flow graph by counting successor
6060 edges on each basic block. */
6061 for (x = 0; x < n_basic_blocks; x++)
6063 basic_block bb = BASIC_BLOCK (x);
6065 for (e = bb->succ; e; e = e->succ_next)
6068 /* Don't forget successors of the entry block. */
6069 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6072 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6073 elist->num_blocks = block_count;
6074 elist->num_edges = num_edges;
6075 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6079 /* Follow successors of the entry block, and register these edges. */
6080 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6082 elist->index_to_edge[num_edges] = e;
6086 for (x = 0; x < n_basic_blocks; x++)
6088 basic_block bb = BASIC_BLOCK (x);
6090 /* Follow all successors of blocks, and register these edges. */
6091 for (e = bb->succ; e; e = e->succ_next)
6093 elist->index_to_edge[num_edges] = e;
6100 /* This function free's memory associated with an edge list. */
6102 free_edge_list (elist)
6103 struct edge_list *elist;
6107 free (elist->index_to_edge);
6112 /* This function provides debug output showing an edge list. */
6114 print_edge_list (f, elist)
6116 struct edge_list *elist;
6119 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6120 elist->num_blocks - 2, elist->num_edges);
6122 for (x = 0; x < elist->num_edges; x++)
6124 fprintf (f, " %-4d - edge(", x);
6125 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6126 fprintf (f,"entry,");
6128 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6130 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6131 fprintf (f,"exit)\n");
6133 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6137 /* This function provides an internal consistancy check of an edge list,
6138 verifying that all edges are present, and that there are no
6141 verify_edge_list (f, elist)
6143 struct edge_list *elist;
6145 int x, pred, succ, index;
6148 for (x = 0; x < n_basic_blocks; x++)
6150 basic_block bb = BASIC_BLOCK (x);
6152 for (e = bb->succ; e; e = e->succ_next)
6154 pred = e->src->index;
6155 succ = e->dest->index;
6156 index = EDGE_INDEX (elist, e->src, e->dest);
6157 if (index == EDGE_INDEX_NO_EDGE)
6159 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6162 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6163 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6164 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6165 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6166 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6167 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6170 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6172 pred = e->src->index;
6173 succ = e->dest->index;
6174 index = EDGE_INDEX (elist, e->src, e->dest);
6175 if (index == EDGE_INDEX_NO_EDGE)
6177 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6180 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6181 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6182 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6183 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6184 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6185 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6187 /* We've verified that all the edges are in the list, no lets make sure
6188 there are no spurious edges in the list. */
6190 for (pred = 0 ; pred < n_basic_blocks; pred++)
6191 for (succ = 0 ; succ < n_basic_blocks; succ++)
6193 basic_block p = BASIC_BLOCK (pred);
6194 basic_block s = BASIC_BLOCK (succ);
6198 for (e = p->succ; e; e = e->succ_next)
6204 for (e = s->pred; e; e = e->pred_next)
6210 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6211 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6212 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6214 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6215 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6216 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6217 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6218 BASIC_BLOCK (succ)));
6220 for (succ = 0 ; succ < n_basic_blocks; succ++)
6222 basic_block p = ENTRY_BLOCK_PTR;
6223 basic_block s = BASIC_BLOCK (succ);
6227 for (e = p->succ; e; e = e->succ_next)
6233 for (e = s->pred; e; e = e->pred_next)
6239 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6240 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6241 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6243 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6244 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6245 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6246 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6247 BASIC_BLOCK (succ)));
6249 for (pred = 0 ; pred < n_basic_blocks; pred++)
6251 basic_block p = BASIC_BLOCK (pred);
6252 basic_block s = EXIT_BLOCK_PTR;
6256 for (e = p->succ; e; e = e->succ_next)
6262 for (e = s->pred; e; e = e->pred_next)
6268 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6269 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6270 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6272 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6273 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6274 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6275 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6280 /* This routine will determine what, if any, edge there is between
6281 a specified predecessor and successor. */
6284 find_edge_index (edge_list, pred, succ)
6285 struct edge_list *edge_list;
6286 basic_block pred, succ;
6289 for (x = 0; x < NUM_EDGES (edge_list); x++)
6291 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6292 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6295 return (EDGE_INDEX_NO_EDGE);
6298 /* This function will remove an edge from the flow graph. */
6303 edge last_pred = NULL;
6304 edge last_succ = NULL;
6306 basic_block src, dest;
6309 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6315 last_succ->succ_next = e->succ_next;
6317 src->succ = e->succ_next;
6319 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6325 last_pred->pred_next = e->pred_next;
6327 dest->pred = e->pred_next;
6333 /* This routine will remove any fake successor edges for a basic block.
6334 When the edge is removed, it is also removed from whatever predecessor
6337 remove_fake_successors (bb)
6341 for (e = bb->succ; e ; )
6345 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6350 /* This routine will remove all fake edges from the flow graph. If
6351 we remove all fake successors, it will automatically remove all
6352 fake predecessors. */
6354 remove_fake_edges ()
6358 for (x = 0; x < n_basic_blocks; x++)
6359 remove_fake_successors (BASIC_BLOCK (x));
6361 /* We've handled all successors except the entry block's. */
6362 remove_fake_successors (ENTRY_BLOCK_PTR);
6365 /* This functions will add a fake edge between any block which has no
6366 successors, and the exit block. Some data flow equations require these
6369 add_noreturn_fake_exit_edges ()
6373 for (x = 0; x < n_basic_blocks; x++)
6374 if (BASIC_BLOCK (x)->succ == NULL)
6375 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6378 /* Dump the list of basic blocks in the bitmap NODES. */
6380 flow_nodes_print (str, nodes, file)
6382 const sbitmap nodes;
6387 fprintf (file, "%s { ", str);
6388 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6389 fputs ("}\n", file);
6393 /* Dump the list of exiting edges in the array EDGES. */
6395 flow_exits_print (str, edges, num_edges, file)
6403 fprintf (file, "%s { ", str);
6404 for (i = 0; i < num_edges; i++)
6405 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6406 fputs ("}\n", file);
6410 /* Dump loop related CFG information. */
6412 flow_loops_cfg_dump (loops, file)
6413 const struct loops *loops;
6418 if (! loops->num || ! file || ! loops->cfg.dom)
6421 for (i = 0; i < n_basic_blocks; i++)
6425 fprintf (file, ";; %d succs { ", i);
6426 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6427 fprintf (file, "%d ", succ->dest->index);
6428 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6432 /* Dump the DFS node order. */
6433 if (loops->cfg.dfs_order)
6435 fputs (";; DFS order: ", file);
6436 for (i = 0; i < n_basic_blocks; i++)
6437 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6443 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6445 flow_loop_nested_p (outer, loop)
6449 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6453 /* Dump the loop information specified by LOOPS to the stream FILE. */
6455 flow_loops_dump (loops, file, verbose)
6456 const struct loops *loops;
6463 num_loops = loops->num;
6464 if (! num_loops || ! file)
6467 fprintf (file, ";; %d loops found, %d levels\n",
6468 num_loops, loops->levels);
6470 for (i = 0; i < num_loops; i++)
6472 struct loop *loop = &loops->array[i];
6474 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6475 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6476 loop->header->index, loop->latch->index,
6477 loop->pre_header ? loop->pre_header->index : -1,
6478 loop->depth, loop->level,
6479 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6480 fprintf (file, ";; %d", loop->num_nodes);
6481 flow_nodes_print (" nodes", loop->nodes, file);
6482 fprintf (file, ";; %d", loop->num_exits);
6483 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6489 for (j = 0; j < i; j++)
6491 struct loop *oloop = &loops->array[j];
6493 if (loop->header == oloop->header)
6498 smaller = loop->num_nodes < oloop->num_nodes;
6500 /* If the union of LOOP and OLOOP is different than
6501 the larger of LOOP and OLOOP then LOOP and OLOOP
6502 must be disjoint. */
6503 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6504 smaller ? oloop : loop);
6505 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6506 loop->header->index, i, j,
6507 disjoint ? "disjoint" : "nested");
6514 /* Print diagnostics to compare our concept of a loop with
6515 what the loop notes say. */
6516 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6517 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6518 != NOTE_INSN_LOOP_BEG)
6519 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6520 INSN_UID (PREV_INSN (loop->first->head)));
6521 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6522 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6523 != NOTE_INSN_LOOP_END)
6524 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6525 INSN_UID (NEXT_INSN (loop->last->end)));
6530 flow_loops_cfg_dump (loops, file);
6534 /* Free all the memory allocated for LOOPS. */
6536 flow_loops_free (loops)
6537 struct loops *loops;
6546 /* Free the loop descriptors. */
6547 for (i = 0; i < loops->num; i++)
6549 struct loop *loop = &loops->array[i];
6552 sbitmap_free (loop->nodes);
6556 free (loops->array);
6557 loops->array = NULL;
6560 sbitmap_vector_free (loops->cfg.dom);
6561 if (loops->cfg.dfs_order)
6562 free (loops->cfg.dfs_order);
6564 sbitmap_free (loops->shared_headers);
6569 /* Find the exits from the loop using the bitmap of loop nodes NODES
6570 and store in EXITS array. Return the number of exits from the
6573 flow_loop_exits_find (nodes, exits)
6574 const sbitmap nodes;
6583 /* Check all nodes within the loop to see if there are any
6584 successors not in the loop. Note that a node may have multiple
6587 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6588 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6590 basic_block dest = e->dest;
6592 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6600 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6602 /* Store all exiting edges into an array. */
6604 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6605 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6607 basic_block dest = e->dest;
6609 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6610 (*exits)[num_exits++] = e;
6618 /* Find the nodes contained within the loop with header HEADER and
6619 latch LATCH and store in NODES. Return the number of nodes within
6622 flow_loop_nodes_find (header, latch, nodes)
6631 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6634 /* Start with only the loop header in the set of loop nodes. */
6635 sbitmap_zero (nodes);
6636 SET_BIT (nodes, header->index);
6638 header->loop_depth++;
6640 /* Push the loop latch on to the stack. */
6641 if (! TEST_BIT (nodes, latch->index))
6643 SET_BIT (nodes, latch->index);
6644 latch->loop_depth++;
6646 stack[sp++] = latch;
6655 for (e = node->pred; e; e = e->pred_next)
6657 basic_block ancestor = e->src;
6659 /* If each ancestor not marked as part of loop, add to set of
6660 loop nodes and push on to stack. */
6661 if (ancestor != ENTRY_BLOCK_PTR
6662 && ! TEST_BIT (nodes, ancestor->index))
6664 SET_BIT (nodes, ancestor->index);
6665 ancestor->loop_depth++;
6667 stack[sp++] = ancestor;
6676 /* Compute the depth first search order and store in the array
6677 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6678 number of nodes visited. */
6680 flow_depth_first_order_compute (dfs_order)
6689 /* Allocate stack for back-tracking up CFG. */
6690 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6693 /* Allocate bitmap to track nodes that have been visited. */
6694 visited = sbitmap_alloc (n_basic_blocks);
6696 /* None of the nodes in the CFG have been visited yet. */
6697 sbitmap_zero (visited);
6699 /* Start with the first successor edge from the entry block. */
6700 e = ENTRY_BLOCK_PTR->succ;
6703 basic_block src = e->src;
6704 basic_block dest = e->dest;
6706 /* Mark that we have visited this node. */
6707 if (src != ENTRY_BLOCK_PTR)
6708 SET_BIT (visited, src->index);
6710 /* If this node has not been visited before, push the current
6711 edge on to the stack and proceed with the first successor
6712 edge of this node. */
6713 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6721 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6724 /* DEST has no successors (for example, a non-returning
6725 function is called) so do not push the current edge
6726 but carry on with its next successor. */
6727 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6728 SET_BIT (visited, dest->index);
6731 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6733 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6735 /* Pop edge off stack. */
6743 sbitmap_free (visited);
6745 /* The number of nodes visited should not be greater than
6747 if (dfsnum > n_basic_blocks)
6750 /* There are some nodes left in the CFG that are unreachable. */
6751 if (dfsnum < n_basic_blocks)
6757 /* Return the block for the pre-header of the loop with header
6758 HEADER where DOM specifies the dominator information. Return NULL if
6759 there is no pre-header. */
6761 flow_loop_pre_header_find (header, dom)
6765 basic_block pre_header;
6768 /* If block p is a predecessor of the header and is the only block
6769 that the header does not dominate, then it is the pre-header. */
6771 for (e = header->pred; e; e = e->pred_next)
6773 basic_block node = e->src;
6775 if (node != ENTRY_BLOCK_PTR
6776 && ! TEST_BIT (dom[node->index], header->index))
6778 if (pre_header == NULL)
6782 /* There are multiple edges into the header from outside
6783 the loop so there is no pre-header block. */
6793 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6794 previously added. The insertion algorithm assumes that the loops
6795 are added in the order found by a depth first search of the CFG. */
6797 flow_loop_tree_node_add (prevloop, loop)
6798 struct loop *prevloop;
6802 if (flow_loop_nested_p (prevloop, loop))
6804 prevloop->inner = loop;
6805 loop->outer = prevloop;
6809 while (prevloop->outer)
6811 if (flow_loop_nested_p (prevloop->outer, loop))
6813 prevloop->next = loop;
6814 loop->outer = prevloop->outer;
6817 prevloop = prevloop->outer;
6820 prevloop->next = loop;
6825 /* Build the loop hierarchy tree for LOOPS. */
6827 flow_loops_tree_build (loops)
6828 struct loops *loops;
6833 num_loops = loops->num;
6837 /* Root the loop hierarchy tree with the first loop found.
6838 Since we used a depth first search this should be the
6840 loops->tree = &loops->array[0];
6841 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6843 /* Add the remaining loops to the tree. */
6844 for (i = 1; i < num_loops; i++)
6845 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6849 /* Helper function to compute loop nesting depth and enclosed loop level
6850 for the natural loop specified by LOOP at the loop depth DEPTH.
6851 Returns the loop level. */
6853 flow_loop_level_compute (loop, depth)
6863 /* Traverse loop tree assigning depth and computing level as the
6864 maximum level of all the inner loops of this loop. The loop
6865 level is equivalent to the height of the loop in the loop tree
6866 and corresponds to the number of enclosed loop levels (including
6868 for (inner = loop->inner; inner; inner = inner->next)
6872 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6877 loop->level = level;
6878 loop->depth = depth;
6883 /* Compute the loop nesting depth and enclosed loop level for the loop
6884 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6888 flow_loops_level_compute (loops)
6889 struct loops *loops;
6895 /* Traverse all the outer level loops. */
6896 for (loop = loops->tree; loop; loop = loop->next)
6898 level = flow_loop_level_compute (loop, 1);
6906 /* Find all the natural loops in the function and save in LOOPS structure
6907 and recalculate loop_depth information in basic block structures.
6908 Return the number of natural loops found. */
6911 flow_loops_find (loops)
6912 struct loops *loops;
6923 loops->array = NULL;
6927 /* Taking care of this degenerate case makes the rest of
6928 this code simpler. */
6929 if (n_basic_blocks == 0)
6932 /* Compute the dominators. */
6933 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6934 compute_flow_dominators (dom, NULL);
6936 /* Count the number of loop edges (back edges). This should be the
6937 same as the number of natural loops. Also clear the loop_depth
6938 and as we work from inner->outer in a loop nest we call
6939 find_loop_nodes_find which will increment loop_depth for nodes
6940 within the current loop, which happens to enclose inner loops. */
6943 for (b = 0; b < n_basic_blocks; b++)
6945 BASIC_BLOCK (b)->loop_depth = 0;
6946 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6948 basic_block latch = e->src;
6950 /* Look for back edges where a predecessor is dominated
6951 by this block. A natural loop has a single entry
6952 node (header) that dominates all the nodes in the
6953 loop. It also has single back edge to the header
6954 from a latch node. Note that multiple natural loops
6955 may share the same header. */
6956 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6963 /* Compute depth first search order of the CFG so that outer
6964 natural loops will be found before inner natural loops. */
6965 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6966 flow_depth_first_order_compute (dfs_order);
6968 /* Allocate loop structures. */
6970 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6972 headers = sbitmap_alloc (n_basic_blocks);
6973 sbitmap_zero (headers);
6975 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6976 sbitmap_zero (loops->shared_headers);
6978 /* Find and record information about all the natural loops
6981 for (b = 0; b < n_basic_blocks; b++)
6985 /* Search the nodes of the CFG in DFS order that we can find
6986 outer loops first. */
6987 header = BASIC_BLOCK (dfs_order[b]);
6989 /* Look for all the possible latch blocks for this header. */
6990 for (e = header->pred; e; e = e->pred_next)
6992 basic_block latch = e->src;
6994 /* Look for back edges where a predecessor is dominated
6995 by this block. A natural loop has a single entry
6996 node (header) that dominates all the nodes in the
6997 loop. It also has single back edge to the header
6998 from a latch node. Note that multiple natural loops
6999 may share the same header. */
7000 if (latch != ENTRY_BLOCK_PTR
7001 && TEST_BIT (dom[latch->index], header->index))
7005 loop = loops->array + num_loops;
7007 loop->header = header;
7008 loop->latch = latch;
7010 /* Keep track of blocks that are loop headers so
7011 that we can tell which loops should be merged. */
7012 if (TEST_BIT (headers, header->index))
7013 SET_BIT (loops->shared_headers, header->index);
7014 SET_BIT (headers, header->index);
7016 /* Find nodes contained within the loop. */
7017 loop->nodes = sbitmap_alloc (n_basic_blocks);
7019 = flow_loop_nodes_find (header, latch, loop->nodes);
7021 /* Compute first and last blocks within the loop.
7022 These are often the same as the loop header and
7023 loop latch respectively, but this is not always
7026 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7028 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7030 /* Find edges which exit the loop. Note that a node
7031 may have several exit edges. */
7033 = flow_loop_exits_find (loop->nodes, &loop->exits);
7035 /* Look to see if the loop has a pre-header node. */
7037 = flow_loop_pre_header_find (header, dom);
7044 /* Natural loops with shared headers may either be disjoint or
7045 nested. Disjoint loops with shared headers cannot be inner
7046 loops and should be merged. For now just mark loops that share
7048 for (i = 0; i < num_loops; i++)
7049 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7050 loops->array[i].shared = 1;
7052 sbitmap_free (headers);
7055 loops->num = num_loops;
7057 /* Save CFG derived information to avoid recomputing it. */
7058 loops->cfg.dom = dom;
7059 loops->cfg.dfs_order = dfs_order;
7061 /* Build the loop hierarchy tree. */
7062 flow_loops_tree_build (loops);
7064 /* Assign the loop nesting depth and enclosed loop level for each
7066 loops->levels = flow_loops_level_compute (loops);
7072 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7074 flow_loop_outside_edge_p (loop, e)
7075 const struct loop *loop;
7078 if (e->dest != loop->header)
7080 return (e->src == ENTRY_BLOCK_PTR)
7081 || ! TEST_BIT (loop->nodes, e->src->index);
7085 /* Clear LOG_LINKS fields of insns in a chain. */
7087 clear_log_links (insns)
7091 for (i = insns; i; i = NEXT_INSN (i))
7092 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')