1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
321 static void propagate_block PARAMS ((basic_block, regset,
323 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
324 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
325 static void mark_set_regs PARAMS ((regset, regset, rtx,
327 static void mark_set_1 PARAMS ((regset, regset, rtx,
330 static void find_auto_inc PARAMS ((regset, rtx, rtx));
331 static int try_pre_increment_1 PARAMS ((rtx));
332 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
334 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
335 void dump_flow_info PARAMS ((FILE *));
336 void debug_flow_info PARAMS ((void));
337 static void dump_edge_info PARAMS ((FILE *, edge, int));
339 static void count_reg_sets_1 PARAMS ((rtx));
340 static void count_reg_sets PARAMS ((rtx));
341 static void count_reg_references PARAMS ((rtx));
342 static void invalidate_mems_from_autoinc PARAMS ((rtx));
343 static void remove_fake_successors PARAMS ((basic_block));
344 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
345 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
346 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
347 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
348 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
349 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
350 static int flow_depth_first_order_compute PARAMS ((int *));
351 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
352 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
353 static void flow_loops_tree_build PARAMS ((struct loops *));
354 static int flow_loop_level_compute PARAMS ((struct loop *, int));
355 static int flow_loops_level_compute PARAMS ((struct loops *));
357 /* This function is always defined so it can be called from the
358 debugger, and it is declared extern so we don't get warnings about
360 void verify_flow_info PARAMS ((void));
361 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
362 void clear_log_links PARAMS ((rtx));
364 /* Find basic blocks of the current function.
365 F is the first insn of the function and NREGS the number of register
369 find_basic_blocks (f, nregs, file)
371 int nregs ATTRIBUTE_UNUSED;
372 FILE *file ATTRIBUTE_UNUSED;
376 /* Flush out existing data. */
377 if (basic_block_info != NULL)
383 /* Clear bb->aux on all extant basic blocks. We'll use this as a
384 tag for reuse during create_basic_block, just in case some pass
385 copies around basic block notes improperly. */
386 for (i = 0; i < n_basic_blocks; ++i)
387 BASIC_BLOCK (i)->aux = NULL;
389 VARRAY_FREE (basic_block_info);
392 n_basic_blocks = count_basic_blocks (f);
394 /* Size the basic block table. The actual structures will be allocated
395 by find_basic_blocks_1, since we want to keep the structure pointers
396 stable across calls to find_basic_blocks. */
397 /* ??? This whole issue would be much simpler if we called find_basic_blocks
398 exactly once, and thereafter we don't have a single long chain of
399 instructions at all until close to the end of compilation when we
400 actually lay them out. */
402 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
404 label_value_list = find_basic_blocks_1 (f);
406 /* Record the block to which an insn belongs. */
407 /* ??? This should be done another way, by which (perhaps) a label is
408 tagged directly with the basic block that it starts. It is used for
409 more than that currently, but IMO that is the only valid use. */
411 max_uid = get_max_uid ();
413 /* Leave space for insns life_analysis makes in some cases for auto-inc.
414 These cases are rare, so we don't need too much space. */
415 max_uid += max_uid / 10;
418 compute_bb_for_insn (max_uid);
420 /* Discover the edges of our cfg. */
421 record_active_eh_regions (f);
422 make_edges (label_value_list);
424 /* Do very simple cleanup now, for the benefit of code that runs between
425 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
426 tidy_fallthru_edges ();
428 mark_critical_edges ();
430 #ifdef ENABLE_CHECKING
435 /* Count the basic blocks of the function. */
438 count_basic_blocks (f)
442 register RTX_CODE prev_code;
443 register int count = 0;
445 int call_had_abnormal_edge = 0;
446 rtx prev_call = NULL_RTX;
448 prev_code = JUMP_INSN;
449 for (insn = f; insn; insn = NEXT_INSN (insn))
451 register RTX_CODE code = GET_CODE (insn);
453 if (code == CODE_LABEL
454 || (GET_RTX_CLASS (code) == 'i'
455 && (prev_code == JUMP_INSN
456 || prev_code == BARRIER
457 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
462 /* Record whether this call created an edge. */
463 if (code == CALL_INSN)
465 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
466 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
468 call_had_abnormal_edge = 0;
470 /* If there is an EH region or rethrow, we have an edge. */
471 if ((eh_region && region > 0)
472 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
473 call_had_abnormal_edge = 1;
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
512 find_basic_blocks_1 (f)
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region or rethrow, we have an edge. */
544 if ((eh_list && region > 0)
545 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
546 call_has_abnormal_edge = 1;
549 /* If there is a nonlocal goto label and the specified
550 region number isn't -1, we have an edge. (0 means
551 no throw, but might have a nonlocal goto). */
552 if (nonlocal_goto_handler_labels && region >= 0)
553 call_has_abnormal_edge = 1;
561 int kind = NOTE_LINE_NUMBER (insn);
563 /* Keep a LIFO list of the currently active exception notes. */
564 if (kind == NOTE_INSN_EH_REGION_BEG)
565 eh_list = alloc_INSN_LIST (insn, eh_list);
566 else if (kind == NOTE_INSN_EH_REGION_END)
569 eh_list = XEXP (eh_list, 1);
570 free_INSN_LIST_node (t);
573 /* Look for basic block notes with which to keep the
574 basic_block_info pointers stable. Unthread the note now;
575 we'll put it back at the right place in create_basic_block.
576 Or not at all if we've already found a note in this block. */
577 else if (kind == NOTE_INSN_BASIC_BLOCK)
579 if (bb_note == NULL_RTX)
581 next = flow_delete_insn (insn);
588 /* A basic block starts at a label. If we've closed one off due
589 to a barrier or some such, no need to do it again. */
590 if (head != NULL_RTX)
592 /* While we now have edge lists with which other portions of
593 the compiler might determine a call ending a basic block
594 does not imply an abnormal edge, it will be a bit before
595 everything can be updated. So continue to emit a noop at
596 the end of such a block. */
597 if (GET_CODE (end) == CALL_INSN
598 && ! SIBLING_CALL_P (end))
600 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
601 end = emit_insn_after (nop, end);
604 create_basic_block (i++, head, end, bb_note);
611 /* A basic block ends at a jump. */
612 if (head == NULL_RTX)
616 /* ??? Make a special check for table jumps. The way this
617 happens is truly and amazingly gross. We are about to
618 create a basic block that contains just a code label and
619 an addr*vec jump insn. Worse, an addr_diff_vec creates
620 its own natural loop.
622 Prevent this bit of brain damage, pasting things together
623 correctly in make_edges.
625 The correct solution involves emitting the table directly
626 on the tablejump instruction as a note, or JUMP_LABEL. */
628 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
629 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
637 goto new_bb_inclusive;
640 /* A basic block ends at a barrier. It may be that an unconditional
641 jump already closed the basic block -- no need to do it again. */
642 if (head == NULL_RTX)
645 /* While we now have edge lists with which other portions of the
646 compiler might determine a call ending a basic block does not
647 imply an abnormal edge, it will be a bit before everything can
648 be updated. So continue to emit a noop at the end of such a
650 if (GET_CODE (end) == CALL_INSN
651 && ! SIBLING_CALL_P (end))
653 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
654 end = emit_insn_after (nop, end);
656 goto new_bb_exclusive;
659 /* A basic block ends at a call that can either throw or
660 do a non-local goto. */
661 if (call_has_abnormal_edge)
664 if (head == NULL_RTX)
669 create_basic_block (i++, head, end, bb_note);
670 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1095 e = (edge) xcalloc (1, sizeof (*e));
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1120 if (GET_CODE (label) != CODE_LABEL)
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1128 if (INSN_UID (label) == 0)
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1144 handler_info **handler_list;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1161 move_stray_eh_region_notes ()
1166 if (n_basic_blocks < 2)
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1187 NEXT_INSN (insn) = list;
1192 if (list == NULL_RTX)
1195 /* Find where to insert these things. */
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1209 /* Recompute eh_beg/eh_end for each basic block. */
1212 record_active_eh_regions (f)
1215 rtx insn, eh_list = NULL_RTX;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1241 if (i == n_basic_blocks)
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1259 bb = ENTRY_BLOCK_PTR;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1277 e->flags &= ~EDGE_CRITICAL;
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1300 split_edge (edge_in)
1303 basic_block old_pred, bb, old_succ;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1347 bb->succ = edge_out;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1378 basic_block jump_block;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1428 BASIC_BLOCK (i) = bb;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1538 insert_insn_on_edge (pattern, e)
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1548 if (e->insns == NULL_RTX)
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1559 /* Update the CFG for the instructions queued on edge E. */
1562 commit_one_edge_insertion (e)
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1568 /* Pull the insns off the edge now since the edge might go away. */
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1620 /* Otherwise we must split the edge. */
1623 bb = split_edge (e);
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1675 /* Update the CFG for all queued instructions. */
1678 commit_edge_insertions ()
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1688 bb = ENTRY_BLOCK_PTR;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1732 /* Mark the block with a handy non-null value. */
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1760 /* This block was found. Tidy up the mark. */
1763 deleted_handler |= delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1779 delete_eh_regions ()
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1807 can_delete_note_p (note)
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1818 flow_delete_insn_chain (start, finish)
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1855 int deleted_handler = 0;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Include any jump table following the basic block. */
1907 if (GET_CODE (end) == JUMP_INSN
1908 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1910 && GET_CODE (tmp) == JUMP_INSN
1911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1915 /* Include any barrier that may follow the basic block. */
1916 tmp = next_nonnote_insn (end);
1917 if (tmp && GET_CODE (tmp) == BARRIER)
1920 /* Selectively delete the entire chain. */
1921 flow_delete_insn_chain (insn, end);
1925 /* Remove the edges into and out of this block. Note that there may
1926 indeed be edges in, if we are removing an unreachable loop. */
1930 for (e = b->pred; e ; e = next)
1932 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1935 next = e->pred_next;
1939 for (e = b->succ; e ; e = next)
1941 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1944 next = e->succ_next;
1953 /* Remove the basic block from the array, and compact behind it. */
1956 return deleted_handler;
1959 /* Remove block B from the basic block array and compact behind it. */
1965 int i, n = n_basic_blocks;
1967 for (i = b->index; i + 1 < n; ++i)
1969 basic_block x = BASIC_BLOCK (i + 1);
1970 BASIC_BLOCK (i) = x;
1974 basic_block_info->num_elements--;
1978 /* Delete INSN by patching it out. Return the next insn. */
1981 flow_delete_insn (insn)
1984 rtx prev = PREV_INSN (insn);
1985 rtx next = NEXT_INSN (insn);
1988 PREV_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (insn) = NULL_RTX;
1992 NEXT_INSN (prev) = next;
1994 PREV_INSN (next) = prev;
1996 set_last_insn (prev);
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2001 /* If deleting a jump, decrement the use count of the label. Deleting
2002 the label itself should happen in the normal course of block merging. */
2003 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2004 LABEL_NUSES (JUMP_LABEL (insn))--;
2006 /* Also if deleting an insn that references a label. */
2007 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2008 LABEL_NUSES (XEXP (note, 0))--;
2013 /* True if a given label can be deleted. */
2016 can_delete_label_p (label)
2021 if (LABEL_PRESERVE_P (label))
2024 for (x = forced_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = label_value_list; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2030 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2031 if (label == XEXP (x, 0))
2034 /* User declared labels must be preserved. */
2035 if (LABEL_NAME (label) != 0)
2041 /* Blocks A and B are to be merged into a single block. A has no incoming
2042 fallthru edge, so it can be moved before B without adding or modifying
2043 any jumps (aside from the jump from A to B). */
2046 merge_blocks_move_predecessor_nojumps (a, b)
2049 rtx start, end, barrier;
2055 /* We want to delete the BARRIER after the end of the insns we are
2056 going to move. If we don't find a BARRIER, then do nothing. This
2057 can happen in some cases if we have labels we can not delete.
2059 Similarly, do nothing if we can not delete the label at the start
2060 of the target block. */
2061 barrier = next_nonnote_insn (end);
2062 if (GET_CODE (barrier) != BARRIER
2063 || (GET_CODE (b->head) == CODE_LABEL
2064 && ! can_delete_label_p (b->head)))
2067 flow_delete_insn (barrier);
2069 /* Move block and loop notes out of the chain so that we do not
2070 disturb their order.
2072 ??? A better solution would be to squeeze out all the non-nested notes
2073 and adjust the block trees appropriately. Even better would be to have
2074 a tighter connection between block trees and rtl so that this is not
2076 start = squeeze_notes (start, end);
2078 /* Scramble the insn chain. */
2079 if (end != PREV_INSN (b->head))
2080 reorder_insns (start, end, PREV_INSN (b->head));
2084 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2085 a->index, b->index);
2088 /* Swap the records for the two blocks around. Although we are deleting B,
2089 A is now where B was and we want to compact the BB array from where
2091 BASIC_BLOCK(a->index) = b;
2092 BASIC_BLOCK(b->index) = a;
2094 a->index = b->index;
2097 /* Now blocks A and B are contiguous. Merge them. */
2098 merge_blocks_nomove (a, b);
2103 /* Blocks A and B are to be merged into a single block. B has no outgoing
2104 fallthru edge, so it can be moved after A without adding or modifying
2105 any jumps (aside from the jump from A to B). */
2108 merge_blocks_move_successor_nojumps (a, b)
2111 rtx start, end, barrier;
2116 /* We want to delete the BARRIER after the end of the insns we are
2117 going to move. If we don't find a BARRIER, then do nothing. This
2118 can happen in some cases if we have labels we can not delete.
2120 Similarly, do nothing if we can not delete the label at the start
2121 of the target block. */
2122 barrier = next_nonnote_insn (end);
2123 if (GET_CODE (barrier) != BARRIER
2124 || (GET_CODE (b->head) == CODE_LABEL
2125 && ! can_delete_label_p (b->head)))
2128 flow_delete_insn (barrier);
2130 /* Move block and loop notes out of the chain so that we do not
2131 disturb their order.
2133 ??? A better solution would be to squeeze out all the non-nested notes
2134 and adjust the block trees appropriately. Even better would be to have
2135 a tighter connection between block trees and rtl so that this is not
2137 start = squeeze_notes (start, end);
2139 /* Scramble the insn chain. */
2140 reorder_insns (start, end, a->end);
2142 /* Now blocks A and B are contiguous. Merge them. */
2143 merge_blocks_nomove (a, b);
2147 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2148 b->index, a->index);
2154 /* Blocks A and B are to be merged into a single block. The insns
2155 are already contiguous, hence `nomove'. */
2158 merge_blocks_nomove (a, b)
2162 rtx b_head, b_end, a_end;
2165 /* If there was a CODE_LABEL beginning B, delete it. */
2168 if (GET_CODE (b_head) == CODE_LABEL)
2170 /* Detect basic blocks with nothing but a label. This can happen
2171 in particular at the end of a function. */
2172 if (b_head == b_end)
2174 b_head = flow_delete_insn (b_head);
2177 /* Delete the basic block note. */
2178 if (GET_CODE (b_head) == NOTE
2179 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2181 if (b_head == b_end)
2183 b_head = flow_delete_insn (b_head);
2186 /* If there was a jump out of A, delete it. */
2188 if (GET_CODE (a_end) == JUMP_INSN)
2192 prev = prev_nonnote_insn (a_end);
2197 /* If this was a conditional jump, we need to also delete
2198 the insn that set cc0. */
2200 if (prev && sets_cc0_p (prev))
2203 prev = prev_nonnote_insn (prev);
2206 flow_delete_insn (tmp);
2210 /* Note that a->head != a->end, since we should have at least a
2211 bb note plus the jump, so prev != insn. */
2212 flow_delete_insn (a_end);
2216 /* By definition, there should only be one successor of A, and that is
2217 B. Free that edge struct. */
2221 /* Adjust the edges out of B for the new owner. */
2222 for (e = b->succ; e ; e = e->succ_next)
2226 /* Reassociate the insns of B with A. */
2229 BLOCK_FOR_INSN (b_head) = a;
2230 while (b_head != b_end)
2232 b_head = NEXT_INSN (b_head);
2233 BLOCK_FOR_INSN (b_head) = a;
2239 /* Compact the basic block array. */
2243 /* Attempt to merge basic blocks that are potentially non-adjacent.
2244 Return true iff the attempt succeeded. */
2247 merge_blocks (e, b, c)
2251 /* If B has a fallthru edge to C, no need to move anything. */
2252 if (e->flags & EDGE_FALLTHRU)
2254 /* If a label still appears somewhere and we cannot delete the label,
2255 then we cannot merge the blocks. The edge was tidied already. */
2257 rtx insn, stop = NEXT_INSN (c->head);
2258 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2259 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2262 merge_blocks_nomove (b, c);
2266 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2267 b->index, c->index);
2276 int c_has_outgoing_fallthru;
2277 int b_has_incoming_fallthru;
2279 /* We must make sure to not munge nesting of exception regions,
2280 lexical blocks, and loop notes.
2282 The first is taken care of by requiring that the active eh
2283 region at the end of one block always matches the active eh
2284 region at the beginning of the next block.
2286 The later two are taken care of by squeezing out all the notes. */
2288 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2289 executed and we may want to treat blocks which have two out
2290 edges, one normal, one abnormal as only having one edge for
2291 block merging purposes. */
2293 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2294 if (tmp_edge->flags & EDGE_FALLTHRU)
2296 c_has_outgoing_fallthru = (tmp_edge != NULL);
2298 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2299 if (tmp_edge->flags & EDGE_FALLTHRU)
2301 b_has_incoming_fallthru = (tmp_edge != NULL);
2303 /* If B does not have an incoming fallthru, and the exception regions
2304 match, then it can be moved immediately before C without introducing
2307 C can not be the first block, so we do not have to worry about
2308 accessing a non-existent block. */
2309 d = BASIC_BLOCK (c->index - 1);
2310 if (! b_has_incoming_fallthru
2311 && d->eh_end == b->eh_beg
2312 && b->eh_end == c->eh_beg)
2313 return merge_blocks_move_predecessor_nojumps (b, c);
2315 /* Otherwise, we're going to try to move C after B. Make sure the
2316 exception regions match.
2318 If B is the last basic block, then we must not try to access the
2319 block structure for block B + 1. Luckily in that case we do not
2320 need to worry about matching exception regions. */
2321 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2322 if (b->eh_end == c->eh_beg
2323 && (d == NULL || c->eh_end == d->eh_beg))
2325 /* If C does not have an outgoing fallthru, then it can be moved
2326 immediately after B without introducing or modifying jumps. */
2327 if (! c_has_outgoing_fallthru)
2328 return merge_blocks_move_successor_nojumps (b, c);
2330 /* Otherwise, we'll need to insert an extra jump, and possibly
2331 a new block to contain it. */
2332 /* ??? Not implemented yet. */
2339 /* Top level driver for merge_blocks. */
2346 /* Attempt to merge blocks as made possible by edge removal. If a block
2347 has only one successor, and the successor has only one predecessor,
2348 they may be combined. */
2350 for (i = 0; i < n_basic_blocks; )
2352 basic_block c, b = BASIC_BLOCK (i);
2355 /* A loop because chains of blocks might be combineable. */
2356 while ((s = b->succ) != NULL
2357 && s->succ_next == NULL
2358 && (s->flags & EDGE_EH) == 0
2359 && (c = s->dest) != EXIT_BLOCK_PTR
2360 && c->pred->pred_next == NULL
2361 /* If the jump insn has side effects, we can't kill the edge. */
2362 && (GET_CODE (b->end) != JUMP_INSN
2363 || onlyjump_p (b->end))
2364 && merge_blocks (s, b, c))
2367 /* Don't get confused by the index shift caused by deleting blocks. */
2372 /* The given edge should potentially be a fallthru edge. If that is in
2373 fact true, delete the jump and barriers that are in the way. */
2376 tidy_fallthru_edge (e, b, c)
2382 /* ??? In a late-running flow pass, other folks may have deleted basic
2383 blocks by nopping out blocks, leaving multiple BARRIERs between here
2384 and the target label. They ought to be chastized and fixed.
2386 We can also wind up with a sequence of undeletable labels between
2387 one block and the next.
2389 So search through a sequence of barriers, labels, and notes for
2390 the head of block C and assert that we really do fall through. */
2392 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2395 /* Remove what will soon cease being the jump insn from the source block.
2396 If block B consisted only of this single jump, turn it into a deleted
2399 if (GET_CODE (q) == JUMP_INSN)
2402 /* If this was a conditional jump, we need to also delete
2403 the insn that set cc0. */
2404 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2411 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2412 NOTE_SOURCE_FILE (q) = 0;
2415 b->end = q = PREV_INSN (q);
2418 /* Selectively unlink the sequence. */
2419 if (q != PREV_INSN (c->head))
2420 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2422 e->flags |= EDGE_FALLTHRU;
2425 /* Fix up edges that now fall through, or rather should now fall through
2426 but previously required a jump around now deleted blocks. Simplify
2427 the search by only examining blocks numerically adjacent, since this
2428 is how find_basic_blocks created them. */
2431 tidy_fallthru_edges ()
2435 for (i = 1; i < n_basic_blocks; ++i)
2437 basic_block b = BASIC_BLOCK (i - 1);
2438 basic_block c = BASIC_BLOCK (i);
2441 /* We care about simple conditional or unconditional jumps with
2444 If we had a conditional branch to the next instruction when
2445 find_basic_blocks was called, then there will only be one
2446 out edge for the block which ended with the conditional
2447 branch (since we do not create duplicate edges).
2449 Furthermore, the edge will be marked as a fallthru because we
2450 merge the flags for the duplicate edges. So we do not want to
2451 check that the edge is not a FALLTHRU edge. */
2452 if ((s = b->succ) != NULL
2453 && s->succ_next == NULL
2455 /* If the jump insn has side effects, we can't tidy the edge. */
2456 && (GET_CODE (b->end) != JUMP_INSN
2457 || onlyjump_p (b->end)))
2458 tidy_fallthru_edge (s, b, c);
2462 /* Discover and record the loop depth at the head of each basic block. */
2465 calculate_loop_depth (dump)
2470 /* The loop infrastructure does the real job for us. */
2471 flow_loops_find (&loops);
2474 flow_loops_dump (&loops, dump, 0);
2476 flow_loops_free (&loops);
2479 /* Perform data flow analysis.
2480 F is the first insn of the function and NREGS the number of register numbers
2484 life_analysis (f, nregs, file, remove_dead_code)
2488 int remove_dead_code;
2490 #ifdef ELIMINABLE_REGS
2492 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2497 /* Record which registers will be eliminated. We use this in
2500 CLEAR_HARD_REG_SET (elim_reg_set);
2502 #ifdef ELIMINABLE_REGS
2503 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2504 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2506 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2509 /* We want alias analysis information for local dead store elimination. */
2510 init_alias_analysis ();
2513 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2517 if (! remove_dead_code)
2518 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2521 /* The post-reload life analysis have (on a global basis) the same
2522 registers live as was computed by reload itself. elimination
2523 Otherwise offsets and such may be incorrect.
2525 Reload will make some registers as live even though they do not
2526 appear in the rtl. */
2527 if (reload_completed)
2528 flags &= ~PROP_REG_INFO;
2532 /* Always remove no-op moves. Do this before other processing so
2533 that we don't have to keep re-scanning them. */
2534 delete_noop_moves (f);
2536 /* Some targets can emit simpler epilogues if they know that sp was
2537 not ever modified during the function. After reload, of course,
2538 we've already emitted the epilogue so there's no sense searching. */
2539 if (! reload_completed)
2540 notice_stack_pointer_modification (f);
2542 /* Allocate and zero out data structures that will record the
2543 data from lifetime analysis. */
2544 allocate_reg_life_data ();
2545 allocate_bb_life_data ();
2546 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2547 all_blocks = sbitmap_alloc (n_basic_blocks);
2548 sbitmap_ones (all_blocks);
2550 /* Find the set of registers live on function exit. */
2551 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2553 /* "Update" life info from zero. It'd be nice to begin the
2554 relaxation with just the exit and noreturn blocks, but that set
2555 is not immediately handy. */
2557 if (flags & PROP_REG_INFO)
2558 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2559 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2562 sbitmap_free (all_blocks);
2563 free (reg_next_use);
2564 reg_next_use = NULL;
2565 end_alias_analysis ();
2568 dump_flow_info (file);
2570 free_basic_block_vars (1);
2573 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2574 Search for REGNO. If found, abort if it is not wider than word_mode. */
2577 verify_wide_reg_1 (px, pregno)
2582 unsigned int regno = *(int *) pregno;
2584 if (GET_CODE (x) == REG && REGNO (x) == regno)
2586 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2593 /* A subroutine of verify_local_live_at_start. Search through insns
2594 between HEAD and END looking for register REGNO. */
2597 verify_wide_reg (regno, head, end)
2603 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2604 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2608 head = NEXT_INSN (head);
2611 /* We didn't find the register at all. Something's way screwy. */
2615 /* A subroutine of update_life_info. Verify that there are no untoward
2616 changes in live_at_start during a local update. */
2619 verify_local_live_at_start (new_live_at_start, bb)
2620 regset new_live_at_start;
2623 if (reload_completed)
2625 /* After reload, there are no pseudos, nor subregs of multi-word
2626 registers. The regsets should exactly match. */
2627 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2634 /* Find the set of changed registers. */
2635 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2637 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2639 /* No registers should die. */
2640 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2642 /* Verify that the now-live register is wider than word_mode. */
2643 verify_wide_reg (i, bb->head, bb->end);
2648 /* Updates life information starting with the basic blocks set in BLOCKS.
2650 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2651 expecting local modifications to basic blocks. If we find extra
2652 registers live at the beginning of a block, then we either killed
2653 useful data, or we have a broken split that wants data not provided.
2654 If we find registers removed from live_at_start, that means we have
2655 a broken peephole that is killing a register it shouldn't.
2657 ??? This is not true in one situation -- when a pre-reload splitter
2658 generates subregs of a multi-word pseudo, current life analysis will
2659 lose the kill. So we _can_ have a pseudo go live. How irritating.
2661 BLOCK_FOR_INSN is assumed to be correct.
2663 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2664 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2665 regs_ever_live unless the caller resets it to zero. */
2668 update_life_info (blocks, extent, prop_flags)
2670 enum update_life_extent extent;
2674 regset_head tmp_head;
2677 tmp = INITIALIZE_REG_SET (tmp_head);
2679 /* For a global update, we go through the relaxation process again. */
2680 if (extent != UPDATE_LIFE_LOCAL)
2682 calculate_global_regs_live (blocks, blocks,
2683 prop_flags & PROP_SCAN_DEAD_CODE);
2685 /* If asked, remove notes from the blocks we'll update. */
2686 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2687 count_or_remove_death_notes (blocks, 1);
2690 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2692 basic_block bb = BASIC_BLOCK (i);
2694 COPY_REG_SET (tmp, bb->global_live_at_end);
2695 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2697 if (extent == UPDATE_LIFE_LOCAL)
2698 verify_local_live_at_start (tmp, bb);
2703 if (prop_flags & PROP_REG_INFO)
2705 /* The only pseudos that are live at the beginning of the function
2706 are those that were not set anywhere in the function. local-alloc
2707 doesn't know how to handle these correctly, so mark them as not
2708 local to any one basic block. */
2709 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2710 FIRST_PSEUDO_REGISTER, i,
2711 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2713 /* We have a problem with any pseudoreg that lives across the setjmp.
2714 ANSI says that if a user variable does not change in value between
2715 the setjmp and the longjmp, then the longjmp preserves it. This
2716 includes longjmp from a place where the pseudo appears dead.
2717 (In principle, the value still exists if it is in scope.)
2718 If the pseudo goes in a hard reg, some other value may occupy
2719 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2720 Conclusion: such a pseudo must not go in a hard reg. */
2721 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2722 FIRST_PSEUDO_REGISTER, i,
2724 if (regno_reg_rtx[i] != 0)
2726 REG_LIVE_LENGTH (i) = -1;
2727 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2733 /* Free the variables allocated by find_basic_blocks.
2735 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2738 free_basic_block_vars (keep_head_end_p)
2739 int keep_head_end_p;
2741 if (basic_block_for_insn)
2743 VARRAY_FREE (basic_block_for_insn);
2744 basic_block_for_insn = NULL;
2747 if (! keep_head_end_p)
2750 VARRAY_FREE (basic_block_info);
2753 ENTRY_BLOCK_PTR->aux = NULL;
2754 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2755 EXIT_BLOCK_PTR->aux = NULL;
2756 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2760 /* Return nonzero if the destination of SET equals the source. */
2765 rtx src = SET_SRC (set);
2766 rtx dst = SET_DEST (set);
2768 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2770 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2772 src = SUBREG_REG (src);
2773 dst = SUBREG_REG (dst);
2776 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2777 && REGNO (src) == REGNO (dst));
2780 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2786 rtx pat = PATTERN (insn);
2788 /* Insns carrying these notes are useful later on. */
2789 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2792 if (GET_CODE (pat) == SET && set_noop_p (pat))
2795 if (GET_CODE (pat) == PARALLEL)
2798 /* If nothing but SETs of registers to themselves,
2799 this insn can also be deleted. */
2800 for (i = 0; i < XVECLEN (pat, 0); i++)
2802 rtx tem = XVECEXP (pat, 0, i);
2804 if (GET_CODE (tem) == USE
2805 || GET_CODE (tem) == CLOBBER)
2808 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2817 /* Delete any insns that copy a register to itself. */
2820 delete_noop_moves (f)
2824 for (insn = f; insn; insn = NEXT_INSN (insn))
2826 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2828 PUT_CODE (insn, NOTE);
2829 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2830 NOTE_SOURCE_FILE (insn) = 0;
2835 /* Determine if the stack pointer is constant over the life of the function.
2836 Only useful before prologues have been emitted. */
2839 notice_stack_pointer_modification_1 (x, pat, data)
2841 rtx pat ATTRIBUTE_UNUSED;
2842 void *data ATTRIBUTE_UNUSED;
2844 if (x == stack_pointer_rtx
2845 /* The stack pointer is only modified indirectly as the result
2846 of a push until later in flow. See the comments in rtl.texi
2847 regarding Embedded Side-Effects on Addresses. */
2848 || (GET_CODE (x) == MEM
2849 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2850 || GET_CODE (XEXP (x, 0)) == PRE_INC
2851 || GET_CODE (XEXP (x, 0)) == POST_DEC
2852 || GET_CODE (XEXP (x, 0)) == POST_INC)
2853 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2854 current_function_sp_is_unchanging = 0;
2858 notice_stack_pointer_modification (f)
2863 /* Assume that the stack pointer is unchanging if alloca hasn't
2865 current_function_sp_is_unchanging = !current_function_calls_alloca;
2866 if (! current_function_sp_is_unchanging)
2869 for (insn = f; insn; insn = NEXT_INSN (insn))
2871 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2873 /* Check if insn modifies the stack pointer. */
2874 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2876 if (! current_function_sp_is_unchanging)
2882 /* Mark a register in SET. Hard registers in large modes get all
2883 of their component registers set as well. */
2885 mark_reg (reg, xset)
2889 regset set = (regset) xset;
2890 int regno = REGNO (reg);
2892 if (GET_MODE (reg) == BLKmode)
2895 SET_REGNO_REG_SET (set, regno);
2896 if (regno < FIRST_PSEUDO_REGISTER)
2898 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2900 SET_REGNO_REG_SET (set, regno + n);
2904 /* Mark those regs which are needed at the end of the function as live
2905 at the end of the last basic block. */
2907 mark_regs_live_at_end (set)
2912 /* If exiting needs the right stack value, consider the stack pointer
2913 live at the end of the function. */
2914 if ((HAVE_epilogue && reload_completed)
2915 || ! EXIT_IGNORE_STACK
2916 || (! FRAME_POINTER_REQUIRED
2917 && ! current_function_calls_alloca
2918 && flag_omit_frame_pointer)
2919 || current_function_sp_is_unchanging)
2921 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2924 /* Mark the frame pointer if needed at the end of the function. If
2925 we end up eliminating it, it will be removed from the live list
2926 of each basic block by reload. */
2928 if (! reload_completed || frame_pointer_needed)
2930 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2931 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2932 /* If they are different, also mark the hard frame pointer as live */
2933 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2937 #ifdef PIC_OFFSET_TABLE_REGNUM
2938 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2939 /* Many architectures have a GP register even without flag_pic.
2940 Assume the pic register is not in use, or will be handled by
2941 other means, if it is not fixed. */
2942 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2943 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2947 /* Mark all global registers, and all registers used by the epilogue
2948 as being live at the end of the function since they may be
2949 referenced by our caller. */
2950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2952 #ifdef EPILOGUE_USES
2953 || EPILOGUE_USES (i)
2956 SET_REGNO_REG_SET (set, i);
2958 /* Mark all call-saved registers that we actaully used. */
2959 if (HAVE_epilogue && reload_completed)
2961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2962 if (! call_used_regs[i] && regs_ever_live[i])
2963 SET_REGNO_REG_SET (set, i);
2966 /* Mark function return value. */
2967 diddle_return_value (mark_reg, set);
2970 /* Propagate global life info around the graph of basic blocks. Begin
2971 considering blocks with their corresponding bit set in BLOCKS_IN.
2972 BLOCKS_OUT is set for every block that was changed. */
2975 calculate_global_regs_live (blocks_in, blocks_out, flags)
2976 sbitmap blocks_in, blocks_out;
2979 basic_block *queue, *qhead, *qtail, *qend;
2980 regset tmp, new_live_at_end;
2981 regset_head tmp_head;
2982 regset_head new_live_at_end_head;
2985 tmp = INITIALIZE_REG_SET (tmp_head);
2986 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2988 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2989 because the `head == tail' style test for an empty queue doesn't
2990 work with a full queue. */
2991 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2993 qhead = qend = queue + n_basic_blocks + 2;
2995 /* Clear out the garbage that might be hanging out in bb->aux. */
2996 for (i = n_basic_blocks - 1; i >= 0; --i)
2997 BASIC_BLOCK (i)->aux = NULL;
2999 /* Queue the blocks set in the initial mask. Do this in reverse block
3000 number order so that we are more likely for the first round to do
3001 useful work. We use AUX non-null to flag that the block is queued. */
3002 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3004 basic_block bb = BASIC_BLOCK (i);
3009 sbitmap_zero (blocks_out);
3011 while (qhead != qtail)
3013 int rescan, changed;
3022 /* Begin by propogating live_at_start from the successor blocks. */
3023 CLEAR_REG_SET (new_live_at_end);
3024 for (e = bb->succ; e ; e = e->succ_next)
3026 basic_block sb = e->dest;
3027 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3030 if (bb == ENTRY_BLOCK_PTR)
3032 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3036 /* On our first pass through this block, we'll go ahead and continue.
3037 Recognize first pass by local_set NULL. On subsequent passes, we
3038 get to skip out early if live_at_end wouldn't have changed. */
3040 if (bb->local_set == NULL)
3042 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3047 /* If any bits were removed from live_at_end, we'll have to
3048 rescan the block. This wouldn't be necessary if we had
3049 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3050 local_live is really dependant on live_at_end. */
3051 CLEAR_REG_SET (tmp);
3052 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3053 new_live_at_end, BITMAP_AND_COMPL);
3057 /* Find the set of changed bits. Take this opportunity
3058 to notice that this set is empty and early out. */
3059 CLEAR_REG_SET (tmp);
3060 changed = bitmap_operation (tmp, bb->global_live_at_end,
3061 new_live_at_end, BITMAP_XOR);
3065 /* If any of the changed bits overlap with local_set,
3066 we'll have to rescan the block. Detect overlap by
3067 the AND with ~local_set turning off bits. */
3068 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3073 /* Let our caller know that BB changed enough to require its
3074 death notes updated. */
3075 SET_BIT (blocks_out, bb->index);
3079 /* Add to live_at_start the set of all registers in
3080 new_live_at_end that aren't in the old live_at_end. */
3082 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3084 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3086 changed = bitmap_operation (bb->global_live_at_start,
3087 bb->global_live_at_start,
3094 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3096 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3097 into live_at_start. */
3098 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3100 /* If live_at start didn't change, no need to go farther. */
3101 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3104 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3107 /* Queue all predecessors of BB so that we may re-examine
3108 their live_at_end. */
3109 for (e = bb->pred; e ; e = e->pred_next)
3111 basic_block pb = e->src;
3112 if (pb->aux == NULL)
3123 FREE_REG_SET (new_live_at_end);
3125 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3127 basic_block bb = BASIC_BLOCK (i);
3128 FREE_REG_SET (bb->local_set);
3134 /* Subroutines of life analysis. */
3136 /* Allocate the permanent data structures that represent the results
3137 of life analysis. Not static since used also for stupid life analysis. */
3140 allocate_bb_life_data ()
3144 for (i = 0; i < n_basic_blocks; i++)
3146 basic_block bb = BASIC_BLOCK (i);
3148 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3149 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3152 ENTRY_BLOCK_PTR->global_live_at_end
3153 = OBSTACK_ALLOC_REG_SET (function_obstack);
3154 EXIT_BLOCK_PTR->global_live_at_start
3155 = OBSTACK_ALLOC_REG_SET (function_obstack);
3157 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3161 allocate_reg_life_data ()
3165 /* Recalculate the register space, in case it has grown. Old style
3166 vector oriented regsets would set regset_{size,bytes} here also. */
3167 allocate_reg_info (max_regno, FALSE, FALSE);
3169 /* Reset all the data we'll collect in propagate_block and its
3171 for (i = 0; i < max_regno; i++)
3175 REG_N_DEATHS (i) = 0;
3176 REG_N_CALLS_CROSSED (i) = 0;
3177 REG_LIVE_LENGTH (i) = 0;
3178 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3182 /* Compute the registers live at the beginning of a basic block
3183 from those live at the end.
3185 When called, OLD contains those live at the end.
3186 On return, it contains those live at the beginning.
3187 FIRST and LAST are the first and last insns of the basic block.
3189 FINAL is nonzero if we are doing the final pass which is not
3190 for computing the life info (since that has already been done)
3191 but for acting on it. On this pass, we delete dead stores,
3192 set up the logical links and dead-variables lists of instructions,
3193 and merge instructions for autoincrement and autodecrement addresses.
3195 SIGNIFICANT is nonzero only the first time for each basic block.
3196 If it is nonzero, it points to a regset in which we store
3197 a 1 for each register that is set within the block.
3199 BNUM is the number of the basic block. */
3202 propagate_block (bb, old, significant, flags)
3211 regset_head live_head;
3213 regset_head dead_head;
3215 /* Find the loop depth for this block. Ignore loop level changes in the
3216 middle of the basic block -- for register allocation purposes, the
3217 important uses will be in the blocks wholely contained within the loop
3218 not in the loop pre-header or post-trailer. */
3219 loop_depth = bb->loop_depth;
3221 dead = INITIALIZE_REG_SET (live_head);
3222 live = INITIALIZE_REG_SET (dead_head);
3226 if (flags & PROP_REG_INFO)
3230 /* Process the regs live at the end of the block.
3231 Mark them as not local to any one basic block. */
3232 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3238 /* Scan the block an insn at a time from end to beginning. */
3240 for (insn = bb->end; ; insn = prev)
3242 prev = PREV_INSN (insn);
3244 if (GET_CODE (insn) == NOTE)
3246 /* If this is a call to `setjmp' et al,
3247 warn if any non-volatile datum is live. */
3249 if ((flags & PROP_REG_INFO)
3250 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3251 IOR_REG_SET (regs_live_at_setjmp, old);
3254 /* Update the life-status of regs for this insn.
3255 First DEAD gets which regs are set in this insn
3256 then LIVE gets which regs are used in this insn.
3257 Then the regs live before the insn
3258 are those live after, with DEAD regs turned off,
3259 and then LIVE regs turned on. */
3261 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3264 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3265 int insn_is_dead = 0;
3266 int libcall_is_dead = 0;
3268 if (flags & PROP_SCAN_DEAD_CODE)
3270 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3272 libcall_is_dead = (insn_is_dead && note != 0
3273 && libcall_dead_p (PATTERN (insn), old,
3277 /* We almost certainly don't want to delete prologue or epilogue
3278 instructions. Warn about probable compiler losage. */
3281 && (((HAVE_epilogue || HAVE_prologue)
3282 && prologue_epilogue_contains (insn))
3283 || (HAVE_sibcall_epilogue
3284 && sibcall_epilogue_contains (insn))))
3286 if (flags & PROP_KILL_DEAD_CODE)
3288 warning ("ICE: would have deleted prologue/epilogue insn");
3289 if (!inhibit_warnings)
3292 libcall_is_dead = insn_is_dead = 0;
3295 /* If an instruction consists of just dead store(s) on final pass,
3296 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3297 We could really delete it with delete_insn, but that
3298 can cause trouble for first or last insn in a basic block. */
3299 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3302 /* If the insn referred to a label, note that the label is
3304 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3306 if (REG_NOTE_KIND (inote) == REG_LABEL)
3308 rtx label = XEXP (inote, 0);
3310 LABEL_NUSES (label)--;
3312 /* If this label was attached to an ADDR_VEC, it's
3313 safe to delete the ADDR_VEC. In fact, it's pretty
3314 much mandatory to delete it, because the ADDR_VEC may
3315 be referencing labels that no longer exist. */
3316 if (LABEL_NUSES (label) == 0
3317 && (next = next_nonnote_insn (label)) != NULL
3318 && GET_CODE (next) == JUMP_INSN
3319 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3320 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3322 rtx pat = PATTERN (next);
3323 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3324 int len = XVECLEN (pat, diff_vec_p);
3326 for (i = 0; i < len; i++)
3327 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3328 PUT_CODE (next, NOTE);
3329 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3330 NOTE_SOURCE_FILE (next) = 0;
3332 if ((next = next_nonnote_insn (label)) != NULL
3333 && GET_CODE (next) == BARRIER)
3335 PUT_CODE (next, NOTE);
3336 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3337 NOTE_SOURCE_FILE (next) = 0;
3343 PUT_CODE (insn, NOTE);
3344 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3345 NOTE_SOURCE_FILE (insn) = 0;
3347 /* CC0 is now known to be dead. Either this insn used it,
3348 in which case it doesn't anymore, or clobbered it,
3349 so the next insn can't use it. */
3352 /* If this insn is copying the return value from a library call,
3353 delete the entire library call. */
3354 if (libcall_is_dead)
3356 rtx first = XEXP (note, 0);
3358 while (INSN_DELETED_P (first))
3359 first = NEXT_INSN (first);
3364 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3365 NOTE_SOURCE_FILE (p) = 0;
3371 CLEAR_REG_SET (dead);
3372 CLEAR_REG_SET (live);
3374 /* See if this is an increment or decrement that can be
3375 merged into a following memory address. */
3378 register rtx x = single_set (insn);
3380 /* Does this instruction increment or decrement a register? */
3381 if (!reload_completed
3382 && (flags & PROP_AUTOINC)
3384 && GET_CODE (SET_DEST (x)) == REG
3385 && (GET_CODE (SET_SRC (x)) == PLUS
3386 || GET_CODE (SET_SRC (x)) == MINUS)
3387 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3388 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3389 /* Ok, look for a following memory ref we can combine with.
3390 If one is found, change the memory ref to a PRE_INC
3391 or PRE_DEC, cancel this insn, and return 1.
3392 Return 0 if nothing has been done. */
3393 && try_pre_increment_1 (insn))
3396 #endif /* AUTO_INC_DEC */
3398 /* If this is not the final pass, and this insn is copying the
3399 value of a library call and it's dead, don't scan the
3400 insns that perform the library call, so that the call's
3401 arguments are not marked live. */
3402 if (libcall_is_dead)
3404 /* Mark the dest reg as `significant'. */
3405 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3406 significant, flags);
3408 insn = XEXP (note, 0);
3409 prev = PREV_INSN (insn);
3411 else if (GET_CODE (PATTERN (insn)) == SET
3412 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3413 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3414 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3415 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3416 /* We have an insn to pop a constant amount off the stack.
3417 (Such insns use PLUS regardless of the direction of the stack,
3418 and any insn to adjust the stack by a constant is always a pop.)
3419 These insns, if not dead stores, have no effect on life. */
3423 /* Any regs live at the time of a call instruction
3424 must not go in a register clobbered by calls.
3425 Find all regs now live and record this for them. */
3427 if (GET_CODE (insn) == CALL_INSN
3428 && (flags & PROP_REG_INFO))
3429 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3431 REG_N_CALLS_CROSSED (i)++;
3434 /* LIVE gets the regs used in INSN;
3435 DEAD gets those set by it. Dead insns don't make anything
3438 mark_set_regs (old, dead, PATTERN (insn),
3439 insn, significant, flags);
3441 /* If an insn doesn't use CC0, it becomes dead since we
3442 assume that every insn clobbers it. So show it dead here;
3443 mark_used_regs will set it live if it is referenced. */
3447 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3449 /* Sometimes we may have inserted something before INSN (such as
3450 a move) when we make an auto-inc. So ensure we will scan
3453 prev = PREV_INSN (insn);
3456 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3462 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3464 note = XEXP (note, 1))
3465 if (GET_CODE (XEXP (note, 0)) == USE)
3466 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3469 /* Each call clobbers all call-clobbered regs that are not
3470 global or fixed. Note that the function-value reg is a
3471 call-clobbered reg, and mark_set_regs has already had
3472 a chance to handle it. */
3474 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3475 if (call_used_regs[i] && ! global_regs[i]
3478 SET_REGNO_REG_SET (dead, i);
3480 SET_REGNO_REG_SET (significant, i);
3483 /* The stack ptr is used (honorarily) by a CALL insn. */
3484 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3486 /* Calls may also reference any of the global registers,
3487 so they are made live. */
3488 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3490 mark_used_regs (old, live,
3491 gen_rtx_REG (reg_raw_mode[i], i),
3494 /* Calls also clobber memory. */
3495 free_EXPR_LIST_list (&mem_set_list);
3498 /* Update OLD for the registers used or set. */
3499 AND_COMPL_REG_SET (old, dead);
3500 IOR_REG_SET (old, live);
3504 /* On final pass, update counts of how many insns in which
3505 each reg is live. */
3506 if (flags & PROP_REG_INFO)
3507 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3510 if (insn == bb->head)
3514 FREE_REG_SET (dead);
3515 FREE_REG_SET (live);
3516 free_EXPR_LIST_list (&mem_set_list);
3519 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3520 (SET expressions whose destinations are registers dead after the insn).
3521 NEEDED is the regset that says which regs are alive after the insn.
3523 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3525 If X is the entire body of an insn, NOTES contains the reg notes
3526 pertaining to the insn. */
3529 insn_dead_p (x, needed, call_ok, notes)
3533 rtx notes ATTRIBUTE_UNUSED;
3535 enum rtx_code code = GET_CODE (x);
3538 /* If flow is invoked after reload, we must take existing AUTO_INC
3539 expresions into account. */
3540 if (reload_completed)
3542 for ( ; notes; notes = XEXP (notes, 1))
3544 if (REG_NOTE_KIND (notes) == REG_INC)
3546 int regno = REGNO (XEXP (notes, 0));
3548 /* Don't delete insns to set global regs. */
3549 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3550 || REGNO_REG_SET_P (needed, regno))
3557 /* If setting something that's a reg or part of one,
3558 see if that register's altered value will be live. */
3562 rtx r = SET_DEST (x);
3565 if (GET_CODE (r) == CC0)
3569 /* A SET that is a subroutine call cannot be dead. */
3570 if (GET_CODE (SET_SRC (x)) == CALL)
3576 /* Don't eliminate loads from volatile memory or volatile asms. */
3577 else if (volatile_refs_p (SET_SRC (x)))
3580 if (GET_CODE (r) == MEM)
3584 if (MEM_VOLATILE_P (r))
3587 /* Walk the set of memory locations we are currently tracking
3588 and see if one is an identical match to this memory location.
3589 If so, this memory write is dead (remember, we're walking
3590 backwards from the end of the block to the start. */
3591 temp = mem_set_list;
3594 if (rtx_equal_p (XEXP (temp, 0), r))
3596 temp = XEXP (temp, 1);
3601 while (GET_CODE (r) == SUBREG
3602 || GET_CODE (r) == STRICT_LOW_PART
3603 || GET_CODE (r) == ZERO_EXTRACT)
3606 if (GET_CODE (r) == REG)
3608 int regno = REGNO (r);
3611 if (REGNO_REG_SET_P (needed, regno))
3614 /* If this is a hard register, verify that subsequent
3615 words are not needed. */
3616 if (regno < FIRST_PSEUDO_REGISTER)
3618 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3621 if (REGNO_REG_SET_P (needed, regno+n))
3625 /* Don't delete insns to set global regs. */
3626 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3629 /* Make sure insns to set the stack pointer aren't deleted. */
3630 if (regno == STACK_POINTER_REGNUM)
3633 /* Make sure insns to set the frame pointer aren't deleted. */
3634 if (regno == FRAME_POINTER_REGNUM
3635 && (! reload_completed || frame_pointer_needed))
3637 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3638 if (regno == HARD_FRAME_POINTER_REGNUM
3639 && (! reload_completed || frame_pointer_needed))
3643 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3644 /* Make sure insns to set arg pointer are never deleted
3645 (if the arg pointer isn't fixed, there will be a USE
3646 for it, so we can treat it normally). */
3647 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3651 /* Otherwise, the set is dead. */
3657 /* If performing several activities, insn is dead if each activity
3658 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3659 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3661 else if (code == PARALLEL)
3663 int i = XVECLEN (x, 0);
3665 for (i--; i >= 0; i--)
3666 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3667 && GET_CODE (XVECEXP (x, 0, i)) != USE
3668 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3674 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3675 is not necessarily true for hard registers. */
3676 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3677 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3678 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3681 /* We do not check other CLOBBER or USE here. An insn consisting of just
3682 a CLOBBER or just a USE should not be deleted. */
3686 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3687 return 1 if the entire library call is dead.
3688 This is true if X copies a register (hard or pseudo)
3689 and if the hard return reg of the call insn is dead.
3690 (The caller should have tested the destination of X already for death.)
3692 If this insn doesn't just copy a register, then we don't
3693 have an ordinary libcall. In that case, cse could not have
3694 managed to substitute the source for the dest later on,
3695 so we can assume the libcall is dead.
3697 NEEDED is the bit vector of pseudoregs live before this insn.
3698 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3701 libcall_dead_p (x, needed, note, insn)
3707 register RTX_CODE code = GET_CODE (x);
3711 register rtx r = SET_SRC (x);
3712 if (GET_CODE (r) == REG)
3714 rtx call = XEXP (note, 0);
3718 /* Find the call insn. */
3719 while (call != insn && GET_CODE (call) != CALL_INSN)
3720 call = NEXT_INSN (call);
3722 /* If there is none, do nothing special,
3723 since ordinary death handling can understand these insns. */
3727 /* See if the hard reg holding the value is dead.
3728 If this is a PARALLEL, find the call within it. */
3729 call_pat = PATTERN (call);
3730 if (GET_CODE (call_pat) == PARALLEL)
3732 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3733 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3734 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3737 /* This may be a library call that is returning a value
3738 via invisible pointer. Do nothing special, since
3739 ordinary death handling can understand these insns. */
3743 call_pat = XVECEXP (call_pat, 0, i);
3746 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3752 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3753 live at function entry. Don't count global register variables, variables
3754 in registers that can be used for function arg passing, or variables in
3755 fixed hard registers. */
3758 regno_uninitialized (regno)
3761 if (n_basic_blocks == 0
3762 || (regno < FIRST_PSEUDO_REGISTER
3763 && (global_regs[regno]
3764 || fixed_regs[regno]
3765 || FUNCTION_ARG_REGNO_P (regno))))
3768 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3771 /* 1 if register REGNO was alive at a place where `setjmp' was called
3772 and was set more than once or is an argument.
3773 Such regs may be clobbered by `longjmp'. */
3776 regno_clobbered_at_setjmp (regno)
3779 if (n_basic_blocks == 0)
3782 return ((REG_N_SETS (regno) > 1
3783 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3784 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3787 /* INSN references memory, possibly using autoincrement addressing modes.
3788 Find any entries on the mem_set_list that need to be invalidated due
3789 to an address change. */
3791 invalidate_mems_from_autoinc (insn)
3794 rtx note = REG_NOTES (insn);
3795 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3797 if (REG_NOTE_KIND (note) == REG_INC)
3799 rtx temp = mem_set_list;
3800 rtx prev = NULL_RTX;
3805 next = XEXP (temp, 1);
3806 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3808 /* Splice temp out of list. */
3810 XEXP (prev, 1) = next;
3812 mem_set_list = next;
3813 free_EXPR_LIST_node (temp);
3823 /* Process the registers that are set within X. Their bits are set to
3824 1 in the regset DEAD, because they are dead prior to this insn.
3826 If INSN is nonzero, it is the insn being processed.
3828 FLAGS is the set of operations to perform. */
3831 mark_set_regs (needed, dead, x, insn, significant, flags)
3839 register RTX_CODE code = GET_CODE (x);
3841 if (code == SET || code == CLOBBER)
3842 mark_set_1 (needed, dead, x, insn, significant, flags);
3843 else if (code == PARALLEL)
3846 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3848 code = GET_CODE (XVECEXP (x, 0, i));
3849 if (code == SET || code == CLOBBER)
3850 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3851 significant, flags);
3856 /* Process a single SET rtx, X. */
3859 mark_set_1 (needed, dead, x, insn, significant, flags)
3867 register int regno = -1;
3868 register rtx reg = SET_DEST (x);
3870 /* Some targets place small structures in registers for
3871 return values of functions. We have to detect this
3872 case specially here to get correct flow information. */
3873 if (GET_CODE (reg) == PARALLEL
3874 && GET_MODE (reg) == BLKmode)
3878 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3879 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3880 significant, flags);
3884 /* Modifying just one hardware register of a multi-reg value
3885 or just a byte field of a register
3886 does not mean the value from before this insn is now dead.
3887 But it does mean liveness of that register at the end of the block
3890 Within mark_set_1, however, we treat it as if the register is
3891 indeed modified. mark_used_regs will, however, also treat this
3892 register as being used. Thus, we treat these insns as setting a
3893 new value for the register as a function of its old value. This
3894 cases LOG_LINKS to be made appropriately and this will help combine. */
3896 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3897 || GET_CODE (reg) == SIGN_EXTRACT
3898 || GET_CODE (reg) == STRICT_LOW_PART)
3899 reg = XEXP (reg, 0);
3901 /* If this set is a MEM, then it kills any aliased writes.
3902 If this set is a REG, then it kills any MEMs which use the reg. */
3903 if (flags & PROP_SCAN_DEAD_CODE)
3905 if (GET_CODE (reg) == MEM
3906 || GET_CODE (reg) == REG)
3908 rtx temp = mem_set_list;
3909 rtx prev = NULL_RTX;
3914 next = XEXP (temp, 1);
3915 if ((GET_CODE (reg) == MEM
3916 && output_dependence (XEXP (temp, 0), reg))
3917 || (GET_CODE (reg) == REG
3918 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3920 /* Splice this entry out of the list. */
3922 XEXP (prev, 1) = next;
3924 mem_set_list = next;
3925 free_EXPR_LIST_node (temp);
3933 /* If the memory reference had embedded side effects (autoincrement
3934 address modes. Then we may need to kill some entries on the
3936 if (insn && GET_CODE (reg) == MEM)
3937 invalidate_mems_from_autoinc (insn);
3939 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3940 /* We do not know the size of a BLKmode store, so we do not track
3941 them for redundant store elimination. */
3942 && GET_MODE (reg) != BLKmode
3943 /* There are no REG_INC notes for SP, so we can't assume we'll see
3944 everything that invalidates it. To be safe, don't eliminate any
3945 stores though SP; none of them should be redundant anyway. */
3946 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3947 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3950 if (GET_CODE (reg) == REG
3951 && (regno = REGNO (reg),
3952 ! (regno == FRAME_POINTER_REGNUM
3953 && (! reload_completed || frame_pointer_needed)))
3954 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3955 && ! (regno == HARD_FRAME_POINTER_REGNUM
3956 && (! reload_completed || frame_pointer_needed))
3958 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3959 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3961 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3962 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3964 int some_needed = REGNO_REG_SET_P (needed, regno);
3965 int some_not_needed = ! some_needed;
3967 /* Mark it as a significant register for this basic block. */
3969 SET_REGNO_REG_SET (significant, regno);
3971 /* Mark it as dead before this insn. */
3972 SET_REGNO_REG_SET (dead, regno);
3974 /* A hard reg in a wide mode may really be multiple registers.
3975 If so, mark all of them just like the first. */
3976 if (regno < FIRST_PSEUDO_REGISTER)
3980 /* Nothing below is needed for the stack pointer; get out asap.
3981 Eg, log links aren't needed, since combine won't use them. */
3982 if (regno == STACK_POINTER_REGNUM)
3985 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3988 int regno_n = regno + n;
3989 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3991 SET_REGNO_REG_SET (significant, regno_n);
3993 SET_REGNO_REG_SET (dead, regno_n);
3994 some_needed |= needed_regno;
3995 some_not_needed |= ! needed_regno;
3999 /* Additional data to record if this is the final pass. */
4000 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4001 | PROP_DEATH_NOTES | PROP_AUTOINC))
4004 register int blocknum = BLOCK_NUM (insn);
4007 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4008 y = reg_next_use[regno];
4010 /* If this is a hard reg, record this function uses the reg. */
4012 if (regno < FIRST_PSEUDO_REGISTER)
4015 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4017 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4018 for (i = regno; i < endregno; i++)
4020 /* The next use is no longer "next", since a store
4022 reg_next_use[i] = 0;
4025 if (flags & PROP_REG_INFO)
4026 for (i = regno; i < endregno; i++)
4028 regs_ever_live[i] = 1;
4034 /* The next use is no longer "next", since a store
4036 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4037 reg_next_use[regno] = 0;
4039 /* Keep track of which basic blocks each reg appears in. */
4041 if (flags & PROP_REG_INFO)
4043 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4044 REG_BASIC_BLOCK (regno) = blocknum;
4045 else if (REG_BASIC_BLOCK (regno) != blocknum)
4046 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4048 /* Count (weighted) references, stores, etc. This counts a
4049 register twice if it is modified, but that is correct. */
4050 REG_N_SETS (regno)++;
4051 REG_N_REFS (regno) += loop_depth + 1;
4053 /* The insns where a reg is live are normally counted
4054 elsewhere, but we want the count to include the insn
4055 where the reg is set, and the normal counting mechanism
4056 would not count it. */
4057 REG_LIVE_LENGTH (regno)++;
4061 if (! some_not_needed)
4063 if (flags & PROP_LOG_LINKS)
4065 /* Make a logical link from the next following insn
4066 that uses this register, back to this insn.
4067 The following insns have already been processed.
4069 We don't build a LOG_LINK for hard registers containing
4070 in ASM_OPERANDs. If these registers get replaced,
4071 we might wind up changing the semantics of the insn,
4072 even if reload can make what appear to be valid
4073 assignments later. */
4074 if (y && (BLOCK_NUM (y) == blocknum)
4075 && (regno >= FIRST_PSEUDO_REGISTER
4076 || asm_noperands (PATTERN (y)) < 0))
4077 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4080 else if (! some_needed)
4082 if (flags & PROP_REG_INFO)
4083 REG_N_DEATHS (REGNO (reg))++;
4085 if (flags & PROP_DEATH_NOTES)
4087 /* Note that dead stores have already been deleted
4088 when possible. If we get here, we have found a
4089 dead store that cannot be eliminated (because the
4090 same insn does something useful). Indicate this
4091 by marking the reg being set as dying here. */
4093 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4098 if (flags & PROP_DEATH_NOTES)
4100 /* This is a case where we have a multi-word hard register
4101 and some, but not all, of the words of the register are
4102 needed in subsequent insns. Write REG_UNUSED notes
4103 for those parts that were not needed. This case should
4108 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4110 if (!REGNO_REG_SET_P (needed, regno + i))
4114 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4120 else if (GET_CODE (reg) == REG)
4122 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4123 reg_next_use[regno] = 0;
4126 /* If this is the last pass and this is a SCRATCH, show it will be dying
4127 here and count it. */
4128 else if (GET_CODE (reg) == SCRATCH)
4130 if (flags & PROP_DEATH_NOTES)
4132 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4138 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4142 find_auto_inc (needed, x, insn)
4147 rtx addr = XEXP (x, 0);
4148 HOST_WIDE_INT offset = 0;
4151 /* Here we detect use of an index register which might be good for
4152 postincrement, postdecrement, preincrement, or predecrement. */
4154 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4155 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4157 if (GET_CODE (addr) == REG)
4160 register int size = GET_MODE_SIZE (GET_MODE (x));
4163 int regno = REGNO (addr);
4165 /* Is the next use an increment that might make auto-increment? */
4166 if ((incr = reg_next_use[regno]) != 0
4167 && (set = single_set (incr)) != 0
4168 && GET_CODE (set) == SET
4169 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4170 /* Can't add side effects to jumps; if reg is spilled and
4171 reloaded, there's no way to store back the altered value. */
4172 && GET_CODE (insn) != JUMP_INSN
4173 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4174 && XEXP (y, 0) == addr
4175 && GET_CODE (XEXP (y, 1)) == CONST_INT
4176 && ((HAVE_POST_INCREMENT
4177 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4178 || (HAVE_POST_DECREMENT
4179 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4180 || (HAVE_PRE_INCREMENT
4181 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4182 || (HAVE_PRE_DECREMENT
4183 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4184 /* Make sure this reg appears only once in this insn. */
4185 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4186 use != 0 && use != (rtx) 1))
4188 rtx q = SET_DEST (set);
4189 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4190 ? (offset ? PRE_INC : POST_INC)
4191 : (offset ? PRE_DEC : POST_DEC));
4193 if (dead_or_set_p (incr, addr))
4195 /* This is the simple case. Try to make the auto-inc. If
4196 we can't, we are done. Otherwise, we will do any
4197 needed updates below. */
4198 if (! validate_change (insn, &XEXP (x, 0),
4199 gen_rtx_fmt_e (inc_code, Pmode, addr),
4203 else if (GET_CODE (q) == REG
4204 /* PREV_INSN used here to check the semi-open interval
4206 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4207 /* We must also check for sets of q as q may be
4208 a call clobbered hard register and there may
4209 be a call between PREV_INSN (insn) and incr. */
4210 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4212 /* We have *p followed sometime later by q = p+size.
4213 Both p and q must be live afterward,
4214 and q is not used between INSN and its assignment.
4215 Change it to q = p, ...*q..., q = q+size.
4216 Then fall into the usual case. */
4221 emit_move_insn (q, addr);
4222 insns = get_insns ();
4225 bb = BLOCK_FOR_INSN (insn);
4226 for (temp = insns; temp; temp = NEXT_INSN (temp))
4227 set_block_for_insn (temp, bb);
4229 /* If we can't make the auto-inc, or can't make the
4230 replacement into Y, exit. There's no point in making
4231 the change below if we can't do the auto-inc and doing
4232 so is not correct in the pre-inc case. */
4234 validate_change (insn, &XEXP (x, 0),
4235 gen_rtx_fmt_e (inc_code, Pmode, q),
4237 validate_change (incr, &XEXP (y, 0), q, 1);
4238 if (! apply_change_group ())
4241 /* We now know we'll be doing this change, so emit the
4242 new insn(s) and do the updates. */
4243 emit_insns_before (insns, insn);
4245 if (BLOCK_FOR_INSN (insn)->head == insn)
4246 BLOCK_FOR_INSN (insn)->head = insns;
4248 /* INCR will become a NOTE and INSN won't contain a
4249 use of ADDR. If a use of ADDR was just placed in
4250 the insn before INSN, make that the next use.
4251 Otherwise, invalidate it. */
4252 if (GET_CODE (PREV_INSN (insn)) == INSN
4253 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4254 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4255 reg_next_use[regno] = PREV_INSN (insn);
4257 reg_next_use[regno] = 0;
4262 /* REGNO is now used in INCR which is below INSN, but
4263 it previously wasn't live here. If we don't mark
4264 it as needed, we'll put a REG_DEAD note for it
4265 on this insn, which is incorrect. */
4266 SET_REGNO_REG_SET (needed, regno);
4268 /* If there are any calls between INSN and INCR, show
4269 that REGNO now crosses them. */
4270 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4271 if (GET_CODE (temp) == CALL_INSN)
4272 REG_N_CALLS_CROSSED (regno)++;
4277 /* If we haven't returned, it means we were able to make the
4278 auto-inc, so update the status. First, record that this insn
4279 has an implicit side effect. */
4282 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4284 /* Modify the old increment-insn to simply copy
4285 the already-incremented value of our register. */
4286 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4289 /* If that makes it a no-op (copying the register into itself) delete
4290 it so it won't appear to be a "use" and a "set" of this
4292 if (SET_DEST (set) == addr)
4294 PUT_CODE (incr, NOTE);
4295 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4296 NOTE_SOURCE_FILE (incr) = 0;
4299 if (regno >= FIRST_PSEUDO_REGISTER)
4301 /* Count an extra reference to the reg. When a reg is
4302 incremented, spilling it is worse, so we want to make
4303 that less likely. */
4304 REG_N_REFS (regno) += loop_depth + 1;
4306 /* Count the increment as a setting of the register,
4307 even though it isn't a SET in rtl. */
4308 REG_N_SETS (regno)++;
4313 #endif /* AUTO_INC_DEC */
4315 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4316 This is done assuming the registers needed from X
4317 are those that have 1-bits in NEEDED.
4319 FLAGS is the set of enabled operations.
4321 INSN is the containing instruction. If INSN is dead, this function is not
4325 mark_used_regs (needed, live, x, flags, insn)
4332 register RTX_CODE code;
4337 code = GET_CODE (x);
4357 /* If we are clobbering a MEM, mark any registers inside the address
4359 if (GET_CODE (XEXP (x, 0)) == MEM)
4360 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4364 /* Don't bother watching stores to mems if this is not the
4365 final pass. We'll not be deleting dead stores this round. */
4366 if (flags & PROP_SCAN_DEAD_CODE)
4368 /* Invalidate the data for the last MEM stored, but only if MEM is
4369 something that can be stored into. */
4370 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4371 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4372 ; /* needn't clear the memory set list */
4375 rtx temp = mem_set_list;
4376 rtx prev = NULL_RTX;
4381 next = XEXP (temp, 1);
4382 if (anti_dependence (XEXP (temp, 0), x))
4384 /* Splice temp out of the list. */
4386 XEXP (prev, 1) = next;
4388 mem_set_list = next;
4389 free_EXPR_LIST_node (temp);
4397 /* If the memory reference had embedded side effects (autoincrement
4398 address modes. Then we may need to kill some entries on the
4401 invalidate_mems_from_autoinc (insn);
4405 if (flags & PROP_AUTOINC)
4406 find_auto_inc (needed, x, insn);
4411 if (GET_CODE (SUBREG_REG (x)) == REG
4412 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4413 && (GET_MODE_SIZE (GET_MODE (x))
4414 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4415 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4417 /* While we're here, optimize this case. */
4420 /* In case the SUBREG is not of a register, don't optimize */
4421 if (GET_CODE (x) != REG)
4423 mark_used_regs (needed, live, x, flags, insn);
4427 /* ... fall through ... */
4430 /* See a register other than being set
4431 => mark it as needed. */
4435 int some_needed = REGNO_REG_SET_P (needed, regno);
4436 int some_not_needed = ! some_needed;
4438 SET_REGNO_REG_SET (live, regno);
4440 /* A hard reg in a wide mode may really be multiple registers.
4441 If so, mark all of them just like the first. */
4442 if (regno < FIRST_PSEUDO_REGISTER)
4446 /* For stack ptr or fixed arg pointer,
4447 nothing below can be necessary, so waste no more time. */
4448 if (regno == STACK_POINTER_REGNUM
4449 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4450 || (regno == HARD_FRAME_POINTER_REGNUM
4451 && (! reload_completed || frame_pointer_needed))
4453 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4454 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4456 || (regno == FRAME_POINTER_REGNUM
4457 && (! reload_completed || frame_pointer_needed)))
4459 /* If this is a register we are going to try to eliminate,
4460 don't mark it live here. If we are successful in
4461 eliminating it, it need not be live unless it is used for
4462 pseudos, in which case it will have been set live when
4463 it was allocated to the pseudos. If the register will not
4464 be eliminated, reload will set it live at that point. */
4466 if ((flags & PROP_REG_INFO)
4467 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4468 regs_ever_live[regno] = 1;
4471 /* No death notes for global register variables;
4472 their values are live after this function exits. */
4473 if (global_regs[regno])
4475 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4476 reg_next_use[regno] = insn;
4480 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4483 int regno_n = regno + n;
4484 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4486 SET_REGNO_REG_SET (live, regno_n);
4487 some_needed |= needed_regno;
4488 some_not_needed |= ! needed_regno;
4492 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4494 /* Record where each reg is used, so when the reg
4495 is set we know the next insn that uses it. */
4497 reg_next_use[regno] = insn;
4499 if (flags & PROP_REG_INFO)
4501 if (regno < FIRST_PSEUDO_REGISTER)
4503 /* If a hard reg is being used,
4504 record that this function does use it. */
4506 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4510 regs_ever_live[regno + --i] = 1;
4515 /* Keep track of which basic block each reg appears in. */
4517 register int blocknum = BLOCK_NUM (insn);
4519 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4520 REG_BASIC_BLOCK (regno) = blocknum;
4521 else if (REG_BASIC_BLOCK (regno) != blocknum)
4522 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4524 /* Count (weighted) number of uses of each reg. */
4526 REG_N_REFS (regno) += loop_depth + 1;
4530 /* Record and count the insns in which a reg dies.
4531 If it is used in this insn and was dead below the insn
4532 then it dies in this insn. If it was set in this insn,
4533 we do not make a REG_DEAD note; likewise if we already
4534 made such a note. */
4536 if (flags & PROP_DEATH_NOTES)
4539 && ! dead_or_set_p (insn, x)
4541 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4545 /* Check for the case where the register dying partially
4546 overlaps the register set by this insn. */
4547 if (regno < FIRST_PSEUDO_REGISTER
4548 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4550 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4552 some_needed |= dead_or_set_regno_p (insn, regno + n);
4555 /* If none of the words in X is needed, make a REG_DEAD
4556 note. Otherwise, we must make partial REG_DEAD notes. */
4560 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4561 REG_N_DEATHS (regno)++;
4567 /* Don't make a REG_DEAD note for a part of a register
4568 that is set in the insn. */
4570 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4572 if (!REGNO_REG_SET_P (needed, regno + i)
4573 && ! dead_or_set_regno_p (insn, regno + i))
4576 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4587 register rtx testreg = SET_DEST (x);
4590 /* If storing into MEM, don't show it as being used. But do
4591 show the address as being used. */
4592 if (GET_CODE (testreg) == MEM)
4595 if (flags & PROP_AUTOINC)
4596 find_auto_inc (needed, testreg, insn);
4598 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4599 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4603 /* Storing in STRICT_LOW_PART is like storing in a reg
4604 in that this SET might be dead, so ignore it in TESTREG.
4605 but in some other ways it is like using the reg.
4607 Storing in a SUBREG or a bit field is like storing the entire
4608 register in that if the register's value is not used
4609 then this SET is not needed. */
4610 while (GET_CODE (testreg) == STRICT_LOW_PART
4611 || GET_CODE (testreg) == ZERO_EXTRACT
4612 || GET_CODE (testreg) == SIGN_EXTRACT
4613 || GET_CODE (testreg) == SUBREG)
4615 if (GET_CODE (testreg) == SUBREG
4616 && GET_CODE (SUBREG_REG (testreg)) == REG
4617 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4618 && (GET_MODE_SIZE (GET_MODE (testreg))
4619 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4620 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4622 /* Modifying a single register in an alternate mode
4623 does not use any of the old value. But these other
4624 ways of storing in a register do use the old value. */
4625 if (GET_CODE (testreg) == SUBREG
4626 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4631 testreg = XEXP (testreg, 0);
4634 /* If this is a store into a register,
4635 recursively scan the value being stored. */
4637 if ((GET_CODE (testreg) == PARALLEL
4638 && GET_MODE (testreg) == BLKmode)
4639 || (GET_CODE (testreg) == REG
4640 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4641 && (! reload_completed || frame_pointer_needed)))
4642 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4643 && ! (regno == HARD_FRAME_POINTER_REGNUM
4644 && (! reload_completed || frame_pointer_needed))
4646 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4647 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4650 /* We used to exclude global_regs here, but that seems wrong.
4651 Storing in them is like storing in mem. */
4653 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4655 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4662 case UNSPEC_VOLATILE:
4666 /* Traditional and volatile asm instructions must be considered to use
4667 and clobber all hard registers, all pseudo-registers and all of
4668 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4670 Consider for instance a volatile asm that changes the fpu rounding
4671 mode. An insn should not be moved across this even if it only uses
4672 pseudo-regs because it might give an incorrectly rounded result.
4674 ?!? Unfortunately, marking all hard registers as live causes massive
4675 problems for the register allocator and marking all pseudos as live
4676 creates mountains of uninitialized variable warnings.
4678 So for now, just clear the memory set list and mark any regs
4679 we can find in ASM_OPERANDS as used. */
4680 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4681 free_EXPR_LIST_list (&mem_set_list);
4683 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4684 We can not just fall through here since then we would be confused
4685 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4686 traditional asms unlike their normal usage. */
4687 if (code == ASM_OPERANDS)
4691 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4692 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4703 /* Recursively scan the operands of this expression. */
4706 register const char *fmt = GET_RTX_FORMAT (code);
4709 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4713 /* Tail recursive case: save a function call level. */
4719 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4721 else if (fmt[i] == 'E')
4724 for (j = 0; j < XVECLEN (x, i); j++)
4725 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4734 try_pre_increment_1 (insn)
4737 /* Find the next use of this reg. If in same basic block,
4738 make it do pre-increment or pre-decrement if appropriate. */
4739 rtx x = single_set (insn);
4740 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4741 * INTVAL (XEXP (SET_SRC (x), 1)));
4742 int regno = REGNO (SET_DEST (x));
4743 rtx y = reg_next_use[regno];
4745 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4746 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4747 mode would be better. */
4748 && ! dead_or_set_p (y, SET_DEST (x))
4749 && try_pre_increment (y, SET_DEST (x), amount))
4751 /* We have found a suitable auto-increment
4752 and already changed insn Y to do it.
4753 So flush this increment-instruction. */
4754 PUT_CODE (insn, NOTE);
4755 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4756 NOTE_SOURCE_FILE (insn) = 0;
4757 /* Count a reference to this reg for the increment
4758 insn we are deleting. When a reg is incremented.
4759 spilling it is worse, so we want to make that
4761 if (regno >= FIRST_PSEUDO_REGISTER)
4763 REG_N_REFS (regno) += loop_depth + 1;
4764 REG_N_SETS (regno)++;
4771 /* Try to change INSN so that it does pre-increment or pre-decrement
4772 addressing on register REG in order to add AMOUNT to REG.
4773 AMOUNT is negative for pre-decrement.
4774 Returns 1 if the change could be made.
4775 This checks all about the validity of the result of modifying INSN. */
4778 try_pre_increment (insn, reg, amount)
4780 HOST_WIDE_INT amount;
4784 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4785 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4787 /* Nonzero if we can try to make a post-increment or post-decrement.
4788 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4789 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4790 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4793 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4796 /* From the sign of increment, see which possibilities are conceivable
4797 on this target machine. */
4798 if (HAVE_PRE_INCREMENT && amount > 0)
4800 if (HAVE_POST_INCREMENT && amount > 0)
4803 if (HAVE_PRE_DECREMENT && amount < 0)
4805 if (HAVE_POST_DECREMENT && amount < 0)
4808 if (! (pre_ok || post_ok))
4811 /* It is not safe to add a side effect to a jump insn
4812 because if the incremented register is spilled and must be reloaded
4813 there would be no way to store the incremented value back in memory. */
4815 if (GET_CODE (insn) == JUMP_INSN)
4820 use = find_use_as_address (PATTERN (insn), reg, 0);
4821 if (post_ok && (use == 0 || use == (rtx) 1))
4823 use = find_use_as_address (PATTERN (insn), reg, -amount);
4827 if (use == 0 || use == (rtx) 1)
4830 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4833 /* See if this combination of instruction and addressing mode exists. */
4834 if (! validate_change (insn, &XEXP (use, 0),
4835 gen_rtx_fmt_e (amount > 0
4836 ? (do_post ? POST_INC : PRE_INC)
4837 : (do_post ? POST_DEC : PRE_DEC),
4841 /* Record that this insn now has an implicit side effect on X. */
4842 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4846 #endif /* AUTO_INC_DEC */
4848 /* Find the place in the rtx X where REG is used as a memory address.
4849 Return the MEM rtx that so uses it.
4850 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4851 (plus REG (const_int PLUSCONST)).
4853 If such an address does not appear, return 0.
4854 If REG appears more than once, or is used other than in such an address,
4858 find_use_as_address (x, reg, plusconst)
4861 HOST_WIDE_INT plusconst;
4863 enum rtx_code code = GET_CODE (x);
4864 const char *fmt = GET_RTX_FORMAT (code);
4866 register rtx value = 0;
4869 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4872 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4873 && XEXP (XEXP (x, 0), 0) == reg
4874 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4875 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4878 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4880 /* If REG occurs inside a MEM used in a bit-field reference,
4881 that is unacceptable. */
4882 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4883 return (rtx) (HOST_WIDE_INT) 1;
4887 return (rtx) (HOST_WIDE_INT) 1;
4889 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4893 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4897 return (rtx) (HOST_WIDE_INT) 1;
4899 else if (fmt[i] == 'E')
4902 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4904 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4908 return (rtx) (HOST_WIDE_INT) 1;
4916 /* Write information about registers and basic blocks into FILE.
4917 This is part of making a debugging dump. */
4920 dump_regset (r, outf)
4927 fputs (" (nil)", outf);
4931 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4933 fprintf (outf, " %d", i);
4934 if (i < FIRST_PSEUDO_REGISTER)
4935 fprintf (outf, " [%s]",
4944 dump_regset (r, stderr);
4945 putc ('\n', stderr);
4949 dump_flow_info (file)
4953 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4955 fprintf (file, "%d registers.\n", max_regno);
4956 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4959 enum reg_class class, altclass;
4960 fprintf (file, "\nRegister %d used %d times across %d insns",
4961 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4962 if (REG_BASIC_BLOCK (i) >= 0)
4963 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4965 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4966 (REG_N_SETS (i) == 1) ? "" : "s");
4967 if (REG_USERVAR_P (regno_reg_rtx[i]))
4968 fprintf (file, "; user var");
4969 if (REG_N_DEATHS (i) != 1)
4970 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4971 if (REG_N_CALLS_CROSSED (i) == 1)
4972 fprintf (file, "; crosses 1 call");
4973 else if (REG_N_CALLS_CROSSED (i))
4974 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4975 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4976 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4977 class = reg_preferred_class (i);
4978 altclass = reg_alternate_class (i);
4979 if (class != GENERAL_REGS || altclass != ALL_REGS)
4981 if (altclass == ALL_REGS || class == ALL_REGS)
4982 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4983 else if (altclass == NO_REGS)
4984 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4986 fprintf (file, "; pref %s, else %s",
4987 reg_class_names[(int) class],
4988 reg_class_names[(int) altclass]);
4990 if (REGNO_POINTER_FLAG (i))
4991 fprintf (file, "; pointer");
4992 fprintf (file, ".\n");
4995 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4996 for (i = 0; i < n_basic_blocks; i++)
4998 register basic_block bb = BASIC_BLOCK (i);
5001 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5002 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5004 fprintf (file, "Predecessors: ");
5005 for (e = bb->pred; e ; e = e->pred_next)
5006 dump_edge_info (file, e, 0);
5008 fprintf (file, "\nSuccessors: ");
5009 for (e = bb->succ; e ; e = e->succ_next)
5010 dump_edge_info (file, e, 1);
5012 fprintf (file, "\nRegisters live at start:");
5013 dump_regset (bb->global_live_at_start, file);
5015 fprintf (file, "\nRegisters live at end:");
5016 dump_regset (bb->global_live_at_end, file);
5027 dump_flow_info (stderr);
5031 dump_edge_info (file, e, do_succ)
5036 basic_block side = (do_succ ? e->dest : e->src);
5038 if (side == ENTRY_BLOCK_PTR)
5039 fputs (" ENTRY", file);
5040 else if (side == EXIT_BLOCK_PTR)
5041 fputs (" EXIT", file);
5043 fprintf (file, " %d", side->index);
5047 static const char * const bitnames[] = {
5048 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5051 int i, flags = e->flags;
5055 for (i = 0; flags; i++)
5056 if (flags & (1 << i))
5062 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5063 fputs (bitnames[i], file);
5065 fprintf (file, "%d", i);
5073 /* Print out one basic block with live information at start and end. */
5083 fprintf (outf, ";; Basic block %d, loop depth %d",
5084 bb->index, bb->loop_depth - 1);
5085 if (bb->eh_beg != -1 || bb->eh_end != -1)
5086 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5089 fputs (";; Predecessors: ", outf);
5090 for (e = bb->pred; e ; e = e->pred_next)
5091 dump_edge_info (outf, e, 0);
5094 fputs (";; Registers live at start:", outf);
5095 dump_regset (bb->global_live_at_start, outf);
5098 for (insn = bb->head, last = NEXT_INSN (bb->end);
5100 insn = NEXT_INSN (insn))
5101 print_rtl_single (outf, insn);
5103 fputs (";; Registers live at end:", outf);
5104 dump_regset (bb->global_live_at_end, outf);
5107 fputs (";; Successors: ", outf);
5108 for (e = bb->succ; e; e = e->succ_next)
5109 dump_edge_info (outf, e, 1);
5117 dump_bb (bb, stderr);
5124 dump_bb (BASIC_BLOCK(n), stderr);
5127 /* Like print_rtl, but also print out live information for the start of each
5131 print_rtl_with_bb (outf, rtx_first)
5135 register rtx tmp_rtx;
5138 fprintf (outf, "(nil)\n");
5142 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5143 int max_uid = get_max_uid ();
5144 basic_block *start = (basic_block *)
5145 xcalloc (max_uid, sizeof (basic_block));
5146 basic_block *end = (basic_block *)
5147 xcalloc (max_uid, sizeof (basic_block));
5148 enum bb_state *in_bb_p = (enum bb_state *)
5149 xcalloc (max_uid, sizeof (enum bb_state));
5151 for (i = n_basic_blocks - 1; i >= 0; i--)
5153 basic_block bb = BASIC_BLOCK (i);
5156 start[INSN_UID (bb->head)] = bb;
5157 end[INSN_UID (bb->end)] = bb;
5158 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5160 enum bb_state state = IN_MULTIPLE_BB;
5161 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5163 in_bb_p[INSN_UID(x)] = state;
5170 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5175 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5177 fprintf (outf, ";; Start of basic block %d, registers live:",
5179 dump_regset (bb->global_live_at_start, outf);
5183 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5184 && GET_CODE (tmp_rtx) != NOTE
5185 && GET_CODE (tmp_rtx) != BARRIER)
5186 fprintf (outf, ";; Insn is not within a basic block\n");
5187 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5188 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5190 did_output = print_rtl_single (outf, tmp_rtx);
5192 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5193 fprintf (outf, ";; End of basic block %d\n", bb->index);
5204 if (current_function_epilogue_delay_list != 0)
5206 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5207 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5208 tmp_rtx = XEXP (tmp_rtx, 1))
5209 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5213 /* Compute dominator relationships using new flow graph structures. */
5215 compute_flow_dominators (dominators, post_dominators)
5216 sbitmap *dominators;
5217 sbitmap *post_dominators;
5220 sbitmap *temp_bitmap;
5222 basic_block *worklist, *tos;
5224 /* Allocate a worklist array/queue. Entries are only added to the
5225 list if they were not already on the list. So the size is
5226 bounded by the number of basic blocks. */
5227 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5230 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5231 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5235 /* The optimistic setting of dominators requires us to put every
5236 block on the work list initially. */
5237 for (bb = 0; bb < n_basic_blocks; bb++)
5239 *tos++ = BASIC_BLOCK (bb);
5240 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5243 /* We want a maximal solution, so initially assume everything dominates
5245 sbitmap_vector_ones (dominators, n_basic_blocks);
5247 /* Mark successors of the entry block so we can identify them below. */
5248 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5249 e->dest->aux = ENTRY_BLOCK_PTR;
5251 /* Iterate until the worklist is empty. */
5252 while (tos != worklist)
5254 /* Take the first entry off the worklist. */
5255 basic_block b = *--tos;
5258 /* Compute the intersection of the dominators of all the
5261 If one of the predecessor blocks is the ENTRY block, then the
5262 intersection of the dominators of the predecessor blocks is
5263 defined as the null set. We can identify such blocks by the
5264 special value in the AUX field in the block structure. */
5265 if (b->aux == ENTRY_BLOCK_PTR)
5267 /* Do not clear the aux field for blocks which are
5268 successors of the ENTRY block. That way we we never
5269 add them to the worklist again.
5271 The intersect of dominators of the preds of this block is
5272 defined as the null set. */
5273 sbitmap_zero (temp_bitmap[bb]);
5277 /* Clear the aux field of this block so it can be added to
5278 the worklist again if necessary. */
5280 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5283 /* Make sure each block always dominates itself. */
5284 SET_BIT (temp_bitmap[bb], bb);
5286 /* If the out state of this block changed, then we need to
5287 add the successors of this block to the worklist if they
5288 are not already on the worklist. */
5289 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5291 for (e = b->succ; e; e = e->succ_next)
5293 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5303 if (post_dominators)
5305 /* The optimistic setting of dominators requires us to put every
5306 block on the work list initially. */
5307 for (bb = 0; bb < n_basic_blocks; bb++)
5309 *tos++ = BASIC_BLOCK (bb);
5310 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5313 /* We want a maximal solution, so initially assume everything post
5314 dominates everything else. */
5315 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5317 /* Mark predecessors of the exit block so we can identify them below. */
5318 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5319 e->src->aux = EXIT_BLOCK_PTR;
5321 /* Iterate until the worklist is empty. */
5322 while (tos != worklist)
5324 /* Take the first entry off the worklist. */
5325 basic_block b = *--tos;
5328 /* Compute the intersection of the post dominators of all the
5331 If one of the successor blocks is the EXIT block, then the
5332 intersection of the dominators of the successor blocks is
5333 defined as the null set. We can identify such blocks by the
5334 special value in the AUX field in the block structure. */
5335 if (b->aux == EXIT_BLOCK_PTR)
5337 /* Do not clear the aux field for blocks which are
5338 predecessors of the EXIT block. That way we we never
5339 add them to the worklist again.
5341 The intersect of dominators of the succs of this block is
5342 defined as the null set. */
5343 sbitmap_zero (temp_bitmap[bb]);
5347 /* Clear the aux field of this block so it can be added to
5348 the worklist again if necessary. */
5350 sbitmap_intersection_of_succs (temp_bitmap[bb],
5351 post_dominators, bb);
5354 /* Make sure each block always post dominates itself. */
5355 SET_BIT (temp_bitmap[bb], bb);
5357 /* If the out state of this block changed, then we need to
5358 add the successors of this block to the worklist if they
5359 are not already on the worklist. */
5360 if (sbitmap_a_and_b (post_dominators[bb],
5361 post_dominators[bb],
5364 for (e = b->pred; e; e = e->pred_next)
5366 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5378 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5381 compute_immediate_dominators (idom, dominators)
5383 sbitmap *dominators;
5388 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5390 /* Begin with tmp(n) = dom(n) - { n }. */
5391 for (b = n_basic_blocks; --b >= 0; )
5393 sbitmap_copy (tmp[b], dominators[b]);
5394 RESET_BIT (tmp[b], b);
5397 /* Subtract out all of our dominator's dominators. */
5398 for (b = n_basic_blocks; --b >= 0; )
5400 sbitmap tmp_b = tmp[b];
5403 for (s = n_basic_blocks; --s >= 0; )
5404 if (TEST_BIT (tmp_b, s))
5405 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5408 /* Find the one bit set in the bitmap and put it in the output array. */
5409 for (b = n_basic_blocks; --b >= 0; )
5412 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5415 sbitmap_vector_free (tmp);
5418 /* Count for a single SET rtx, X. */
5421 count_reg_sets_1 (x)
5425 register rtx reg = SET_DEST (x);
5427 /* Find the register that's set/clobbered. */
5428 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5429 || GET_CODE (reg) == SIGN_EXTRACT
5430 || GET_CODE (reg) == STRICT_LOW_PART)
5431 reg = XEXP (reg, 0);
5433 if (GET_CODE (reg) == PARALLEL
5434 && GET_MODE (reg) == BLKmode)
5437 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5438 count_reg_sets_1 (XVECEXP (reg, 0, i));
5442 if (GET_CODE (reg) == REG)
5444 regno = REGNO (reg);
5445 if (regno >= FIRST_PSEUDO_REGISTER)
5447 /* Count (weighted) references, stores, etc. This counts a
5448 register twice if it is modified, but that is correct. */
5449 REG_N_SETS (regno)++;
5450 REG_N_REFS (regno) += loop_depth + 1;
5455 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5456 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5462 register RTX_CODE code = GET_CODE (x);
5464 if (code == SET || code == CLOBBER)
5465 count_reg_sets_1 (x);
5466 else if (code == PARALLEL)
5469 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5471 code = GET_CODE (XVECEXP (x, 0, i));
5472 if (code == SET || code == CLOBBER)
5473 count_reg_sets_1 (XVECEXP (x, 0, i));
5478 /* Increment REG_N_REFS by the current loop depth each register reference
5482 count_reg_references (x)
5485 register RTX_CODE code;
5488 code = GET_CODE (x);
5508 /* If we are clobbering a MEM, mark any registers inside the address
5510 if (GET_CODE (XEXP (x, 0)) == MEM)
5511 count_reg_references (XEXP (XEXP (x, 0), 0));
5515 /* While we're here, optimize this case. */
5518 /* In case the SUBREG is not of a register, don't optimize */
5519 if (GET_CODE (x) != REG)
5521 count_reg_references (x);
5525 /* ... fall through ... */
5528 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5529 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5534 register rtx testreg = SET_DEST (x);
5537 /* If storing into MEM, don't show it as being used. But do
5538 show the address as being used. */
5539 if (GET_CODE (testreg) == MEM)
5541 count_reg_references (XEXP (testreg, 0));
5542 count_reg_references (SET_SRC (x));
5546 /* Storing in STRICT_LOW_PART is like storing in a reg
5547 in that this SET might be dead, so ignore it in TESTREG.
5548 but in some other ways it is like using the reg.
5550 Storing in a SUBREG or a bit field is like storing the entire
5551 register in that if the register's value is not used
5552 then this SET is not needed. */
5553 while (GET_CODE (testreg) == STRICT_LOW_PART
5554 || GET_CODE (testreg) == ZERO_EXTRACT
5555 || GET_CODE (testreg) == SIGN_EXTRACT
5556 || GET_CODE (testreg) == SUBREG)
5558 /* Modifying a single register in an alternate mode
5559 does not use any of the old value. But these other
5560 ways of storing in a register do use the old value. */
5561 if (GET_CODE (testreg) == SUBREG
5562 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5567 testreg = XEXP (testreg, 0);
5570 /* If this is a store into a register,
5571 recursively scan the value being stored. */
5573 if ((GET_CODE (testreg) == PARALLEL
5574 && GET_MODE (testreg) == BLKmode)
5575 || GET_CODE (testreg) == REG)
5577 count_reg_references (SET_SRC (x));
5579 count_reg_references (SET_DEST (x));
5589 /* Recursively scan the operands of this expression. */
5592 register const char *fmt = GET_RTX_FORMAT (code);
5595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5599 /* Tail recursive case: save a function call level. */
5605 count_reg_references (XEXP (x, i));
5607 else if (fmt[i] == 'E')
5610 for (j = 0; j < XVECLEN (x, i); j++)
5611 count_reg_references (XVECEXP (x, i, j));
5617 /* Recompute register set/reference counts immediately prior to register
5620 This avoids problems with set/reference counts changing to/from values
5621 which have special meanings to the register allocators.
5623 Additionally, the reference counts are the primary component used by the
5624 register allocators to prioritize pseudos for allocation to hard regs.
5625 More accurate reference counts generally lead to better register allocation.
5627 F is the first insn to be scanned.
5629 LOOP_STEP denotes how much loop_depth should be incremented per
5630 loop nesting level in order to increase the ref count more for
5631 references in a loop.
5633 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5634 possibly other information which is used by the register allocators. */
5637 recompute_reg_usage (f, loop_step)
5638 rtx f ATTRIBUTE_UNUSED;
5639 int loop_step ATTRIBUTE_UNUSED;
5645 /* Clear out the old data. */
5646 max_reg = max_reg_num ();
5647 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5653 /* Scan each insn in the chain and count how many times each register is
5655 for (index = 0; index < n_basic_blocks; index++)
5657 basic_block bb = BASIC_BLOCK (index);
5658 loop_depth = bb->loop_depth;
5659 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5661 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5665 /* This call will increment REG_N_SETS for each SET or CLOBBER
5666 of a register in INSN. It will also increment REG_N_REFS
5667 by the loop depth for each set of a register in INSN. */
5668 count_reg_sets (PATTERN (insn));
5670 /* count_reg_sets does not detect autoincrement address modes, so
5671 detect them here by looking at the notes attached to INSN. */
5672 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5674 if (REG_NOTE_KIND (links) == REG_INC)
5675 /* Count (weighted) references, stores, etc. This counts a
5676 register twice if it is modified, but that is correct. */
5677 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5680 /* This call will increment REG_N_REFS by the current loop depth for
5681 each reference to a register in INSN. */
5682 count_reg_references (PATTERN (insn));
5684 /* count_reg_references will not include counts for arguments to
5685 function calls, so detect them here by examining the
5686 CALL_INSN_FUNCTION_USAGE data. */
5687 if (GET_CODE (insn) == CALL_INSN)
5691 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5693 note = XEXP (note, 1))
5694 if (GET_CODE (XEXP (note, 0)) == USE)
5695 count_reg_references (XEXP (XEXP (note, 0), 0));
5698 if (insn == bb->end)
5704 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5705 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5706 of the number of registers that died. */
5709 count_or_remove_death_notes (blocks, kill)
5715 for (i = n_basic_blocks - 1; i >= 0; --i)
5720 if (blocks && ! TEST_BIT (blocks, i))
5723 bb = BASIC_BLOCK (i);
5725 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5727 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5729 rtx *pprev = ®_NOTES (insn);
5734 switch (REG_NOTE_KIND (link))
5737 if (GET_CODE (XEXP (link, 0)) == REG)
5739 rtx reg = XEXP (link, 0);
5742 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5745 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5753 rtx next = XEXP (link, 1);
5754 free_EXPR_LIST_node (link);
5755 *pprev = link = next;
5761 pprev = &XEXP (link, 1);
5768 if (insn == bb->end)
5776 /* Record INSN's block as BB. */
5779 set_block_for_insn (insn, bb)
5783 size_t uid = INSN_UID (insn);
5784 if (uid >= basic_block_for_insn->num_elements)
5788 /* Add one-eighth the size so we don't keep calling xrealloc. */
5789 new_size = uid + (uid + 7) / 8;
5791 VARRAY_GROW (basic_block_for_insn, new_size);
5793 VARRAY_BB (basic_block_for_insn, uid) = bb;
5796 /* Record INSN's block number as BB. */
5797 /* ??? This has got to go. */
5800 set_block_num (insn, bb)
5804 set_block_for_insn (insn, BASIC_BLOCK (bb));
5807 /* Verify the CFG consistency. This function check some CFG invariants and
5808 aborts when something is wrong. Hope that this function will help to
5809 convert many optimization passes to preserve CFG consistent.
5811 Currently it does following checks:
5813 - test head/end pointers
5814 - overlapping of basic blocks
5815 - edge list corectness
5816 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5817 - tails of basic blocks (ensure that boundary is necesary)
5818 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5819 and NOTE_INSN_BASIC_BLOCK
5820 - check that all insns are in the basic blocks
5821 (except the switch handling code, barriers and notes)
5822 - check that all returns are followed by barriers
5824 In future it can be extended check a lot of other stuff as well
5825 (reachability of basic blocks, life information, etc. etc.). */
5830 const int max_uid = get_max_uid ();
5831 const rtx rtx_first = get_insns ();
5832 basic_block *bb_info;
5836 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5838 /* First pass check head/end pointers and set bb_info array used by
5840 for (i = n_basic_blocks - 1; i >= 0; i--)
5842 basic_block bb = BASIC_BLOCK (i);
5844 /* Check the head pointer and make sure that it is pointing into
5846 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5851 error ("Head insn %d for block %d not found in the insn stream.",
5852 INSN_UID (bb->head), bb->index);
5856 /* Check the end pointer and make sure that it is pointing into
5858 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5860 if (bb_info[INSN_UID (x)] != NULL)
5862 error ("Insn %d is in multiple basic blocks (%d and %d)",
5863 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5866 bb_info[INSN_UID (x)] = bb;
5873 error ("End insn %d for block %d not found in the insn stream.",
5874 INSN_UID (bb->end), bb->index);
5879 /* Now check the basic blocks (boundaries etc.) */
5880 for (i = n_basic_blocks - 1; i >= 0; i--)
5882 basic_block bb = BASIC_BLOCK (i);
5883 /* Check corectness of edge lists */
5891 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5893 fprintf (stderr, "Predecessor: ");
5894 dump_edge_info (stderr, e, 0);
5895 fprintf (stderr, "\nSuccessor: ");
5896 dump_edge_info (stderr, e, 1);
5900 if (e->dest != EXIT_BLOCK_PTR)
5902 edge e2 = e->dest->pred;
5903 while (e2 && e2 != e)
5907 error ("Basic block %i edge lists are corrupted", bb->index);
5919 error ("Basic block %d pred edge is corrupted", bb->index);
5920 fputs ("Predecessor: ", stderr);
5921 dump_edge_info (stderr, e, 0);
5922 fputs ("\nSuccessor: ", stderr);
5923 dump_edge_info (stderr, e, 1);
5924 fputc ('\n', stderr);
5927 if (e->src != ENTRY_BLOCK_PTR)
5929 edge e2 = e->src->succ;
5930 while (e2 && e2 != e)
5934 error ("Basic block %i edge lists are corrupted", bb->index);
5941 /* OK pointers are correct. Now check the header of basic
5942 block. It ought to contain optional CODE_LABEL followed
5943 by NOTE_BASIC_BLOCK. */
5945 if (GET_CODE (x) == CODE_LABEL)
5949 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5955 if (GET_CODE (x) != NOTE
5956 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5957 || NOTE_BASIC_BLOCK (x) != bb)
5959 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5966 /* Do checks for empty blocks here */
5973 if (GET_CODE (x) == NOTE
5974 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5976 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5977 INSN_UID (x), bb->index);
5984 if (GET_CODE (x) == JUMP_INSN
5985 || GET_CODE (x) == CODE_LABEL
5986 || GET_CODE (x) == BARRIER)
5988 error ("In basic block %d:", bb->index);
5989 fatal_insn ("Flow control insn inside a basic block", x);
6000 if (!bb_info[INSN_UID (x)])
6002 switch (GET_CODE (x))
6009 /* An addr_vec is placed outside any block block. */
6011 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6012 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6013 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6018 /* But in any case, non-deletable labels can appear anywhere. */
6022 fatal_insn ("Insn outside basic block", x);
6026 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6027 && GET_CODE (x) == JUMP_INSN
6028 && returnjump_p (x) && ! condjump_p (x)
6029 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6030 fatal_insn ("Return not followed by barrier", x);
6042 /* Functions to access an edge list with a vector representation.
6043 Enough data is kept such that given an index number, the
6044 pred and succ that edge reprsents can be determined, or
6045 given a pred and a succ, it's index number can be returned.
6046 This allows algorithms which comsume a lot of memory to
6047 represent the normally full matrix of edge (pred,succ) with a
6048 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6049 wasted space in the client code due to sparse flow graphs. */
6051 /* This functions initializes the edge list. Basically the entire
6052 flowgraph is processed, and all edges are assigned a number,
6053 and the data structure is filed in. */
6057 struct edge_list *elist;
6063 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6067 /* Determine the number of edges in the flow graph by counting successor
6068 edges on each basic block. */
6069 for (x = 0; x < n_basic_blocks; x++)
6071 basic_block bb = BASIC_BLOCK (x);
6073 for (e = bb->succ; e; e = e->succ_next)
6076 /* Don't forget successors of the entry block. */
6077 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6080 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6081 elist->num_blocks = block_count;
6082 elist->num_edges = num_edges;
6083 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6087 /* Follow successors of the entry block, and register these edges. */
6088 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6090 elist->index_to_edge[num_edges] = e;
6094 for (x = 0; x < n_basic_blocks; x++)
6096 basic_block bb = BASIC_BLOCK (x);
6098 /* Follow all successors of blocks, and register these edges. */
6099 for (e = bb->succ; e; e = e->succ_next)
6101 elist->index_to_edge[num_edges] = e;
6108 /* This function free's memory associated with an edge list. */
6110 free_edge_list (elist)
6111 struct edge_list *elist;
6115 free (elist->index_to_edge);
6120 /* This function provides debug output showing an edge list. */
6122 print_edge_list (f, elist)
6124 struct edge_list *elist;
6127 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6128 elist->num_blocks - 2, elist->num_edges);
6130 for (x = 0; x < elist->num_edges; x++)
6132 fprintf (f, " %-4d - edge(", x);
6133 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6134 fprintf (f,"entry,");
6136 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6138 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6139 fprintf (f,"exit)\n");
6141 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6145 /* This function provides an internal consistancy check of an edge list,
6146 verifying that all edges are present, and that there are no
6149 verify_edge_list (f, elist)
6151 struct edge_list *elist;
6153 int x, pred, succ, index;
6156 for (x = 0; x < n_basic_blocks; x++)
6158 basic_block bb = BASIC_BLOCK (x);
6160 for (e = bb->succ; e; e = e->succ_next)
6162 pred = e->src->index;
6163 succ = e->dest->index;
6164 index = EDGE_INDEX (elist, e->src, e->dest);
6165 if (index == EDGE_INDEX_NO_EDGE)
6167 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6170 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6171 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6172 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6173 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6174 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6175 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6178 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6180 pred = e->src->index;
6181 succ = e->dest->index;
6182 index = EDGE_INDEX (elist, e->src, e->dest);
6183 if (index == EDGE_INDEX_NO_EDGE)
6185 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6188 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6189 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6190 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6191 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6192 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6193 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6195 /* We've verified that all the edges are in the list, no lets make sure
6196 there are no spurious edges in the list. */
6198 for (pred = 0 ; pred < n_basic_blocks; pred++)
6199 for (succ = 0 ; succ < n_basic_blocks; succ++)
6201 basic_block p = BASIC_BLOCK (pred);
6202 basic_block s = BASIC_BLOCK (succ);
6206 for (e = p->succ; e; e = e->succ_next)
6212 for (e = s->pred; e; e = e->pred_next)
6218 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6219 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6220 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6222 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6223 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6224 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6225 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6226 BASIC_BLOCK (succ)));
6228 for (succ = 0 ; succ < n_basic_blocks; succ++)
6230 basic_block p = ENTRY_BLOCK_PTR;
6231 basic_block s = BASIC_BLOCK (succ);
6235 for (e = p->succ; e; e = e->succ_next)
6241 for (e = s->pred; e; e = e->pred_next)
6247 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6248 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6249 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6251 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6252 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6253 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6254 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6255 BASIC_BLOCK (succ)));
6257 for (pred = 0 ; pred < n_basic_blocks; pred++)
6259 basic_block p = BASIC_BLOCK (pred);
6260 basic_block s = EXIT_BLOCK_PTR;
6264 for (e = p->succ; e; e = e->succ_next)
6270 for (e = s->pred; e; e = e->pred_next)
6276 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6277 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6278 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6280 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6281 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6282 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6283 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6288 /* This routine will determine what, if any, edge there is between
6289 a specified predecessor and successor. */
6292 find_edge_index (edge_list, pred, succ)
6293 struct edge_list *edge_list;
6294 basic_block pred, succ;
6297 for (x = 0; x < NUM_EDGES (edge_list); x++)
6299 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6300 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6303 return (EDGE_INDEX_NO_EDGE);
6306 /* This function will remove an edge from the flow graph. */
6311 edge last_pred = NULL;
6312 edge last_succ = NULL;
6314 basic_block src, dest;
6317 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6323 last_succ->succ_next = e->succ_next;
6325 src->succ = e->succ_next;
6327 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6333 last_pred->pred_next = e->pred_next;
6335 dest->pred = e->pred_next;
6341 /* This routine will remove any fake successor edges for a basic block.
6342 When the edge is removed, it is also removed from whatever predecessor
6345 remove_fake_successors (bb)
6349 for (e = bb->succ; e ; )
6353 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6358 /* This routine will remove all fake edges from the flow graph. If
6359 we remove all fake successors, it will automatically remove all
6360 fake predecessors. */
6362 remove_fake_edges ()
6366 for (x = 0; x < n_basic_blocks; x++)
6367 remove_fake_successors (BASIC_BLOCK (x));
6369 /* We've handled all successors except the entry block's. */
6370 remove_fake_successors (ENTRY_BLOCK_PTR);
6373 /* This functions will add a fake edge between any block which has no
6374 successors, and the exit block. Some data flow equations require these
6377 add_noreturn_fake_exit_edges ()
6381 for (x = 0; x < n_basic_blocks; x++)
6382 if (BASIC_BLOCK (x)->succ == NULL)
6383 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6386 /* Dump the list of basic blocks in the bitmap NODES. */
6388 flow_nodes_print (str, nodes, file)
6390 const sbitmap nodes;
6395 fprintf (file, "%s { ", str);
6396 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6397 fputs ("}\n", file);
6401 /* Dump the list of exiting edges in the array EDGES. */
6403 flow_exits_print (str, edges, num_edges, file)
6411 fprintf (file, "%s { ", str);
6412 for (i = 0; i < num_edges; i++)
6413 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6414 fputs ("}\n", file);
6418 /* Dump loop related CFG information. */
6420 flow_loops_cfg_dump (loops, file)
6421 const struct loops *loops;
6426 if (! loops->num || ! file || ! loops->cfg.dom)
6429 for (i = 0; i < n_basic_blocks; i++)
6433 fprintf (file, ";; %d succs { ", i);
6434 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6435 fprintf (file, "%d ", succ->dest->index);
6436 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6440 /* Dump the DFS node order. */
6441 if (loops->cfg.dfs_order)
6443 fputs (";; DFS order: ", file);
6444 for (i = 0; i < n_basic_blocks; i++)
6445 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6451 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6453 flow_loop_nested_p (outer, loop)
6457 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6461 /* Dump the loop information specified by LOOPS to the stream FILE. */
6463 flow_loops_dump (loops, file, verbose)
6464 const struct loops *loops;
6471 num_loops = loops->num;
6472 if (! num_loops || ! file)
6475 fprintf (file, ";; %d loops found, %d levels\n",
6476 num_loops, loops->levels);
6478 for (i = 0; i < num_loops; i++)
6480 struct loop *loop = &loops->array[i];
6482 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6483 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6484 loop->header->index, loop->latch->index,
6485 loop->pre_header ? loop->pre_header->index : -1,
6486 loop->depth, loop->level,
6487 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6488 fprintf (file, ";; %d", loop->num_nodes);
6489 flow_nodes_print (" nodes", loop->nodes, file);
6490 fprintf (file, ";; %d", loop->num_exits);
6491 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6497 for (j = 0; j < i; j++)
6499 struct loop *oloop = &loops->array[j];
6501 if (loop->header == oloop->header)
6506 smaller = loop->num_nodes < oloop->num_nodes;
6508 /* If the union of LOOP and OLOOP is different than
6509 the larger of LOOP and OLOOP then LOOP and OLOOP
6510 must be disjoint. */
6511 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6512 smaller ? oloop : loop);
6513 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6514 loop->header->index, i, j,
6515 disjoint ? "disjoint" : "nested");
6522 /* Print diagnostics to compare our concept of a loop with
6523 what the loop notes say. */
6524 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6525 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6526 != NOTE_INSN_LOOP_BEG)
6527 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6528 INSN_UID (PREV_INSN (loop->first->head)));
6529 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6530 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6531 != NOTE_INSN_LOOP_END)
6532 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6533 INSN_UID (NEXT_INSN (loop->last->end)));
6538 flow_loops_cfg_dump (loops, file);
6542 /* Free all the memory allocated for LOOPS. */
6544 flow_loops_free (loops)
6545 struct loops *loops;
6554 /* Free the loop descriptors. */
6555 for (i = 0; i < loops->num; i++)
6557 struct loop *loop = &loops->array[i];
6560 sbitmap_free (loop->nodes);
6564 free (loops->array);
6565 loops->array = NULL;
6568 sbitmap_vector_free (loops->cfg.dom);
6569 if (loops->cfg.dfs_order)
6570 free (loops->cfg.dfs_order);
6572 sbitmap_free (loops->shared_headers);
6577 /* Find the exits from the loop using the bitmap of loop nodes NODES
6578 and store in EXITS array. Return the number of exits from the
6581 flow_loop_exits_find (nodes, exits)
6582 const sbitmap nodes;
6591 /* Check all nodes within the loop to see if there are any
6592 successors not in the loop. Note that a node may have multiple
6595 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6596 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6598 basic_block dest = e->dest;
6600 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6608 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6610 /* Store all exiting edges into an array. */
6612 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6613 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6615 basic_block dest = e->dest;
6617 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6618 (*exits)[num_exits++] = e;
6626 /* Find the nodes contained within the loop with header HEADER and
6627 latch LATCH and store in NODES. Return the number of nodes within
6630 flow_loop_nodes_find (header, latch, nodes)
6639 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6642 /* Start with only the loop header in the set of loop nodes. */
6643 sbitmap_zero (nodes);
6644 SET_BIT (nodes, header->index);
6646 header->loop_depth++;
6648 /* Push the loop latch on to the stack. */
6649 if (! TEST_BIT (nodes, latch->index))
6651 SET_BIT (nodes, latch->index);
6652 latch->loop_depth++;
6654 stack[sp++] = latch;
6663 for (e = node->pred; e; e = e->pred_next)
6665 basic_block ancestor = e->src;
6667 /* If each ancestor not marked as part of loop, add to set of
6668 loop nodes and push on to stack. */
6669 if (ancestor != ENTRY_BLOCK_PTR
6670 && ! TEST_BIT (nodes, ancestor->index))
6672 SET_BIT (nodes, ancestor->index);
6673 ancestor->loop_depth++;
6675 stack[sp++] = ancestor;
6684 /* Compute the depth first search order and store in the array
6685 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6686 number of nodes visited. */
6688 flow_depth_first_order_compute (dfs_order)
6697 /* Allocate stack for back-tracking up CFG. */
6698 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6701 /* Allocate bitmap to track nodes that have been visited. */
6702 visited = sbitmap_alloc (n_basic_blocks);
6704 /* None of the nodes in the CFG have been visited yet. */
6705 sbitmap_zero (visited);
6707 /* Start with the first successor edge from the entry block. */
6708 e = ENTRY_BLOCK_PTR->succ;
6711 basic_block src = e->src;
6712 basic_block dest = e->dest;
6714 /* Mark that we have visited this node. */
6715 if (src != ENTRY_BLOCK_PTR)
6716 SET_BIT (visited, src->index);
6718 /* If this node has not been visited before, push the current
6719 edge on to the stack and proceed with the first successor
6720 edge of this node. */
6721 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6729 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6732 /* DEST has no successors (for example, a non-returning
6733 function is called) so do not push the current edge
6734 but carry on with its next successor. */
6735 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6736 SET_BIT (visited, dest->index);
6739 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6741 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6743 /* Pop edge off stack. */
6751 sbitmap_free (visited);
6753 /* The number of nodes visited should not be greater than
6755 if (dfsnum > n_basic_blocks)
6758 /* There are some nodes left in the CFG that are unreachable. */
6759 if (dfsnum < n_basic_blocks)
6765 /* Return the block for the pre-header of the loop with header
6766 HEADER where DOM specifies the dominator information. Return NULL if
6767 there is no pre-header. */
6769 flow_loop_pre_header_find (header, dom)
6773 basic_block pre_header;
6776 /* If block p is a predecessor of the header and is the only block
6777 that the header does not dominate, then it is the pre-header. */
6779 for (e = header->pred; e; e = e->pred_next)
6781 basic_block node = e->src;
6783 if (node != ENTRY_BLOCK_PTR
6784 && ! TEST_BIT (dom[node->index], header->index))
6786 if (pre_header == NULL)
6790 /* There are multiple edges into the header from outside
6791 the loop so there is no pre-header block. */
6801 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6802 previously added. The insertion algorithm assumes that the loops
6803 are added in the order found by a depth first search of the CFG. */
6805 flow_loop_tree_node_add (prevloop, loop)
6806 struct loop *prevloop;
6810 if (flow_loop_nested_p (prevloop, loop))
6812 prevloop->inner = loop;
6813 loop->outer = prevloop;
6817 while (prevloop->outer)
6819 if (flow_loop_nested_p (prevloop->outer, loop))
6821 prevloop->next = loop;
6822 loop->outer = prevloop->outer;
6825 prevloop = prevloop->outer;
6828 prevloop->next = loop;
6833 /* Build the loop hierarchy tree for LOOPS. */
6835 flow_loops_tree_build (loops)
6836 struct loops *loops;
6841 num_loops = loops->num;
6845 /* Root the loop hierarchy tree with the first loop found.
6846 Since we used a depth first search this should be the
6848 loops->tree = &loops->array[0];
6849 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6851 /* Add the remaining loops to the tree. */
6852 for (i = 1; i < num_loops; i++)
6853 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6857 /* Helper function to compute loop nesting depth and enclosed loop level
6858 for the natural loop specified by LOOP at the loop depth DEPTH.
6859 Returns the loop level. */
6861 flow_loop_level_compute (loop, depth)
6871 /* Traverse loop tree assigning depth and computing level as the
6872 maximum level of all the inner loops of this loop. The loop
6873 level is equivalent to the height of the loop in the loop tree
6874 and corresponds to the number of enclosed loop levels (including
6876 for (inner = loop->inner; inner; inner = inner->next)
6880 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6885 loop->level = level;
6886 loop->depth = depth;
6891 /* Compute the loop nesting depth and enclosed loop level for the loop
6892 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6896 flow_loops_level_compute (loops)
6897 struct loops *loops;
6903 /* Traverse all the outer level loops. */
6904 for (loop = loops->tree; loop; loop = loop->next)
6906 level = flow_loop_level_compute (loop, 1);
6914 /* Find all the natural loops in the function and save in LOOPS structure
6915 and recalculate loop_depth information in basic block structures.
6916 Return the number of natural loops found. */
6919 flow_loops_find (loops)
6920 struct loops *loops;
6931 loops->array = NULL;
6935 /* Taking care of this degenerate case makes the rest of
6936 this code simpler. */
6937 if (n_basic_blocks == 0)
6940 /* Compute the dominators. */
6941 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6942 compute_flow_dominators (dom, NULL);
6944 /* Count the number of loop edges (back edges). This should be the
6945 same as the number of natural loops. Also clear the loop_depth
6946 and as we work from inner->outer in a loop nest we call
6947 find_loop_nodes_find which will increment loop_depth for nodes
6948 within the current loop, which happens to enclose inner loops. */
6951 for (b = 0; b < n_basic_blocks; b++)
6953 BASIC_BLOCK (b)->loop_depth = 0;
6954 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6956 basic_block latch = e->src;
6958 /* Look for back edges where a predecessor is dominated
6959 by this block. A natural loop has a single entry
6960 node (header) that dominates all the nodes in the
6961 loop. It also has single back edge to the header
6962 from a latch node. Note that multiple natural loops
6963 may share the same header. */
6964 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6971 /* Compute depth first search order of the CFG so that outer
6972 natural loops will be found before inner natural loops. */
6973 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6974 flow_depth_first_order_compute (dfs_order);
6976 /* Allocate loop structures. */
6978 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6980 headers = sbitmap_alloc (n_basic_blocks);
6981 sbitmap_zero (headers);
6983 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6984 sbitmap_zero (loops->shared_headers);
6986 /* Find and record information about all the natural loops
6989 for (b = 0; b < n_basic_blocks; b++)
6993 /* Search the nodes of the CFG in DFS order that we can find
6994 outer loops first. */
6995 header = BASIC_BLOCK (dfs_order[b]);
6997 /* Look for all the possible latch blocks for this header. */
6998 for (e = header->pred; e; e = e->pred_next)
7000 basic_block latch = e->src;
7002 /* Look for back edges where a predecessor is dominated
7003 by this block. A natural loop has a single entry
7004 node (header) that dominates all the nodes in the
7005 loop. It also has single back edge to the header
7006 from a latch node. Note that multiple natural loops
7007 may share the same header. */
7008 if (latch != ENTRY_BLOCK_PTR
7009 && TEST_BIT (dom[latch->index], header->index))
7013 loop = loops->array + num_loops;
7015 loop->header = header;
7016 loop->latch = latch;
7018 /* Keep track of blocks that are loop headers so
7019 that we can tell which loops should be merged. */
7020 if (TEST_BIT (headers, header->index))
7021 SET_BIT (loops->shared_headers, header->index);
7022 SET_BIT (headers, header->index);
7024 /* Find nodes contained within the loop. */
7025 loop->nodes = sbitmap_alloc (n_basic_blocks);
7027 = flow_loop_nodes_find (header, latch, loop->nodes);
7029 /* Compute first and last blocks within the loop.
7030 These are often the same as the loop header and
7031 loop latch respectively, but this is not always
7034 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7036 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7038 /* Find edges which exit the loop. Note that a node
7039 may have several exit edges. */
7041 = flow_loop_exits_find (loop->nodes, &loop->exits);
7043 /* Look to see if the loop has a pre-header node. */
7045 = flow_loop_pre_header_find (header, dom);
7052 /* Natural loops with shared headers may either be disjoint or
7053 nested. Disjoint loops with shared headers cannot be inner
7054 loops and should be merged. For now just mark loops that share
7056 for (i = 0; i < num_loops; i++)
7057 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7058 loops->array[i].shared = 1;
7060 sbitmap_free (headers);
7063 loops->num = num_loops;
7065 /* Save CFG derived information to avoid recomputing it. */
7066 loops->cfg.dom = dom;
7067 loops->cfg.dfs_order = dfs_order;
7069 /* Build the loop hierarchy tree. */
7070 flow_loops_tree_build (loops);
7072 /* Assign the loop nesting depth and enclosed loop level for each
7074 loops->levels = flow_loops_level_compute (loops);
7080 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7082 flow_loop_outside_edge_p (loop, e)
7083 const struct loop *loop;
7086 if (e->dest != loop->header)
7088 return (e->src == ENTRY_BLOCK_PTR)
7089 || ! TEST_BIT (loop->nodes, e->src->index);
7093 /* Clear LOG_LINKS fields of insns in a chain. */
7095 clear_log_links (insns)
7099 for (i = insns; i; i = NEXT_INSN (i))
7100 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')