1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
321 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
322 static void propagate_block PARAMS ((basic_block, regset,
324 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
325 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
326 static void mark_set_regs PARAMS ((regset, regset, rtx,
328 static void mark_set_1 PARAMS ((regset, regset, rtx,
331 static void find_auto_inc PARAMS ((regset, rtx, rtx));
332 static int try_pre_increment_1 PARAMS ((rtx));
333 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
335 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
336 void dump_flow_info PARAMS ((FILE *));
337 void debug_flow_info PARAMS ((void));
338 static void dump_edge_info PARAMS ((FILE *, edge, int));
340 static void count_reg_sets_1 PARAMS ((rtx));
341 static void count_reg_sets PARAMS ((rtx));
342 static void count_reg_references PARAMS ((rtx));
343 static void invalidate_mems_from_autoinc PARAMS ((rtx));
344 static void remove_fake_successors PARAMS ((basic_block));
345 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
346 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
347 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
348 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
349 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
350 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
351 static int flow_depth_first_order_compute PARAMS ((int *));
352 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
353 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
354 static void flow_loops_tree_build PARAMS ((struct loops *));
355 static int flow_loop_level_compute PARAMS ((struct loop *, int));
356 static int flow_loops_level_compute PARAMS ((struct loops *));
358 /* Find basic blocks of the current function.
359 F is the first insn of the function and NREGS the number of register
363 find_basic_blocks (f, nregs, file)
365 int nregs ATTRIBUTE_UNUSED;
366 FILE *file ATTRIBUTE_UNUSED;
370 /* Flush out existing data. */
371 if (basic_block_info != NULL)
377 /* Clear bb->aux on all extant basic blocks. We'll use this as a
378 tag for reuse during create_basic_block, just in case some pass
379 copies around basic block notes improperly. */
380 for (i = 0; i < n_basic_blocks; ++i)
381 BASIC_BLOCK (i)->aux = NULL;
383 VARRAY_FREE (basic_block_info);
386 n_basic_blocks = count_basic_blocks (f);
388 /* Size the basic block table. The actual structures will be allocated
389 by find_basic_blocks_1, since we want to keep the structure pointers
390 stable across calls to find_basic_blocks. */
391 /* ??? This whole issue would be much simpler if we called find_basic_blocks
392 exactly once, and thereafter we don't have a single long chain of
393 instructions at all until close to the end of compilation when we
394 actually lay them out. */
396 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
398 label_value_list = find_basic_blocks_1 (f);
400 /* Record the block to which an insn belongs. */
401 /* ??? This should be done another way, by which (perhaps) a label is
402 tagged directly with the basic block that it starts. It is used for
403 more than that currently, but IMO that is the only valid use. */
405 max_uid = get_max_uid ();
407 /* Leave space for insns life_analysis makes in some cases for auto-inc.
408 These cases are rare, so we don't need too much space. */
409 max_uid += max_uid / 10;
412 compute_bb_for_insn (max_uid);
414 /* Discover the edges of our cfg. */
415 record_active_eh_regions (f);
416 make_edges (label_value_list);
418 /* Do very simple cleanup now, for the benefit of code that runs between
419 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
420 tidy_fallthru_edges ();
422 mark_critical_edges ();
424 #ifdef ENABLE_CHECKING
429 /* Count the basic blocks of the function. */
432 count_basic_blocks (f)
436 register RTX_CODE prev_code;
437 register int count = 0;
439 int call_had_abnormal_edge = 0;
440 rtx prev_call = NULL_RTX;
442 prev_code = JUMP_INSN;
443 for (insn = f; insn; insn = NEXT_INSN (insn))
445 register RTX_CODE code = GET_CODE (insn);
447 if (code == CODE_LABEL
448 || (GET_RTX_CLASS (code) == 'i'
449 && (prev_code == JUMP_INSN
450 || prev_code == BARRIER
451 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
456 /* Record whether this call created an edge. */
457 if (code == CALL_INSN)
459 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
460 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
462 call_had_abnormal_edge = 0;
464 /* If there is an EH region or rethrow, we have an edge. */
465 if ((eh_region && region > 0)
466 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
467 call_had_abnormal_edge = 1;
470 /* If there is a nonlocal goto label and the specified
471 region number isn't -1, we have an edge. (0 means
472 no throw, but might have a nonlocal goto). */
473 if (nonlocal_goto_handler_labels && region >= 0)
474 call_had_abnormal_edge = 1;
477 else if (code != NOTE)
478 prev_call = NULL_RTX;
482 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
484 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
489 /* The rest of the compiler works a bit smoother when we don't have to
490 check for the edge case of do-nothing functions with no basic blocks. */
493 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
500 /* Find all basic blocks of the function whose first insn is F.
502 Collect and return a list of labels whose addresses are taken. This
503 will be used in make_edges for use with computed gotos. */
506 find_basic_blocks_1 (f)
509 register rtx insn, next;
510 int call_has_abnormal_edge = 0;
512 rtx bb_note = NULL_RTX;
513 rtx eh_list = NULL_RTX;
514 rtx label_value_list = NULL_RTX;
518 /* We process the instructions in a slightly different way than we did
519 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
520 closed out the previous block, so that it gets attached at the proper
521 place. Since this form should be equivalent to the previous,
522 count_basic_blocks continues to use the old form as a check. */
524 for (insn = f; insn; insn = next)
526 enum rtx_code code = GET_CODE (insn);
528 next = NEXT_INSN (insn);
530 if (code == CALL_INSN)
532 /* Record whether this call created an edge. */
533 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
534 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
535 call_has_abnormal_edge = 0;
537 /* If there is an EH region or rethrow, we have an edge. */
538 if ((eh_list && region > 0)
539 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
540 call_has_abnormal_edge = 1;
543 /* If there is a nonlocal goto label and the specified
544 region number isn't -1, we have an edge. (0 means
545 no throw, but might have a nonlocal goto). */
546 if (nonlocal_goto_handler_labels && region >= 0)
547 call_has_abnormal_edge = 1;
555 int kind = NOTE_LINE_NUMBER (insn);
557 /* Keep a LIFO list of the currently active exception notes. */
558 if (kind == NOTE_INSN_EH_REGION_BEG)
559 eh_list = alloc_INSN_LIST (insn, eh_list);
560 else if (kind == NOTE_INSN_EH_REGION_END)
563 eh_list = XEXP (eh_list, 1);
564 free_INSN_LIST_node (t);
567 /* Look for basic block notes with which to keep the
568 basic_block_info pointers stable. Unthread the note now;
569 we'll put it back at the right place in create_basic_block.
570 Or not at all if we've already found a note in this block. */
571 else if (kind == NOTE_INSN_BASIC_BLOCK)
573 if (bb_note == NULL_RTX)
575 next = flow_delete_insn (insn);
582 /* A basic block starts at a label. If we've closed one off due
583 to a barrier or some such, no need to do it again. */
584 if (head != NULL_RTX)
586 /* While we now have edge lists with which other portions of
587 the compiler might determine a call ending a basic block
588 does not imply an abnormal edge, it will be a bit before
589 everything can be updated. So continue to emit a noop at
590 the end of such a block. */
591 if (GET_CODE (end) == CALL_INSN
592 && ! SIBLING_CALL_P (end))
594 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
595 end = emit_insn_after (nop, end);
598 create_basic_block (i++, head, end, bb_note);
605 /* A basic block ends at a jump. */
606 if (head == NULL_RTX)
610 /* ??? Make a special check for table jumps. The way this
611 happens is truly and amazingly gross. We are about to
612 create a basic block that contains just a code label and
613 an addr*vec jump insn. Worse, an addr_diff_vec creates
614 its own natural loop.
616 Prevent this bit of brain damage, pasting things together
617 correctly in make_edges.
619 The correct solution involves emitting the table directly
620 on the tablejump instruction as a note, or JUMP_LABEL. */
622 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
623 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
631 goto new_bb_inclusive;
634 /* A basic block ends at a barrier. It may be that an unconditional
635 jump already closed the basic block -- no need to do it again. */
636 if (head == NULL_RTX)
639 /* While we now have edge lists with which other portions of the
640 compiler might determine a call ending a basic block does not
641 imply an abnormal edge, it will be a bit before everything can
642 be updated. So continue to emit a noop at the end of such a
644 if (GET_CODE (end) == CALL_INSN
645 && ! SIBLING_CALL_P (end))
647 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
648 end = emit_insn_after (nop, end);
650 goto new_bb_exclusive;
653 /* A basic block ends at a call that can either throw or
654 do a non-local goto. */
655 if (call_has_abnormal_edge)
658 if (head == NULL_RTX)
663 create_basic_block (i++, head, end, bb_note);
664 head = end = NULL_RTX;
671 if (GET_RTX_CLASS (code) == 'i')
673 if (head == NULL_RTX)
680 if (GET_RTX_CLASS (code) == 'i')
684 /* Make a list of all labels referred to other than by jumps
685 (which just don't have the REG_LABEL notes).
687 Make a special exception for labels followed by an ADDR*VEC,
688 as this would be a part of the tablejump setup code.
690 Make a special exception for the eh_return_stub_label, which
691 we know isn't part of any otherwise visible control flow. */
693 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
694 if (REG_NOTE_KIND (note) == REG_LABEL)
696 rtx lab = XEXP (note, 0), next;
698 if (lab == eh_return_stub_label)
700 else if ((next = next_nonnote_insn (lab)) != NULL
701 && GET_CODE (next) == JUMP_INSN
702 && (GET_CODE (PATTERN (next)) == ADDR_VEC
703 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
707 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
712 if (head != NULL_RTX)
713 create_basic_block (i++, head, end, bb_note);
715 if (i != n_basic_blocks)
718 return label_value_list;
721 /* Tidy the CFG by deleting unreachable code and whatnot. */
727 delete_unreachable_blocks ();
728 move_stray_eh_region_notes ();
729 record_active_eh_regions (f);
731 mark_critical_edges ();
733 /* Kill the data we won't maintain. */
734 label_value_list = NULL_RTX;
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
742 create_basic_block (index, head, end, bb_note)
744 rtx head, end, bb_note;
749 && ! RTX_INTEGRATED_P (bb_note)
750 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
753 /* If we found an existing note, thread it back onto the chain. */
755 if (GET_CODE (head) == CODE_LABEL)
756 add_insn_after (bb_note, head);
759 add_insn_before (bb_note, head);
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
770 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771 memset (bb, 0, sizeof (*bb));
773 if (GET_CODE (head) == CODE_LABEL)
774 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
777 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
780 NOTE_BASIC_BLOCK (bb_note) = bb;
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end) == bb_note)
790 BASIC_BLOCK (index) = bb;
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
801 compute_bb_for_insn (max)
806 if (basic_block_for_insn)
807 VARRAY_FREE (basic_block_for_insn);
808 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
810 for (i = 0; i < n_basic_blocks; ++i)
812 basic_block bb = BASIC_BLOCK (i);
819 int uid = INSN_UID (insn);
821 VARRAY_BB (basic_block_for_insn, uid) = bb;
824 insn = NEXT_INSN (insn);
829 /* Free the memory associated with the edge structures. */
837 for (i = 0; i < n_basic_blocks; ++i)
839 basic_block bb = BASIC_BLOCK (i);
841 for (e = bb->succ; e ; e = n)
851 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
857 ENTRY_BLOCK_PTR->succ = 0;
858 EXIT_BLOCK_PTR->pred = 0;
863 /* Identify the edges between basic blocks.
865 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
866 that are otherwise unreachable may be reachable with a non-local goto.
868 BB_EH_END is an array indexed by basic block number in which we record
869 the list of exception regions active at the end of the basic block. */
872 make_edges (label_value_list)
873 rtx label_value_list;
876 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
877 sbitmap *edge_cache = NULL;
879 /* Assume no computed jump; revise as we create edges. */
880 current_function_has_computed_jump = 0;
882 /* Heavy use of computed goto in machine-generated code can lead to
883 nearly fully-connected CFGs. In that case we spend a significant
884 amount of time searching the edge lists for duplicates. */
885 if (forced_labels || label_value_list)
887 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
888 sbitmap_vector_zero (edge_cache, n_basic_blocks);
891 /* By nature of the way these get numbered, block 0 is always the entry. */
892 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
894 for (i = 0; i < n_basic_blocks; ++i)
896 basic_block bb = BASIC_BLOCK (i);
899 int force_fallthru = 0;
901 /* Examine the last instruction of the block, and discover the
902 ways we can leave the block. */
905 code = GET_CODE (insn);
908 if (code == JUMP_INSN)
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915 && GET_CODE (tmp) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
922 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923 vec = XVEC (PATTERN (tmp), 0);
925 vec = XVEC (PATTERN (tmp), 1);
927 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928 make_label_edge (edge_cache, bb,
929 XEXP (RTVEC_ELT (vec, j), 0), 0);
931 /* Some targets (eg, ARM) emit a conditional jump that also
932 contains the out-of-range target. Scan for these and
933 add an edge if necessary. */
934 if ((tmp = single_set (insn)) != NULL
935 && SET_DEST (tmp) == pc_rtx
936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938 make_label_edge (edge_cache, bb,
939 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
941 #ifdef CASE_DROPS_THROUGH
942 /* Silly VAXen. The ADDR_VEC is going to be in the way of
943 us naturally detecting fallthru into the next block. */
948 /* If this is a computed jump, then mark it as reaching
949 everything on the label_value_list and forced_labels list. */
950 else if (computed_jump_p (insn))
952 current_function_has_computed_jump = 1;
954 for (x = label_value_list; x; x = XEXP (x, 1))
955 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
957 for (x = forced_labels; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
961 /* Returns create an exit out. */
962 else if (returnjump_p (insn))
963 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
965 /* Otherwise, we have a plain conditional or unconditional jump. */
968 if (! JUMP_LABEL (insn))
970 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
974 /* If this is a sibling call insn, then this is in effect a
975 combined call and return, and so we need an edge to the
976 exit block. No need to worry about EH edges, since we
977 wouldn't have created the sibling call in the first place. */
979 if (code == CALL_INSN && SIBLING_CALL_P (insn))
980 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
983 /* If this is a CALL_INSN, then mark it as reaching the active EH
984 handler for this CALL_INSN. If we're handling asynchronous
985 exceptions then any insn can reach any of the active handlers.
987 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
989 if (code == CALL_INSN || asynchronous_exceptions)
991 /* Add any appropriate EH edges. We do this unconditionally
992 since there may be a REG_EH_REGION or REG_EH_RETHROW note
993 on the call, and this needn't be within an EH region. */
994 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
996 /* If we have asynchronous exceptions, do the same for *all*
997 exception regions active in the block. */
998 if (asynchronous_exceptions
999 && bb->eh_beg != bb->eh_end)
1001 if (bb->eh_beg >= 0)
1002 make_eh_edge (edge_cache, eh_nest_info, bb,
1003 NULL_RTX, bb->eh_beg);
1005 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1006 if (GET_CODE (x) == NOTE
1007 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1008 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1010 int region = NOTE_EH_HANDLER (x);
1011 make_eh_edge (edge_cache, eh_nest_info, bb,
1016 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1018 /* ??? This could be made smarter: in some cases it's possible
1019 to tell that certain calls will not do a nonlocal goto.
1021 For example, if the nested functions that do the nonlocal
1022 gotos do not have their addresses taken, then only calls to
1023 those functions or to other nested functions that use them
1024 could possibly do nonlocal gotos. */
1025 /* We do know that a REG_EH_REGION note with a value less
1026 than 0 is guaranteed not to perform a non-local goto. */
1027 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1028 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1029 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1030 make_label_edge (edge_cache, bb, XEXP (x, 0),
1031 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1035 /* We know something about the structure of the function __throw in
1036 libgcc2.c. It is the only function that ever contains eh_stub
1037 labels. It modifies its return address so that the last block
1038 returns to one of the eh_stub labels within it. So we have to
1039 make additional edges in the flow graph. */
1040 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1041 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1043 /* Find out if we can drop through to the next block. */
1044 insn = next_nonnote_insn (insn);
1045 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1046 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1047 else if (i + 1 < n_basic_blocks)
1049 rtx tmp = BLOCK_HEAD (i + 1);
1050 if (GET_CODE (tmp) == NOTE)
1051 tmp = next_nonnote_insn (tmp);
1052 if (force_fallthru || insn == tmp)
1053 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1057 free_eh_nesting_info (eh_nest_info);
1059 sbitmap_vector_free (edge_cache);
1062 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1063 about the edge that is accumulated between calls. */
1066 make_edge (edge_cache, src, dst, flags)
1067 sbitmap *edge_cache;
1068 basic_block src, dst;
1074 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1075 many edges to them, and we didn't allocate memory for it. */
1076 use_edge_cache = (edge_cache
1077 && src != ENTRY_BLOCK_PTR
1078 && dst != EXIT_BLOCK_PTR);
1080 /* Make sure we don't add duplicate edges. */
1081 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1082 for (e = src->succ; e ; e = e->succ_next)
1089 e = (edge) xcalloc (1, sizeof (*e));
1092 e->succ_next = src->succ;
1093 e->pred_next = dst->pred;
1102 SET_BIT (edge_cache[src->index], dst->index);
1105 /* Create an edge from a basic block to a label. */
1108 make_label_edge (edge_cache, src, label, flags)
1109 sbitmap *edge_cache;
1114 if (GET_CODE (label) != CODE_LABEL)
1117 /* If the label was never emitted, this insn is junk, but avoid a
1118 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1119 as a result of a syntax error and a diagnostic has already been
1122 if (INSN_UID (label) == 0)
1125 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1128 /* Create the edges generated by INSN in REGION. */
1131 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1132 sbitmap *edge_cache;
1133 eh_nesting_info *eh_nest_info;
1138 handler_info **handler_list;
1141 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1142 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1145 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1146 EDGE_ABNORMAL | EDGE_EH | is_call);
1150 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1151 dangerous if we intend to move basic blocks around. Move such notes
1152 into the following block. */
1155 move_stray_eh_region_notes ()
1160 if (n_basic_blocks < 2)
1163 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1164 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1166 rtx insn, next, list = NULL_RTX;
1168 b1 = BASIC_BLOCK (i);
1169 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1171 next = NEXT_INSN (insn);
1172 if (GET_CODE (insn) == NOTE
1173 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1174 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1176 /* Unlink from the insn chain. */
1177 NEXT_INSN (PREV_INSN (insn)) = next;
1178 PREV_INSN (next) = PREV_INSN (insn);
1181 NEXT_INSN (insn) = list;
1186 if (list == NULL_RTX)
1189 /* Find where to insert these things. */
1191 if (GET_CODE (insn) == CODE_LABEL)
1192 insn = NEXT_INSN (insn);
1196 next = NEXT_INSN (list);
1197 add_insn_after (list, insn);
1203 /* Recompute eh_beg/eh_end for each basic block. */
1206 record_active_eh_regions (f)
1209 rtx insn, eh_list = NULL_RTX;
1211 basic_block bb = BASIC_BLOCK (0);
1213 for (insn = f; insn ; insn = NEXT_INSN (insn))
1215 if (bb->head == insn)
1216 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1218 if (GET_CODE (insn) == NOTE)
1220 int kind = NOTE_LINE_NUMBER (insn);
1221 if (kind == NOTE_INSN_EH_REGION_BEG)
1222 eh_list = alloc_INSN_LIST (insn, eh_list);
1223 else if (kind == NOTE_INSN_EH_REGION_END)
1225 rtx t = XEXP (eh_list, 1);
1226 free_INSN_LIST_node (eh_list);
1231 if (bb->end == insn)
1233 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1235 if (i == n_basic_blocks)
1237 bb = BASIC_BLOCK (i);
1242 /* Identify critical edges and set the bits appropriately. */
1245 mark_critical_edges ()
1247 int i, n = n_basic_blocks;
1250 /* We begin with the entry block. This is not terribly important now,
1251 but could be if a front end (Fortran) implemented alternate entry
1253 bb = ENTRY_BLOCK_PTR;
1260 /* (1) Critical edges must have a source with multiple successors. */
1261 if (bb->succ && bb->succ->succ_next)
1263 for (e = bb->succ; e ; e = e->succ_next)
1265 /* (2) Critical edges must have a destination with multiple
1266 predecessors. Note that we know there is at least one
1267 predecessor -- the edge we followed to get here. */
1268 if (e->dest->pred->pred_next)
1269 e->flags |= EDGE_CRITICAL;
1271 e->flags &= ~EDGE_CRITICAL;
1276 for (e = bb->succ; e ; e = e->succ_next)
1277 e->flags &= ~EDGE_CRITICAL;
1282 bb = BASIC_BLOCK (i);
1286 /* Split a (typically critical) edge. Return the new block.
1287 Abort on abnormal edges.
1289 ??? The code generally expects to be called on critical edges.
1290 The case of a block ending in an unconditional jump to a
1291 block with multiple predecessors is not handled optimally. */
1294 split_edge (edge_in)
1297 basic_block old_pred, bb, old_succ;
1302 /* Abnormal edges cannot be split. */
1303 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1306 old_pred = edge_in->src;
1307 old_succ = edge_in->dest;
1309 /* Remove the existing edge from the destination's pred list. */
1312 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1314 *pp = edge_in->pred_next;
1315 edge_in->pred_next = NULL;
1318 /* Create the new structures. */
1319 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1320 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1323 memset (bb, 0, sizeof (*bb));
1324 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1325 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1327 /* ??? This info is likely going to be out of date very soon. */
1328 if (old_succ->global_live_at_start)
1330 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1331 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1335 CLEAR_REG_SET (bb->global_live_at_start);
1336 CLEAR_REG_SET (bb->global_live_at_end);
1341 bb->succ = edge_out;
1344 edge_in->flags &= ~EDGE_CRITICAL;
1346 edge_out->pred_next = old_succ->pred;
1347 edge_out->succ_next = NULL;
1349 edge_out->dest = old_succ;
1350 edge_out->flags = EDGE_FALLTHRU;
1351 edge_out->probability = REG_BR_PROB_BASE;
1353 old_succ->pred = edge_out;
1355 /* Tricky case -- if there existed a fallthru into the successor
1356 (and we're not it) we must add a new unconditional jump around
1357 the new block we're actually interested in.
1359 Further, if that edge is critical, this means a second new basic
1360 block must be created to hold it. In order to simplify correct
1361 insn placement, do this before we touch the existing basic block
1362 ordering for the block we were really wanting. */
1363 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1366 for (e = edge_out->pred_next; e ; e = e->pred_next)
1367 if (e->flags & EDGE_FALLTHRU)
1372 basic_block jump_block;
1375 if ((e->flags & EDGE_CRITICAL) == 0
1376 && e->src != ENTRY_BLOCK_PTR)
1378 /* Non critical -- we can simply add a jump to the end
1379 of the existing predecessor. */
1380 jump_block = e->src;
1384 /* We need a new block to hold the jump. The simplest
1385 way to do the bulk of the work here is to recursively
1387 jump_block = split_edge (e);
1388 e = jump_block->succ;
1391 /* Now add the jump insn ... */
1392 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1394 jump_block->end = pos;
1395 if (basic_block_for_insn)
1396 set_block_for_insn (pos, jump_block);
1397 emit_barrier_after (pos);
1399 /* ... let jump know that label is in use, ... */
1400 JUMP_LABEL (pos) = old_succ->head;
1401 ++LABEL_NUSES (old_succ->head);
1403 /* ... and clear fallthru on the outgoing edge. */
1404 e->flags &= ~EDGE_FALLTHRU;
1406 /* Continue splitting the interesting edge. */
1410 /* Place the new block just in front of the successor. */
1411 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1412 if (old_succ == EXIT_BLOCK_PTR)
1413 j = n_basic_blocks - 1;
1415 j = old_succ->index;
1416 for (i = n_basic_blocks - 1; i > j; --i)
1418 basic_block tmp = BASIC_BLOCK (i - 1);
1419 BASIC_BLOCK (i) = tmp;
1422 BASIC_BLOCK (i) = bb;
1425 /* Create the basic block note.
1427 Where we place the note can have a noticable impact on the generated
1428 code. Consider this cfg:
1439 If we need to insert an insn on the edge from block 0 to block 1,
1440 we want to ensure the instructions we insert are outside of any
1441 loop notes that physically sit between block 0 and block 1. Otherwise
1442 we confuse the loop optimizer into thinking the loop is a phony. */
1443 if (old_succ != EXIT_BLOCK_PTR
1444 && PREV_INSN (old_succ->head)
1445 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1446 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1447 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1448 PREV_INSN (old_succ->head));
1449 else if (old_succ != EXIT_BLOCK_PTR)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1452 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1453 NOTE_BASIC_BLOCK (bb_note) = bb;
1454 bb->head = bb->end = bb_note;
1456 /* Not quite simple -- for non-fallthru edges, we must adjust the
1457 predecessor's jump instruction to target our new block. */
1458 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1460 rtx tmp, insn = old_pred->end;
1461 rtx old_label = old_succ->head;
1462 rtx new_label = gen_label_rtx ();
1464 if (GET_CODE (insn) != JUMP_INSN)
1467 /* ??? Recognize a tablejump and adjust all matching cases. */
1468 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1469 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1470 && GET_CODE (tmp) == JUMP_INSN
1471 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1472 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1477 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1478 vec = XVEC (PATTERN (tmp), 0);
1480 vec = XVEC (PATTERN (tmp), 1);
1482 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1483 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1485 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1486 --LABEL_NUSES (old_label);
1487 ++LABEL_NUSES (new_label);
1490 /* Handle casesi dispatch insns */
1491 if ((tmp = single_set (insn)) != NULL
1492 && SET_DEST (tmp) == pc_rtx
1493 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1494 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1495 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1497 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1499 --LABEL_NUSES (old_label);
1500 ++LABEL_NUSES (new_label);
1505 /* This would have indicated an abnormal edge. */
1506 if (computed_jump_p (insn))
1509 /* A return instruction can't be redirected. */
1510 if (returnjump_p (insn))
1513 /* If the insn doesn't go where we think, we're confused. */
1514 if (JUMP_LABEL (insn) != old_label)
1517 redirect_jump (insn, new_label);
1520 emit_label_before (new_label, bb_note);
1521 bb->head = new_label;
1527 /* Queue instructions for insertion on an edge between two basic blocks.
1528 The new instructions and basic blocks (if any) will not appear in the
1529 CFG until commit_edge_insertions is called. */
1532 insert_insn_on_edge (pattern, e)
1536 /* We cannot insert instructions on an abnormal critical edge.
1537 It will be easier to find the culprit if we die now. */
1538 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1539 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1542 if (e->insns == NULL_RTX)
1545 push_to_sequence (e->insns);
1547 emit_insn (pattern);
1549 e->insns = get_insns ();
1553 /* Update the CFG for the instructions queued on edge E. */
1556 commit_one_edge_insertion (e)
1559 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1562 /* Pull the insns off the edge now since the edge might go away. */
1564 e->insns = NULL_RTX;
1566 /* Figure out where to put these things. If the destination has
1567 one predecessor, insert there. Except for the exit block. */
1568 if (e->dest->pred->pred_next == NULL
1569 && e->dest != EXIT_BLOCK_PTR)
1573 /* Get the location correct wrt a code label, and "nice" wrt
1574 a basic block note, and before everything else. */
1576 if (GET_CODE (tmp) == CODE_LABEL)
1577 tmp = NEXT_INSN (tmp);
1578 if (GET_CODE (tmp) == NOTE
1579 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1580 tmp = NEXT_INSN (tmp);
1581 if (tmp == bb->head)
1584 after = PREV_INSN (tmp);
1587 /* If the source has one successor and the edge is not abnormal,
1588 insert there. Except for the entry block. */
1589 else if ((e->flags & EDGE_ABNORMAL) == 0
1590 && e->src->succ->succ_next == NULL
1591 && e->src != ENTRY_BLOCK_PTR)
1594 /* It is possible to have a non-simple jump here. Consider a target
1595 where some forms of unconditional jumps clobber a register. This
1596 happens on the fr30 for example.
1598 We know this block has a single successor, so we can just emit
1599 the queued insns before the jump. */
1600 if (GET_CODE (bb->end) == JUMP_INSN)
1606 /* We'd better be fallthru, or we've lost track of what's what. */
1607 if ((e->flags & EDGE_FALLTHRU) == 0)
1614 /* Otherwise we must split the edge. */
1617 bb = split_edge (e);
1621 /* Now that we've found the spot, do the insertion. */
1623 /* Set the new block number for these insns, if structure is allocated. */
1624 if (basic_block_for_insn)
1627 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1628 set_block_for_insn (i, bb);
1633 emit_insns_before (insns, before);
1634 if (before == bb->head)
1639 rtx last = emit_insns_after (insns, after);
1640 if (after == bb->end)
1644 if (GET_CODE (last) == JUMP_INSN)
1646 if (returnjump_p (last))
1648 /* ??? Remove all outgoing edges from BB and add one
1649 for EXIT. This is not currently a problem because
1650 this only happens for the (single) epilogue, which
1651 already has a fallthru edge to EXIT. */
1654 if (e->dest != EXIT_BLOCK_PTR
1655 || e->succ_next != NULL
1656 || (e->flags & EDGE_FALLTHRU) == 0)
1658 e->flags &= ~EDGE_FALLTHRU;
1660 emit_barrier_after (last);
1669 /* Update the CFG for all queued instructions. */
1672 commit_edge_insertions ()
1677 #ifdef ENABLE_CHECKING
1678 verify_flow_info ();
1682 bb = ENTRY_BLOCK_PTR;
1687 for (e = bb->succ; e ; e = next)
1689 next = e->succ_next;
1691 commit_one_edge_insertion (e);
1694 if (++i >= n_basic_blocks)
1696 bb = BASIC_BLOCK (i);
1700 /* Delete all unreachable basic blocks. */
1703 delete_unreachable_blocks ()
1705 basic_block *worklist, *tos;
1706 int deleted_handler;
1711 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1713 /* Use basic_block->aux as a marker. Clear them all. */
1715 for (i = 0; i < n; ++i)
1716 BASIC_BLOCK (i)->aux = NULL;
1718 /* Add our starting points to the worklist. Almost always there will
1719 be only one. It isn't inconcievable that we might one day directly
1720 support Fortran alternate entry points. */
1722 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1726 /* Mark the block with a handy non-null value. */
1730 /* Iterate: find everything reachable from what we've already seen. */
1732 while (tos != worklist)
1734 basic_block b = *--tos;
1736 for (e = b->succ; e ; e = e->succ_next)
1744 /* Delete all unreachable basic blocks. Count down so that we don't
1745 interfere with the block renumbering that happens in delete_block. */
1747 deleted_handler = 0;
1749 for (i = n - 1; i >= 0; --i)
1751 basic_block b = BASIC_BLOCK (i);
1754 /* This block was found. Tidy up the mark. */
1757 deleted_handler |= delete_block (b);
1760 tidy_fallthru_edges ();
1762 /* If we deleted an exception handler, we may have EH region begin/end
1763 blocks to remove as well. */
1764 if (deleted_handler)
1765 delete_eh_regions ();
1770 /* Find EH regions for which there is no longer a handler, and delete them. */
1773 delete_eh_regions ()
1777 update_rethrow_references ();
1779 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1780 if (GET_CODE (insn) == NOTE)
1782 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1783 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1785 int num = NOTE_EH_HANDLER (insn);
1786 /* A NULL handler indicates a region is no longer needed,
1787 as long as its rethrow label isn't used. */
1788 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1790 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1791 NOTE_SOURCE_FILE (insn) = 0;
1797 /* Return true if NOTE is not one of the ones that must be kept paired,
1798 so that we may simply delete them. */
1801 can_delete_note_p (note)
1804 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1805 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1808 /* Unlink a chain of insns between START and FINISH, leaving notes
1809 that must be paired. */
1812 flow_delete_insn_chain (start, finish)
1815 /* Unchain the insns one by one. It would be quicker to delete all
1816 of these with a single unchaining, rather than one at a time, but
1817 we need to keep the NOTE's. */
1823 next = NEXT_INSN (start);
1824 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1826 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1829 next = flow_delete_insn (start);
1831 if (start == finish)
1837 /* Delete the insns in a (non-live) block. We physically delete every
1838 non-deleted-note insn, and update the flow graph appropriately.
1840 Return nonzero if we deleted an exception handler. */
1842 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1843 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1849 int deleted_handler = 0;
1852 /* If the head of this block is a CODE_LABEL, then it might be the
1853 label for an exception handler which can't be reached.
1855 We need to remove the label from the exception_handler_label list
1856 and remove the associated NOTE_INSN_EH_REGION_BEG and
1857 NOTE_INSN_EH_REGION_END notes. */
1861 never_reached_warning (insn);
1863 if (GET_CODE (insn) == CODE_LABEL)
1865 rtx x, *prev = &exception_handler_labels;
1867 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1869 if (XEXP (x, 0) == insn)
1871 /* Found a match, splice this label out of the EH label list. */
1872 *prev = XEXP (x, 1);
1873 XEXP (x, 1) = NULL_RTX;
1874 XEXP (x, 0) = NULL_RTX;
1876 /* Remove the handler from all regions */
1877 remove_handler (insn);
1878 deleted_handler = 1;
1881 prev = &XEXP (x, 1);
1884 /* This label may be referenced by code solely for its value, or
1885 referenced by static data, or something. We have determined
1886 that it is not reachable, but cannot delete the label itself.
1887 Save code space and continue to delete the balance of the block,
1888 along with properly updating the cfg. */
1889 if (!can_delete_label_p (insn))
1891 /* If we've only got one of these, skip the whole deleting
1894 goto no_delete_insns;
1895 insn = NEXT_INSN (insn);
1899 /* Include any jump table following the basic block. */
1901 if (GET_CODE (end) == JUMP_INSN
1902 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1903 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1904 && GET_CODE (tmp) == JUMP_INSN
1905 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1906 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1909 /* Include any barrier that may follow the basic block. */
1910 tmp = next_nonnote_insn (end);
1911 if (tmp && GET_CODE (tmp) == BARRIER)
1914 /* Selectively delete the entire chain. */
1915 flow_delete_insn_chain (insn, end);
1919 /* Remove the edges into and out of this block. Note that there may
1920 indeed be edges in, if we are removing an unreachable loop. */
1924 for (e = b->pred; e ; e = next)
1926 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1929 next = e->pred_next;
1933 for (e = b->succ; e ; e = next)
1935 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1938 next = e->succ_next;
1947 /* Remove the basic block from the array, and compact behind it. */
1950 return deleted_handler;
1953 /* Remove block B from the basic block array and compact behind it. */
1959 int i, n = n_basic_blocks;
1961 for (i = b->index; i + 1 < n; ++i)
1963 basic_block x = BASIC_BLOCK (i + 1);
1964 BASIC_BLOCK (i) = x;
1968 basic_block_info->num_elements--;
1972 /* Delete INSN by patching it out. Return the next insn. */
1975 flow_delete_insn (insn)
1978 rtx prev = PREV_INSN (insn);
1979 rtx next = NEXT_INSN (insn);
1982 PREV_INSN (insn) = NULL_RTX;
1983 NEXT_INSN (insn) = NULL_RTX;
1986 NEXT_INSN (prev) = next;
1988 PREV_INSN (next) = prev;
1990 set_last_insn (prev);
1992 if (GET_CODE (insn) == CODE_LABEL)
1993 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1995 /* If deleting a jump, decrement the use count of the label. Deleting
1996 the label itself should happen in the normal course of block merging. */
1997 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1998 LABEL_NUSES (JUMP_LABEL (insn))--;
2000 /* Also if deleting an insn that references a label. */
2001 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2002 LABEL_NUSES (XEXP (note, 0))--;
2007 /* True if a given label can be deleted. */
2010 can_delete_label_p (label)
2015 if (LABEL_PRESERVE_P (label))
2018 for (x = forced_labels; x ; x = XEXP (x, 1))
2019 if (label == XEXP (x, 0))
2021 for (x = label_value_list; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2024 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2028 /* User declared labels must be preserved. */
2029 if (LABEL_NAME (label) != 0)
2035 /* Blocks A and B are to be merged into a single block. A has no incoming
2036 fallthru edge, so it can be moved before B without adding or modifying
2037 any jumps (aside from the jump from A to B). */
2040 merge_blocks_move_predecessor_nojumps (a, b)
2043 rtx start, end, barrier;
2049 /* We want to delete the BARRIER after the end of the insns we are
2050 going to move. If we don't find a BARRIER, then do nothing. This
2051 can happen in some cases if we have labels we can not delete.
2053 Similarly, do nothing if we can not delete the label at the start
2054 of the target block. */
2055 barrier = next_nonnote_insn (end);
2056 if (GET_CODE (barrier) != BARRIER
2057 || (GET_CODE (b->head) == CODE_LABEL
2058 && ! can_delete_label_p (b->head)))
2061 flow_delete_insn (barrier);
2063 /* Move block and loop notes out of the chain so that we do not
2064 disturb their order.
2066 ??? A better solution would be to squeeze out all the non-nested notes
2067 and adjust the block trees appropriately. Even better would be to have
2068 a tighter connection between block trees and rtl so that this is not
2070 start = squeeze_notes (start, end);
2072 /* Scramble the insn chain. */
2073 if (end != PREV_INSN (b->head))
2074 reorder_insns (start, end, PREV_INSN (b->head));
2078 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2079 a->index, b->index);
2082 /* Swap the records for the two blocks around. Although we are deleting B,
2083 A is now where B was and we want to compact the BB array from where
2085 BASIC_BLOCK(a->index) = b;
2086 BASIC_BLOCK(b->index) = a;
2088 a->index = b->index;
2091 /* Now blocks A and B are contiguous. Merge them. */
2092 merge_blocks_nomove (a, b);
2097 /* Blocks A and B are to be merged into a single block. B has no outgoing
2098 fallthru edge, so it can be moved after A without adding or modifying
2099 any jumps (aside from the jump from A to B). */
2102 merge_blocks_move_successor_nojumps (a, b)
2105 rtx start, end, barrier;
2110 /* We want to delete the BARRIER after the end of the insns we are
2111 going to move. If we don't find a BARRIER, then do nothing. This
2112 can happen in some cases if we have labels we can not delete.
2114 Similarly, do nothing if we can not delete the label at the start
2115 of the target block. */
2116 barrier = next_nonnote_insn (end);
2117 if (GET_CODE (barrier) != BARRIER
2118 || (GET_CODE (b->head) == CODE_LABEL
2119 && ! can_delete_label_p (b->head)))
2122 flow_delete_insn (barrier);
2124 /* Move block and loop notes out of the chain so that we do not
2125 disturb their order.
2127 ??? A better solution would be to squeeze out all the non-nested notes
2128 and adjust the block trees appropriately. Even better would be to have
2129 a tighter connection between block trees and rtl so that this is not
2131 start = squeeze_notes (start, end);
2133 /* Scramble the insn chain. */
2134 reorder_insns (start, end, a->end);
2136 /* Now blocks A and B are contiguous. Merge them. */
2137 merge_blocks_nomove (a, b);
2141 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2142 b->index, a->index);
2148 /* Blocks A and B are to be merged into a single block. The insns
2149 are already contiguous, hence `nomove'. */
2152 merge_blocks_nomove (a, b)
2156 rtx b_head, b_end, a_end;
2159 /* If there was a CODE_LABEL beginning B, delete it. */
2162 if (GET_CODE (b_head) == CODE_LABEL)
2164 /* Detect basic blocks with nothing but a label. This can happen
2165 in particular at the end of a function. */
2166 if (b_head == b_end)
2168 b_head = flow_delete_insn (b_head);
2171 /* Delete the basic block note. */
2172 if (GET_CODE (b_head) == NOTE
2173 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2175 if (b_head == b_end)
2177 b_head = flow_delete_insn (b_head);
2180 /* If there was a jump out of A, delete it. */
2182 if (GET_CODE (a_end) == JUMP_INSN)
2186 prev = prev_nonnote_insn (a_end);
2191 /* If this was a conditional jump, we need to also delete
2192 the insn that set cc0. */
2194 if (prev && sets_cc0_p (prev))
2197 prev = prev_nonnote_insn (prev);
2200 flow_delete_insn (tmp);
2204 /* Note that a->head != a->end, since we should have at least a
2205 bb note plus the jump, so prev != insn. */
2206 flow_delete_insn (a_end);
2210 /* By definition, there should only be one successor of A, and that is
2211 B. Free that edge struct. */
2215 /* Adjust the edges out of B for the new owner. */
2216 for (e = b->succ; e ; e = e->succ_next)
2220 /* Reassociate the insns of B with A. */
2223 BLOCK_FOR_INSN (b_head) = a;
2224 while (b_head != b_end)
2226 b_head = NEXT_INSN (b_head);
2227 BLOCK_FOR_INSN (b_head) = a;
2233 /* Compact the basic block array. */
2237 /* Attempt to merge basic blocks that are potentially non-adjacent.
2238 Return true iff the attempt succeeded. */
2241 merge_blocks (e, b, c)
2245 /* If B has a fallthru edge to C, no need to move anything. */
2246 if (e->flags & EDGE_FALLTHRU)
2248 /* If a label still appears somewhere and we cannot delete the label,
2249 then we cannot merge the blocks. The edge was tidied already. */
2251 rtx insn, stop = NEXT_INSN (c->head);
2252 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2253 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2256 merge_blocks_nomove (b, c);
2260 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2261 b->index, c->index);
2270 int c_has_outgoing_fallthru;
2271 int b_has_incoming_fallthru;
2273 /* We must make sure to not munge nesting of exception regions,
2274 lexical blocks, and loop notes.
2276 The first is taken care of by requiring that the active eh
2277 region at the end of one block always matches the active eh
2278 region at the beginning of the next block.
2280 The later two are taken care of by squeezing out all the notes. */
2282 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2283 executed and we may want to treat blocks which have two out
2284 edges, one normal, one abnormal as only having one edge for
2285 block merging purposes. */
2287 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2288 if (tmp_edge->flags & EDGE_FALLTHRU)
2290 c_has_outgoing_fallthru = (tmp_edge != NULL);
2292 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2293 if (tmp_edge->flags & EDGE_FALLTHRU)
2295 b_has_incoming_fallthru = (tmp_edge != NULL);
2297 /* If B does not have an incoming fallthru, and the exception regions
2298 match, then it can be moved immediately before C without introducing
2301 C can not be the first block, so we do not have to worry about
2302 accessing a non-existent block. */
2303 d = BASIC_BLOCK (c->index - 1);
2304 if (! b_has_incoming_fallthru
2305 && d->eh_end == b->eh_beg
2306 && b->eh_end == c->eh_beg)
2307 return merge_blocks_move_predecessor_nojumps (b, c);
2309 /* Otherwise, we're going to try to move C after B. Make sure the
2310 exception regions match.
2312 If B is the last basic block, then we must not try to access the
2313 block structure for block B + 1. Luckily in that case we do not
2314 need to worry about matching exception regions. */
2315 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2316 if (b->eh_end == c->eh_beg
2317 && (d == NULL || c->eh_end == d->eh_beg))
2319 /* If C does not have an outgoing fallthru, then it can be moved
2320 immediately after B without introducing or modifying jumps. */
2321 if (! c_has_outgoing_fallthru)
2322 return merge_blocks_move_successor_nojumps (b, c);
2324 /* Otherwise, we'll need to insert an extra jump, and possibly
2325 a new block to contain it. */
2326 /* ??? Not implemented yet. */
2333 /* Top level driver for merge_blocks. */
2340 /* Attempt to merge blocks as made possible by edge removal. If a block
2341 has only one successor, and the successor has only one predecessor,
2342 they may be combined. */
2344 for (i = 0; i < n_basic_blocks; )
2346 basic_block c, b = BASIC_BLOCK (i);
2349 /* A loop because chains of blocks might be combineable. */
2350 while ((s = b->succ) != NULL
2351 && s->succ_next == NULL
2352 && (s->flags & EDGE_EH) == 0
2353 && (c = s->dest) != EXIT_BLOCK_PTR
2354 && c->pred->pred_next == NULL
2355 /* If the jump insn has side effects, we can't kill the edge. */
2356 && (GET_CODE (b->end) != JUMP_INSN
2357 || onlyjump_p (b->end))
2358 && merge_blocks (s, b, c))
2361 /* Don't get confused by the index shift caused by deleting blocks. */
2366 /* The given edge should potentially be a fallthru edge. If that is in
2367 fact true, delete the jump and barriers that are in the way. */
2370 tidy_fallthru_edge (e, b, c)
2376 /* ??? In a late-running flow pass, other folks may have deleted basic
2377 blocks by nopping out blocks, leaving multiple BARRIERs between here
2378 and the target label. They ought to be chastized and fixed.
2380 We can also wind up with a sequence of undeletable labels between
2381 one block and the next.
2383 So search through a sequence of barriers, labels, and notes for
2384 the head of block C and assert that we really do fall through. */
2386 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2389 /* Remove what will soon cease being the jump insn from the source block.
2390 If block B consisted only of this single jump, turn it into a deleted
2393 if (GET_CODE (q) == JUMP_INSN)
2396 /* If this was a conditional jump, we need to also delete
2397 the insn that set cc0. */
2398 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2405 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2406 NOTE_SOURCE_FILE (q) = 0;
2409 b->end = q = PREV_INSN (q);
2412 /* Selectively unlink the sequence. */
2413 if (q != PREV_INSN (c->head))
2414 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2416 e->flags |= EDGE_FALLTHRU;
2419 /* Fix up edges that now fall through, or rather should now fall through
2420 but previously required a jump around now deleted blocks. Simplify
2421 the search by only examining blocks numerically adjacent, since this
2422 is how find_basic_blocks created them. */
2425 tidy_fallthru_edges ()
2429 for (i = 1; i < n_basic_blocks; ++i)
2431 basic_block b = BASIC_BLOCK (i - 1);
2432 basic_block c = BASIC_BLOCK (i);
2435 /* We care about simple conditional or unconditional jumps with
2438 If we had a conditional branch to the next instruction when
2439 find_basic_blocks was called, then there will only be one
2440 out edge for the block which ended with the conditional
2441 branch (since we do not create duplicate edges).
2443 Furthermore, the edge will be marked as a fallthru because we
2444 merge the flags for the duplicate edges. So we do not want to
2445 check that the edge is not a FALLTHRU edge. */
2446 if ((s = b->succ) != NULL
2447 && s->succ_next == NULL
2449 /* If the jump insn has side effects, we can't tidy the edge. */
2450 && (GET_CODE (b->end) != JUMP_INSN
2451 || onlyjump_p (b->end)))
2452 tidy_fallthru_edge (s, b, c);
2456 /* Discover and record the loop depth at the head of each basic block. */
2459 calculate_loop_depth (dump)
2464 /* The loop infrastructure does the real job for us. */
2465 flow_loops_find (&loops);
2468 flow_loops_dump (&loops, dump, 0);
2470 flow_loops_free (&loops);
2473 /* Perform data flow analysis.
2474 F is the first insn of the function and NREGS the number of register numbers
2478 life_analysis (f, nregs, file, remove_dead_code)
2482 int remove_dead_code;
2484 #ifdef ELIMINABLE_REGS
2486 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2491 /* Dead code elimination changes basic block structure and therefore
2492 breaks the SSA phi representation. Particularly, a phi node
2493 can have an alternative value for each incoming block, referenced
2494 by the block number. Removing dead code can bump entire blocks
2495 and therefore cause blocks to be renumbered, invalidating the
2496 numbering of phi alternatives. */
2497 if (remove_dead_code && in_ssa_form)
2500 /* Record which registers will be eliminated. We use this in
2503 CLEAR_HARD_REG_SET (elim_reg_set);
2505 #ifdef ELIMINABLE_REGS
2506 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2507 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2509 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2512 /* We want alias analysis information for local dead store elimination. */
2513 init_alias_analysis ();
2516 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2520 if (! remove_dead_code)
2521 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2524 /* The post-reload life analysis have (on a global basis) the same
2525 registers live as was computed by reload itself. elimination
2526 Otherwise offsets and such may be incorrect.
2528 Reload will make some registers as live even though they do not
2529 appear in the rtl. */
2530 if (reload_completed)
2531 flags &= ~PROP_REG_INFO;
2535 /* Always remove no-op moves. Do this before other processing so
2536 that we don't have to keep re-scanning them. */
2537 delete_noop_moves (f);
2539 /* Some targets can emit simpler epilogues if they know that sp was
2540 not ever modified during the function. After reload, of course,
2541 we've already emitted the epilogue so there's no sense searching. */
2542 if (! reload_completed)
2543 notice_stack_pointer_modification (f);
2545 /* Allocate and zero out data structures that will record the
2546 data from lifetime analysis. */
2547 allocate_reg_life_data ();
2548 allocate_bb_life_data ();
2549 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2550 all_blocks = sbitmap_alloc (n_basic_blocks);
2551 sbitmap_ones (all_blocks);
2553 /* Find the set of registers live on function exit. */
2554 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2556 /* "Update" life info from zero. It'd be nice to begin the
2557 relaxation with just the exit and noreturn blocks, but that set
2558 is not immediately handy. */
2560 if (flags & PROP_REG_INFO)
2561 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2562 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2565 sbitmap_free (all_blocks);
2566 free (reg_next_use);
2567 reg_next_use = NULL;
2568 end_alias_analysis ();
2571 dump_flow_info (file);
2573 free_basic_block_vars (1);
2576 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2577 Search for REGNO. If found, abort if it is not wider than word_mode. */
2580 verify_wide_reg_1 (px, pregno)
2585 unsigned int regno = *(int *) pregno;
2587 if (GET_CODE (x) == REG && REGNO (x) == regno)
2589 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2596 /* A subroutine of verify_local_live_at_start. Search through insns
2597 between HEAD and END looking for register REGNO. */
2600 verify_wide_reg (regno, head, end)
2606 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2607 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2611 head = NEXT_INSN (head);
2614 /* We didn't find the register at all. Something's way screwy. */
2618 /* A subroutine of update_life_info. Verify that there are no untoward
2619 changes in live_at_start during a local update. */
2622 verify_local_live_at_start (new_live_at_start, bb)
2623 regset new_live_at_start;
2626 if (reload_completed)
2628 /* After reload, there are no pseudos, nor subregs of multi-word
2629 registers. The regsets should exactly match. */
2630 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2637 /* Find the set of changed registers. */
2638 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2640 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2642 /* No registers should die. */
2643 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2645 /* Verify that the now-live register is wider than word_mode. */
2646 verify_wide_reg (i, bb->head, bb->end);
2651 /* Updates life information starting with the basic blocks set in BLOCKS.
2653 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2654 expecting local modifications to basic blocks. If we find extra
2655 registers live at the beginning of a block, then we either killed
2656 useful data, or we have a broken split that wants data not provided.
2657 If we find registers removed from live_at_start, that means we have
2658 a broken peephole that is killing a register it shouldn't.
2660 ??? This is not true in one situation -- when a pre-reload splitter
2661 generates subregs of a multi-word pseudo, current life analysis will
2662 lose the kill. So we _can_ have a pseudo go live. How irritating.
2664 BLOCK_FOR_INSN is assumed to be correct.
2666 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2667 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2668 regs_ever_live unless the caller resets it to zero. */
2671 update_life_info (blocks, extent, prop_flags)
2673 enum update_life_extent extent;
2677 regset_head tmp_head;
2680 tmp = INITIALIZE_REG_SET (tmp_head);
2682 /* For a global update, we go through the relaxation process again. */
2683 if (extent != UPDATE_LIFE_LOCAL)
2685 calculate_global_regs_live (blocks, blocks,
2686 prop_flags & PROP_SCAN_DEAD_CODE);
2688 /* If asked, remove notes from the blocks we'll update. */
2689 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2690 count_or_remove_death_notes (blocks, 1);
2693 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2695 basic_block bb = BASIC_BLOCK (i);
2697 COPY_REG_SET (tmp, bb->global_live_at_end);
2698 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2700 if (extent == UPDATE_LIFE_LOCAL)
2701 verify_local_live_at_start (tmp, bb);
2706 if (prop_flags & PROP_REG_INFO)
2708 /* The only pseudos that are live at the beginning of the function
2709 are those that were not set anywhere in the function. local-alloc
2710 doesn't know how to handle these correctly, so mark them as not
2711 local to any one basic block. */
2712 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2713 FIRST_PSEUDO_REGISTER, i,
2714 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2716 /* We have a problem with any pseudoreg that lives across the setjmp.
2717 ANSI says that if a user variable does not change in value between
2718 the setjmp and the longjmp, then the longjmp preserves it. This
2719 includes longjmp from a place where the pseudo appears dead.
2720 (In principle, the value still exists if it is in scope.)
2721 If the pseudo goes in a hard reg, some other value may occupy
2722 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2723 Conclusion: such a pseudo must not go in a hard reg. */
2724 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2725 FIRST_PSEUDO_REGISTER, i,
2727 if (regno_reg_rtx[i] != 0)
2729 REG_LIVE_LENGTH (i) = -1;
2730 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2736 /* Free the variables allocated by find_basic_blocks.
2738 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2741 free_basic_block_vars (keep_head_end_p)
2742 int keep_head_end_p;
2744 if (basic_block_for_insn)
2746 VARRAY_FREE (basic_block_for_insn);
2747 basic_block_for_insn = NULL;
2750 if (! keep_head_end_p)
2753 VARRAY_FREE (basic_block_info);
2756 ENTRY_BLOCK_PTR->aux = NULL;
2757 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2758 EXIT_BLOCK_PTR->aux = NULL;
2759 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2763 /* Return nonzero if the destination of SET equals the source. */
2768 rtx src = SET_SRC (set);
2769 rtx dst = SET_DEST (set);
2771 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2773 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2775 src = SUBREG_REG (src);
2776 dst = SUBREG_REG (dst);
2779 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2780 && REGNO (src) == REGNO (dst));
2783 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2789 rtx pat = PATTERN (insn);
2791 /* Insns carrying these notes are useful later on. */
2792 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2795 if (GET_CODE (pat) == SET && set_noop_p (pat))
2798 if (GET_CODE (pat) == PARALLEL)
2801 /* If nothing but SETs of registers to themselves,
2802 this insn can also be deleted. */
2803 for (i = 0; i < XVECLEN (pat, 0); i++)
2805 rtx tem = XVECEXP (pat, 0, i);
2807 if (GET_CODE (tem) == USE
2808 || GET_CODE (tem) == CLOBBER)
2811 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2820 /* Delete any insns that copy a register to itself. */
2823 delete_noop_moves (f)
2827 for (insn = f; insn; insn = NEXT_INSN (insn))
2829 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2831 PUT_CODE (insn, NOTE);
2832 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2833 NOTE_SOURCE_FILE (insn) = 0;
2838 /* Determine if the stack pointer is constant over the life of the function.
2839 Only useful before prologues have been emitted. */
2842 notice_stack_pointer_modification_1 (x, pat, data)
2844 rtx pat ATTRIBUTE_UNUSED;
2845 void *data ATTRIBUTE_UNUSED;
2847 if (x == stack_pointer_rtx
2848 /* The stack pointer is only modified indirectly as the result
2849 of a push until later in flow. See the comments in rtl.texi
2850 regarding Embedded Side-Effects on Addresses. */
2851 || (GET_CODE (x) == MEM
2852 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2853 || GET_CODE (XEXP (x, 0)) == PRE_INC
2854 || GET_CODE (XEXP (x, 0)) == POST_DEC
2855 || GET_CODE (XEXP (x, 0)) == POST_INC)
2856 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2857 current_function_sp_is_unchanging = 0;
2861 notice_stack_pointer_modification (f)
2866 /* Assume that the stack pointer is unchanging if alloca hasn't
2868 current_function_sp_is_unchanging = !current_function_calls_alloca;
2869 if (! current_function_sp_is_unchanging)
2872 for (insn = f; insn; insn = NEXT_INSN (insn))
2874 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2876 /* Check if insn modifies the stack pointer. */
2877 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2879 if (! current_function_sp_is_unchanging)
2885 /* Mark a register in SET. Hard registers in large modes get all
2886 of their component registers set as well. */
2888 mark_reg (reg, xset)
2892 regset set = (regset) xset;
2893 int regno = REGNO (reg);
2895 if (GET_MODE (reg) == BLKmode)
2898 SET_REGNO_REG_SET (set, regno);
2899 if (regno < FIRST_PSEUDO_REGISTER)
2901 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2903 SET_REGNO_REG_SET (set, regno + n);
2907 /* Mark those regs which are needed at the end of the function as live
2908 at the end of the last basic block. */
2910 mark_regs_live_at_end (set)
2915 /* If exiting needs the right stack value, consider the stack pointer
2916 live at the end of the function. */
2917 if ((HAVE_epilogue && reload_completed)
2918 || ! EXIT_IGNORE_STACK
2919 || (! FRAME_POINTER_REQUIRED
2920 && ! current_function_calls_alloca
2921 && flag_omit_frame_pointer)
2922 || current_function_sp_is_unchanging)
2924 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2927 /* Mark the frame pointer if needed at the end of the function. If
2928 we end up eliminating it, it will be removed from the live list
2929 of each basic block by reload. */
2931 if (! reload_completed || frame_pointer_needed)
2933 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2934 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2935 /* If they are different, also mark the hard frame pointer as live */
2936 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2940 #ifdef PIC_OFFSET_TABLE_REGNUM
2941 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2942 /* Many architectures have a GP register even without flag_pic.
2943 Assume the pic register is not in use, or will be handled by
2944 other means, if it is not fixed. */
2945 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2946 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2950 /* Mark all global registers, and all registers used by the epilogue
2951 as being live at the end of the function since they may be
2952 referenced by our caller. */
2953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2955 #ifdef EPILOGUE_USES
2956 || EPILOGUE_USES (i)
2959 SET_REGNO_REG_SET (set, i);
2961 /* Mark all call-saved registers that we actaully used. */
2962 if (HAVE_epilogue && reload_completed)
2964 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2965 if (! call_used_regs[i] && regs_ever_live[i])
2966 SET_REGNO_REG_SET (set, i);
2969 /* Mark function return value. */
2970 diddle_return_value (mark_reg, set);
2973 /* Callback function for for_each_successor_phi. DATA is a regset.
2974 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2975 INSN, in the regset. */
2978 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2979 rtx insn ATTRIBUTE_UNUSED;
2980 int dest_regno ATTRIBUTE_UNUSED;
2984 regset live = (regset) data;
2985 SET_REGNO_REG_SET (live, src_regno);
2989 /* Propagate global life info around the graph of basic blocks. Begin
2990 considering blocks with their corresponding bit set in BLOCKS_IN.
2991 BLOCKS_OUT is set for every block that was changed. */
2994 calculate_global_regs_live (blocks_in, blocks_out, flags)
2995 sbitmap blocks_in, blocks_out;
2998 basic_block *queue, *qhead, *qtail, *qend;
2999 regset tmp, new_live_at_end;
3000 regset_head tmp_head;
3001 regset_head new_live_at_end_head;
3004 tmp = INITIALIZE_REG_SET (tmp_head);
3005 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3007 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3008 because the `head == tail' style test for an empty queue doesn't
3009 work with a full queue. */
3010 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3012 qhead = qend = queue + n_basic_blocks + 2;
3014 /* Clear out the garbage that might be hanging out in bb->aux. */
3015 for (i = n_basic_blocks - 1; i >= 0; --i)
3016 BASIC_BLOCK (i)->aux = NULL;
3018 /* Queue the blocks set in the initial mask. Do this in reverse block
3019 number order so that we are more likely for the first round to do
3020 useful work. We use AUX non-null to flag that the block is queued. */
3021 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3023 basic_block bb = BASIC_BLOCK (i);
3028 sbitmap_zero (blocks_out);
3030 while (qhead != qtail)
3032 int rescan, changed;
3041 /* Begin by propogating live_at_start from the successor blocks. */
3042 CLEAR_REG_SET (new_live_at_end);
3043 for (e = bb->succ; e ; e = e->succ_next)
3045 basic_block sb = e->dest;
3046 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3049 /* Regs used in phi nodes are not included in
3050 global_live_at_start, since they are live only along a
3051 particular edge. Set those regs that are live because of a
3052 phi node alternative corresponding to this particular block. */
3053 for_each_successor_phi (bb->index, &set_phi_alternative_reg,
3056 if (bb == ENTRY_BLOCK_PTR)
3058 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3062 /* On our first pass through this block, we'll go ahead and continue.
3063 Recognize first pass by local_set NULL. On subsequent passes, we
3064 get to skip out early if live_at_end wouldn't have changed. */
3066 if (bb->local_set == NULL)
3068 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3073 /* If any bits were removed from live_at_end, we'll have to
3074 rescan the block. This wouldn't be necessary if we had
3075 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3076 local_live is really dependant on live_at_end. */
3077 CLEAR_REG_SET (tmp);
3078 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3079 new_live_at_end, BITMAP_AND_COMPL);
3083 /* Find the set of changed bits. Take this opportunity
3084 to notice that this set is empty and early out. */
3085 CLEAR_REG_SET (tmp);
3086 changed = bitmap_operation (tmp, bb->global_live_at_end,
3087 new_live_at_end, BITMAP_XOR);
3091 /* If any of the changed bits overlap with local_set,
3092 we'll have to rescan the block. Detect overlap by
3093 the AND with ~local_set turning off bits. */
3094 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3099 /* Let our caller know that BB changed enough to require its
3100 death notes updated. */
3101 SET_BIT (blocks_out, bb->index);
3105 /* Add to live_at_start the set of all registers in
3106 new_live_at_end that aren't in the old live_at_end. */
3108 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3110 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3112 changed = bitmap_operation (bb->global_live_at_start,
3113 bb->global_live_at_start,
3120 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3122 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3123 into live_at_start. */
3124 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3126 /* If live_at start didn't change, no need to go farther. */
3127 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3130 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3133 /* Queue all predecessors of BB so that we may re-examine
3134 their live_at_end. */
3135 for (e = bb->pred; e ; e = e->pred_next)
3137 basic_block pb = e->src;
3138 if (pb->aux == NULL)
3149 FREE_REG_SET (new_live_at_end);
3151 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3153 basic_block bb = BASIC_BLOCK (i);
3154 FREE_REG_SET (bb->local_set);
3160 /* Subroutines of life analysis. */
3162 /* Allocate the permanent data structures that represent the results
3163 of life analysis. Not static since used also for stupid life analysis. */
3166 allocate_bb_life_data ()
3170 for (i = 0; i < n_basic_blocks; i++)
3172 basic_block bb = BASIC_BLOCK (i);
3174 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3175 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3178 ENTRY_BLOCK_PTR->global_live_at_end
3179 = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 EXIT_BLOCK_PTR->global_live_at_start
3181 = OBSTACK_ALLOC_REG_SET (function_obstack);
3183 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3187 allocate_reg_life_data ()
3191 /* Recalculate the register space, in case it has grown. Old style
3192 vector oriented regsets would set regset_{size,bytes} here also. */
3193 allocate_reg_info (max_regno, FALSE, FALSE);
3195 /* Reset all the data we'll collect in propagate_block and its
3197 for (i = 0; i < max_regno; i++)
3201 REG_N_DEATHS (i) = 0;
3202 REG_N_CALLS_CROSSED (i) = 0;
3203 REG_LIVE_LENGTH (i) = 0;
3204 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3208 /* Compute the registers live at the beginning of a basic block
3209 from those live at the end.
3211 When called, OLD contains those live at the end.
3212 On return, it contains those live at the beginning.
3213 FIRST and LAST are the first and last insns of the basic block.
3215 FINAL is nonzero if we are doing the final pass which is not
3216 for computing the life info (since that has already been done)
3217 but for acting on it. On this pass, we delete dead stores,
3218 set up the logical links and dead-variables lists of instructions,
3219 and merge instructions for autoincrement and autodecrement addresses.
3221 SIGNIFICANT is nonzero only the first time for each basic block.
3222 If it is nonzero, it points to a regset in which we store
3223 a 1 for each register that is set within the block.
3225 BNUM is the number of the basic block. */
3228 propagate_block (bb, old, significant, flags)
3237 regset_head live_head;
3239 regset_head dead_head;
3241 /* Find the loop depth for this block. Ignore loop level changes in the
3242 middle of the basic block -- for register allocation purposes, the
3243 important uses will be in the blocks wholely contained within the loop
3244 not in the loop pre-header or post-trailer. */
3245 loop_depth = bb->loop_depth;
3247 dead = INITIALIZE_REG_SET (live_head);
3248 live = INITIALIZE_REG_SET (dead_head);
3252 if (flags & PROP_REG_INFO)
3256 /* Process the regs live at the end of the block.
3257 Mark them as not local to any one basic block. */
3258 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3260 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3264 /* Scan the block an insn at a time from end to beginning. */
3266 for (insn = bb->end; ; insn = prev)
3268 prev = PREV_INSN (insn);
3270 if (GET_CODE (insn) == NOTE)
3272 /* If this is a call to `setjmp' et al,
3273 warn if any non-volatile datum is live. */
3275 if ((flags & PROP_REG_INFO)
3276 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3277 IOR_REG_SET (regs_live_at_setjmp, old);
3280 /* Update the life-status of regs for this insn.
3281 First DEAD gets which regs are set in this insn
3282 then LIVE gets which regs are used in this insn.
3283 Then the regs live before the insn
3284 are those live after, with DEAD regs turned off,
3285 and then LIVE regs turned on. */
3287 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3290 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3291 int insn_is_dead = 0;
3292 int libcall_is_dead = 0;
3294 if (flags & PROP_SCAN_DEAD_CODE)
3296 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3298 libcall_is_dead = (insn_is_dead && note != 0
3299 && libcall_dead_p (PATTERN (insn), old,
3303 /* We almost certainly don't want to delete prologue or epilogue
3304 instructions. Warn about probable compiler losage. */
3307 && (((HAVE_epilogue || HAVE_prologue)
3308 && prologue_epilogue_contains (insn))
3309 || (HAVE_sibcall_epilogue
3310 && sibcall_epilogue_contains (insn))))
3312 if (flags & PROP_KILL_DEAD_CODE)
3314 warning ("ICE: would have deleted prologue/epilogue insn");
3315 if (!inhibit_warnings)
3318 libcall_is_dead = insn_is_dead = 0;
3321 /* If an instruction consists of just dead store(s) on final pass,
3322 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3323 We could really delete it with delete_insn, but that
3324 can cause trouble for first or last insn in a basic block. */
3325 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3328 /* If the insn referred to a label, note that the label is
3330 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3332 if (REG_NOTE_KIND (inote) == REG_LABEL)
3334 rtx label = XEXP (inote, 0);
3336 LABEL_NUSES (label)--;
3338 /* If this label was attached to an ADDR_VEC, it's
3339 safe to delete the ADDR_VEC. In fact, it's pretty
3340 much mandatory to delete it, because the ADDR_VEC may
3341 be referencing labels that no longer exist. */
3342 if (LABEL_NUSES (label) == 0
3343 && (next = next_nonnote_insn (label)) != NULL
3344 && GET_CODE (next) == JUMP_INSN
3345 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3346 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3348 rtx pat = PATTERN (next);
3349 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3350 int len = XVECLEN (pat, diff_vec_p);
3352 for (i = 0; i < len; i++)
3353 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3354 PUT_CODE (next, NOTE);
3355 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3356 NOTE_SOURCE_FILE (next) = 0;
3358 if ((next = next_nonnote_insn (label)) != NULL
3359 && GET_CODE (next) == BARRIER)
3361 PUT_CODE (next, NOTE);
3362 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3363 NOTE_SOURCE_FILE (next) = 0;
3369 PUT_CODE (insn, NOTE);
3370 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3371 NOTE_SOURCE_FILE (insn) = 0;
3373 /* CC0 is now known to be dead. Either this insn used it,
3374 in which case it doesn't anymore, or clobbered it,
3375 so the next insn can't use it. */
3378 /* If this insn is copying the return value from a library call,
3379 delete the entire library call. */
3380 if (libcall_is_dead)
3382 rtx first = XEXP (note, 0);
3384 while (INSN_DELETED_P (first))
3385 first = NEXT_INSN (first);
3390 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3391 NOTE_SOURCE_FILE (p) = 0;
3397 CLEAR_REG_SET (dead);
3398 CLEAR_REG_SET (live);
3400 /* See if this is an increment or decrement that can be
3401 merged into a following memory address. */
3404 register rtx x = single_set (insn);
3406 /* Does this instruction increment or decrement a register? */
3407 if (!reload_completed
3408 && (flags & PROP_AUTOINC)
3410 && GET_CODE (SET_DEST (x)) == REG
3411 && (GET_CODE (SET_SRC (x)) == PLUS
3412 || GET_CODE (SET_SRC (x)) == MINUS)
3413 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3414 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3415 /* Ok, look for a following memory ref we can combine with.
3416 If one is found, change the memory ref to a PRE_INC
3417 or PRE_DEC, cancel this insn, and return 1.
3418 Return 0 if nothing has been done. */
3419 && try_pre_increment_1 (insn))
3422 #endif /* AUTO_INC_DEC */
3424 /* If this is not the final pass, and this insn is copying the
3425 value of a library call and it's dead, don't scan the
3426 insns that perform the library call, so that the call's
3427 arguments are not marked live. */
3428 if (libcall_is_dead)
3430 /* Mark the dest reg as `significant'. */
3431 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3432 significant, flags);
3434 insn = XEXP (note, 0);
3435 prev = PREV_INSN (insn);
3437 else if (GET_CODE (PATTERN (insn)) == SET
3438 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3439 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3440 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3441 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3442 /* We have an insn to pop a constant amount off the stack.
3443 (Such insns use PLUS regardless of the direction of the stack,
3444 and any insn to adjust the stack by a constant is always a pop.)
3445 These insns, if not dead stores, have no effect on life. */
3449 /* Any regs live at the time of a call instruction
3450 must not go in a register clobbered by calls.
3451 Find all regs now live and record this for them. */
3453 if (GET_CODE (insn) == CALL_INSN
3454 && (flags & PROP_REG_INFO))
3455 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3457 REG_N_CALLS_CROSSED (i)++;
3460 /* LIVE gets the regs used in INSN;
3461 DEAD gets those set by it. Dead insns don't make anything
3464 mark_set_regs (old, dead, PATTERN (insn),
3465 insn, significant, flags);
3467 /* If an insn doesn't use CC0, it becomes dead since we
3468 assume that every insn clobbers it. So show it dead here;
3469 mark_used_regs will set it live if it is referenced. */
3473 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3475 /* Sometimes we may have inserted something before INSN (such as
3476 a move) when we make an auto-inc. So ensure we will scan
3479 prev = PREV_INSN (insn);
3482 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3488 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3490 note = XEXP (note, 1))
3491 if (GET_CODE (XEXP (note, 0)) == USE)
3492 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3495 /* Each call clobbers all call-clobbered regs that are not
3496 global or fixed. Note that the function-value reg is a
3497 call-clobbered reg, and mark_set_regs has already had
3498 a chance to handle it. */
3500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3501 if (call_used_regs[i] && ! global_regs[i]
3504 SET_REGNO_REG_SET (dead, i);
3506 SET_REGNO_REG_SET (significant, i);
3509 /* The stack ptr is used (honorarily) by a CALL insn. */
3510 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3512 /* Calls may also reference any of the global registers,
3513 so they are made live. */
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3516 mark_used_regs (old, live,
3517 gen_rtx_REG (reg_raw_mode[i], i),
3520 /* Calls also clobber memory. */
3521 free_EXPR_LIST_list (&mem_set_list);
3524 /* Update OLD for the registers used or set. */
3525 AND_COMPL_REG_SET (old, dead);
3526 IOR_REG_SET (old, live);
3530 /* On final pass, update counts of how many insns in which
3531 each reg is live. */
3532 if (flags & PROP_REG_INFO)
3533 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3536 if (insn == bb->head)
3540 FREE_REG_SET (dead);
3541 FREE_REG_SET (live);
3542 free_EXPR_LIST_list (&mem_set_list);
3545 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3546 (SET expressions whose destinations are registers dead after the insn).
3547 NEEDED is the regset that says which regs are alive after the insn.
3549 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3551 If X is the entire body of an insn, NOTES contains the reg notes
3552 pertaining to the insn. */
3555 insn_dead_p (x, needed, call_ok, notes)
3559 rtx notes ATTRIBUTE_UNUSED;
3561 enum rtx_code code = GET_CODE (x);
3564 /* If flow is invoked after reload, we must take existing AUTO_INC
3565 expresions into account. */
3566 if (reload_completed)
3568 for ( ; notes; notes = XEXP (notes, 1))
3570 if (REG_NOTE_KIND (notes) == REG_INC)
3572 int regno = REGNO (XEXP (notes, 0));
3574 /* Don't delete insns to set global regs. */
3575 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3576 || REGNO_REG_SET_P (needed, regno))
3583 /* If setting something that's a reg or part of one,
3584 see if that register's altered value will be live. */
3588 rtx r = SET_DEST (x);
3591 if (GET_CODE (r) == CC0)
3595 /* A SET that is a subroutine call cannot be dead. */
3596 if (GET_CODE (SET_SRC (x)) == CALL)
3602 /* Don't eliminate loads from volatile memory or volatile asms. */
3603 else if (volatile_refs_p (SET_SRC (x)))
3606 if (GET_CODE (r) == MEM)
3610 if (MEM_VOLATILE_P (r))
3613 /* Walk the set of memory locations we are currently tracking
3614 and see if one is an identical match to this memory location.
3615 If so, this memory write is dead (remember, we're walking
3616 backwards from the end of the block to the start. */
3617 temp = mem_set_list;
3620 if (rtx_equal_p (XEXP (temp, 0), r))
3622 temp = XEXP (temp, 1);
3627 while (GET_CODE (r) == SUBREG
3628 || GET_CODE (r) == STRICT_LOW_PART
3629 || GET_CODE (r) == ZERO_EXTRACT)
3632 if (GET_CODE (r) == REG)
3634 int regno = REGNO (r);
3637 if (REGNO_REG_SET_P (needed, regno))
3640 /* If this is a hard register, verify that subsequent
3641 words are not needed. */
3642 if (regno < FIRST_PSEUDO_REGISTER)
3644 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3647 if (REGNO_REG_SET_P (needed, regno+n))
3651 /* Don't delete insns to set global regs. */
3652 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3655 /* Make sure insns to set the stack pointer aren't deleted. */
3656 if (regno == STACK_POINTER_REGNUM)
3659 /* Make sure insns to set the frame pointer aren't deleted. */
3660 if (regno == FRAME_POINTER_REGNUM
3661 && (! reload_completed || frame_pointer_needed))
3663 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3664 if (regno == HARD_FRAME_POINTER_REGNUM
3665 && (! reload_completed || frame_pointer_needed))
3669 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3670 /* Make sure insns to set arg pointer are never deleted
3671 (if the arg pointer isn't fixed, there will be a USE
3672 for it, so we can treat it normally). */
3673 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3677 /* Otherwise, the set is dead. */
3683 /* If performing several activities, insn is dead if each activity
3684 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3685 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3687 else if (code == PARALLEL)
3689 int i = XVECLEN (x, 0);
3691 for (i--; i >= 0; i--)
3692 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3693 && GET_CODE (XVECEXP (x, 0, i)) != USE
3694 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3700 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3701 is not necessarily true for hard registers. */
3702 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3703 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3704 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3707 /* We do not check other CLOBBER or USE here. An insn consisting of just
3708 a CLOBBER or just a USE should not be deleted. */
3712 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3713 return 1 if the entire library call is dead.
3714 This is true if X copies a register (hard or pseudo)
3715 and if the hard return reg of the call insn is dead.
3716 (The caller should have tested the destination of X already for death.)
3718 If this insn doesn't just copy a register, then we don't
3719 have an ordinary libcall. In that case, cse could not have
3720 managed to substitute the source for the dest later on,
3721 so we can assume the libcall is dead.
3723 NEEDED is the bit vector of pseudoregs live before this insn.
3724 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3727 libcall_dead_p (x, needed, note, insn)
3733 register RTX_CODE code = GET_CODE (x);
3737 register rtx r = SET_SRC (x);
3738 if (GET_CODE (r) == REG)
3740 rtx call = XEXP (note, 0);
3744 /* Find the call insn. */
3745 while (call != insn && GET_CODE (call) != CALL_INSN)
3746 call = NEXT_INSN (call);
3748 /* If there is none, do nothing special,
3749 since ordinary death handling can understand these insns. */
3753 /* See if the hard reg holding the value is dead.
3754 If this is a PARALLEL, find the call within it. */
3755 call_pat = PATTERN (call);
3756 if (GET_CODE (call_pat) == PARALLEL)
3758 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3759 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3760 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3763 /* This may be a library call that is returning a value
3764 via invisible pointer. Do nothing special, since
3765 ordinary death handling can understand these insns. */
3769 call_pat = XVECEXP (call_pat, 0, i);
3772 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3778 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3779 live at function entry. Don't count global register variables, variables
3780 in registers that can be used for function arg passing, or variables in
3781 fixed hard registers. */
3784 regno_uninitialized (regno)
3787 if (n_basic_blocks == 0
3788 || (regno < FIRST_PSEUDO_REGISTER
3789 && (global_regs[regno]
3790 || fixed_regs[regno]
3791 || FUNCTION_ARG_REGNO_P (regno))))
3794 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3797 /* 1 if register REGNO was alive at a place where `setjmp' was called
3798 and was set more than once or is an argument.
3799 Such regs may be clobbered by `longjmp'. */
3802 regno_clobbered_at_setjmp (regno)
3805 if (n_basic_blocks == 0)
3808 return ((REG_N_SETS (regno) > 1
3809 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3810 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3813 /* INSN references memory, possibly using autoincrement addressing modes.
3814 Find any entries on the mem_set_list that need to be invalidated due
3815 to an address change. */
3817 invalidate_mems_from_autoinc (insn)
3820 rtx note = REG_NOTES (insn);
3821 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3823 if (REG_NOTE_KIND (note) == REG_INC)
3825 rtx temp = mem_set_list;
3826 rtx prev = NULL_RTX;
3831 next = XEXP (temp, 1);
3832 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3834 /* Splice temp out of list. */
3836 XEXP (prev, 1) = next;
3838 mem_set_list = next;
3839 free_EXPR_LIST_node (temp);
3849 /* Process the registers that are set within X. Their bits are set to
3850 1 in the regset DEAD, because they are dead prior to this insn.
3852 If INSN is nonzero, it is the insn being processed.
3854 FLAGS is the set of operations to perform. */
3857 mark_set_regs (needed, dead, x, insn, significant, flags)
3865 register RTX_CODE code = GET_CODE (x);
3867 if (code == SET || code == CLOBBER)
3868 mark_set_1 (needed, dead, x, insn, significant, flags);
3869 else if (code == PARALLEL)
3872 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3874 code = GET_CODE (XVECEXP (x, 0, i));
3875 if (code == SET || code == CLOBBER)
3876 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3877 significant, flags);
3882 /* Process a single SET rtx, X. */
3885 mark_set_1 (needed, dead, x, insn, significant, flags)
3893 register int regno = -1;
3894 register rtx reg = SET_DEST (x);
3896 /* Some targets place small structures in registers for
3897 return values of functions. We have to detect this
3898 case specially here to get correct flow information. */
3899 if (GET_CODE (reg) == PARALLEL
3900 && GET_MODE (reg) == BLKmode)
3904 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3905 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3906 significant, flags);
3910 /* Modifying just one hardware register of a multi-reg value
3911 or just a byte field of a register
3912 does not mean the value from before this insn is now dead.
3913 But it does mean liveness of that register at the end of the block
3916 Within mark_set_1, however, we treat it as if the register is
3917 indeed modified. mark_used_regs will, however, also treat this
3918 register as being used. Thus, we treat these insns as setting a
3919 new value for the register as a function of its old value. This
3920 cases LOG_LINKS to be made appropriately and this will help combine. */
3922 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3923 || GET_CODE (reg) == SIGN_EXTRACT
3924 || GET_CODE (reg) == STRICT_LOW_PART)
3925 reg = XEXP (reg, 0);
3927 /* If this set is a MEM, then it kills any aliased writes.
3928 If this set is a REG, then it kills any MEMs which use the reg. */
3929 if (flags & PROP_SCAN_DEAD_CODE)
3931 if (GET_CODE (reg) == MEM
3932 || GET_CODE (reg) == REG)
3934 rtx temp = mem_set_list;
3935 rtx prev = NULL_RTX;
3940 next = XEXP (temp, 1);
3941 if ((GET_CODE (reg) == MEM
3942 && output_dependence (XEXP (temp, 0), reg))
3943 || (GET_CODE (reg) == REG
3944 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3946 /* Splice this entry out of the list. */
3948 XEXP (prev, 1) = next;
3950 mem_set_list = next;
3951 free_EXPR_LIST_node (temp);
3959 /* If the memory reference had embedded side effects (autoincrement
3960 address modes. Then we may need to kill some entries on the
3962 if (insn && GET_CODE (reg) == MEM)
3963 invalidate_mems_from_autoinc (insn);
3965 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3966 /* We do not know the size of a BLKmode store, so we do not track
3967 them for redundant store elimination. */
3968 && GET_MODE (reg) != BLKmode
3969 /* There are no REG_INC notes for SP, so we can't assume we'll see
3970 everything that invalidates it. To be safe, don't eliminate any
3971 stores though SP; none of them should be redundant anyway. */
3972 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3973 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3976 if (GET_CODE (reg) == REG
3977 && (regno = REGNO (reg),
3978 ! (regno == FRAME_POINTER_REGNUM
3979 && (! reload_completed || frame_pointer_needed)))
3980 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3981 && ! (regno == HARD_FRAME_POINTER_REGNUM
3982 && (! reload_completed || frame_pointer_needed))
3984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3985 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3987 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3988 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3990 int some_needed = REGNO_REG_SET_P (needed, regno);
3991 int some_not_needed = ! some_needed;
3993 /* Mark it as a significant register for this basic block. */
3995 SET_REGNO_REG_SET (significant, regno);
3997 /* Mark it as dead before this insn. */
3998 SET_REGNO_REG_SET (dead, regno);
4000 /* A hard reg in a wide mode may really be multiple registers.
4001 If so, mark all of them just like the first. */
4002 if (regno < FIRST_PSEUDO_REGISTER)
4006 /* Nothing below is needed for the stack pointer; get out asap.
4007 Eg, log links aren't needed, since combine won't use them. */
4008 if (regno == STACK_POINTER_REGNUM)
4011 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4014 int regno_n = regno + n;
4015 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4017 SET_REGNO_REG_SET (significant, regno_n);
4019 SET_REGNO_REG_SET (dead, regno_n);
4020 some_needed |= needed_regno;
4021 some_not_needed |= ! needed_regno;
4025 /* Additional data to record if this is the final pass. */
4026 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4027 | PROP_DEATH_NOTES | PROP_AUTOINC))
4030 register int blocknum = BLOCK_NUM (insn);
4033 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4034 y = reg_next_use[regno];
4036 /* If this is a hard reg, record this function uses the reg. */
4038 if (regno < FIRST_PSEUDO_REGISTER)
4041 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4043 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4044 for (i = regno; i < endregno; i++)
4046 /* The next use is no longer "next", since a store
4048 reg_next_use[i] = 0;
4051 if (flags & PROP_REG_INFO)
4052 for (i = regno; i < endregno; i++)
4054 regs_ever_live[i] = 1;
4060 /* The next use is no longer "next", since a store
4062 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4063 reg_next_use[regno] = 0;
4065 /* Keep track of which basic blocks each reg appears in. */
4067 if (flags & PROP_REG_INFO)
4069 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4070 REG_BASIC_BLOCK (regno) = blocknum;
4071 else if (REG_BASIC_BLOCK (regno) != blocknum)
4072 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4074 /* Count (weighted) references, stores, etc. This counts a
4075 register twice if it is modified, but that is correct. */
4076 REG_N_SETS (regno)++;
4077 REG_N_REFS (regno) += loop_depth + 1;
4079 /* The insns where a reg is live are normally counted
4080 elsewhere, but we want the count to include the insn
4081 where the reg is set, and the normal counting mechanism
4082 would not count it. */
4083 REG_LIVE_LENGTH (regno)++;
4087 if (! some_not_needed)
4089 if (flags & PROP_LOG_LINKS)
4091 /* Make a logical link from the next following insn
4092 that uses this register, back to this insn.
4093 The following insns have already been processed.
4095 We don't build a LOG_LINK for hard registers containing
4096 in ASM_OPERANDs. If these registers get replaced,
4097 we might wind up changing the semantics of the insn,
4098 even if reload can make what appear to be valid
4099 assignments later. */
4100 if (y && (BLOCK_NUM (y) == blocknum)
4101 && (regno >= FIRST_PSEUDO_REGISTER
4102 || asm_noperands (PATTERN (y)) < 0))
4103 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4106 else if (! some_needed)
4108 if (flags & PROP_REG_INFO)
4109 REG_N_DEATHS (REGNO (reg))++;
4111 if (flags & PROP_DEATH_NOTES)
4113 /* Note that dead stores have already been deleted
4114 when possible. If we get here, we have found a
4115 dead store that cannot be eliminated (because the
4116 same insn does something useful). Indicate this
4117 by marking the reg being set as dying here. */
4119 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4124 if (flags & PROP_DEATH_NOTES)
4126 /* This is a case where we have a multi-word hard register
4127 and some, but not all, of the words of the register are
4128 needed in subsequent insns. Write REG_UNUSED notes
4129 for those parts that were not needed. This case should
4134 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4136 if (!REGNO_REG_SET_P (needed, regno + i))
4140 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4146 else if (GET_CODE (reg) == REG)
4148 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4149 reg_next_use[regno] = 0;
4152 /* If this is the last pass and this is a SCRATCH, show it will be dying
4153 here and count it. */
4154 else if (GET_CODE (reg) == SCRATCH)
4156 if (flags & PROP_DEATH_NOTES)
4158 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4164 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4168 find_auto_inc (needed, x, insn)
4173 rtx addr = XEXP (x, 0);
4174 HOST_WIDE_INT offset = 0;
4177 /* Here we detect use of an index register which might be good for
4178 postincrement, postdecrement, preincrement, or predecrement. */
4180 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4181 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4183 if (GET_CODE (addr) == REG)
4186 register int size = GET_MODE_SIZE (GET_MODE (x));
4189 int regno = REGNO (addr);
4191 /* Is the next use an increment that might make auto-increment? */
4192 if ((incr = reg_next_use[regno]) != 0
4193 && (set = single_set (incr)) != 0
4194 && GET_CODE (set) == SET
4195 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4196 /* Can't add side effects to jumps; if reg is spilled and
4197 reloaded, there's no way to store back the altered value. */
4198 && GET_CODE (insn) != JUMP_INSN
4199 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4200 && XEXP (y, 0) == addr
4201 && GET_CODE (XEXP (y, 1)) == CONST_INT
4202 && ((HAVE_POST_INCREMENT
4203 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4204 || (HAVE_POST_DECREMENT
4205 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4206 || (HAVE_PRE_INCREMENT
4207 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4208 || (HAVE_PRE_DECREMENT
4209 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4210 /* Make sure this reg appears only once in this insn. */
4211 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4212 use != 0 && use != (rtx) 1))
4214 rtx q = SET_DEST (set);
4215 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4216 ? (offset ? PRE_INC : POST_INC)
4217 : (offset ? PRE_DEC : POST_DEC));
4219 if (dead_or_set_p (incr, addr))
4221 /* This is the simple case. Try to make the auto-inc. If
4222 we can't, we are done. Otherwise, we will do any
4223 needed updates below. */
4224 if (! validate_change (insn, &XEXP (x, 0),
4225 gen_rtx_fmt_e (inc_code, Pmode, addr),
4229 else if (GET_CODE (q) == REG
4230 /* PREV_INSN used here to check the semi-open interval
4232 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4233 /* We must also check for sets of q as q may be
4234 a call clobbered hard register and there may
4235 be a call between PREV_INSN (insn) and incr. */
4236 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4238 /* We have *p followed sometime later by q = p+size.
4239 Both p and q must be live afterward,
4240 and q is not used between INSN and its assignment.
4241 Change it to q = p, ...*q..., q = q+size.
4242 Then fall into the usual case. */
4247 emit_move_insn (q, addr);
4248 insns = get_insns ();
4251 bb = BLOCK_FOR_INSN (insn);
4252 for (temp = insns; temp; temp = NEXT_INSN (temp))
4253 set_block_for_insn (temp, bb);
4255 /* If we can't make the auto-inc, or can't make the
4256 replacement into Y, exit. There's no point in making
4257 the change below if we can't do the auto-inc and doing
4258 so is not correct in the pre-inc case. */
4260 validate_change (insn, &XEXP (x, 0),
4261 gen_rtx_fmt_e (inc_code, Pmode, q),
4263 validate_change (incr, &XEXP (y, 0), q, 1);
4264 if (! apply_change_group ())
4267 /* We now know we'll be doing this change, so emit the
4268 new insn(s) and do the updates. */
4269 emit_insns_before (insns, insn);
4271 if (BLOCK_FOR_INSN (insn)->head == insn)
4272 BLOCK_FOR_INSN (insn)->head = insns;
4274 /* INCR will become a NOTE and INSN won't contain a
4275 use of ADDR. If a use of ADDR was just placed in
4276 the insn before INSN, make that the next use.
4277 Otherwise, invalidate it. */
4278 if (GET_CODE (PREV_INSN (insn)) == INSN
4279 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4280 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4281 reg_next_use[regno] = PREV_INSN (insn);
4283 reg_next_use[regno] = 0;
4288 /* REGNO is now used in INCR which is below INSN, but
4289 it previously wasn't live here. If we don't mark
4290 it as needed, we'll put a REG_DEAD note for it
4291 on this insn, which is incorrect. */
4292 SET_REGNO_REG_SET (needed, regno);
4294 /* If there are any calls between INSN and INCR, show
4295 that REGNO now crosses them. */
4296 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4297 if (GET_CODE (temp) == CALL_INSN)
4298 REG_N_CALLS_CROSSED (regno)++;
4303 /* If we haven't returned, it means we were able to make the
4304 auto-inc, so update the status. First, record that this insn
4305 has an implicit side effect. */
4308 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4310 /* Modify the old increment-insn to simply copy
4311 the already-incremented value of our register. */
4312 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4315 /* If that makes it a no-op (copying the register into itself) delete
4316 it so it won't appear to be a "use" and a "set" of this
4318 if (SET_DEST (set) == addr)
4320 PUT_CODE (incr, NOTE);
4321 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4322 NOTE_SOURCE_FILE (incr) = 0;
4325 if (regno >= FIRST_PSEUDO_REGISTER)
4327 /* Count an extra reference to the reg. When a reg is
4328 incremented, spilling it is worse, so we want to make
4329 that less likely. */
4330 REG_N_REFS (regno) += loop_depth + 1;
4332 /* Count the increment as a setting of the register,
4333 even though it isn't a SET in rtl. */
4334 REG_N_SETS (regno)++;
4339 #endif /* AUTO_INC_DEC */
4341 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4342 This is done assuming the registers needed from X
4343 are those that have 1-bits in NEEDED.
4345 FLAGS is the set of enabled operations.
4347 INSN is the containing instruction. If INSN is dead, this function is not
4351 mark_used_regs (needed, live, x, flags, insn)
4358 register RTX_CODE code;
4363 code = GET_CODE (x);
4383 /* If we are clobbering a MEM, mark any registers inside the address
4385 if (GET_CODE (XEXP (x, 0)) == MEM)
4386 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4390 /* Don't bother watching stores to mems if this is not the
4391 final pass. We'll not be deleting dead stores this round. */
4392 if (flags & PROP_SCAN_DEAD_CODE)
4394 /* Invalidate the data for the last MEM stored, but only if MEM is
4395 something that can be stored into. */
4396 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4397 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4398 ; /* needn't clear the memory set list */
4401 rtx temp = mem_set_list;
4402 rtx prev = NULL_RTX;
4407 next = XEXP (temp, 1);
4408 if (anti_dependence (XEXP (temp, 0), x))
4410 /* Splice temp out of the list. */
4412 XEXP (prev, 1) = next;
4414 mem_set_list = next;
4415 free_EXPR_LIST_node (temp);
4423 /* If the memory reference had embedded side effects (autoincrement
4424 address modes. Then we may need to kill some entries on the
4427 invalidate_mems_from_autoinc (insn);
4431 if (flags & PROP_AUTOINC)
4432 find_auto_inc (needed, x, insn);
4437 if (GET_CODE (SUBREG_REG (x)) == REG
4438 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4439 && (GET_MODE_SIZE (GET_MODE (x))
4440 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4441 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4443 /* While we're here, optimize this case. */
4446 /* In case the SUBREG is not of a register, don't optimize */
4447 if (GET_CODE (x) != REG)
4449 mark_used_regs (needed, live, x, flags, insn);
4453 /* ... fall through ... */
4456 /* See a register other than being set
4457 => mark it as needed. */
4461 int some_needed = REGNO_REG_SET_P (needed, regno);
4462 int some_not_needed = ! some_needed;
4464 SET_REGNO_REG_SET (live, regno);
4466 /* A hard reg in a wide mode may really be multiple registers.
4467 If so, mark all of them just like the first. */
4468 if (regno < FIRST_PSEUDO_REGISTER)
4472 /* For stack ptr or fixed arg pointer,
4473 nothing below can be necessary, so waste no more time. */
4474 if (regno == STACK_POINTER_REGNUM
4475 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4476 || (regno == HARD_FRAME_POINTER_REGNUM
4477 && (! reload_completed || frame_pointer_needed))
4479 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4480 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4482 || (regno == FRAME_POINTER_REGNUM
4483 && (! reload_completed || frame_pointer_needed)))
4485 /* If this is a register we are going to try to eliminate,
4486 don't mark it live here. If we are successful in
4487 eliminating it, it need not be live unless it is used for
4488 pseudos, in which case it will have been set live when
4489 it was allocated to the pseudos. If the register will not
4490 be eliminated, reload will set it live at that point. */
4492 if ((flags & PROP_REG_INFO)
4493 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4494 regs_ever_live[regno] = 1;
4497 /* No death notes for global register variables;
4498 their values are live after this function exits. */
4499 if (global_regs[regno])
4501 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4502 reg_next_use[regno] = insn;
4506 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4509 int regno_n = regno + n;
4510 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4512 SET_REGNO_REG_SET (live, regno_n);
4513 some_needed |= needed_regno;
4514 some_not_needed |= ! needed_regno;
4518 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4520 /* Record where each reg is used, so when the reg
4521 is set we know the next insn that uses it. */
4523 reg_next_use[regno] = insn;
4525 if (flags & PROP_REG_INFO)
4527 if (regno < FIRST_PSEUDO_REGISTER)
4529 /* If a hard reg is being used,
4530 record that this function does use it. */
4532 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4536 regs_ever_live[regno + --i] = 1;
4541 /* Keep track of which basic block each reg appears in. */
4543 register int blocknum = BLOCK_NUM (insn);
4545 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4546 REG_BASIC_BLOCK (regno) = blocknum;
4547 else if (REG_BASIC_BLOCK (regno) != blocknum)
4548 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4550 /* Count (weighted) number of uses of each reg. */
4552 REG_N_REFS (regno) += loop_depth + 1;
4556 /* Record and count the insns in which a reg dies.
4557 If it is used in this insn and was dead below the insn
4558 then it dies in this insn. If it was set in this insn,
4559 we do not make a REG_DEAD note; likewise if we already
4560 made such a note. */
4562 if (flags & PROP_DEATH_NOTES)
4565 && ! dead_or_set_p (insn, x)
4567 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4571 /* Check for the case where the register dying partially
4572 overlaps the register set by this insn. */
4573 if (regno < FIRST_PSEUDO_REGISTER
4574 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4576 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4578 some_needed |= dead_or_set_regno_p (insn, regno + n);
4581 /* If none of the words in X is needed, make a REG_DEAD
4582 note. Otherwise, we must make partial REG_DEAD notes. */
4586 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4587 REG_N_DEATHS (regno)++;
4593 /* Don't make a REG_DEAD note for a part of a register
4594 that is set in the insn. */
4596 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4598 if (!REGNO_REG_SET_P (needed, regno + i)
4599 && ! dead_or_set_regno_p (insn, regno + i))
4602 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4613 register rtx testreg = SET_DEST (x);
4616 /* If storing into MEM, don't show it as being used. But do
4617 show the address as being used. */
4618 if (GET_CODE (testreg) == MEM)
4621 if (flags & PROP_AUTOINC)
4622 find_auto_inc (needed, testreg, insn);
4624 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4625 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4629 /* Storing in STRICT_LOW_PART is like storing in a reg
4630 in that this SET might be dead, so ignore it in TESTREG.
4631 but in some other ways it is like using the reg.
4633 Storing in a SUBREG or a bit field is like storing the entire
4634 register in that if the register's value is not used
4635 then this SET is not needed. */
4636 while (GET_CODE (testreg) == STRICT_LOW_PART
4637 || GET_CODE (testreg) == ZERO_EXTRACT
4638 || GET_CODE (testreg) == SIGN_EXTRACT
4639 || GET_CODE (testreg) == SUBREG)
4641 if (GET_CODE (testreg) == SUBREG
4642 && GET_CODE (SUBREG_REG (testreg)) == REG
4643 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4644 && (GET_MODE_SIZE (GET_MODE (testreg))
4645 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4646 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4648 /* Modifying a single register in an alternate mode
4649 does not use any of the old value. But these other
4650 ways of storing in a register do use the old value. */
4651 if (GET_CODE (testreg) == SUBREG
4652 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4657 testreg = XEXP (testreg, 0);
4660 /* If this is a store into a register,
4661 recursively scan the value being stored. */
4663 if ((GET_CODE (testreg) == PARALLEL
4664 && GET_MODE (testreg) == BLKmode)
4665 || (GET_CODE (testreg) == REG
4666 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4667 && (! reload_completed || frame_pointer_needed)))
4668 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4669 && ! (regno == HARD_FRAME_POINTER_REGNUM
4670 && (! reload_completed || frame_pointer_needed))
4672 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4673 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4676 /* We used to exclude global_regs here, but that seems wrong.
4677 Storing in them is like storing in mem. */
4679 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4681 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4688 case UNSPEC_VOLATILE:
4692 /* Traditional and volatile asm instructions must be considered to use
4693 and clobber all hard registers, all pseudo-registers and all of
4694 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4696 Consider for instance a volatile asm that changes the fpu rounding
4697 mode. An insn should not be moved across this even if it only uses
4698 pseudo-regs because it might give an incorrectly rounded result.
4700 ?!? Unfortunately, marking all hard registers as live causes massive
4701 problems for the register allocator and marking all pseudos as live
4702 creates mountains of uninitialized variable warnings.
4704 So for now, just clear the memory set list and mark any regs
4705 we can find in ASM_OPERANDS as used. */
4706 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4707 free_EXPR_LIST_list (&mem_set_list);
4709 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4710 We can not just fall through here since then we would be confused
4711 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4712 traditional asms unlike their normal usage. */
4713 if (code == ASM_OPERANDS)
4717 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4718 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4725 /* We _do_not_ want to scan operands of phi nodes. Operands of
4726 a phi function are evaluated only when control reaches this
4727 block along a particular edge. Therefore, regs that appear
4728 as arguments to phi should not be added to the global live at
4736 /* Recursively scan the operands of this expression. */
4739 register const char *fmt = GET_RTX_FORMAT (code);
4742 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4746 /* Tail recursive case: save a function call level. */
4752 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4754 else if (fmt[i] == 'E')
4757 for (j = 0; j < XVECLEN (x, i); j++)
4758 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4767 try_pre_increment_1 (insn)
4770 /* Find the next use of this reg. If in same basic block,
4771 make it do pre-increment or pre-decrement if appropriate. */
4772 rtx x = single_set (insn);
4773 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4774 * INTVAL (XEXP (SET_SRC (x), 1)));
4775 int regno = REGNO (SET_DEST (x));
4776 rtx y = reg_next_use[regno];
4778 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4779 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4780 mode would be better. */
4781 && ! dead_or_set_p (y, SET_DEST (x))
4782 && try_pre_increment (y, SET_DEST (x), amount))
4784 /* We have found a suitable auto-increment
4785 and already changed insn Y to do it.
4786 So flush this increment-instruction. */
4787 PUT_CODE (insn, NOTE);
4788 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4789 NOTE_SOURCE_FILE (insn) = 0;
4790 /* Count a reference to this reg for the increment
4791 insn we are deleting. When a reg is incremented.
4792 spilling it is worse, so we want to make that
4794 if (regno >= FIRST_PSEUDO_REGISTER)
4796 REG_N_REFS (regno) += loop_depth + 1;
4797 REG_N_SETS (regno)++;
4804 /* Try to change INSN so that it does pre-increment or pre-decrement
4805 addressing on register REG in order to add AMOUNT to REG.
4806 AMOUNT is negative for pre-decrement.
4807 Returns 1 if the change could be made.
4808 This checks all about the validity of the result of modifying INSN. */
4811 try_pre_increment (insn, reg, amount)
4813 HOST_WIDE_INT amount;
4817 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4818 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4820 /* Nonzero if we can try to make a post-increment or post-decrement.
4821 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4822 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4823 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4826 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4829 /* From the sign of increment, see which possibilities are conceivable
4830 on this target machine. */
4831 if (HAVE_PRE_INCREMENT && amount > 0)
4833 if (HAVE_POST_INCREMENT && amount > 0)
4836 if (HAVE_PRE_DECREMENT && amount < 0)
4838 if (HAVE_POST_DECREMENT && amount < 0)
4841 if (! (pre_ok || post_ok))
4844 /* It is not safe to add a side effect to a jump insn
4845 because if the incremented register is spilled and must be reloaded
4846 there would be no way to store the incremented value back in memory. */
4848 if (GET_CODE (insn) == JUMP_INSN)
4853 use = find_use_as_address (PATTERN (insn), reg, 0);
4854 if (post_ok && (use == 0 || use == (rtx) 1))
4856 use = find_use_as_address (PATTERN (insn), reg, -amount);
4860 if (use == 0 || use == (rtx) 1)
4863 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4866 /* See if this combination of instruction and addressing mode exists. */
4867 if (! validate_change (insn, &XEXP (use, 0),
4868 gen_rtx_fmt_e (amount > 0
4869 ? (do_post ? POST_INC : PRE_INC)
4870 : (do_post ? POST_DEC : PRE_DEC),
4874 /* Record that this insn now has an implicit side effect on X. */
4875 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4879 #endif /* AUTO_INC_DEC */
4881 /* Find the place in the rtx X where REG is used as a memory address.
4882 Return the MEM rtx that so uses it.
4883 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4884 (plus REG (const_int PLUSCONST)).
4886 If such an address does not appear, return 0.
4887 If REG appears more than once, or is used other than in such an address,
4891 find_use_as_address (x, reg, plusconst)
4894 HOST_WIDE_INT plusconst;
4896 enum rtx_code code = GET_CODE (x);
4897 const char *fmt = GET_RTX_FORMAT (code);
4899 register rtx value = 0;
4902 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4905 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4906 && XEXP (XEXP (x, 0), 0) == reg
4907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4908 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4911 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4913 /* If REG occurs inside a MEM used in a bit-field reference,
4914 that is unacceptable. */
4915 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4916 return (rtx) (HOST_WIDE_INT) 1;
4920 return (rtx) (HOST_WIDE_INT) 1;
4922 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4926 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4930 return (rtx) (HOST_WIDE_INT) 1;
4932 else if (fmt[i] == 'E')
4935 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4937 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4941 return (rtx) (HOST_WIDE_INT) 1;
4949 /* Write information about registers and basic blocks into FILE.
4950 This is part of making a debugging dump. */
4953 dump_regset (r, outf)
4960 fputs (" (nil)", outf);
4964 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4966 fprintf (outf, " %d", i);
4967 if (i < FIRST_PSEUDO_REGISTER)
4968 fprintf (outf, " [%s]",
4977 dump_regset (r, stderr);
4978 putc ('\n', stderr);
4982 dump_flow_info (file)
4986 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4988 fprintf (file, "%d registers.\n", max_regno);
4989 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4992 enum reg_class class, altclass;
4993 fprintf (file, "\nRegister %d used %d times across %d insns",
4994 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4995 if (REG_BASIC_BLOCK (i) >= 0)
4996 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4998 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4999 (REG_N_SETS (i) == 1) ? "" : "s");
5000 if (REG_USERVAR_P (regno_reg_rtx[i]))
5001 fprintf (file, "; user var");
5002 if (REG_N_DEATHS (i) != 1)
5003 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5004 if (REG_N_CALLS_CROSSED (i) == 1)
5005 fprintf (file, "; crosses 1 call");
5006 else if (REG_N_CALLS_CROSSED (i))
5007 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5008 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5009 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5010 class = reg_preferred_class (i);
5011 altclass = reg_alternate_class (i);
5012 if (class != GENERAL_REGS || altclass != ALL_REGS)
5014 if (altclass == ALL_REGS || class == ALL_REGS)
5015 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5016 else if (altclass == NO_REGS)
5017 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5019 fprintf (file, "; pref %s, else %s",
5020 reg_class_names[(int) class],
5021 reg_class_names[(int) altclass]);
5023 if (REGNO_POINTER_FLAG (i))
5024 fprintf (file, "; pointer");
5025 fprintf (file, ".\n");
5028 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5029 for (i = 0; i < n_basic_blocks; i++)
5031 register basic_block bb = BASIC_BLOCK (i);
5034 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5035 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5037 fprintf (file, "Predecessors: ");
5038 for (e = bb->pred; e ; e = e->pred_next)
5039 dump_edge_info (file, e, 0);
5041 fprintf (file, "\nSuccessors: ");
5042 for (e = bb->succ; e ; e = e->succ_next)
5043 dump_edge_info (file, e, 1);
5045 fprintf (file, "\nRegisters live at start:");
5046 dump_regset (bb->global_live_at_start, file);
5048 fprintf (file, "\nRegisters live at end:");
5049 dump_regset (bb->global_live_at_end, file);
5060 dump_flow_info (stderr);
5064 dump_edge_info (file, e, do_succ)
5069 basic_block side = (do_succ ? e->dest : e->src);
5071 if (side == ENTRY_BLOCK_PTR)
5072 fputs (" ENTRY", file);
5073 else if (side == EXIT_BLOCK_PTR)
5074 fputs (" EXIT", file);
5076 fprintf (file, " %d", side->index);
5080 static const char * const bitnames[] = {
5081 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5084 int i, flags = e->flags;
5088 for (i = 0; flags; i++)
5089 if (flags & (1 << i))
5095 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5096 fputs (bitnames[i], file);
5098 fprintf (file, "%d", i);
5106 /* Print out one basic block with live information at start and end. */
5116 fprintf (outf, ";; Basic block %d, loop depth %d",
5117 bb->index, bb->loop_depth - 1);
5118 if (bb->eh_beg != -1 || bb->eh_end != -1)
5119 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5122 fputs (";; Predecessors: ", outf);
5123 for (e = bb->pred; e ; e = e->pred_next)
5124 dump_edge_info (outf, e, 0);
5127 fputs (";; Registers live at start:", outf);
5128 dump_regset (bb->global_live_at_start, outf);
5131 for (insn = bb->head, last = NEXT_INSN (bb->end);
5133 insn = NEXT_INSN (insn))
5134 print_rtl_single (outf, insn);
5136 fputs (";; Registers live at end:", outf);
5137 dump_regset (bb->global_live_at_end, outf);
5140 fputs (";; Successors: ", outf);
5141 for (e = bb->succ; e; e = e->succ_next)
5142 dump_edge_info (outf, e, 1);
5150 dump_bb (bb, stderr);
5157 dump_bb (BASIC_BLOCK(n), stderr);
5160 /* Like print_rtl, but also print out live information for the start of each
5164 print_rtl_with_bb (outf, rtx_first)
5168 register rtx tmp_rtx;
5171 fprintf (outf, "(nil)\n");
5175 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5176 int max_uid = get_max_uid ();
5177 basic_block *start = (basic_block *)
5178 xcalloc (max_uid, sizeof (basic_block));
5179 basic_block *end = (basic_block *)
5180 xcalloc (max_uid, sizeof (basic_block));
5181 enum bb_state *in_bb_p = (enum bb_state *)
5182 xcalloc (max_uid, sizeof (enum bb_state));
5184 for (i = n_basic_blocks - 1; i >= 0; i--)
5186 basic_block bb = BASIC_BLOCK (i);
5189 start[INSN_UID (bb->head)] = bb;
5190 end[INSN_UID (bb->end)] = bb;
5191 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5193 enum bb_state state = IN_MULTIPLE_BB;
5194 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5196 in_bb_p[INSN_UID(x)] = state;
5203 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5208 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5210 fprintf (outf, ";; Start of basic block %d, registers live:",
5212 dump_regset (bb->global_live_at_start, outf);
5216 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5217 && GET_CODE (tmp_rtx) != NOTE
5218 && GET_CODE (tmp_rtx) != BARRIER)
5219 fprintf (outf, ";; Insn is not within a basic block\n");
5220 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5221 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5223 did_output = print_rtl_single (outf, tmp_rtx);
5225 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5226 fprintf (outf, ";; End of basic block %d\n", bb->index);
5237 if (current_function_epilogue_delay_list != 0)
5239 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5240 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5241 tmp_rtx = XEXP (tmp_rtx, 1))
5242 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5246 /* Compute dominator relationships using new flow graph structures. */
5248 compute_flow_dominators (dominators, post_dominators)
5249 sbitmap *dominators;
5250 sbitmap *post_dominators;
5253 sbitmap *temp_bitmap;
5255 basic_block *worklist, *workend, *qin, *qout;
5258 /* Allocate a worklist array/queue. Entries are only added to the
5259 list if they were not already on the list. So the size is
5260 bounded by the number of basic blocks. */
5261 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5262 workend = &worklist[n_basic_blocks];
5264 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5265 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5269 /* The optimistic setting of dominators requires us to put every
5270 block on the work list initially. */
5271 qin = qout = worklist;
5272 for (bb = 0; bb < n_basic_blocks; bb++)
5274 *qin++ = BASIC_BLOCK (bb);
5275 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5277 qlen = n_basic_blocks;
5280 /* We want a maximal solution, so initially assume everything dominates
5282 sbitmap_vector_ones (dominators, n_basic_blocks);
5284 /* Mark successors of the entry block so we can identify them below. */
5285 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5286 e->dest->aux = ENTRY_BLOCK_PTR;
5288 /* Iterate until the worklist is empty. */
5291 /* Take the first entry off the worklist. */
5292 basic_block b = *qout++;
5293 if (qout >= workend)
5299 /* Compute the intersection of the dominators of all the
5302 If one of the predecessor blocks is the ENTRY block, then the
5303 intersection of the dominators of the predecessor blocks is
5304 defined as the null set. We can identify such blocks by the
5305 special value in the AUX field in the block structure. */
5306 if (b->aux == ENTRY_BLOCK_PTR)
5308 /* Do not clear the aux field for blocks which are
5309 successors of the ENTRY block. That way we we never
5310 add them to the worklist again.
5312 The intersect of dominators of the preds of this block is
5313 defined as the null set. */
5314 sbitmap_zero (temp_bitmap[bb]);
5318 /* Clear the aux field of this block so it can be added to
5319 the worklist again if necessary. */
5321 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5324 /* Make sure each block always dominates itself. */
5325 SET_BIT (temp_bitmap[bb], bb);
5327 /* If the out state of this block changed, then we need to
5328 add the successors of this block to the worklist if they
5329 are not already on the worklist. */
5330 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5332 for (e = b->succ; e; e = e->succ_next)
5334 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5348 if (post_dominators)
5350 /* The optimistic setting of dominators requires us to put every
5351 block on the work list initially. */
5352 qin = qout = worklist;
5353 for (bb = 0; bb < n_basic_blocks; bb++)
5355 *qin++ = BASIC_BLOCK (bb);
5356 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5358 qlen = n_basic_blocks;
5361 /* We want a maximal solution, so initially assume everything post
5362 dominates everything else. */
5363 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5365 /* Mark predecessors of the exit block so we can identify them below. */
5366 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5367 e->src->aux = EXIT_BLOCK_PTR;
5369 /* Iterate until the worklist is empty. */
5372 /* Take the first entry off the worklist. */
5373 basic_block b = *qout++;
5374 if (qout >= workend)
5380 /* Compute the intersection of the post dominators of all the
5383 If one of the successor blocks is the EXIT block, then the
5384 intersection of the dominators of the successor blocks is
5385 defined as the null set. We can identify such blocks by the
5386 special value in the AUX field in the block structure. */
5387 if (b->aux == EXIT_BLOCK_PTR)
5389 /* Do not clear the aux field for blocks which are
5390 predecessors of the EXIT block. That way we we never
5391 add them to the worklist again.
5393 The intersect of dominators of the succs of this block is
5394 defined as the null set. */
5395 sbitmap_zero (temp_bitmap[bb]);
5399 /* Clear the aux field of this block so it can be added to
5400 the worklist again if necessary. */
5402 sbitmap_intersection_of_succs (temp_bitmap[bb],
5403 post_dominators, bb);
5406 /* Make sure each block always post dominates itself. */
5407 SET_BIT (temp_bitmap[bb], bb);
5409 /* If the out state of this block changed, then we need to
5410 add the successors of this block to the worklist if they
5411 are not already on the worklist. */
5412 if (sbitmap_a_and_b (post_dominators[bb],
5413 post_dominators[bb],
5416 for (e = b->pred; e; e = e->pred_next)
5418 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5435 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5438 compute_immediate_dominators (idom, dominators)
5440 sbitmap *dominators;
5445 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5447 /* Begin with tmp(n) = dom(n) - { n }. */
5448 for (b = n_basic_blocks; --b >= 0; )
5450 sbitmap_copy (tmp[b], dominators[b]);
5451 RESET_BIT (tmp[b], b);
5454 /* Subtract out all of our dominator's dominators. */
5455 for (b = n_basic_blocks; --b >= 0; )
5457 sbitmap tmp_b = tmp[b];
5460 for (s = n_basic_blocks; --s >= 0; )
5461 if (TEST_BIT (tmp_b, s))
5462 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5465 /* Find the one bit set in the bitmap and put it in the output array. */
5466 for (b = n_basic_blocks; --b >= 0; )
5469 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5472 sbitmap_vector_free (tmp);
5475 /* Count for a single SET rtx, X. */
5478 count_reg_sets_1 (x)
5482 register rtx reg = SET_DEST (x);
5484 /* Find the register that's set/clobbered. */
5485 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5486 || GET_CODE (reg) == SIGN_EXTRACT
5487 || GET_CODE (reg) == STRICT_LOW_PART)
5488 reg = XEXP (reg, 0);
5490 if (GET_CODE (reg) == PARALLEL
5491 && GET_MODE (reg) == BLKmode)
5494 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5495 count_reg_sets_1 (XVECEXP (reg, 0, i));
5499 if (GET_CODE (reg) == REG)
5501 regno = REGNO (reg);
5502 if (regno >= FIRST_PSEUDO_REGISTER)
5504 /* Count (weighted) references, stores, etc. This counts a
5505 register twice if it is modified, but that is correct. */
5506 REG_N_SETS (regno)++;
5507 REG_N_REFS (regno) += loop_depth + 1;
5512 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5513 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5519 register RTX_CODE code = GET_CODE (x);
5521 if (code == SET || code == CLOBBER)
5522 count_reg_sets_1 (x);
5523 else if (code == PARALLEL)
5526 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5528 code = GET_CODE (XVECEXP (x, 0, i));
5529 if (code == SET || code == CLOBBER)
5530 count_reg_sets_1 (XVECEXP (x, 0, i));
5535 /* Increment REG_N_REFS by the current loop depth each register reference
5539 count_reg_references (x)
5542 register RTX_CODE code;
5545 code = GET_CODE (x);
5565 /* If we are clobbering a MEM, mark any registers inside the address
5567 if (GET_CODE (XEXP (x, 0)) == MEM)
5568 count_reg_references (XEXP (XEXP (x, 0), 0));
5572 /* While we're here, optimize this case. */
5575 /* In case the SUBREG is not of a register, don't optimize */
5576 if (GET_CODE (x) != REG)
5578 count_reg_references (x);
5582 /* ... fall through ... */
5585 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5586 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5591 register rtx testreg = SET_DEST (x);
5594 /* If storing into MEM, don't show it as being used. But do
5595 show the address as being used. */
5596 if (GET_CODE (testreg) == MEM)
5598 count_reg_references (XEXP (testreg, 0));
5599 count_reg_references (SET_SRC (x));
5603 /* Storing in STRICT_LOW_PART is like storing in a reg
5604 in that this SET might be dead, so ignore it in TESTREG.
5605 but in some other ways it is like using the reg.
5607 Storing in a SUBREG or a bit field is like storing the entire
5608 register in that if the register's value is not used
5609 then this SET is not needed. */
5610 while (GET_CODE (testreg) == STRICT_LOW_PART
5611 || GET_CODE (testreg) == ZERO_EXTRACT
5612 || GET_CODE (testreg) == SIGN_EXTRACT
5613 || GET_CODE (testreg) == SUBREG)
5615 /* Modifying a single register in an alternate mode
5616 does not use any of the old value. But these other
5617 ways of storing in a register do use the old value. */
5618 if (GET_CODE (testreg) == SUBREG
5619 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5624 testreg = XEXP (testreg, 0);
5627 /* If this is a store into a register,
5628 recursively scan the value being stored. */
5630 if ((GET_CODE (testreg) == PARALLEL
5631 && GET_MODE (testreg) == BLKmode)
5632 || GET_CODE (testreg) == REG)
5634 count_reg_references (SET_SRC (x));
5636 count_reg_references (SET_DEST (x));
5646 /* Recursively scan the operands of this expression. */
5649 register const char *fmt = GET_RTX_FORMAT (code);
5652 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5656 /* Tail recursive case: save a function call level. */
5662 count_reg_references (XEXP (x, i));
5664 else if (fmt[i] == 'E')
5667 for (j = 0; j < XVECLEN (x, i); j++)
5668 count_reg_references (XVECEXP (x, i, j));
5674 /* Recompute register set/reference counts immediately prior to register
5677 This avoids problems with set/reference counts changing to/from values
5678 which have special meanings to the register allocators.
5680 Additionally, the reference counts are the primary component used by the
5681 register allocators to prioritize pseudos for allocation to hard regs.
5682 More accurate reference counts generally lead to better register allocation.
5684 F is the first insn to be scanned.
5686 LOOP_STEP denotes how much loop_depth should be incremented per
5687 loop nesting level in order to increase the ref count more for
5688 references in a loop.
5690 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5691 possibly other information which is used by the register allocators. */
5694 recompute_reg_usage (f, loop_step)
5695 rtx f ATTRIBUTE_UNUSED;
5696 int loop_step ATTRIBUTE_UNUSED;
5702 /* Clear out the old data. */
5703 max_reg = max_reg_num ();
5704 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5710 /* Scan each insn in the chain and count how many times each register is
5712 for (index = 0; index < n_basic_blocks; index++)
5714 basic_block bb = BASIC_BLOCK (index);
5715 loop_depth = bb->loop_depth;
5716 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5718 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5722 /* This call will increment REG_N_SETS for each SET or CLOBBER
5723 of a register in INSN. It will also increment REG_N_REFS
5724 by the loop depth for each set of a register in INSN. */
5725 count_reg_sets (PATTERN (insn));
5727 /* count_reg_sets does not detect autoincrement address modes, so
5728 detect them here by looking at the notes attached to INSN. */
5729 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5731 if (REG_NOTE_KIND (links) == REG_INC)
5732 /* Count (weighted) references, stores, etc. This counts a
5733 register twice if it is modified, but that is correct. */
5734 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5737 /* This call will increment REG_N_REFS by the current loop depth for
5738 each reference to a register in INSN. */
5739 count_reg_references (PATTERN (insn));
5741 /* count_reg_references will not include counts for arguments to
5742 function calls, so detect them here by examining the
5743 CALL_INSN_FUNCTION_USAGE data. */
5744 if (GET_CODE (insn) == CALL_INSN)
5748 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5750 note = XEXP (note, 1))
5751 if (GET_CODE (XEXP (note, 0)) == USE)
5752 count_reg_references (XEXP (XEXP (note, 0), 0));
5755 if (insn == bb->end)
5761 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5762 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5763 of the number of registers that died. */
5766 count_or_remove_death_notes (blocks, kill)
5772 for (i = n_basic_blocks - 1; i >= 0; --i)
5777 if (blocks && ! TEST_BIT (blocks, i))
5780 bb = BASIC_BLOCK (i);
5782 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5784 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5786 rtx *pprev = ®_NOTES (insn);
5791 switch (REG_NOTE_KIND (link))
5794 if (GET_CODE (XEXP (link, 0)) == REG)
5796 rtx reg = XEXP (link, 0);
5799 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5802 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5810 rtx next = XEXP (link, 1);
5811 free_EXPR_LIST_node (link);
5812 *pprev = link = next;
5818 pprev = &XEXP (link, 1);
5825 if (insn == bb->end)
5833 /* Record INSN's block as BB. */
5836 set_block_for_insn (insn, bb)
5840 size_t uid = INSN_UID (insn);
5841 if (uid >= basic_block_for_insn->num_elements)
5845 /* Add one-eighth the size so we don't keep calling xrealloc. */
5846 new_size = uid + (uid + 7) / 8;
5848 VARRAY_GROW (basic_block_for_insn, new_size);
5850 VARRAY_BB (basic_block_for_insn, uid) = bb;
5853 /* Record INSN's block number as BB. */
5854 /* ??? This has got to go. */
5857 set_block_num (insn, bb)
5861 set_block_for_insn (insn, BASIC_BLOCK (bb));
5864 /* Verify the CFG consistency. This function check some CFG invariants and
5865 aborts when something is wrong. Hope that this function will help to
5866 convert many optimization passes to preserve CFG consistent.
5868 Currently it does following checks:
5870 - test head/end pointers
5871 - overlapping of basic blocks
5872 - edge list corectness
5873 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5874 - tails of basic blocks (ensure that boundary is necesary)
5875 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5876 and NOTE_INSN_BASIC_BLOCK
5877 - check that all insns are in the basic blocks
5878 (except the switch handling code, barriers and notes)
5879 - check that all returns are followed by barriers
5881 In future it can be extended check a lot of other stuff as well
5882 (reachability of basic blocks, life information, etc. etc.). */
5887 const int max_uid = get_max_uid ();
5888 const rtx rtx_first = get_insns ();
5889 basic_block *bb_info;
5893 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5895 /* First pass check head/end pointers and set bb_info array used by
5897 for (i = n_basic_blocks - 1; i >= 0; i--)
5899 basic_block bb = BASIC_BLOCK (i);
5901 /* Check the head pointer and make sure that it is pointing into
5903 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5908 error ("Head insn %d for block %d not found in the insn stream.",
5909 INSN_UID (bb->head), bb->index);
5913 /* Check the end pointer and make sure that it is pointing into
5915 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5917 if (bb_info[INSN_UID (x)] != NULL)
5919 error ("Insn %d is in multiple basic blocks (%d and %d)",
5920 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5923 bb_info[INSN_UID (x)] = bb;
5930 error ("End insn %d for block %d not found in the insn stream.",
5931 INSN_UID (bb->end), bb->index);
5936 /* Now check the basic blocks (boundaries etc.) */
5937 for (i = n_basic_blocks - 1; i >= 0; i--)
5939 basic_block bb = BASIC_BLOCK (i);
5940 /* Check corectness of edge lists */
5948 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5950 fprintf (stderr, "Predecessor: ");
5951 dump_edge_info (stderr, e, 0);
5952 fprintf (stderr, "\nSuccessor: ");
5953 dump_edge_info (stderr, e, 1);
5957 if (e->dest != EXIT_BLOCK_PTR)
5959 edge e2 = e->dest->pred;
5960 while (e2 && e2 != e)
5964 error ("Basic block %i edge lists are corrupted", bb->index);
5976 error ("Basic block %d pred edge is corrupted", bb->index);
5977 fputs ("Predecessor: ", stderr);
5978 dump_edge_info (stderr, e, 0);
5979 fputs ("\nSuccessor: ", stderr);
5980 dump_edge_info (stderr, e, 1);
5981 fputc ('\n', stderr);
5984 if (e->src != ENTRY_BLOCK_PTR)
5986 edge e2 = e->src->succ;
5987 while (e2 && e2 != e)
5991 error ("Basic block %i edge lists are corrupted", bb->index);
5998 /* OK pointers are correct. Now check the header of basic
5999 block. It ought to contain optional CODE_LABEL followed
6000 by NOTE_BASIC_BLOCK. */
6002 if (GET_CODE (x) == CODE_LABEL)
6006 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6012 if (GET_CODE (x) != NOTE
6013 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6014 || NOTE_BASIC_BLOCK (x) != bb)
6016 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6023 /* Do checks for empty blocks here */
6030 if (GET_CODE (x) == NOTE
6031 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6033 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6034 INSN_UID (x), bb->index);
6041 if (GET_CODE (x) == JUMP_INSN
6042 || GET_CODE (x) == CODE_LABEL
6043 || GET_CODE (x) == BARRIER)
6045 error ("In basic block %d:", bb->index);
6046 fatal_insn ("Flow control insn inside a basic block", x);
6057 if (!bb_info[INSN_UID (x)])
6059 switch (GET_CODE (x))
6066 /* An addr_vec is placed outside any block block. */
6068 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6069 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6070 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6075 /* But in any case, non-deletable labels can appear anywhere. */
6079 fatal_insn ("Insn outside basic block", x);
6083 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6084 && GET_CODE (x) == JUMP_INSN
6085 && returnjump_p (x) && ! condjump_p (x)
6086 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6087 fatal_insn ("Return not followed by barrier", x);
6099 /* Functions to access an edge list with a vector representation.
6100 Enough data is kept such that given an index number, the
6101 pred and succ that edge reprsents can be determined, or
6102 given a pred and a succ, it's index number can be returned.
6103 This allows algorithms which comsume a lot of memory to
6104 represent the normally full matrix of edge (pred,succ) with a
6105 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6106 wasted space in the client code due to sparse flow graphs. */
6108 /* This functions initializes the edge list. Basically the entire
6109 flowgraph is processed, and all edges are assigned a number,
6110 and the data structure is filed in. */
6114 struct edge_list *elist;
6120 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6124 /* Determine the number of edges in the flow graph by counting successor
6125 edges on each basic block. */
6126 for (x = 0; x < n_basic_blocks; x++)
6128 basic_block bb = BASIC_BLOCK (x);
6130 for (e = bb->succ; e; e = e->succ_next)
6133 /* Don't forget successors of the entry block. */
6134 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6137 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6138 elist->num_blocks = block_count;
6139 elist->num_edges = num_edges;
6140 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6144 /* Follow successors of the entry block, and register these edges. */
6145 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6147 elist->index_to_edge[num_edges] = e;
6151 for (x = 0; x < n_basic_blocks; x++)
6153 basic_block bb = BASIC_BLOCK (x);
6155 /* Follow all successors of blocks, and register these edges. */
6156 for (e = bb->succ; e; e = e->succ_next)
6158 elist->index_to_edge[num_edges] = e;
6165 /* This function free's memory associated with an edge list. */
6167 free_edge_list (elist)
6168 struct edge_list *elist;
6172 free (elist->index_to_edge);
6177 /* This function provides debug output showing an edge list. */
6179 print_edge_list (f, elist)
6181 struct edge_list *elist;
6184 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6185 elist->num_blocks - 2, elist->num_edges);
6187 for (x = 0; x < elist->num_edges; x++)
6189 fprintf (f, " %-4d - edge(", x);
6190 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6191 fprintf (f,"entry,");
6193 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6195 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6196 fprintf (f,"exit)\n");
6198 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6202 /* This function provides an internal consistancy check of an edge list,
6203 verifying that all edges are present, and that there are no
6206 verify_edge_list (f, elist)
6208 struct edge_list *elist;
6210 int x, pred, succ, index;
6213 for (x = 0; x < n_basic_blocks; x++)
6215 basic_block bb = BASIC_BLOCK (x);
6217 for (e = bb->succ; e; e = e->succ_next)
6219 pred = e->src->index;
6220 succ = e->dest->index;
6221 index = EDGE_INDEX (elist, e->src, e->dest);
6222 if (index == EDGE_INDEX_NO_EDGE)
6224 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6227 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6228 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6229 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6230 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6231 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6232 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6235 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6237 pred = e->src->index;
6238 succ = e->dest->index;
6239 index = EDGE_INDEX (elist, e->src, e->dest);
6240 if (index == EDGE_INDEX_NO_EDGE)
6242 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6245 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6246 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6247 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6248 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6249 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6250 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6252 /* We've verified that all the edges are in the list, no lets make sure
6253 there are no spurious edges in the list. */
6255 for (pred = 0 ; pred < n_basic_blocks; pred++)
6256 for (succ = 0 ; succ < n_basic_blocks; succ++)
6258 basic_block p = BASIC_BLOCK (pred);
6259 basic_block s = BASIC_BLOCK (succ);
6263 for (e = p->succ; e; e = e->succ_next)
6269 for (e = s->pred; e; e = e->pred_next)
6275 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6276 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6277 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6279 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6280 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6281 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6282 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6283 BASIC_BLOCK (succ)));
6285 for (succ = 0 ; succ < n_basic_blocks; succ++)
6287 basic_block p = ENTRY_BLOCK_PTR;
6288 basic_block s = BASIC_BLOCK (succ);
6292 for (e = p->succ; e; e = e->succ_next)
6298 for (e = s->pred; e; e = e->pred_next)
6304 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6305 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6306 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6308 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6309 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6310 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6311 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6312 BASIC_BLOCK (succ)));
6314 for (pred = 0 ; pred < n_basic_blocks; pred++)
6316 basic_block p = BASIC_BLOCK (pred);
6317 basic_block s = EXIT_BLOCK_PTR;
6321 for (e = p->succ; e; e = e->succ_next)
6327 for (e = s->pred; e; e = e->pred_next)
6333 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6334 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6335 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6337 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6338 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6339 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6340 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6345 /* This routine will determine what, if any, edge there is between
6346 a specified predecessor and successor. */
6349 find_edge_index (edge_list, pred, succ)
6350 struct edge_list *edge_list;
6351 basic_block pred, succ;
6354 for (x = 0; x < NUM_EDGES (edge_list); x++)
6356 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6357 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6360 return (EDGE_INDEX_NO_EDGE);
6363 /* This function will remove an edge from the flow graph. */
6368 edge last_pred = NULL;
6369 edge last_succ = NULL;
6371 basic_block src, dest;
6374 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6380 last_succ->succ_next = e->succ_next;
6382 src->succ = e->succ_next;
6384 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6390 last_pred->pred_next = e->pred_next;
6392 dest->pred = e->pred_next;
6398 /* This routine will remove any fake successor edges for a basic block.
6399 When the edge is removed, it is also removed from whatever predecessor
6402 remove_fake_successors (bb)
6406 for (e = bb->succ; e ; )
6410 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6415 /* This routine will remove all fake edges from the flow graph. If
6416 we remove all fake successors, it will automatically remove all
6417 fake predecessors. */
6419 remove_fake_edges ()
6423 for (x = 0; x < n_basic_blocks; x++)
6424 remove_fake_successors (BASIC_BLOCK (x));
6426 /* We've handled all successors except the entry block's. */
6427 remove_fake_successors (ENTRY_BLOCK_PTR);
6430 /* This functions will add a fake edge between any block which has no
6431 successors, and the exit block. Some data flow equations require these
6434 add_noreturn_fake_exit_edges ()
6438 for (x = 0; x < n_basic_blocks; x++)
6439 if (BASIC_BLOCK (x)->succ == NULL)
6440 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6443 /* Dump the list of basic blocks in the bitmap NODES. */
6445 flow_nodes_print (str, nodes, file)
6447 const sbitmap nodes;
6452 fprintf (file, "%s { ", str);
6453 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6454 fputs ("}\n", file);
6458 /* Dump the list of exiting edges in the array EDGES. */
6460 flow_exits_print (str, edges, num_edges, file)
6468 fprintf (file, "%s { ", str);
6469 for (i = 0; i < num_edges; i++)
6470 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6471 fputs ("}\n", file);
6475 /* Dump loop related CFG information. */
6477 flow_loops_cfg_dump (loops, file)
6478 const struct loops *loops;
6483 if (! loops->num || ! file || ! loops->cfg.dom)
6486 for (i = 0; i < n_basic_blocks; i++)
6490 fprintf (file, ";; %d succs { ", i);
6491 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6492 fprintf (file, "%d ", succ->dest->index);
6493 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6497 /* Dump the DFS node order. */
6498 if (loops->cfg.dfs_order)
6500 fputs (";; DFS order: ", file);
6501 for (i = 0; i < n_basic_blocks; i++)
6502 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6508 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6510 flow_loop_nested_p (outer, loop)
6514 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6518 /* Dump the loop information specified by LOOPS to the stream FILE. */
6520 flow_loops_dump (loops, file, verbose)
6521 const struct loops *loops;
6528 num_loops = loops->num;
6529 if (! num_loops || ! file)
6532 fprintf (file, ";; %d loops found, %d levels\n",
6533 num_loops, loops->levels);
6535 for (i = 0; i < num_loops; i++)
6537 struct loop *loop = &loops->array[i];
6539 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6540 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6541 loop->header->index, loop->latch->index,
6542 loop->pre_header ? loop->pre_header->index : -1,
6543 loop->depth, loop->level,
6544 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6545 fprintf (file, ";; %d", loop->num_nodes);
6546 flow_nodes_print (" nodes", loop->nodes, file);
6547 fprintf (file, ";; %d", loop->num_exits);
6548 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6554 for (j = 0; j < i; j++)
6556 struct loop *oloop = &loops->array[j];
6558 if (loop->header == oloop->header)
6563 smaller = loop->num_nodes < oloop->num_nodes;
6565 /* If the union of LOOP and OLOOP is different than
6566 the larger of LOOP and OLOOP then LOOP and OLOOP
6567 must be disjoint. */
6568 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6569 smaller ? oloop : loop);
6570 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6571 loop->header->index, i, j,
6572 disjoint ? "disjoint" : "nested");
6579 /* Print diagnostics to compare our concept of a loop with
6580 what the loop notes say. */
6581 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6582 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6583 != NOTE_INSN_LOOP_BEG)
6584 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6585 INSN_UID (PREV_INSN (loop->first->head)));
6586 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6587 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6588 != NOTE_INSN_LOOP_END)
6589 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6590 INSN_UID (NEXT_INSN (loop->last->end)));
6595 flow_loops_cfg_dump (loops, file);
6599 /* Free all the memory allocated for LOOPS. */
6601 flow_loops_free (loops)
6602 struct loops *loops;
6611 /* Free the loop descriptors. */
6612 for (i = 0; i < loops->num; i++)
6614 struct loop *loop = &loops->array[i];
6617 sbitmap_free (loop->nodes);
6621 free (loops->array);
6622 loops->array = NULL;
6625 sbitmap_vector_free (loops->cfg.dom);
6626 if (loops->cfg.dfs_order)
6627 free (loops->cfg.dfs_order);
6629 sbitmap_free (loops->shared_headers);
6634 /* Find the exits from the loop using the bitmap of loop nodes NODES
6635 and store in EXITS array. Return the number of exits from the
6638 flow_loop_exits_find (nodes, exits)
6639 const sbitmap nodes;
6648 /* Check all nodes within the loop to see if there are any
6649 successors not in the loop. Note that a node may have multiple
6652 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6653 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6655 basic_block dest = e->dest;
6657 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6665 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6667 /* Store all exiting edges into an array. */
6669 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6670 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6672 basic_block dest = e->dest;
6674 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6675 (*exits)[num_exits++] = e;
6683 /* Find the nodes contained within the loop with header HEADER and
6684 latch LATCH and store in NODES. Return the number of nodes within
6687 flow_loop_nodes_find (header, latch, nodes)
6696 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6699 /* Start with only the loop header in the set of loop nodes. */
6700 sbitmap_zero (nodes);
6701 SET_BIT (nodes, header->index);
6703 header->loop_depth++;
6705 /* Push the loop latch on to the stack. */
6706 if (! TEST_BIT (nodes, latch->index))
6708 SET_BIT (nodes, latch->index);
6709 latch->loop_depth++;
6711 stack[sp++] = latch;
6720 for (e = node->pred; e; e = e->pred_next)
6722 basic_block ancestor = e->src;
6724 /* If each ancestor not marked as part of loop, add to set of
6725 loop nodes and push on to stack. */
6726 if (ancestor != ENTRY_BLOCK_PTR
6727 && ! TEST_BIT (nodes, ancestor->index))
6729 SET_BIT (nodes, ancestor->index);
6730 ancestor->loop_depth++;
6732 stack[sp++] = ancestor;
6741 /* Compute the depth first search order and store in the array
6742 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6743 number of nodes visited. */
6745 flow_depth_first_order_compute (dfs_order)
6754 /* Allocate stack for back-tracking up CFG. */
6755 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6758 /* Allocate bitmap to track nodes that have been visited. */
6759 visited = sbitmap_alloc (n_basic_blocks);
6761 /* None of the nodes in the CFG have been visited yet. */
6762 sbitmap_zero (visited);
6764 /* Start with the first successor edge from the entry block. */
6765 e = ENTRY_BLOCK_PTR->succ;
6768 basic_block src = e->src;
6769 basic_block dest = e->dest;
6771 /* Mark that we have visited this node. */
6772 if (src != ENTRY_BLOCK_PTR)
6773 SET_BIT (visited, src->index);
6775 /* If this node has not been visited before, push the current
6776 edge on to the stack and proceed with the first successor
6777 edge of this node. */
6778 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6786 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6789 /* DEST has no successors (for example, a non-returning
6790 function is called) so do not push the current edge
6791 but carry on with its next successor. */
6792 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6793 SET_BIT (visited, dest->index);
6796 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6798 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6800 /* Pop edge off stack. */
6808 sbitmap_free (visited);
6810 /* The number of nodes visited should not be greater than
6812 if (dfsnum > n_basic_blocks)
6815 /* There are some nodes left in the CFG that are unreachable. */
6816 if (dfsnum < n_basic_blocks)
6822 /* Return the block for the pre-header of the loop with header
6823 HEADER where DOM specifies the dominator information. Return NULL if
6824 there is no pre-header. */
6826 flow_loop_pre_header_find (header, dom)
6830 basic_block pre_header;
6833 /* If block p is a predecessor of the header and is the only block
6834 that the header does not dominate, then it is the pre-header. */
6836 for (e = header->pred; e; e = e->pred_next)
6838 basic_block node = e->src;
6840 if (node != ENTRY_BLOCK_PTR
6841 && ! TEST_BIT (dom[node->index], header->index))
6843 if (pre_header == NULL)
6847 /* There are multiple edges into the header from outside
6848 the loop so there is no pre-header block. */
6858 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6859 previously added. The insertion algorithm assumes that the loops
6860 are added in the order found by a depth first search of the CFG. */
6862 flow_loop_tree_node_add (prevloop, loop)
6863 struct loop *prevloop;
6867 if (flow_loop_nested_p (prevloop, loop))
6869 prevloop->inner = loop;
6870 loop->outer = prevloop;
6874 while (prevloop->outer)
6876 if (flow_loop_nested_p (prevloop->outer, loop))
6878 prevloop->next = loop;
6879 loop->outer = prevloop->outer;
6882 prevloop = prevloop->outer;
6885 prevloop->next = loop;
6890 /* Build the loop hierarchy tree for LOOPS. */
6892 flow_loops_tree_build (loops)
6893 struct loops *loops;
6898 num_loops = loops->num;
6902 /* Root the loop hierarchy tree with the first loop found.
6903 Since we used a depth first search this should be the
6905 loops->tree = &loops->array[0];
6906 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6908 /* Add the remaining loops to the tree. */
6909 for (i = 1; i < num_loops; i++)
6910 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6914 /* Helper function to compute loop nesting depth and enclosed loop level
6915 for the natural loop specified by LOOP at the loop depth DEPTH.
6916 Returns the loop level. */
6918 flow_loop_level_compute (loop, depth)
6928 /* Traverse loop tree assigning depth and computing level as the
6929 maximum level of all the inner loops of this loop. The loop
6930 level is equivalent to the height of the loop in the loop tree
6931 and corresponds to the number of enclosed loop levels (including
6933 for (inner = loop->inner; inner; inner = inner->next)
6937 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6942 loop->level = level;
6943 loop->depth = depth;
6948 /* Compute the loop nesting depth and enclosed loop level for the loop
6949 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6953 flow_loops_level_compute (loops)
6954 struct loops *loops;
6960 /* Traverse all the outer level loops. */
6961 for (loop = loops->tree; loop; loop = loop->next)
6963 level = flow_loop_level_compute (loop, 1);
6971 /* Find all the natural loops in the function and save in LOOPS structure
6972 and recalculate loop_depth information in basic block structures.
6973 Return the number of natural loops found. */
6976 flow_loops_find (loops)
6977 struct loops *loops;
6988 loops->array = NULL;
6992 /* Taking care of this degenerate case makes the rest of
6993 this code simpler. */
6994 if (n_basic_blocks == 0)
6997 /* Compute the dominators. */
6998 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6999 compute_flow_dominators (dom, NULL);
7001 /* Count the number of loop edges (back edges). This should be the
7002 same as the number of natural loops. Also clear the loop_depth
7003 and as we work from inner->outer in a loop nest we call
7004 find_loop_nodes_find which will increment loop_depth for nodes
7005 within the current loop, which happens to enclose inner loops. */
7008 for (b = 0; b < n_basic_blocks; b++)
7010 BASIC_BLOCK (b)->loop_depth = 0;
7011 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7013 basic_block latch = e->src;
7015 /* Look for back edges where a predecessor is dominated
7016 by this block. A natural loop has a single entry
7017 node (header) that dominates all the nodes in the
7018 loop. It also has single back edge to the header
7019 from a latch node. Note that multiple natural loops
7020 may share the same header. */
7021 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7028 /* Compute depth first search order of the CFG so that outer
7029 natural loops will be found before inner natural loops. */
7030 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7031 flow_depth_first_order_compute (dfs_order);
7033 /* Allocate loop structures. */
7035 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7037 headers = sbitmap_alloc (n_basic_blocks);
7038 sbitmap_zero (headers);
7040 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7041 sbitmap_zero (loops->shared_headers);
7043 /* Find and record information about all the natural loops
7046 for (b = 0; b < n_basic_blocks; b++)
7050 /* Search the nodes of the CFG in DFS order that we can find
7051 outer loops first. */
7052 header = BASIC_BLOCK (dfs_order[b]);
7054 /* Look for all the possible latch blocks for this header. */
7055 for (e = header->pred; e; e = e->pred_next)
7057 basic_block latch = e->src;
7059 /* Look for back edges where a predecessor is dominated
7060 by this block. A natural loop has a single entry
7061 node (header) that dominates all the nodes in the
7062 loop. It also has single back edge to the header
7063 from a latch node. Note that multiple natural loops
7064 may share the same header. */
7065 if (latch != ENTRY_BLOCK_PTR
7066 && TEST_BIT (dom[latch->index], header->index))
7070 loop = loops->array + num_loops;
7072 loop->header = header;
7073 loop->latch = latch;
7075 /* Keep track of blocks that are loop headers so
7076 that we can tell which loops should be merged. */
7077 if (TEST_BIT (headers, header->index))
7078 SET_BIT (loops->shared_headers, header->index);
7079 SET_BIT (headers, header->index);
7081 /* Find nodes contained within the loop. */
7082 loop->nodes = sbitmap_alloc (n_basic_blocks);
7084 = flow_loop_nodes_find (header, latch, loop->nodes);
7086 /* Compute first and last blocks within the loop.
7087 These are often the same as the loop header and
7088 loop latch respectively, but this is not always
7091 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7093 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7095 /* Find edges which exit the loop. Note that a node
7096 may have several exit edges. */
7098 = flow_loop_exits_find (loop->nodes, &loop->exits);
7100 /* Look to see if the loop has a pre-header node. */
7102 = flow_loop_pre_header_find (header, dom);
7109 /* Natural loops with shared headers may either be disjoint or
7110 nested. Disjoint loops with shared headers cannot be inner
7111 loops and should be merged. For now just mark loops that share
7113 for (i = 0; i < num_loops; i++)
7114 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7115 loops->array[i].shared = 1;
7117 sbitmap_free (headers);
7120 loops->num = num_loops;
7122 /* Save CFG derived information to avoid recomputing it. */
7123 loops->cfg.dom = dom;
7124 loops->cfg.dfs_order = dfs_order;
7126 /* Build the loop hierarchy tree. */
7127 flow_loops_tree_build (loops);
7129 /* Assign the loop nesting depth and enclosed loop level for each
7131 loops->levels = flow_loops_level_compute (loops);
7137 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7139 flow_loop_outside_edge_p (loop, e)
7140 const struct loop *loop;
7143 if (e->dest != loop->header)
7145 return (e->src == ENTRY_BLOCK_PTR)
7146 || ! TEST_BIT (loop->nodes, e->src->index);
7150 /* Clear LOG_LINKS fields of insns in a chain. */
7152 clear_log_links (insns)
7156 for (i = insns; i; i = NEXT_INSN (i))
7157 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')