1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
321 static void propagate_block PARAMS ((basic_block, regset,
323 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
324 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
325 static void mark_set_regs PARAMS ((regset, regset, rtx,
327 static void mark_set_1 PARAMS ((regset, regset, rtx,
330 static void find_auto_inc PARAMS ((regset, rtx, rtx));
331 static int try_pre_increment_1 PARAMS ((rtx));
332 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
334 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
335 void dump_flow_info PARAMS ((FILE *));
336 void debug_flow_info PARAMS ((void));
337 static void dump_edge_info PARAMS ((FILE *, edge, int));
339 static void count_reg_sets_1 PARAMS ((rtx));
340 static void count_reg_sets PARAMS ((rtx));
341 static void count_reg_references PARAMS ((rtx));
342 static void invalidate_mems_from_autoinc PARAMS ((rtx));
343 static void remove_fake_successors PARAMS ((basic_block));
344 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
345 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
346 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
347 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
348 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
349 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
350 static int flow_depth_first_order_compute PARAMS ((int *));
351 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
352 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
353 static void flow_loops_tree_build PARAMS ((struct loops *));
354 static int flow_loop_level_compute PARAMS ((struct loop *, int));
355 static int flow_loops_level_compute PARAMS ((struct loops *));
357 /* Find basic blocks of the current function.
358 F is the first insn of the function and NREGS the number of register
362 find_basic_blocks (f, nregs, file)
364 int nregs ATTRIBUTE_UNUSED;
365 FILE *file ATTRIBUTE_UNUSED;
369 /* Flush out existing data. */
370 if (basic_block_info != NULL)
376 /* Clear bb->aux on all extant basic blocks. We'll use this as a
377 tag for reuse during create_basic_block, just in case some pass
378 copies around basic block notes improperly. */
379 for (i = 0; i < n_basic_blocks; ++i)
380 BASIC_BLOCK (i)->aux = NULL;
382 VARRAY_FREE (basic_block_info);
385 n_basic_blocks = count_basic_blocks (f);
387 /* Size the basic block table. The actual structures will be allocated
388 by find_basic_blocks_1, since we want to keep the structure pointers
389 stable across calls to find_basic_blocks. */
390 /* ??? This whole issue would be much simpler if we called find_basic_blocks
391 exactly once, and thereafter we don't have a single long chain of
392 instructions at all until close to the end of compilation when we
393 actually lay them out. */
395 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
397 label_value_list = find_basic_blocks_1 (f);
399 /* Record the block to which an insn belongs. */
400 /* ??? This should be done another way, by which (perhaps) a label is
401 tagged directly with the basic block that it starts. It is used for
402 more than that currently, but IMO that is the only valid use. */
404 max_uid = get_max_uid ();
406 /* Leave space for insns life_analysis makes in some cases for auto-inc.
407 These cases are rare, so we don't need too much space. */
408 max_uid += max_uid / 10;
411 compute_bb_for_insn (max_uid);
413 /* Discover the edges of our cfg. */
414 record_active_eh_regions (f);
415 make_edges (label_value_list);
417 /* Do very simple cleanup now, for the benefit of code that runs between
418 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
419 tidy_fallthru_edges ();
421 mark_critical_edges ();
423 #ifdef ENABLE_CHECKING
428 /* Count the basic blocks of the function. */
431 count_basic_blocks (f)
435 register RTX_CODE prev_code;
436 register int count = 0;
438 int call_had_abnormal_edge = 0;
439 rtx prev_call = NULL_RTX;
441 prev_code = JUMP_INSN;
442 for (insn = f; insn; insn = NEXT_INSN (insn))
444 register RTX_CODE code = GET_CODE (insn);
446 if (code == CODE_LABEL
447 || (GET_RTX_CLASS (code) == 'i'
448 && (prev_code == JUMP_INSN
449 || prev_code == BARRIER
450 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
455 /* Record whether this call created an edge. */
456 if (code == CALL_INSN)
458 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
459 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
461 call_had_abnormal_edge = 0;
463 /* If there is an EH region or rethrow, we have an edge. */
464 if ((eh_region && region > 0)
465 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
466 call_had_abnormal_edge = 1;
469 /* If there is a nonlocal goto label and the specified
470 region number isn't -1, we have an edge. (0 means
471 no throw, but might have a nonlocal goto). */
472 if (nonlocal_goto_handler_labels && region >= 0)
473 call_had_abnormal_edge = 1;
476 else if (code != NOTE)
477 prev_call = NULL_RTX;
481 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
483 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
488 /* The rest of the compiler works a bit smoother when we don't have to
489 check for the edge case of do-nothing functions with no basic blocks. */
492 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
499 /* Find all basic blocks of the function whose first insn is F.
501 Collect and return a list of labels whose addresses are taken. This
502 will be used in make_edges for use with computed gotos. */
505 find_basic_blocks_1 (f)
508 register rtx insn, next;
509 int call_has_abnormal_edge = 0;
511 rtx bb_note = NULL_RTX;
512 rtx eh_list = NULL_RTX;
513 rtx label_value_list = NULL_RTX;
517 /* We process the instructions in a slightly different way than we did
518 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
519 closed out the previous block, so that it gets attached at the proper
520 place. Since this form should be equivalent to the previous,
521 count_basic_blocks continues to use the old form as a check. */
523 for (insn = f; insn; insn = next)
525 enum rtx_code code = GET_CODE (insn);
527 next = NEXT_INSN (insn);
529 if (code == CALL_INSN)
531 /* Record whether this call created an edge. */
532 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
533 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
534 call_has_abnormal_edge = 0;
536 /* If there is an EH region or rethrow, we have an edge. */
537 if ((eh_list && region > 0)
538 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
539 call_has_abnormal_edge = 1;
542 /* If there is a nonlocal goto label and the specified
543 region number isn't -1, we have an edge. (0 means
544 no throw, but might have a nonlocal goto). */
545 if (nonlocal_goto_handler_labels && region >= 0)
546 call_has_abnormal_edge = 1;
554 int kind = NOTE_LINE_NUMBER (insn);
556 /* Keep a LIFO list of the currently active exception notes. */
557 if (kind == NOTE_INSN_EH_REGION_BEG)
558 eh_list = alloc_INSN_LIST (insn, eh_list);
559 else if (kind == NOTE_INSN_EH_REGION_END)
562 eh_list = XEXP (eh_list, 1);
563 free_INSN_LIST_node (t);
566 /* Look for basic block notes with which to keep the
567 basic_block_info pointers stable. Unthread the note now;
568 we'll put it back at the right place in create_basic_block.
569 Or not at all if we've already found a note in this block. */
570 else if (kind == NOTE_INSN_BASIC_BLOCK)
572 if (bb_note == NULL_RTX)
574 next = flow_delete_insn (insn);
581 /* A basic block starts at a label. If we've closed one off due
582 to a barrier or some such, no need to do it again. */
583 if (head != NULL_RTX)
585 /* While we now have edge lists with which other portions of
586 the compiler might determine a call ending a basic block
587 does not imply an abnormal edge, it will be a bit before
588 everything can be updated. So continue to emit a noop at
589 the end of such a block. */
590 if (GET_CODE (end) == CALL_INSN
591 && ! SIBLING_CALL_P (end))
593 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
594 end = emit_insn_after (nop, end);
597 create_basic_block (i++, head, end, bb_note);
604 /* A basic block ends at a jump. */
605 if (head == NULL_RTX)
609 /* ??? Make a special check for table jumps. The way this
610 happens is truly and amazingly gross. We are about to
611 create a basic block that contains just a code label and
612 an addr*vec jump insn. Worse, an addr_diff_vec creates
613 its own natural loop.
615 Prevent this bit of brain damage, pasting things together
616 correctly in make_edges.
618 The correct solution involves emitting the table directly
619 on the tablejump instruction as a note, or JUMP_LABEL. */
621 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
622 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
630 goto new_bb_inclusive;
633 /* A basic block ends at a barrier. It may be that an unconditional
634 jump already closed the basic block -- no need to do it again. */
635 if (head == NULL_RTX)
638 /* While we now have edge lists with which other portions of the
639 compiler might determine a call ending a basic block does not
640 imply an abnormal edge, it will be a bit before everything can
641 be updated. So continue to emit a noop at the end of such a
643 if (GET_CODE (end) == CALL_INSN
644 && ! SIBLING_CALL_P (end))
646 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
647 end = emit_insn_after (nop, end);
649 goto new_bb_exclusive;
652 /* A basic block ends at a call that can either throw or
653 do a non-local goto. */
654 if (call_has_abnormal_edge)
657 if (head == NULL_RTX)
662 create_basic_block (i++, head, end, bb_note);
663 head = end = NULL_RTX;
670 if (GET_RTX_CLASS (code) == 'i')
672 if (head == NULL_RTX)
679 if (GET_RTX_CLASS (code) == 'i')
683 /* Make a list of all labels referred to other than by jumps
684 (which just don't have the REG_LABEL notes).
686 Make a special exception for labels followed by an ADDR*VEC,
687 as this would be a part of the tablejump setup code.
689 Make a special exception for the eh_return_stub_label, which
690 we know isn't part of any otherwise visible control flow. */
692 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
693 if (REG_NOTE_KIND (note) == REG_LABEL)
695 rtx lab = XEXP (note, 0), next;
697 if (lab == eh_return_stub_label)
699 else if ((next = next_nonnote_insn (lab)) != NULL
700 && GET_CODE (next) == JUMP_INSN
701 && (GET_CODE (PATTERN (next)) == ADDR_VEC
702 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
706 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
711 if (head != NULL_RTX)
712 create_basic_block (i++, head, end, bb_note);
714 if (i != n_basic_blocks)
717 return label_value_list;
720 /* Tidy the CFG by deleting unreachable code and whatnot. */
726 delete_unreachable_blocks ();
727 move_stray_eh_region_notes ();
728 record_active_eh_regions (f);
730 mark_critical_edges ();
732 /* Kill the data we won't maintain. */
733 label_value_list = NULL_RTX;
736 /* Create a new basic block consisting of the instructions between
737 HEAD and END inclusive. Reuses the note and basic block struct
738 in BB_NOTE, if any. */
741 create_basic_block (index, head, end, bb_note)
743 rtx head, end, bb_note;
748 && ! RTX_INTEGRATED_P (bb_note)
749 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
752 /* If we found an existing note, thread it back onto the chain. */
754 if (GET_CODE (head) == CODE_LABEL)
755 add_insn_after (bb_note, head);
758 add_insn_before (bb_note, head);
764 /* Otherwise we must create a note and a basic block structure.
765 Since we allow basic block structs in rtl, give the struct
766 the same lifetime by allocating it off the function obstack
767 rather than using malloc. */
769 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
770 memset (bb, 0, sizeof (*bb));
772 if (GET_CODE (head) == CODE_LABEL)
773 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
776 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
779 NOTE_BASIC_BLOCK (bb_note) = bb;
782 /* Always include the bb note in the block. */
783 if (NEXT_INSN (end) == bb_note)
789 BASIC_BLOCK (index) = bb;
791 /* Tag the block so that we know it has been used when considering
792 other basic block notes. */
796 /* Records the basic block struct in BB_FOR_INSN, for every instruction
797 indexed by INSN_UID. MAX is the size of the array. */
800 compute_bb_for_insn (max)
805 if (basic_block_for_insn)
806 VARRAY_FREE (basic_block_for_insn);
807 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
809 for (i = 0; i < n_basic_blocks; ++i)
811 basic_block bb = BASIC_BLOCK (i);
818 int uid = INSN_UID (insn);
820 VARRAY_BB (basic_block_for_insn, uid) = bb;
823 insn = NEXT_INSN (insn);
828 /* Free the memory associated with the edge structures. */
836 for (i = 0; i < n_basic_blocks; ++i)
838 basic_block bb = BASIC_BLOCK (i);
840 for (e = bb->succ; e ; e = n)
850 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
856 ENTRY_BLOCK_PTR->succ = 0;
857 EXIT_BLOCK_PTR->pred = 0;
862 /* Identify the edges between basic blocks.
864 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
865 that are otherwise unreachable may be reachable with a non-local goto.
867 BB_EH_END is an array indexed by basic block number in which we record
868 the list of exception regions active at the end of the basic block. */
871 make_edges (label_value_list)
872 rtx label_value_list;
875 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
876 sbitmap *edge_cache = NULL;
878 /* Assume no computed jump; revise as we create edges. */
879 current_function_has_computed_jump = 0;
881 /* Heavy use of computed goto in machine-generated code can lead to
882 nearly fully-connected CFGs. In that case we spend a significant
883 amount of time searching the edge lists for duplicates. */
884 if (forced_labels || label_value_list)
886 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
887 sbitmap_vector_zero (edge_cache, n_basic_blocks);
890 /* By nature of the way these get numbered, block 0 is always the entry. */
891 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
893 for (i = 0; i < n_basic_blocks; ++i)
895 basic_block bb = BASIC_BLOCK (i);
898 int force_fallthru = 0;
900 /* Examine the last instruction of the block, and discover the
901 ways we can leave the block. */
904 code = GET_CODE (insn);
907 if (code == JUMP_INSN)
911 /* ??? Recognize a tablejump and do the right thing. */
912 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
913 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
914 && GET_CODE (tmp) == JUMP_INSN
915 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
916 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
921 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
922 vec = XVEC (PATTERN (tmp), 0);
924 vec = XVEC (PATTERN (tmp), 1);
926 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
927 make_label_edge (edge_cache, bb,
928 XEXP (RTVEC_ELT (vec, j), 0), 0);
930 /* Some targets (eg, ARM) emit a conditional jump that also
931 contains the out-of-range target. Scan for these and
932 add an edge if necessary. */
933 if ((tmp = single_set (insn)) != NULL
934 && SET_DEST (tmp) == pc_rtx
935 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
936 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
937 make_label_edge (edge_cache, bb,
938 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
940 #ifdef CASE_DROPS_THROUGH
941 /* Silly VAXen. The ADDR_VEC is going to be in the way of
942 us naturally detecting fallthru into the next block. */
947 /* If this is a computed jump, then mark it as reaching
948 everything on the label_value_list and forced_labels list. */
949 else if (computed_jump_p (insn))
951 current_function_has_computed_jump = 1;
953 for (x = label_value_list; x; x = XEXP (x, 1))
954 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
956 for (x = forced_labels; x; x = XEXP (x, 1))
957 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 /* Returns create an exit out. */
961 else if (returnjump_p (insn))
962 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
964 /* Otherwise, we have a plain conditional or unconditional jump. */
967 if (! JUMP_LABEL (insn))
969 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
973 /* If this is a sibling call insn, then this is in effect a
974 combined call and return, and so we need an edge to the
975 exit block. No need to worry about EH edges, since we
976 wouldn't have created the sibling call in the first place. */
978 if (code == CALL_INSN && SIBLING_CALL_P (insn))
979 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
982 /* If this is a CALL_INSN, then mark it as reaching the active EH
983 handler for this CALL_INSN. If we're handling asynchronous
984 exceptions then any insn can reach any of the active handlers.
986 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
988 if (code == CALL_INSN || asynchronous_exceptions)
990 /* Add any appropriate EH edges. We do this unconditionally
991 since there may be a REG_EH_REGION or REG_EH_RETHROW note
992 on the call, and this needn't be within an EH region. */
993 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
995 /* If we have asynchronous exceptions, do the same for *all*
996 exception regions active in the block. */
997 if (asynchronous_exceptions
998 && bb->eh_beg != bb->eh_end)
1000 if (bb->eh_beg >= 0)
1001 make_eh_edge (edge_cache, eh_nest_info, bb,
1002 NULL_RTX, bb->eh_beg);
1004 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1005 if (GET_CODE (x) == NOTE
1006 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1007 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1009 int region = NOTE_EH_HANDLER (x);
1010 make_eh_edge (edge_cache, eh_nest_info, bb,
1015 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1017 /* ??? This could be made smarter: in some cases it's possible
1018 to tell that certain calls will not do a nonlocal goto.
1020 For example, if the nested functions that do the nonlocal
1021 gotos do not have their addresses taken, then only calls to
1022 those functions or to other nested functions that use them
1023 could possibly do nonlocal gotos. */
1024 /* We do know that a REG_EH_REGION note with a value less
1025 than 0 is guaranteed not to perform a non-local goto. */
1026 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1027 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1028 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1029 make_label_edge (edge_cache, bb, XEXP (x, 0),
1030 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1034 /* We know something about the structure of the function __throw in
1035 libgcc2.c. It is the only function that ever contains eh_stub
1036 labels. It modifies its return address so that the last block
1037 returns to one of the eh_stub labels within it. So we have to
1038 make additional edges in the flow graph. */
1039 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1040 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1042 /* Find out if we can drop through to the next block. */
1043 insn = next_nonnote_insn (insn);
1044 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1045 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1046 else if (i + 1 < n_basic_blocks)
1048 rtx tmp = BLOCK_HEAD (i + 1);
1049 if (GET_CODE (tmp) == NOTE)
1050 tmp = next_nonnote_insn (tmp);
1051 if (force_fallthru || insn == tmp)
1052 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1056 free_eh_nesting_info (eh_nest_info);
1058 sbitmap_vector_free (edge_cache);
1061 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1062 about the edge that is accumulated between calls. */
1065 make_edge (edge_cache, src, dst, flags)
1066 sbitmap *edge_cache;
1067 basic_block src, dst;
1073 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1074 many edges to them, and we didn't allocate memory for it. */
1075 use_edge_cache = (edge_cache
1076 && src != ENTRY_BLOCK_PTR
1077 && dst != EXIT_BLOCK_PTR);
1079 /* Make sure we don't add duplicate edges. */
1080 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1081 for (e = src->succ; e ; e = e->succ_next)
1088 e = (edge) xcalloc (1, sizeof (*e));
1091 e->succ_next = src->succ;
1092 e->pred_next = dst->pred;
1101 SET_BIT (edge_cache[src->index], dst->index);
1104 /* Create an edge from a basic block to a label. */
1107 make_label_edge (edge_cache, src, label, flags)
1108 sbitmap *edge_cache;
1113 if (GET_CODE (label) != CODE_LABEL)
1116 /* If the label was never emitted, this insn is junk, but avoid a
1117 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1118 as a result of a syntax error and a diagnostic has already been
1121 if (INSN_UID (label) == 0)
1124 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1127 /* Create the edges generated by INSN in REGION. */
1130 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1131 sbitmap *edge_cache;
1132 eh_nesting_info *eh_nest_info;
1137 handler_info **handler_list;
1140 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1141 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1144 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1145 EDGE_ABNORMAL | EDGE_EH | is_call);
1149 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1150 dangerous if we intend to move basic blocks around. Move such notes
1151 into the following block. */
1154 move_stray_eh_region_notes ()
1159 if (n_basic_blocks < 2)
1162 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1163 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1165 rtx insn, next, list = NULL_RTX;
1167 b1 = BASIC_BLOCK (i);
1168 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1170 next = NEXT_INSN (insn);
1171 if (GET_CODE (insn) == NOTE
1172 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1173 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1175 /* Unlink from the insn chain. */
1176 NEXT_INSN (PREV_INSN (insn)) = next;
1177 PREV_INSN (next) = PREV_INSN (insn);
1180 NEXT_INSN (insn) = list;
1185 if (list == NULL_RTX)
1188 /* Find where to insert these things. */
1190 if (GET_CODE (insn) == CODE_LABEL)
1191 insn = NEXT_INSN (insn);
1195 next = NEXT_INSN (list);
1196 add_insn_after (list, insn);
1202 /* Recompute eh_beg/eh_end for each basic block. */
1205 record_active_eh_regions (f)
1208 rtx insn, eh_list = NULL_RTX;
1210 basic_block bb = BASIC_BLOCK (0);
1212 for (insn = f; insn ; insn = NEXT_INSN (insn))
1214 if (bb->head == insn)
1215 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1217 if (GET_CODE (insn) == NOTE)
1219 int kind = NOTE_LINE_NUMBER (insn);
1220 if (kind == NOTE_INSN_EH_REGION_BEG)
1221 eh_list = alloc_INSN_LIST (insn, eh_list);
1222 else if (kind == NOTE_INSN_EH_REGION_END)
1224 rtx t = XEXP (eh_list, 1);
1225 free_INSN_LIST_node (eh_list);
1230 if (bb->end == insn)
1232 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1234 if (i == n_basic_blocks)
1236 bb = BASIC_BLOCK (i);
1241 /* Identify critical edges and set the bits appropriately. */
1244 mark_critical_edges ()
1246 int i, n = n_basic_blocks;
1249 /* We begin with the entry block. This is not terribly important now,
1250 but could be if a front end (Fortran) implemented alternate entry
1252 bb = ENTRY_BLOCK_PTR;
1259 /* (1) Critical edges must have a source with multiple successors. */
1260 if (bb->succ && bb->succ->succ_next)
1262 for (e = bb->succ; e ; e = e->succ_next)
1264 /* (2) Critical edges must have a destination with multiple
1265 predecessors. Note that we know there is at least one
1266 predecessor -- the edge we followed to get here. */
1267 if (e->dest->pred->pred_next)
1268 e->flags |= EDGE_CRITICAL;
1270 e->flags &= ~EDGE_CRITICAL;
1275 for (e = bb->succ; e ; e = e->succ_next)
1276 e->flags &= ~EDGE_CRITICAL;
1281 bb = BASIC_BLOCK (i);
1285 /* Split a (typically critical) edge. Return the new block.
1286 Abort on abnormal edges.
1288 ??? The code generally expects to be called on critical edges.
1289 The case of a block ending in an unconditional jump to a
1290 block with multiple predecessors is not handled optimally. */
1293 split_edge (edge_in)
1296 basic_block old_pred, bb, old_succ;
1301 /* Abnormal edges cannot be split. */
1302 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1305 old_pred = edge_in->src;
1306 old_succ = edge_in->dest;
1308 /* Remove the existing edge from the destination's pred list. */
1311 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1313 *pp = edge_in->pred_next;
1314 edge_in->pred_next = NULL;
1317 /* Create the new structures. */
1318 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1319 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1322 memset (bb, 0, sizeof (*bb));
1323 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1324 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1326 /* ??? This info is likely going to be out of date very soon. */
1327 if (old_succ->global_live_at_start)
1329 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1330 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1334 CLEAR_REG_SET (bb->global_live_at_start);
1335 CLEAR_REG_SET (bb->global_live_at_end);
1340 bb->succ = edge_out;
1343 edge_in->flags &= ~EDGE_CRITICAL;
1345 edge_out->pred_next = old_succ->pred;
1346 edge_out->succ_next = NULL;
1348 edge_out->dest = old_succ;
1349 edge_out->flags = EDGE_FALLTHRU;
1350 edge_out->probability = REG_BR_PROB_BASE;
1352 old_succ->pred = edge_out;
1354 /* Tricky case -- if there existed a fallthru into the successor
1355 (and we're not it) we must add a new unconditional jump around
1356 the new block we're actually interested in.
1358 Further, if that edge is critical, this means a second new basic
1359 block must be created to hold it. In order to simplify correct
1360 insn placement, do this before we touch the existing basic block
1361 ordering for the block we were really wanting. */
1362 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1365 for (e = edge_out->pred_next; e ; e = e->pred_next)
1366 if (e->flags & EDGE_FALLTHRU)
1371 basic_block jump_block;
1374 if ((e->flags & EDGE_CRITICAL) == 0
1375 && e->src != ENTRY_BLOCK_PTR)
1377 /* Non critical -- we can simply add a jump to the end
1378 of the existing predecessor. */
1379 jump_block = e->src;
1383 /* We need a new block to hold the jump. The simplest
1384 way to do the bulk of the work here is to recursively
1386 jump_block = split_edge (e);
1387 e = jump_block->succ;
1390 /* Now add the jump insn ... */
1391 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1393 jump_block->end = pos;
1394 if (basic_block_for_insn)
1395 set_block_for_insn (pos, jump_block);
1396 emit_barrier_after (pos);
1398 /* ... let jump know that label is in use, ... */
1399 JUMP_LABEL (pos) = old_succ->head;
1400 ++LABEL_NUSES (old_succ->head);
1402 /* ... and clear fallthru on the outgoing edge. */
1403 e->flags &= ~EDGE_FALLTHRU;
1405 /* Continue splitting the interesting edge. */
1409 /* Place the new block just in front of the successor. */
1410 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1411 if (old_succ == EXIT_BLOCK_PTR)
1412 j = n_basic_blocks - 1;
1414 j = old_succ->index;
1415 for (i = n_basic_blocks - 1; i > j; --i)
1417 basic_block tmp = BASIC_BLOCK (i - 1);
1418 BASIC_BLOCK (i) = tmp;
1421 BASIC_BLOCK (i) = bb;
1424 /* Create the basic block note.
1426 Where we place the note can have a noticable impact on the generated
1427 code. Consider this cfg:
1438 If we need to insert an insn on the edge from block 0 to block 1,
1439 we want to ensure the instructions we insert are outside of any
1440 loop notes that physically sit between block 0 and block 1. Otherwise
1441 we confuse the loop optimizer into thinking the loop is a phony. */
1442 if (old_succ != EXIT_BLOCK_PTR
1443 && PREV_INSN (old_succ->head)
1444 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1445 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1446 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1447 PREV_INSN (old_succ->head));
1448 else if (old_succ != EXIT_BLOCK_PTR)
1449 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1451 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1452 NOTE_BASIC_BLOCK (bb_note) = bb;
1453 bb->head = bb->end = bb_note;
1455 /* Not quite simple -- for non-fallthru edges, we must adjust the
1456 predecessor's jump instruction to target our new block. */
1457 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1459 rtx tmp, insn = old_pred->end;
1460 rtx old_label = old_succ->head;
1461 rtx new_label = gen_label_rtx ();
1463 if (GET_CODE (insn) != JUMP_INSN)
1466 /* ??? Recognize a tablejump and adjust all matching cases. */
1467 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1468 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1469 && GET_CODE (tmp) == JUMP_INSN
1470 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1471 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1476 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1477 vec = XVEC (PATTERN (tmp), 0);
1479 vec = XVEC (PATTERN (tmp), 1);
1481 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1482 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1484 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1485 --LABEL_NUSES (old_label);
1486 ++LABEL_NUSES (new_label);
1489 /* Handle casesi dispatch insns */
1490 if ((tmp = single_set (insn)) != NULL
1491 && SET_DEST (tmp) == pc_rtx
1492 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1493 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1494 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1496 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1498 --LABEL_NUSES (old_label);
1499 ++LABEL_NUSES (new_label);
1504 /* This would have indicated an abnormal edge. */
1505 if (computed_jump_p (insn))
1508 /* A return instruction can't be redirected. */
1509 if (returnjump_p (insn))
1512 /* If the insn doesn't go where we think, we're confused. */
1513 if (JUMP_LABEL (insn) != old_label)
1516 redirect_jump (insn, new_label);
1519 emit_label_before (new_label, bb_note);
1520 bb->head = new_label;
1526 /* Queue instructions for insertion on an edge between two basic blocks.
1527 The new instructions and basic blocks (if any) will not appear in the
1528 CFG until commit_edge_insertions is called. */
1531 insert_insn_on_edge (pattern, e)
1535 /* We cannot insert instructions on an abnormal critical edge.
1536 It will be easier to find the culprit if we die now. */
1537 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1538 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1541 if (e->insns == NULL_RTX)
1544 push_to_sequence (e->insns);
1546 emit_insn (pattern);
1548 e->insns = get_insns ();
1552 /* Update the CFG for the instructions queued on edge E. */
1555 commit_one_edge_insertion (e)
1558 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1561 /* Pull the insns off the edge now since the edge might go away. */
1563 e->insns = NULL_RTX;
1565 /* Figure out where to put these things. If the destination has
1566 one predecessor, insert there. Except for the exit block. */
1567 if (e->dest->pred->pred_next == NULL
1568 && e->dest != EXIT_BLOCK_PTR)
1572 /* Get the location correct wrt a code label, and "nice" wrt
1573 a basic block note, and before everything else. */
1575 if (GET_CODE (tmp) == CODE_LABEL)
1576 tmp = NEXT_INSN (tmp);
1577 if (GET_CODE (tmp) == NOTE
1578 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1579 tmp = NEXT_INSN (tmp);
1580 if (tmp == bb->head)
1583 after = PREV_INSN (tmp);
1586 /* If the source has one successor and the edge is not abnormal,
1587 insert there. Except for the entry block. */
1588 else if ((e->flags & EDGE_ABNORMAL) == 0
1589 && e->src->succ->succ_next == NULL
1590 && e->src != ENTRY_BLOCK_PTR)
1593 /* It is possible to have a non-simple jump here. Consider a target
1594 where some forms of unconditional jumps clobber a register. This
1595 happens on the fr30 for example.
1597 We know this block has a single successor, so we can just emit
1598 the queued insns before the jump. */
1599 if (GET_CODE (bb->end) == JUMP_INSN)
1605 /* We'd better be fallthru, or we've lost track of what's what. */
1606 if ((e->flags & EDGE_FALLTHRU) == 0)
1613 /* Otherwise we must split the edge. */
1616 bb = split_edge (e);
1620 /* Now that we've found the spot, do the insertion. */
1622 /* Set the new block number for these insns, if structure is allocated. */
1623 if (basic_block_for_insn)
1626 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1627 set_block_for_insn (i, bb);
1632 emit_insns_before (insns, before);
1633 if (before == bb->head)
1638 rtx last = emit_insns_after (insns, after);
1639 if (after == bb->end)
1643 if (GET_CODE (last) == JUMP_INSN)
1645 if (returnjump_p (last))
1647 /* ??? Remove all outgoing edges from BB and add one
1648 for EXIT. This is not currently a problem because
1649 this only happens for the (single) epilogue, which
1650 already has a fallthru edge to EXIT. */
1653 if (e->dest != EXIT_BLOCK_PTR
1654 || e->succ_next != NULL
1655 || (e->flags & EDGE_FALLTHRU) == 0)
1657 e->flags &= ~EDGE_FALLTHRU;
1659 emit_barrier_after (last);
1668 /* Update the CFG for all queued instructions. */
1671 commit_edge_insertions ()
1676 #ifdef ENABLE_CHECKING
1677 verify_flow_info ();
1681 bb = ENTRY_BLOCK_PTR;
1686 for (e = bb->succ; e ; e = next)
1688 next = e->succ_next;
1690 commit_one_edge_insertion (e);
1693 if (++i >= n_basic_blocks)
1695 bb = BASIC_BLOCK (i);
1699 /* Delete all unreachable basic blocks. */
1702 delete_unreachable_blocks ()
1704 basic_block *worklist, *tos;
1705 int deleted_handler;
1710 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1712 /* Use basic_block->aux as a marker. Clear them all. */
1714 for (i = 0; i < n; ++i)
1715 BASIC_BLOCK (i)->aux = NULL;
1717 /* Add our starting points to the worklist. Almost always there will
1718 be only one. It isn't inconcievable that we might one day directly
1719 support Fortran alternate entry points. */
1721 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1725 /* Mark the block with a handy non-null value. */
1729 /* Iterate: find everything reachable from what we've already seen. */
1731 while (tos != worklist)
1733 basic_block b = *--tos;
1735 for (e = b->succ; e ; e = e->succ_next)
1743 /* Delete all unreachable basic blocks. Count down so that we don't
1744 interfere with the block renumbering that happens in delete_block. */
1746 deleted_handler = 0;
1748 for (i = n - 1; i >= 0; --i)
1750 basic_block b = BASIC_BLOCK (i);
1753 /* This block was found. Tidy up the mark. */
1756 deleted_handler |= delete_block (b);
1759 tidy_fallthru_edges ();
1761 /* If we deleted an exception handler, we may have EH region begin/end
1762 blocks to remove as well. */
1763 if (deleted_handler)
1764 delete_eh_regions ();
1769 /* Find EH regions for which there is no longer a handler, and delete them. */
1772 delete_eh_regions ()
1776 update_rethrow_references ();
1778 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1779 if (GET_CODE (insn) == NOTE)
1781 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1782 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1784 int num = NOTE_EH_HANDLER (insn);
1785 /* A NULL handler indicates a region is no longer needed,
1786 as long as its rethrow label isn't used. */
1787 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1789 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1790 NOTE_SOURCE_FILE (insn) = 0;
1796 /* Return true if NOTE is not one of the ones that must be kept paired,
1797 so that we may simply delete them. */
1800 can_delete_note_p (note)
1803 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1804 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1807 /* Unlink a chain of insns between START and FINISH, leaving notes
1808 that must be paired. */
1811 flow_delete_insn_chain (start, finish)
1814 /* Unchain the insns one by one. It would be quicker to delete all
1815 of these with a single unchaining, rather than one at a time, but
1816 we need to keep the NOTE's. */
1822 next = NEXT_INSN (start);
1823 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1825 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1828 next = flow_delete_insn (start);
1830 if (start == finish)
1836 /* Delete the insns in a (non-live) block. We physically delete every
1837 non-deleted-note insn, and update the flow graph appropriately.
1839 Return nonzero if we deleted an exception handler. */
1841 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1842 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1848 int deleted_handler = 0;
1851 /* If the head of this block is a CODE_LABEL, then it might be the
1852 label for an exception handler which can't be reached.
1854 We need to remove the label from the exception_handler_label list
1855 and remove the associated NOTE_INSN_EH_REGION_BEG and
1856 NOTE_INSN_EH_REGION_END notes. */
1860 never_reached_warning (insn);
1862 if (GET_CODE (insn) == CODE_LABEL)
1864 rtx x, *prev = &exception_handler_labels;
1866 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1868 if (XEXP (x, 0) == insn)
1870 /* Found a match, splice this label out of the EH label list. */
1871 *prev = XEXP (x, 1);
1872 XEXP (x, 1) = NULL_RTX;
1873 XEXP (x, 0) = NULL_RTX;
1875 /* Remove the handler from all regions */
1876 remove_handler (insn);
1877 deleted_handler = 1;
1880 prev = &XEXP (x, 1);
1883 /* This label may be referenced by code solely for its value, or
1884 referenced by static data, or something. We have determined
1885 that it is not reachable, but cannot delete the label itself.
1886 Save code space and continue to delete the balance of the block,
1887 along with properly updating the cfg. */
1888 if (!can_delete_label_p (insn))
1890 /* If we've only got one of these, skip the whole deleting
1893 goto no_delete_insns;
1894 insn = NEXT_INSN (insn);
1898 /* Include any jump table following the basic block. */
1900 if (GET_CODE (end) == JUMP_INSN
1901 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1902 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1903 && GET_CODE (tmp) == JUMP_INSN
1904 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1905 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1908 /* Include any barrier that may follow the basic block. */
1909 tmp = next_nonnote_insn (end);
1910 if (tmp && GET_CODE (tmp) == BARRIER)
1913 /* Selectively delete the entire chain. */
1914 flow_delete_insn_chain (insn, end);
1918 /* Remove the edges into and out of this block. Note that there may
1919 indeed be edges in, if we are removing an unreachable loop. */
1923 for (e = b->pred; e ; e = next)
1925 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1928 next = e->pred_next;
1932 for (e = b->succ; e ; e = next)
1934 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1937 next = e->succ_next;
1946 /* Remove the basic block from the array, and compact behind it. */
1949 return deleted_handler;
1952 /* Remove block B from the basic block array and compact behind it. */
1958 int i, n = n_basic_blocks;
1960 for (i = b->index; i + 1 < n; ++i)
1962 basic_block x = BASIC_BLOCK (i + 1);
1963 BASIC_BLOCK (i) = x;
1967 basic_block_info->num_elements--;
1971 /* Delete INSN by patching it out. Return the next insn. */
1974 flow_delete_insn (insn)
1977 rtx prev = PREV_INSN (insn);
1978 rtx next = NEXT_INSN (insn);
1981 PREV_INSN (insn) = NULL_RTX;
1982 NEXT_INSN (insn) = NULL_RTX;
1985 NEXT_INSN (prev) = next;
1987 PREV_INSN (next) = prev;
1989 set_last_insn (prev);
1991 if (GET_CODE (insn) == CODE_LABEL)
1992 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1994 /* If deleting a jump, decrement the use count of the label. Deleting
1995 the label itself should happen in the normal course of block merging. */
1996 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1997 LABEL_NUSES (JUMP_LABEL (insn))--;
1999 /* Also if deleting an insn that references a label. */
2000 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2001 LABEL_NUSES (XEXP (note, 0))--;
2006 /* True if a given label can be deleted. */
2009 can_delete_label_p (label)
2014 if (LABEL_PRESERVE_P (label))
2017 for (x = forced_labels; x ; x = XEXP (x, 1))
2018 if (label == XEXP (x, 0))
2020 for (x = label_value_list; x ; x = XEXP (x, 1))
2021 if (label == XEXP (x, 0))
2023 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2024 if (label == XEXP (x, 0))
2027 /* User declared labels must be preserved. */
2028 if (LABEL_NAME (label) != 0)
2034 /* Blocks A and B are to be merged into a single block. A has no incoming
2035 fallthru edge, so it can be moved before B without adding or modifying
2036 any jumps (aside from the jump from A to B). */
2039 merge_blocks_move_predecessor_nojumps (a, b)
2042 rtx start, end, barrier;
2048 /* We want to delete the BARRIER after the end of the insns we are
2049 going to move. If we don't find a BARRIER, then do nothing. This
2050 can happen in some cases if we have labels we can not delete.
2052 Similarly, do nothing if we can not delete the label at the start
2053 of the target block. */
2054 barrier = next_nonnote_insn (end);
2055 if (GET_CODE (barrier) != BARRIER
2056 || (GET_CODE (b->head) == CODE_LABEL
2057 && ! can_delete_label_p (b->head)))
2060 flow_delete_insn (barrier);
2062 /* Move block and loop notes out of the chain so that we do not
2063 disturb their order.
2065 ??? A better solution would be to squeeze out all the non-nested notes
2066 and adjust the block trees appropriately. Even better would be to have
2067 a tighter connection between block trees and rtl so that this is not
2069 start = squeeze_notes (start, end);
2071 /* Scramble the insn chain. */
2072 if (end != PREV_INSN (b->head))
2073 reorder_insns (start, end, PREV_INSN (b->head));
2077 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2078 a->index, b->index);
2081 /* Swap the records for the two blocks around. Although we are deleting B,
2082 A is now where B was and we want to compact the BB array from where
2084 BASIC_BLOCK(a->index) = b;
2085 BASIC_BLOCK(b->index) = a;
2087 a->index = b->index;
2090 /* Now blocks A and B are contiguous. Merge them. */
2091 merge_blocks_nomove (a, b);
2096 /* Blocks A and B are to be merged into a single block. B has no outgoing
2097 fallthru edge, so it can be moved after A without adding or modifying
2098 any jumps (aside from the jump from A to B). */
2101 merge_blocks_move_successor_nojumps (a, b)
2104 rtx start, end, barrier;
2109 /* We want to delete the BARRIER after the end of the insns we are
2110 going to move. If we don't find a BARRIER, then do nothing. This
2111 can happen in some cases if we have labels we can not delete.
2113 Similarly, do nothing if we can not delete the label at the start
2114 of the target block. */
2115 barrier = next_nonnote_insn (end);
2116 if (GET_CODE (barrier) != BARRIER
2117 || (GET_CODE (b->head) == CODE_LABEL
2118 && ! can_delete_label_p (b->head)))
2121 flow_delete_insn (barrier);
2123 /* Move block and loop notes out of the chain so that we do not
2124 disturb their order.
2126 ??? A better solution would be to squeeze out all the non-nested notes
2127 and adjust the block trees appropriately. Even better would be to have
2128 a tighter connection between block trees and rtl so that this is not
2130 start = squeeze_notes (start, end);
2132 /* Scramble the insn chain. */
2133 reorder_insns (start, end, a->end);
2135 /* Now blocks A and B are contiguous. Merge them. */
2136 merge_blocks_nomove (a, b);
2140 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2141 b->index, a->index);
2147 /* Blocks A and B are to be merged into a single block. The insns
2148 are already contiguous, hence `nomove'. */
2151 merge_blocks_nomove (a, b)
2155 rtx b_head, b_end, a_end;
2158 /* If there was a CODE_LABEL beginning B, delete it. */
2161 if (GET_CODE (b_head) == CODE_LABEL)
2163 /* Detect basic blocks with nothing but a label. This can happen
2164 in particular at the end of a function. */
2165 if (b_head == b_end)
2167 b_head = flow_delete_insn (b_head);
2170 /* Delete the basic block note. */
2171 if (GET_CODE (b_head) == NOTE
2172 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2174 if (b_head == b_end)
2176 b_head = flow_delete_insn (b_head);
2179 /* If there was a jump out of A, delete it. */
2181 if (GET_CODE (a_end) == JUMP_INSN)
2185 prev = prev_nonnote_insn (a_end);
2190 /* If this was a conditional jump, we need to also delete
2191 the insn that set cc0. */
2193 if (prev && sets_cc0_p (prev))
2196 prev = prev_nonnote_insn (prev);
2199 flow_delete_insn (tmp);
2203 /* Note that a->head != a->end, since we should have at least a
2204 bb note plus the jump, so prev != insn. */
2205 flow_delete_insn (a_end);
2209 /* By definition, there should only be one successor of A, and that is
2210 B. Free that edge struct. */
2214 /* Adjust the edges out of B for the new owner. */
2215 for (e = b->succ; e ; e = e->succ_next)
2219 /* Reassociate the insns of B with A. */
2222 BLOCK_FOR_INSN (b_head) = a;
2223 while (b_head != b_end)
2225 b_head = NEXT_INSN (b_head);
2226 BLOCK_FOR_INSN (b_head) = a;
2232 /* Compact the basic block array. */
2236 /* Attempt to merge basic blocks that are potentially non-adjacent.
2237 Return true iff the attempt succeeded. */
2240 merge_blocks (e, b, c)
2244 /* If B has a fallthru edge to C, no need to move anything. */
2245 if (e->flags & EDGE_FALLTHRU)
2247 /* If a label still appears somewhere and we cannot delete the label,
2248 then we cannot merge the blocks. The edge was tidied already. */
2250 rtx insn, stop = NEXT_INSN (c->head);
2251 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2252 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2255 merge_blocks_nomove (b, c);
2259 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2260 b->index, c->index);
2269 int c_has_outgoing_fallthru;
2270 int b_has_incoming_fallthru;
2272 /* We must make sure to not munge nesting of exception regions,
2273 lexical blocks, and loop notes.
2275 The first is taken care of by requiring that the active eh
2276 region at the end of one block always matches the active eh
2277 region at the beginning of the next block.
2279 The later two are taken care of by squeezing out all the notes. */
2281 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2282 executed and we may want to treat blocks which have two out
2283 edges, one normal, one abnormal as only having one edge for
2284 block merging purposes. */
2286 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2287 if (tmp_edge->flags & EDGE_FALLTHRU)
2289 c_has_outgoing_fallthru = (tmp_edge != NULL);
2291 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2292 if (tmp_edge->flags & EDGE_FALLTHRU)
2294 b_has_incoming_fallthru = (tmp_edge != NULL);
2296 /* If B does not have an incoming fallthru, and the exception regions
2297 match, then it can be moved immediately before C without introducing
2300 C can not be the first block, so we do not have to worry about
2301 accessing a non-existent block. */
2302 d = BASIC_BLOCK (c->index - 1);
2303 if (! b_has_incoming_fallthru
2304 && d->eh_end == b->eh_beg
2305 && b->eh_end == c->eh_beg)
2306 return merge_blocks_move_predecessor_nojumps (b, c);
2308 /* Otherwise, we're going to try to move C after B. Make sure the
2309 exception regions match.
2311 If B is the last basic block, then we must not try to access the
2312 block structure for block B + 1. Luckily in that case we do not
2313 need to worry about matching exception regions. */
2314 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2315 if (b->eh_end == c->eh_beg
2316 && (d == NULL || c->eh_end == d->eh_beg))
2318 /* If C does not have an outgoing fallthru, then it can be moved
2319 immediately after B without introducing or modifying jumps. */
2320 if (! c_has_outgoing_fallthru)
2321 return merge_blocks_move_successor_nojumps (b, c);
2323 /* Otherwise, we'll need to insert an extra jump, and possibly
2324 a new block to contain it. */
2325 /* ??? Not implemented yet. */
2332 /* Top level driver for merge_blocks. */
2339 /* Attempt to merge blocks as made possible by edge removal. If a block
2340 has only one successor, and the successor has only one predecessor,
2341 they may be combined. */
2343 for (i = 0; i < n_basic_blocks; )
2345 basic_block c, b = BASIC_BLOCK (i);
2348 /* A loop because chains of blocks might be combineable. */
2349 while ((s = b->succ) != NULL
2350 && s->succ_next == NULL
2351 && (s->flags & EDGE_EH) == 0
2352 && (c = s->dest) != EXIT_BLOCK_PTR
2353 && c->pred->pred_next == NULL
2354 /* If the jump insn has side effects, we can't kill the edge. */
2355 && (GET_CODE (b->end) != JUMP_INSN
2356 || onlyjump_p (b->end))
2357 && merge_blocks (s, b, c))
2360 /* Don't get confused by the index shift caused by deleting blocks. */
2365 /* The given edge should potentially be a fallthru edge. If that is in
2366 fact true, delete the jump and barriers that are in the way. */
2369 tidy_fallthru_edge (e, b, c)
2375 /* ??? In a late-running flow pass, other folks may have deleted basic
2376 blocks by nopping out blocks, leaving multiple BARRIERs between here
2377 and the target label. They ought to be chastized and fixed.
2379 We can also wind up with a sequence of undeletable labels between
2380 one block and the next.
2382 So search through a sequence of barriers, labels, and notes for
2383 the head of block C and assert that we really do fall through. */
2385 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2388 /* Remove what will soon cease being the jump insn from the source block.
2389 If block B consisted only of this single jump, turn it into a deleted
2392 if (GET_CODE (q) == JUMP_INSN)
2395 /* If this was a conditional jump, we need to also delete
2396 the insn that set cc0. */
2397 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2404 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2405 NOTE_SOURCE_FILE (q) = 0;
2408 b->end = q = PREV_INSN (q);
2411 /* Selectively unlink the sequence. */
2412 if (q != PREV_INSN (c->head))
2413 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2415 e->flags |= EDGE_FALLTHRU;
2418 /* Fix up edges that now fall through, or rather should now fall through
2419 but previously required a jump around now deleted blocks. Simplify
2420 the search by only examining blocks numerically adjacent, since this
2421 is how find_basic_blocks created them. */
2424 tidy_fallthru_edges ()
2428 for (i = 1; i < n_basic_blocks; ++i)
2430 basic_block b = BASIC_BLOCK (i - 1);
2431 basic_block c = BASIC_BLOCK (i);
2434 /* We care about simple conditional or unconditional jumps with
2437 If we had a conditional branch to the next instruction when
2438 find_basic_blocks was called, then there will only be one
2439 out edge for the block which ended with the conditional
2440 branch (since we do not create duplicate edges).
2442 Furthermore, the edge will be marked as a fallthru because we
2443 merge the flags for the duplicate edges. So we do not want to
2444 check that the edge is not a FALLTHRU edge. */
2445 if ((s = b->succ) != NULL
2446 && s->succ_next == NULL
2448 /* If the jump insn has side effects, we can't tidy the edge. */
2449 && (GET_CODE (b->end) != JUMP_INSN
2450 || onlyjump_p (b->end)))
2451 tidy_fallthru_edge (s, b, c);
2455 /* Discover and record the loop depth at the head of each basic block. */
2458 calculate_loop_depth (dump)
2463 /* The loop infrastructure does the real job for us. */
2464 flow_loops_find (&loops);
2467 flow_loops_dump (&loops, dump, 0);
2469 flow_loops_free (&loops);
2472 /* Perform data flow analysis.
2473 F is the first insn of the function and NREGS the number of register numbers
2477 life_analysis (f, nregs, file, remove_dead_code)
2481 int remove_dead_code;
2483 #ifdef ELIMINABLE_REGS
2485 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2490 /* Record which registers will be eliminated. We use this in
2493 CLEAR_HARD_REG_SET (elim_reg_set);
2495 #ifdef ELIMINABLE_REGS
2496 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2497 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2499 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2502 /* We want alias analysis information for local dead store elimination. */
2503 init_alias_analysis ();
2506 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2510 if (! remove_dead_code)
2511 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2514 /* The post-reload life analysis have (on a global basis) the same
2515 registers live as was computed by reload itself. elimination
2516 Otherwise offsets and such may be incorrect.
2518 Reload will make some registers as live even though they do not
2519 appear in the rtl. */
2520 if (reload_completed)
2521 flags &= ~PROP_REG_INFO;
2525 /* Always remove no-op moves. Do this before other processing so
2526 that we don't have to keep re-scanning them. */
2527 delete_noop_moves (f);
2529 /* Some targets can emit simpler epilogues if they know that sp was
2530 not ever modified during the function. After reload, of course,
2531 we've already emitted the epilogue so there's no sense searching. */
2532 if (! reload_completed)
2533 notice_stack_pointer_modification (f);
2535 /* Allocate and zero out data structures that will record the
2536 data from lifetime analysis. */
2537 allocate_reg_life_data ();
2538 allocate_bb_life_data ();
2539 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2540 all_blocks = sbitmap_alloc (n_basic_blocks);
2541 sbitmap_ones (all_blocks);
2543 /* Find the set of registers live on function exit. */
2544 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2546 /* "Update" life info from zero. It'd be nice to begin the
2547 relaxation with just the exit and noreturn blocks, but that set
2548 is not immediately handy. */
2550 if (flags & PROP_REG_INFO)
2551 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2552 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2555 sbitmap_free (all_blocks);
2556 free (reg_next_use);
2557 reg_next_use = NULL;
2558 end_alias_analysis ();
2561 dump_flow_info (file);
2563 free_basic_block_vars (1);
2566 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2567 Search for REGNO. If found, abort if it is not wider than word_mode. */
2570 verify_wide_reg_1 (px, pregno)
2575 unsigned int regno = *(int *) pregno;
2577 if (GET_CODE (x) == REG && REGNO (x) == regno)
2579 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2586 /* A subroutine of verify_local_live_at_start. Search through insns
2587 between HEAD and END looking for register REGNO. */
2590 verify_wide_reg (regno, head, end)
2596 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2597 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2601 head = NEXT_INSN (head);
2604 /* We didn't find the register at all. Something's way screwy. */
2608 /* A subroutine of update_life_info. Verify that there are no untoward
2609 changes in live_at_start during a local update. */
2612 verify_local_live_at_start (new_live_at_start, bb)
2613 regset new_live_at_start;
2616 if (reload_completed)
2618 /* After reload, there are no pseudos, nor subregs of multi-word
2619 registers. The regsets should exactly match. */
2620 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2627 /* Find the set of changed registers. */
2628 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2630 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2632 /* No registers should die. */
2633 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2635 /* Verify that the now-live register is wider than word_mode. */
2636 verify_wide_reg (i, bb->head, bb->end);
2641 /* Updates life information starting with the basic blocks set in BLOCKS.
2643 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2644 expecting local modifications to basic blocks. If we find extra
2645 registers live at the beginning of a block, then we either killed
2646 useful data, or we have a broken split that wants data not provided.
2647 If we find registers removed from live_at_start, that means we have
2648 a broken peephole that is killing a register it shouldn't.
2650 ??? This is not true in one situation -- when a pre-reload splitter
2651 generates subregs of a multi-word pseudo, current life analysis will
2652 lose the kill. So we _can_ have a pseudo go live. How irritating.
2654 BLOCK_FOR_INSN is assumed to be correct.
2656 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2657 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2658 regs_ever_live unless the caller resets it to zero. */
2661 update_life_info (blocks, extent, prop_flags)
2663 enum update_life_extent extent;
2667 regset_head tmp_head;
2670 tmp = INITIALIZE_REG_SET (tmp_head);
2672 /* For a global update, we go through the relaxation process again. */
2673 if (extent != UPDATE_LIFE_LOCAL)
2675 calculate_global_regs_live (blocks, blocks,
2676 prop_flags & PROP_SCAN_DEAD_CODE);
2678 /* If asked, remove notes from the blocks we'll update. */
2679 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2680 count_or_remove_death_notes (blocks, 1);
2683 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2685 basic_block bb = BASIC_BLOCK (i);
2687 COPY_REG_SET (tmp, bb->global_live_at_end);
2688 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2690 if (extent == UPDATE_LIFE_LOCAL)
2691 verify_local_live_at_start (tmp, bb);
2696 if (prop_flags & PROP_REG_INFO)
2698 /* The only pseudos that are live at the beginning of the function
2699 are those that were not set anywhere in the function. local-alloc
2700 doesn't know how to handle these correctly, so mark them as not
2701 local to any one basic block. */
2702 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2703 FIRST_PSEUDO_REGISTER, i,
2704 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2706 /* We have a problem with any pseudoreg that lives across the setjmp.
2707 ANSI says that if a user variable does not change in value between
2708 the setjmp and the longjmp, then the longjmp preserves it. This
2709 includes longjmp from a place where the pseudo appears dead.
2710 (In principle, the value still exists if it is in scope.)
2711 If the pseudo goes in a hard reg, some other value may occupy
2712 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2713 Conclusion: such a pseudo must not go in a hard reg. */
2714 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2715 FIRST_PSEUDO_REGISTER, i,
2717 if (regno_reg_rtx[i] != 0)
2719 REG_LIVE_LENGTH (i) = -1;
2720 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2726 /* Free the variables allocated by find_basic_blocks.
2728 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2731 free_basic_block_vars (keep_head_end_p)
2732 int keep_head_end_p;
2734 if (basic_block_for_insn)
2736 VARRAY_FREE (basic_block_for_insn);
2737 basic_block_for_insn = NULL;
2740 if (! keep_head_end_p)
2743 VARRAY_FREE (basic_block_info);
2746 ENTRY_BLOCK_PTR->aux = NULL;
2747 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2748 EXIT_BLOCK_PTR->aux = NULL;
2749 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2753 /* Return nonzero if the destination of SET equals the source. */
2758 rtx src = SET_SRC (set);
2759 rtx dst = SET_DEST (set);
2761 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2763 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2765 src = SUBREG_REG (src);
2766 dst = SUBREG_REG (dst);
2769 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2770 && REGNO (src) == REGNO (dst));
2773 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2779 rtx pat = PATTERN (insn);
2781 /* Insns carrying these notes are useful later on. */
2782 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2785 if (GET_CODE (pat) == SET && set_noop_p (pat))
2788 if (GET_CODE (pat) == PARALLEL)
2791 /* If nothing but SETs of registers to themselves,
2792 this insn can also be deleted. */
2793 for (i = 0; i < XVECLEN (pat, 0); i++)
2795 rtx tem = XVECEXP (pat, 0, i);
2797 if (GET_CODE (tem) == USE
2798 || GET_CODE (tem) == CLOBBER)
2801 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2810 /* Delete any insns that copy a register to itself. */
2813 delete_noop_moves (f)
2817 for (insn = f; insn; insn = NEXT_INSN (insn))
2819 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2821 PUT_CODE (insn, NOTE);
2822 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2823 NOTE_SOURCE_FILE (insn) = 0;
2828 /* Determine if the stack pointer is constant over the life of the function.
2829 Only useful before prologues have been emitted. */
2832 notice_stack_pointer_modification_1 (x, pat, data)
2834 rtx pat ATTRIBUTE_UNUSED;
2835 void *data ATTRIBUTE_UNUSED;
2837 if (x == stack_pointer_rtx
2838 /* The stack pointer is only modified indirectly as the result
2839 of a push until later in flow. See the comments in rtl.texi
2840 regarding Embedded Side-Effects on Addresses. */
2841 || (GET_CODE (x) == MEM
2842 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2843 || GET_CODE (XEXP (x, 0)) == PRE_INC
2844 || GET_CODE (XEXP (x, 0)) == POST_DEC
2845 || GET_CODE (XEXP (x, 0)) == POST_INC)
2846 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2847 current_function_sp_is_unchanging = 0;
2851 notice_stack_pointer_modification (f)
2856 /* Assume that the stack pointer is unchanging if alloca hasn't
2858 current_function_sp_is_unchanging = !current_function_calls_alloca;
2859 if (! current_function_sp_is_unchanging)
2862 for (insn = f; insn; insn = NEXT_INSN (insn))
2864 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2866 /* Check if insn modifies the stack pointer. */
2867 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2869 if (! current_function_sp_is_unchanging)
2875 /* Mark a register in SET. Hard registers in large modes get all
2876 of their component registers set as well. */
2878 mark_reg (reg, xset)
2882 regset set = (regset) xset;
2883 int regno = REGNO (reg);
2885 if (GET_MODE (reg) == BLKmode)
2888 SET_REGNO_REG_SET (set, regno);
2889 if (regno < FIRST_PSEUDO_REGISTER)
2891 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2893 SET_REGNO_REG_SET (set, regno + n);
2897 /* Mark those regs which are needed at the end of the function as live
2898 at the end of the last basic block. */
2900 mark_regs_live_at_end (set)
2905 /* If exiting needs the right stack value, consider the stack pointer
2906 live at the end of the function. */
2907 if ((HAVE_epilogue && reload_completed)
2908 || ! EXIT_IGNORE_STACK
2909 || (! FRAME_POINTER_REQUIRED
2910 && ! current_function_calls_alloca
2911 && flag_omit_frame_pointer)
2912 || current_function_sp_is_unchanging)
2914 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2917 /* Mark the frame pointer if needed at the end of the function. If
2918 we end up eliminating it, it will be removed from the live list
2919 of each basic block by reload. */
2921 if (! reload_completed || frame_pointer_needed)
2923 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2924 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2925 /* If they are different, also mark the hard frame pointer as live */
2926 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2930 #ifdef PIC_OFFSET_TABLE_REGNUM
2931 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2932 /* Many architectures have a GP register even without flag_pic.
2933 Assume the pic register is not in use, or will be handled by
2934 other means, if it is not fixed. */
2935 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2936 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2940 /* Mark all global registers, and all registers used by the epilogue
2941 as being live at the end of the function since they may be
2942 referenced by our caller. */
2943 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2945 #ifdef EPILOGUE_USES
2946 || EPILOGUE_USES (i)
2949 SET_REGNO_REG_SET (set, i);
2951 /* Mark all call-saved registers that we actaully used. */
2952 if (HAVE_epilogue && reload_completed)
2954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2955 if (! call_used_regs[i] && regs_ever_live[i])
2956 SET_REGNO_REG_SET (set, i);
2959 /* Mark function return value. */
2960 diddle_return_value (mark_reg, set);
2963 /* Propagate global life info around the graph of basic blocks. Begin
2964 considering blocks with their corresponding bit set in BLOCKS_IN.
2965 BLOCKS_OUT is set for every block that was changed. */
2968 calculate_global_regs_live (blocks_in, blocks_out, flags)
2969 sbitmap blocks_in, blocks_out;
2972 basic_block *queue, *qhead, *qtail, *qend;
2973 regset tmp, new_live_at_end;
2974 regset_head tmp_head;
2975 regset_head new_live_at_end_head;
2978 tmp = INITIALIZE_REG_SET (tmp_head);
2979 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2981 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2982 because the `head == tail' style test for an empty queue doesn't
2983 work with a full queue. */
2984 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2986 qhead = qend = queue + n_basic_blocks + 2;
2988 /* Clear out the garbage that might be hanging out in bb->aux. */
2989 for (i = n_basic_blocks - 1; i >= 0; --i)
2990 BASIC_BLOCK (i)->aux = NULL;
2992 /* Queue the blocks set in the initial mask. Do this in reverse block
2993 number order so that we are more likely for the first round to do
2994 useful work. We use AUX non-null to flag that the block is queued. */
2995 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2997 basic_block bb = BASIC_BLOCK (i);
3002 sbitmap_zero (blocks_out);
3004 while (qhead != qtail)
3006 int rescan, changed;
3015 /* Begin by propogating live_at_start from the successor blocks. */
3016 CLEAR_REG_SET (new_live_at_end);
3017 for (e = bb->succ; e ; e = e->succ_next)
3019 basic_block sb = e->dest;
3020 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3023 if (bb == ENTRY_BLOCK_PTR)
3025 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3029 /* On our first pass through this block, we'll go ahead and continue.
3030 Recognize first pass by local_set NULL. On subsequent passes, we
3031 get to skip out early if live_at_end wouldn't have changed. */
3033 if (bb->local_set == NULL)
3035 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3040 /* If any bits were removed from live_at_end, we'll have to
3041 rescan the block. This wouldn't be necessary if we had
3042 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3043 local_live is really dependant on live_at_end. */
3044 CLEAR_REG_SET (tmp);
3045 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3046 new_live_at_end, BITMAP_AND_COMPL);
3050 /* Find the set of changed bits. Take this opportunity
3051 to notice that this set is empty and early out. */
3052 CLEAR_REG_SET (tmp);
3053 changed = bitmap_operation (tmp, bb->global_live_at_end,
3054 new_live_at_end, BITMAP_XOR);
3058 /* If any of the changed bits overlap with local_set,
3059 we'll have to rescan the block. Detect overlap by
3060 the AND with ~local_set turning off bits. */
3061 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3066 /* Let our caller know that BB changed enough to require its
3067 death notes updated. */
3068 SET_BIT (blocks_out, bb->index);
3072 /* Add to live_at_start the set of all registers in
3073 new_live_at_end that aren't in the old live_at_end. */
3075 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3077 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3079 changed = bitmap_operation (bb->global_live_at_start,
3080 bb->global_live_at_start,
3087 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3089 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3090 into live_at_start. */
3091 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3093 /* If live_at start didn't change, no need to go farther. */
3094 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3097 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3100 /* Queue all predecessors of BB so that we may re-examine
3101 their live_at_end. */
3102 for (e = bb->pred; e ; e = e->pred_next)
3104 basic_block pb = e->src;
3105 if (pb->aux == NULL)
3116 FREE_REG_SET (new_live_at_end);
3118 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3120 basic_block bb = BASIC_BLOCK (i);
3121 FREE_REG_SET (bb->local_set);
3127 /* Subroutines of life analysis. */
3129 /* Allocate the permanent data structures that represent the results
3130 of life analysis. Not static since used also for stupid life analysis. */
3133 allocate_bb_life_data ()
3137 for (i = 0; i < n_basic_blocks; i++)
3139 basic_block bb = BASIC_BLOCK (i);
3141 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3142 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3145 ENTRY_BLOCK_PTR->global_live_at_end
3146 = OBSTACK_ALLOC_REG_SET (function_obstack);
3147 EXIT_BLOCK_PTR->global_live_at_start
3148 = OBSTACK_ALLOC_REG_SET (function_obstack);
3150 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3154 allocate_reg_life_data ()
3158 /* Recalculate the register space, in case it has grown. Old style
3159 vector oriented regsets would set regset_{size,bytes} here also. */
3160 allocate_reg_info (max_regno, FALSE, FALSE);
3162 /* Reset all the data we'll collect in propagate_block and its
3164 for (i = 0; i < max_regno; i++)
3168 REG_N_DEATHS (i) = 0;
3169 REG_N_CALLS_CROSSED (i) = 0;
3170 REG_LIVE_LENGTH (i) = 0;
3171 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3175 /* Compute the registers live at the beginning of a basic block
3176 from those live at the end.
3178 When called, OLD contains those live at the end.
3179 On return, it contains those live at the beginning.
3180 FIRST and LAST are the first and last insns of the basic block.
3182 FINAL is nonzero if we are doing the final pass which is not
3183 for computing the life info (since that has already been done)
3184 but for acting on it. On this pass, we delete dead stores,
3185 set up the logical links and dead-variables lists of instructions,
3186 and merge instructions for autoincrement and autodecrement addresses.
3188 SIGNIFICANT is nonzero only the first time for each basic block.
3189 If it is nonzero, it points to a regset in which we store
3190 a 1 for each register that is set within the block.
3192 BNUM is the number of the basic block. */
3195 propagate_block (bb, old, significant, flags)
3204 regset_head live_head;
3206 regset_head dead_head;
3208 /* Find the loop depth for this block. Ignore loop level changes in the
3209 middle of the basic block -- for register allocation purposes, the
3210 important uses will be in the blocks wholely contained within the loop
3211 not in the loop pre-header or post-trailer. */
3212 loop_depth = bb->loop_depth;
3214 dead = INITIALIZE_REG_SET (live_head);
3215 live = INITIALIZE_REG_SET (dead_head);
3219 if (flags & PROP_REG_INFO)
3223 /* Process the regs live at the end of the block.
3224 Mark them as not local to any one basic block. */
3225 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3227 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3231 /* Scan the block an insn at a time from end to beginning. */
3233 for (insn = bb->end; ; insn = prev)
3235 prev = PREV_INSN (insn);
3237 if (GET_CODE (insn) == NOTE)
3239 /* If this is a call to `setjmp' et al,
3240 warn if any non-volatile datum is live. */
3242 if ((flags & PROP_REG_INFO)
3243 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3244 IOR_REG_SET (regs_live_at_setjmp, old);
3247 /* Update the life-status of regs for this insn.
3248 First DEAD gets which regs are set in this insn
3249 then LIVE gets which regs are used in this insn.
3250 Then the regs live before the insn
3251 are those live after, with DEAD regs turned off,
3252 and then LIVE regs turned on. */
3254 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3257 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3258 int insn_is_dead = 0;
3259 int libcall_is_dead = 0;
3261 if (flags & PROP_SCAN_DEAD_CODE)
3263 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3265 libcall_is_dead = (insn_is_dead && note != 0
3266 && libcall_dead_p (PATTERN (insn), old,
3270 /* We almost certainly don't want to delete prologue or epilogue
3271 instructions. Warn about probable compiler losage. */
3274 && (((HAVE_epilogue || HAVE_prologue)
3275 && prologue_epilogue_contains (insn))
3276 || (HAVE_sibcall_epilogue
3277 && sibcall_epilogue_contains (insn))))
3279 if (flags & PROP_KILL_DEAD_CODE)
3281 warning ("ICE: would have deleted prologue/epilogue insn");
3282 if (!inhibit_warnings)
3285 libcall_is_dead = insn_is_dead = 0;
3288 /* If an instruction consists of just dead store(s) on final pass,
3289 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3290 We could really delete it with delete_insn, but that
3291 can cause trouble for first or last insn in a basic block. */
3292 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3295 /* If the insn referred to a label, note that the label is
3297 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3299 if (REG_NOTE_KIND (inote) == REG_LABEL)
3301 rtx label = XEXP (inote, 0);
3303 LABEL_NUSES (label)--;
3305 /* If this label was attached to an ADDR_VEC, it's
3306 safe to delete the ADDR_VEC. In fact, it's pretty
3307 much mandatory to delete it, because the ADDR_VEC may
3308 be referencing labels that no longer exist. */
3309 if (LABEL_NUSES (label) == 0
3310 && (next = next_nonnote_insn (label)) != NULL
3311 && GET_CODE (next) == JUMP_INSN
3312 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3313 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3315 rtx pat = PATTERN (next);
3316 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3317 int len = XVECLEN (pat, diff_vec_p);
3319 for (i = 0; i < len; i++)
3320 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3321 PUT_CODE (next, NOTE);
3322 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3323 NOTE_SOURCE_FILE (next) = 0;
3325 if ((next = next_nonnote_insn (label)) != NULL
3326 && GET_CODE (next) == BARRIER)
3328 PUT_CODE (next, NOTE);
3329 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3330 NOTE_SOURCE_FILE (next) = 0;
3336 PUT_CODE (insn, NOTE);
3337 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3338 NOTE_SOURCE_FILE (insn) = 0;
3340 /* CC0 is now known to be dead. Either this insn used it,
3341 in which case it doesn't anymore, or clobbered it,
3342 so the next insn can't use it. */
3345 /* If this insn is copying the return value from a library call,
3346 delete the entire library call. */
3347 if (libcall_is_dead)
3349 rtx first = XEXP (note, 0);
3351 while (INSN_DELETED_P (first))
3352 first = NEXT_INSN (first);
3357 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3358 NOTE_SOURCE_FILE (p) = 0;
3364 CLEAR_REG_SET (dead);
3365 CLEAR_REG_SET (live);
3367 /* See if this is an increment or decrement that can be
3368 merged into a following memory address. */
3371 register rtx x = single_set (insn);
3373 /* Does this instruction increment or decrement a register? */
3374 if (!reload_completed
3375 && (flags & PROP_AUTOINC)
3377 && GET_CODE (SET_DEST (x)) == REG
3378 && (GET_CODE (SET_SRC (x)) == PLUS
3379 || GET_CODE (SET_SRC (x)) == MINUS)
3380 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3381 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3382 /* Ok, look for a following memory ref we can combine with.
3383 If one is found, change the memory ref to a PRE_INC
3384 or PRE_DEC, cancel this insn, and return 1.
3385 Return 0 if nothing has been done. */
3386 && try_pre_increment_1 (insn))
3389 #endif /* AUTO_INC_DEC */
3391 /* If this is not the final pass, and this insn is copying the
3392 value of a library call and it's dead, don't scan the
3393 insns that perform the library call, so that the call's
3394 arguments are not marked live. */
3395 if (libcall_is_dead)
3397 /* Mark the dest reg as `significant'. */
3398 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3399 significant, flags);
3401 insn = XEXP (note, 0);
3402 prev = PREV_INSN (insn);
3404 else if (GET_CODE (PATTERN (insn)) == SET
3405 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3406 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3407 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3408 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3409 /* We have an insn to pop a constant amount off the stack.
3410 (Such insns use PLUS regardless of the direction of the stack,
3411 and any insn to adjust the stack by a constant is always a pop.)
3412 These insns, if not dead stores, have no effect on life. */
3416 /* Any regs live at the time of a call instruction
3417 must not go in a register clobbered by calls.
3418 Find all regs now live and record this for them. */
3420 if (GET_CODE (insn) == CALL_INSN
3421 && (flags & PROP_REG_INFO))
3422 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3424 REG_N_CALLS_CROSSED (i)++;
3427 /* LIVE gets the regs used in INSN;
3428 DEAD gets those set by it. Dead insns don't make anything
3431 mark_set_regs (old, dead, PATTERN (insn),
3432 insn, significant, flags);
3434 /* If an insn doesn't use CC0, it becomes dead since we
3435 assume that every insn clobbers it. So show it dead here;
3436 mark_used_regs will set it live if it is referenced. */
3440 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3442 /* Sometimes we may have inserted something before INSN (such as
3443 a move) when we make an auto-inc. So ensure we will scan
3446 prev = PREV_INSN (insn);
3449 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3455 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3457 note = XEXP (note, 1))
3458 if (GET_CODE (XEXP (note, 0)) == USE)
3459 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3462 /* Each call clobbers all call-clobbered regs that are not
3463 global or fixed. Note that the function-value reg is a
3464 call-clobbered reg, and mark_set_regs has already had
3465 a chance to handle it. */
3467 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3468 if (call_used_regs[i] && ! global_regs[i]
3471 SET_REGNO_REG_SET (dead, i);
3473 SET_REGNO_REG_SET (significant, i);
3476 /* The stack ptr is used (honorarily) by a CALL insn. */
3477 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3479 /* Calls may also reference any of the global registers,
3480 so they are made live. */
3481 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3483 mark_used_regs (old, live,
3484 gen_rtx_REG (reg_raw_mode[i], i),
3487 /* Calls also clobber memory. */
3488 free_EXPR_LIST_list (&mem_set_list);
3491 /* Update OLD for the registers used or set. */
3492 AND_COMPL_REG_SET (old, dead);
3493 IOR_REG_SET (old, live);
3497 /* On final pass, update counts of how many insns in which
3498 each reg is live. */
3499 if (flags & PROP_REG_INFO)
3500 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3503 if (insn == bb->head)
3507 FREE_REG_SET (dead);
3508 FREE_REG_SET (live);
3509 free_EXPR_LIST_list (&mem_set_list);
3512 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3513 (SET expressions whose destinations are registers dead after the insn).
3514 NEEDED is the regset that says which regs are alive after the insn.
3516 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3518 If X is the entire body of an insn, NOTES contains the reg notes
3519 pertaining to the insn. */
3522 insn_dead_p (x, needed, call_ok, notes)
3526 rtx notes ATTRIBUTE_UNUSED;
3528 enum rtx_code code = GET_CODE (x);
3531 /* If flow is invoked after reload, we must take existing AUTO_INC
3532 expresions into account. */
3533 if (reload_completed)
3535 for ( ; notes; notes = XEXP (notes, 1))
3537 if (REG_NOTE_KIND (notes) == REG_INC)
3539 int regno = REGNO (XEXP (notes, 0));
3541 /* Don't delete insns to set global regs. */
3542 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3543 || REGNO_REG_SET_P (needed, regno))
3550 /* If setting something that's a reg or part of one,
3551 see if that register's altered value will be live. */
3555 rtx r = SET_DEST (x);
3558 if (GET_CODE (r) == CC0)
3562 /* A SET that is a subroutine call cannot be dead. */
3563 if (GET_CODE (SET_SRC (x)) == CALL)
3569 /* Don't eliminate loads from volatile memory or volatile asms. */
3570 else if (volatile_refs_p (SET_SRC (x)))
3573 if (GET_CODE (r) == MEM)
3577 if (MEM_VOLATILE_P (r))
3580 /* Walk the set of memory locations we are currently tracking
3581 and see if one is an identical match to this memory location.
3582 If so, this memory write is dead (remember, we're walking
3583 backwards from the end of the block to the start. */
3584 temp = mem_set_list;
3587 if (rtx_equal_p (XEXP (temp, 0), r))
3589 temp = XEXP (temp, 1);
3594 while (GET_CODE (r) == SUBREG
3595 || GET_CODE (r) == STRICT_LOW_PART
3596 || GET_CODE (r) == ZERO_EXTRACT)
3599 if (GET_CODE (r) == REG)
3601 int regno = REGNO (r);
3604 if (REGNO_REG_SET_P (needed, regno))
3607 /* If this is a hard register, verify that subsequent
3608 words are not needed. */
3609 if (regno < FIRST_PSEUDO_REGISTER)
3611 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3614 if (REGNO_REG_SET_P (needed, regno+n))
3618 /* Don't delete insns to set global regs. */
3619 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3622 /* Make sure insns to set the stack pointer aren't deleted. */
3623 if (regno == STACK_POINTER_REGNUM)
3626 /* Make sure insns to set the frame pointer aren't deleted. */
3627 if (regno == FRAME_POINTER_REGNUM
3628 && (! reload_completed || frame_pointer_needed))
3630 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3631 if (regno == HARD_FRAME_POINTER_REGNUM
3632 && (! reload_completed || frame_pointer_needed))
3636 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3637 /* Make sure insns to set arg pointer are never deleted
3638 (if the arg pointer isn't fixed, there will be a USE
3639 for it, so we can treat it normally). */
3640 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3644 /* Otherwise, the set is dead. */
3650 /* If performing several activities, insn is dead if each activity
3651 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3652 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3654 else if (code == PARALLEL)
3656 int i = XVECLEN (x, 0);
3658 for (i--; i >= 0; i--)
3659 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3660 && GET_CODE (XVECEXP (x, 0, i)) != USE
3661 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3667 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3668 is not necessarily true for hard registers. */
3669 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3670 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3671 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3674 /* We do not check other CLOBBER or USE here. An insn consisting of just
3675 a CLOBBER or just a USE should not be deleted. */
3679 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3680 return 1 if the entire library call is dead.
3681 This is true if X copies a register (hard or pseudo)
3682 and if the hard return reg of the call insn is dead.
3683 (The caller should have tested the destination of X already for death.)
3685 If this insn doesn't just copy a register, then we don't
3686 have an ordinary libcall. In that case, cse could not have
3687 managed to substitute the source for the dest later on,
3688 so we can assume the libcall is dead.
3690 NEEDED is the bit vector of pseudoregs live before this insn.
3691 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3694 libcall_dead_p (x, needed, note, insn)
3700 register RTX_CODE code = GET_CODE (x);
3704 register rtx r = SET_SRC (x);
3705 if (GET_CODE (r) == REG)
3707 rtx call = XEXP (note, 0);
3711 /* Find the call insn. */
3712 while (call != insn && GET_CODE (call) != CALL_INSN)
3713 call = NEXT_INSN (call);
3715 /* If there is none, do nothing special,
3716 since ordinary death handling can understand these insns. */
3720 /* See if the hard reg holding the value is dead.
3721 If this is a PARALLEL, find the call within it. */
3722 call_pat = PATTERN (call);
3723 if (GET_CODE (call_pat) == PARALLEL)
3725 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3726 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3727 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3730 /* This may be a library call that is returning a value
3731 via invisible pointer. Do nothing special, since
3732 ordinary death handling can understand these insns. */
3736 call_pat = XVECEXP (call_pat, 0, i);
3739 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3745 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3746 live at function entry. Don't count global register variables, variables
3747 in registers that can be used for function arg passing, or variables in
3748 fixed hard registers. */
3751 regno_uninitialized (regno)
3754 if (n_basic_blocks == 0
3755 || (regno < FIRST_PSEUDO_REGISTER
3756 && (global_regs[regno]
3757 || fixed_regs[regno]
3758 || FUNCTION_ARG_REGNO_P (regno))))
3761 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3764 /* 1 if register REGNO was alive at a place where `setjmp' was called
3765 and was set more than once or is an argument.
3766 Such regs may be clobbered by `longjmp'. */
3769 regno_clobbered_at_setjmp (regno)
3772 if (n_basic_blocks == 0)
3775 return ((REG_N_SETS (regno) > 1
3776 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3777 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3780 /* INSN references memory, possibly using autoincrement addressing modes.
3781 Find any entries on the mem_set_list that need to be invalidated due
3782 to an address change. */
3784 invalidate_mems_from_autoinc (insn)
3787 rtx note = REG_NOTES (insn);
3788 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3790 if (REG_NOTE_KIND (note) == REG_INC)
3792 rtx temp = mem_set_list;
3793 rtx prev = NULL_RTX;
3798 next = XEXP (temp, 1);
3799 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3801 /* Splice temp out of list. */
3803 XEXP (prev, 1) = next;
3805 mem_set_list = next;
3806 free_EXPR_LIST_node (temp);
3816 /* Process the registers that are set within X. Their bits are set to
3817 1 in the regset DEAD, because they are dead prior to this insn.
3819 If INSN is nonzero, it is the insn being processed.
3821 FLAGS is the set of operations to perform. */
3824 mark_set_regs (needed, dead, x, insn, significant, flags)
3832 register RTX_CODE code = GET_CODE (x);
3834 if (code == SET || code == CLOBBER)
3835 mark_set_1 (needed, dead, x, insn, significant, flags);
3836 else if (code == PARALLEL)
3839 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3841 code = GET_CODE (XVECEXP (x, 0, i));
3842 if (code == SET || code == CLOBBER)
3843 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3844 significant, flags);
3849 /* Process a single SET rtx, X. */
3852 mark_set_1 (needed, dead, x, insn, significant, flags)
3860 register int regno = -1;
3861 register rtx reg = SET_DEST (x);
3863 /* Some targets place small structures in registers for
3864 return values of functions. We have to detect this
3865 case specially here to get correct flow information. */
3866 if (GET_CODE (reg) == PARALLEL
3867 && GET_MODE (reg) == BLKmode)
3871 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3872 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3873 significant, flags);
3877 /* Modifying just one hardware register of a multi-reg value
3878 or just a byte field of a register
3879 does not mean the value from before this insn is now dead.
3880 But it does mean liveness of that register at the end of the block
3883 Within mark_set_1, however, we treat it as if the register is
3884 indeed modified. mark_used_regs will, however, also treat this
3885 register as being used. Thus, we treat these insns as setting a
3886 new value for the register as a function of its old value. This
3887 cases LOG_LINKS to be made appropriately and this will help combine. */
3889 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3890 || GET_CODE (reg) == SIGN_EXTRACT
3891 || GET_CODE (reg) == STRICT_LOW_PART)
3892 reg = XEXP (reg, 0);
3894 /* If this set is a MEM, then it kills any aliased writes.
3895 If this set is a REG, then it kills any MEMs which use the reg. */
3896 if (flags & PROP_SCAN_DEAD_CODE)
3898 if (GET_CODE (reg) == MEM
3899 || GET_CODE (reg) == REG)
3901 rtx temp = mem_set_list;
3902 rtx prev = NULL_RTX;
3907 next = XEXP (temp, 1);
3908 if ((GET_CODE (reg) == MEM
3909 && output_dependence (XEXP (temp, 0), reg))
3910 || (GET_CODE (reg) == REG
3911 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3913 /* Splice this entry out of the list. */
3915 XEXP (prev, 1) = next;
3917 mem_set_list = next;
3918 free_EXPR_LIST_node (temp);
3926 /* If the memory reference had embedded side effects (autoincrement
3927 address modes. Then we may need to kill some entries on the
3929 if (insn && GET_CODE (reg) == MEM)
3930 invalidate_mems_from_autoinc (insn);
3932 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3933 /* We do not know the size of a BLKmode store, so we do not track
3934 them for redundant store elimination. */
3935 && GET_MODE (reg) != BLKmode
3936 /* There are no REG_INC notes for SP, so we can't assume we'll see
3937 everything that invalidates it. To be safe, don't eliminate any
3938 stores though SP; none of them should be redundant anyway. */
3939 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3940 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3943 if (GET_CODE (reg) == REG
3944 && (regno = REGNO (reg),
3945 ! (regno == FRAME_POINTER_REGNUM
3946 && (! reload_completed || frame_pointer_needed)))
3947 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3948 && ! (regno == HARD_FRAME_POINTER_REGNUM
3949 && (! reload_completed || frame_pointer_needed))
3951 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3952 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3954 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3955 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3957 int some_needed = REGNO_REG_SET_P (needed, regno);
3958 int some_not_needed = ! some_needed;
3960 /* Mark it as a significant register for this basic block. */
3962 SET_REGNO_REG_SET (significant, regno);
3964 /* Mark it as dead before this insn. */
3965 SET_REGNO_REG_SET (dead, regno);
3967 /* A hard reg in a wide mode may really be multiple registers.
3968 If so, mark all of them just like the first. */
3969 if (regno < FIRST_PSEUDO_REGISTER)
3973 /* Nothing below is needed for the stack pointer; get out asap.
3974 Eg, log links aren't needed, since combine won't use them. */
3975 if (regno == STACK_POINTER_REGNUM)
3978 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3981 int regno_n = regno + n;
3982 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3984 SET_REGNO_REG_SET (significant, regno_n);
3986 SET_REGNO_REG_SET (dead, regno_n);
3987 some_needed |= needed_regno;
3988 some_not_needed |= ! needed_regno;
3992 /* Additional data to record if this is the final pass. */
3993 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3994 | PROP_DEATH_NOTES | PROP_AUTOINC))
3997 register int blocknum = BLOCK_NUM (insn);
4000 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4001 y = reg_next_use[regno];
4003 /* If this is a hard reg, record this function uses the reg. */
4005 if (regno < FIRST_PSEUDO_REGISTER)
4008 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4010 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4011 for (i = regno; i < endregno; i++)
4013 /* The next use is no longer "next", since a store
4015 reg_next_use[i] = 0;
4018 if (flags & PROP_REG_INFO)
4019 for (i = regno; i < endregno; i++)
4021 regs_ever_live[i] = 1;
4027 /* The next use is no longer "next", since a store
4029 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4030 reg_next_use[regno] = 0;
4032 /* Keep track of which basic blocks each reg appears in. */
4034 if (flags & PROP_REG_INFO)
4036 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4037 REG_BASIC_BLOCK (regno) = blocknum;
4038 else if (REG_BASIC_BLOCK (regno) != blocknum)
4039 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4041 /* Count (weighted) references, stores, etc. This counts a
4042 register twice if it is modified, but that is correct. */
4043 REG_N_SETS (regno)++;
4044 REG_N_REFS (regno) += loop_depth + 1;
4046 /* The insns where a reg is live are normally counted
4047 elsewhere, but we want the count to include the insn
4048 where the reg is set, and the normal counting mechanism
4049 would not count it. */
4050 REG_LIVE_LENGTH (regno)++;
4054 if (! some_not_needed)
4056 if (flags & PROP_LOG_LINKS)
4058 /* Make a logical link from the next following insn
4059 that uses this register, back to this insn.
4060 The following insns have already been processed.
4062 We don't build a LOG_LINK for hard registers containing
4063 in ASM_OPERANDs. If these registers get replaced,
4064 we might wind up changing the semantics of the insn,
4065 even if reload can make what appear to be valid
4066 assignments later. */
4067 if (y && (BLOCK_NUM (y) == blocknum)
4068 && (regno >= FIRST_PSEUDO_REGISTER
4069 || asm_noperands (PATTERN (y)) < 0))
4070 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4073 else if (! some_needed)
4075 if (flags & PROP_REG_INFO)
4076 REG_N_DEATHS (REGNO (reg))++;
4078 if (flags & PROP_DEATH_NOTES)
4080 /* Note that dead stores have already been deleted
4081 when possible. If we get here, we have found a
4082 dead store that cannot be eliminated (because the
4083 same insn does something useful). Indicate this
4084 by marking the reg being set as dying here. */
4086 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4091 if (flags & PROP_DEATH_NOTES)
4093 /* This is a case where we have a multi-word hard register
4094 and some, but not all, of the words of the register are
4095 needed in subsequent insns. Write REG_UNUSED notes
4096 for those parts that were not needed. This case should
4101 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4103 if (!REGNO_REG_SET_P (needed, regno + i))
4107 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4113 else if (GET_CODE (reg) == REG)
4115 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4116 reg_next_use[regno] = 0;
4119 /* If this is the last pass and this is a SCRATCH, show it will be dying
4120 here and count it. */
4121 else if (GET_CODE (reg) == SCRATCH)
4123 if (flags & PROP_DEATH_NOTES)
4125 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4131 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4135 find_auto_inc (needed, x, insn)
4140 rtx addr = XEXP (x, 0);
4141 HOST_WIDE_INT offset = 0;
4144 /* Here we detect use of an index register which might be good for
4145 postincrement, postdecrement, preincrement, or predecrement. */
4147 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4148 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4150 if (GET_CODE (addr) == REG)
4153 register int size = GET_MODE_SIZE (GET_MODE (x));
4156 int regno = REGNO (addr);
4158 /* Is the next use an increment that might make auto-increment? */
4159 if ((incr = reg_next_use[regno]) != 0
4160 && (set = single_set (incr)) != 0
4161 && GET_CODE (set) == SET
4162 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4163 /* Can't add side effects to jumps; if reg is spilled and
4164 reloaded, there's no way to store back the altered value. */
4165 && GET_CODE (insn) != JUMP_INSN
4166 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4167 && XEXP (y, 0) == addr
4168 && GET_CODE (XEXP (y, 1)) == CONST_INT
4169 && ((HAVE_POST_INCREMENT
4170 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4171 || (HAVE_POST_DECREMENT
4172 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4173 || (HAVE_PRE_INCREMENT
4174 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4175 || (HAVE_PRE_DECREMENT
4176 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4177 /* Make sure this reg appears only once in this insn. */
4178 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4179 use != 0 && use != (rtx) 1))
4181 rtx q = SET_DEST (set);
4182 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4183 ? (offset ? PRE_INC : POST_INC)
4184 : (offset ? PRE_DEC : POST_DEC));
4186 if (dead_or_set_p (incr, addr))
4188 /* This is the simple case. Try to make the auto-inc. If
4189 we can't, we are done. Otherwise, we will do any
4190 needed updates below. */
4191 if (! validate_change (insn, &XEXP (x, 0),
4192 gen_rtx_fmt_e (inc_code, Pmode, addr),
4196 else if (GET_CODE (q) == REG
4197 /* PREV_INSN used here to check the semi-open interval
4199 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4200 /* We must also check for sets of q as q may be
4201 a call clobbered hard register and there may
4202 be a call between PREV_INSN (insn) and incr. */
4203 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4205 /* We have *p followed sometime later by q = p+size.
4206 Both p and q must be live afterward,
4207 and q is not used between INSN and its assignment.
4208 Change it to q = p, ...*q..., q = q+size.
4209 Then fall into the usual case. */
4214 emit_move_insn (q, addr);
4215 insns = get_insns ();
4218 bb = BLOCK_FOR_INSN (insn);
4219 for (temp = insns; temp; temp = NEXT_INSN (temp))
4220 set_block_for_insn (temp, bb);
4222 /* If we can't make the auto-inc, or can't make the
4223 replacement into Y, exit. There's no point in making
4224 the change below if we can't do the auto-inc and doing
4225 so is not correct in the pre-inc case. */
4227 validate_change (insn, &XEXP (x, 0),
4228 gen_rtx_fmt_e (inc_code, Pmode, q),
4230 validate_change (incr, &XEXP (y, 0), q, 1);
4231 if (! apply_change_group ())
4234 /* We now know we'll be doing this change, so emit the
4235 new insn(s) and do the updates. */
4236 emit_insns_before (insns, insn);
4238 if (BLOCK_FOR_INSN (insn)->head == insn)
4239 BLOCK_FOR_INSN (insn)->head = insns;
4241 /* INCR will become a NOTE and INSN won't contain a
4242 use of ADDR. If a use of ADDR was just placed in
4243 the insn before INSN, make that the next use.
4244 Otherwise, invalidate it. */
4245 if (GET_CODE (PREV_INSN (insn)) == INSN
4246 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4247 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4248 reg_next_use[regno] = PREV_INSN (insn);
4250 reg_next_use[regno] = 0;
4255 /* REGNO is now used in INCR which is below INSN, but
4256 it previously wasn't live here. If we don't mark
4257 it as needed, we'll put a REG_DEAD note for it
4258 on this insn, which is incorrect. */
4259 SET_REGNO_REG_SET (needed, regno);
4261 /* If there are any calls between INSN and INCR, show
4262 that REGNO now crosses them. */
4263 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4264 if (GET_CODE (temp) == CALL_INSN)
4265 REG_N_CALLS_CROSSED (regno)++;
4270 /* If we haven't returned, it means we were able to make the
4271 auto-inc, so update the status. First, record that this insn
4272 has an implicit side effect. */
4275 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4277 /* Modify the old increment-insn to simply copy
4278 the already-incremented value of our register. */
4279 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4282 /* If that makes it a no-op (copying the register into itself) delete
4283 it so it won't appear to be a "use" and a "set" of this
4285 if (SET_DEST (set) == addr)
4287 PUT_CODE (incr, NOTE);
4288 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4289 NOTE_SOURCE_FILE (incr) = 0;
4292 if (regno >= FIRST_PSEUDO_REGISTER)
4294 /* Count an extra reference to the reg. When a reg is
4295 incremented, spilling it is worse, so we want to make
4296 that less likely. */
4297 REG_N_REFS (regno) += loop_depth + 1;
4299 /* Count the increment as a setting of the register,
4300 even though it isn't a SET in rtl. */
4301 REG_N_SETS (regno)++;
4306 #endif /* AUTO_INC_DEC */
4308 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4309 This is done assuming the registers needed from X
4310 are those that have 1-bits in NEEDED.
4312 FLAGS is the set of enabled operations.
4314 INSN is the containing instruction. If INSN is dead, this function is not
4318 mark_used_regs (needed, live, x, flags, insn)
4325 register RTX_CODE code;
4330 code = GET_CODE (x);
4350 /* If we are clobbering a MEM, mark any registers inside the address
4352 if (GET_CODE (XEXP (x, 0)) == MEM)
4353 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4357 /* Don't bother watching stores to mems if this is not the
4358 final pass. We'll not be deleting dead stores this round. */
4359 if (flags & PROP_SCAN_DEAD_CODE)
4361 /* Invalidate the data for the last MEM stored, but only if MEM is
4362 something that can be stored into. */
4363 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4364 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4365 ; /* needn't clear the memory set list */
4368 rtx temp = mem_set_list;
4369 rtx prev = NULL_RTX;
4374 next = XEXP (temp, 1);
4375 if (anti_dependence (XEXP (temp, 0), x))
4377 /* Splice temp out of the list. */
4379 XEXP (prev, 1) = next;
4381 mem_set_list = next;
4382 free_EXPR_LIST_node (temp);
4390 /* If the memory reference had embedded side effects (autoincrement
4391 address modes. Then we may need to kill some entries on the
4394 invalidate_mems_from_autoinc (insn);
4398 if (flags & PROP_AUTOINC)
4399 find_auto_inc (needed, x, insn);
4404 if (GET_CODE (SUBREG_REG (x)) == REG
4405 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4406 && (GET_MODE_SIZE (GET_MODE (x))
4407 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4408 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4410 /* While we're here, optimize this case. */
4413 /* In case the SUBREG is not of a register, don't optimize */
4414 if (GET_CODE (x) != REG)
4416 mark_used_regs (needed, live, x, flags, insn);
4420 /* ... fall through ... */
4423 /* See a register other than being set
4424 => mark it as needed. */
4428 int some_needed = REGNO_REG_SET_P (needed, regno);
4429 int some_not_needed = ! some_needed;
4431 SET_REGNO_REG_SET (live, regno);
4433 /* A hard reg in a wide mode may really be multiple registers.
4434 If so, mark all of them just like the first. */
4435 if (regno < FIRST_PSEUDO_REGISTER)
4439 /* For stack ptr or fixed arg pointer,
4440 nothing below can be necessary, so waste no more time. */
4441 if (regno == STACK_POINTER_REGNUM
4442 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4443 || (regno == HARD_FRAME_POINTER_REGNUM
4444 && (! reload_completed || frame_pointer_needed))
4446 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4447 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4449 || (regno == FRAME_POINTER_REGNUM
4450 && (! reload_completed || frame_pointer_needed)))
4452 /* If this is a register we are going to try to eliminate,
4453 don't mark it live here. If we are successful in
4454 eliminating it, it need not be live unless it is used for
4455 pseudos, in which case it will have been set live when
4456 it was allocated to the pseudos. If the register will not
4457 be eliminated, reload will set it live at that point. */
4459 if ((flags & PROP_REG_INFO)
4460 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4461 regs_ever_live[regno] = 1;
4464 /* No death notes for global register variables;
4465 their values are live after this function exits. */
4466 if (global_regs[regno])
4468 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4469 reg_next_use[regno] = insn;
4473 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4476 int regno_n = regno + n;
4477 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4479 SET_REGNO_REG_SET (live, regno_n);
4480 some_needed |= needed_regno;
4481 some_not_needed |= ! needed_regno;
4485 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4487 /* Record where each reg is used, so when the reg
4488 is set we know the next insn that uses it. */
4490 reg_next_use[regno] = insn;
4492 if (flags & PROP_REG_INFO)
4494 if (regno < FIRST_PSEUDO_REGISTER)
4496 /* If a hard reg is being used,
4497 record that this function does use it. */
4499 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4503 regs_ever_live[regno + --i] = 1;
4508 /* Keep track of which basic block each reg appears in. */
4510 register int blocknum = BLOCK_NUM (insn);
4512 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4513 REG_BASIC_BLOCK (regno) = blocknum;
4514 else if (REG_BASIC_BLOCK (regno) != blocknum)
4515 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4517 /* Count (weighted) number of uses of each reg. */
4519 REG_N_REFS (regno) += loop_depth + 1;
4523 /* Record and count the insns in which a reg dies.
4524 If it is used in this insn and was dead below the insn
4525 then it dies in this insn. If it was set in this insn,
4526 we do not make a REG_DEAD note; likewise if we already
4527 made such a note. */
4529 if (flags & PROP_DEATH_NOTES)
4532 && ! dead_or_set_p (insn, x)
4534 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4538 /* Check for the case where the register dying partially
4539 overlaps the register set by this insn. */
4540 if (regno < FIRST_PSEUDO_REGISTER
4541 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4543 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4545 some_needed |= dead_or_set_regno_p (insn, regno + n);
4548 /* If none of the words in X is needed, make a REG_DEAD
4549 note. Otherwise, we must make partial REG_DEAD notes. */
4553 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4554 REG_N_DEATHS (regno)++;
4560 /* Don't make a REG_DEAD note for a part of a register
4561 that is set in the insn. */
4563 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4565 if (!REGNO_REG_SET_P (needed, regno + i)
4566 && ! dead_or_set_regno_p (insn, regno + i))
4569 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4580 register rtx testreg = SET_DEST (x);
4583 /* If storing into MEM, don't show it as being used. But do
4584 show the address as being used. */
4585 if (GET_CODE (testreg) == MEM)
4588 if (flags & PROP_AUTOINC)
4589 find_auto_inc (needed, testreg, insn);
4591 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4592 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4596 /* Storing in STRICT_LOW_PART is like storing in a reg
4597 in that this SET might be dead, so ignore it in TESTREG.
4598 but in some other ways it is like using the reg.
4600 Storing in a SUBREG or a bit field is like storing the entire
4601 register in that if the register's value is not used
4602 then this SET is not needed. */
4603 while (GET_CODE (testreg) == STRICT_LOW_PART
4604 || GET_CODE (testreg) == ZERO_EXTRACT
4605 || GET_CODE (testreg) == SIGN_EXTRACT
4606 || GET_CODE (testreg) == SUBREG)
4608 if (GET_CODE (testreg) == SUBREG
4609 && GET_CODE (SUBREG_REG (testreg)) == REG
4610 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4611 && (GET_MODE_SIZE (GET_MODE (testreg))
4612 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4613 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4615 /* Modifying a single register in an alternate mode
4616 does not use any of the old value. But these other
4617 ways of storing in a register do use the old value. */
4618 if (GET_CODE (testreg) == SUBREG
4619 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4624 testreg = XEXP (testreg, 0);
4627 /* If this is a store into a register,
4628 recursively scan the value being stored. */
4630 if ((GET_CODE (testreg) == PARALLEL
4631 && GET_MODE (testreg) == BLKmode)
4632 || (GET_CODE (testreg) == REG
4633 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4634 && (! reload_completed || frame_pointer_needed)))
4635 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4636 && ! (regno == HARD_FRAME_POINTER_REGNUM
4637 && (! reload_completed || frame_pointer_needed))
4639 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4640 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4643 /* We used to exclude global_regs here, but that seems wrong.
4644 Storing in them is like storing in mem. */
4646 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4648 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4655 case UNSPEC_VOLATILE:
4659 /* Traditional and volatile asm instructions must be considered to use
4660 and clobber all hard registers, all pseudo-registers and all of
4661 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4663 Consider for instance a volatile asm that changes the fpu rounding
4664 mode. An insn should not be moved across this even if it only uses
4665 pseudo-regs because it might give an incorrectly rounded result.
4667 ?!? Unfortunately, marking all hard registers as live causes massive
4668 problems for the register allocator and marking all pseudos as live
4669 creates mountains of uninitialized variable warnings.
4671 So for now, just clear the memory set list and mark any regs
4672 we can find in ASM_OPERANDS as used. */
4673 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4674 free_EXPR_LIST_list (&mem_set_list);
4676 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4677 We can not just fall through here since then we would be confused
4678 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4679 traditional asms unlike their normal usage. */
4680 if (code == ASM_OPERANDS)
4684 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4685 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4696 /* Recursively scan the operands of this expression. */
4699 register const char *fmt = GET_RTX_FORMAT (code);
4702 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4706 /* Tail recursive case: save a function call level. */
4712 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4714 else if (fmt[i] == 'E')
4717 for (j = 0; j < XVECLEN (x, i); j++)
4718 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4727 try_pre_increment_1 (insn)
4730 /* Find the next use of this reg. If in same basic block,
4731 make it do pre-increment or pre-decrement if appropriate. */
4732 rtx x = single_set (insn);
4733 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4734 * INTVAL (XEXP (SET_SRC (x), 1)));
4735 int regno = REGNO (SET_DEST (x));
4736 rtx y = reg_next_use[regno];
4738 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4739 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4740 mode would be better. */
4741 && ! dead_or_set_p (y, SET_DEST (x))
4742 && try_pre_increment (y, SET_DEST (x), amount))
4744 /* We have found a suitable auto-increment
4745 and already changed insn Y to do it.
4746 So flush this increment-instruction. */
4747 PUT_CODE (insn, NOTE);
4748 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4749 NOTE_SOURCE_FILE (insn) = 0;
4750 /* Count a reference to this reg for the increment
4751 insn we are deleting. When a reg is incremented.
4752 spilling it is worse, so we want to make that
4754 if (regno >= FIRST_PSEUDO_REGISTER)
4756 REG_N_REFS (regno) += loop_depth + 1;
4757 REG_N_SETS (regno)++;
4764 /* Try to change INSN so that it does pre-increment or pre-decrement
4765 addressing on register REG in order to add AMOUNT to REG.
4766 AMOUNT is negative for pre-decrement.
4767 Returns 1 if the change could be made.
4768 This checks all about the validity of the result of modifying INSN. */
4771 try_pre_increment (insn, reg, amount)
4773 HOST_WIDE_INT amount;
4777 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4778 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4780 /* Nonzero if we can try to make a post-increment or post-decrement.
4781 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4782 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4783 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4786 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4789 /* From the sign of increment, see which possibilities are conceivable
4790 on this target machine. */
4791 if (HAVE_PRE_INCREMENT && amount > 0)
4793 if (HAVE_POST_INCREMENT && amount > 0)
4796 if (HAVE_PRE_DECREMENT && amount < 0)
4798 if (HAVE_POST_DECREMENT && amount < 0)
4801 if (! (pre_ok || post_ok))
4804 /* It is not safe to add a side effect to a jump insn
4805 because if the incremented register is spilled and must be reloaded
4806 there would be no way to store the incremented value back in memory. */
4808 if (GET_CODE (insn) == JUMP_INSN)
4813 use = find_use_as_address (PATTERN (insn), reg, 0);
4814 if (post_ok && (use == 0 || use == (rtx) 1))
4816 use = find_use_as_address (PATTERN (insn), reg, -amount);
4820 if (use == 0 || use == (rtx) 1)
4823 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4826 /* See if this combination of instruction and addressing mode exists. */
4827 if (! validate_change (insn, &XEXP (use, 0),
4828 gen_rtx_fmt_e (amount > 0
4829 ? (do_post ? POST_INC : PRE_INC)
4830 : (do_post ? POST_DEC : PRE_DEC),
4834 /* Record that this insn now has an implicit side effect on X. */
4835 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4839 #endif /* AUTO_INC_DEC */
4841 /* Find the place in the rtx X where REG is used as a memory address.
4842 Return the MEM rtx that so uses it.
4843 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4844 (plus REG (const_int PLUSCONST)).
4846 If such an address does not appear, return 0.
4847 If REG appears more than once, or is used other than in such an address,
4851 find_use_as_address (x, reg, plusconst)
4854 HOST_WIDE_INT plusconst;
4856 enum rtx_code code = GET_CODE (x);
4857 const char *fmt = GET_RTX_FORMAT (code);
4859 register rtx value = 0;
4862 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4865 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4866 && XEXP (XEXP (x, 0), 0) == reg
4867 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4868 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4871 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4873 /* If REG occurs inside a MEM used in a bit-field reference,
4874 that is unacceptable. */
4875 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4876 return (rtx) (HOST_WIDE_INT) 1;
4880 return (rtx) (HOST_WIDE_INT) 1;
4882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4886 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4890 return (rtx) (HOST_WIDE_INT) 1;
4892 else if (fmt[i] == 'E')
4895 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4897 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4901 return (rtx) (HOST_WIDE_INT) 1;
4909 /* Write information about registers and basic blocks into FILE.
4910 This is part of making a debugging dump. */
4913 dump_regset (r, outf)
4920 fputs (" (nil)", outf);
4924 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4926 fprintf (outf, " %d", i);
4927 if (i < FIRST_PSEUDO_REGISTER)
4928 fprintf (outf, " [%s]",
4937 dump_regset (r, stderr);
4938 putc ('\n', stderr);
4942 dump_flow_info (file)
4946 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4948 fprintf (file, "%d registers.\n", max_regno);
4949 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4952 enum reg_class class, altclass;
4953 fprintf (file, "\nRegister %d used %d times across %d insns",
4954 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4955 if (REG_BASIC_BLOCK (i) >= 0)
4956 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4958 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4959 (REG_N_SETS (i) == 1) ? "" : "s");
4960 if (REG_USERVAR_P (regno_reg_rtx[i]))
4961 fprintf (file, "; user var");
4962 if (REG_N_DEATHS (i) != 1)
4963 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4964 if (REG_N_CALLS_CROSSED (i) == 1)
4965 fprintf (file, "; crosses 1 call");
4966 else if (REG_N_CALLS_CROSSED (i))
4967 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4968 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4969 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4970 class = reg_preferred_class (i);
4971 altclass = reg_alternate_class (i);
4972 if (class != GENERAL_REGS || altclass != ALL_REGS)
4974 if (altclass == ALL_REGS || class == ALL_REGS)
4975 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4976 else if (altclass == NO_REGS)
4977 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4979 fprintf (file, "; pref %s, else %s",
4980 reg_class_names[(int) class],
4981 reg_class_names[(int) altclass]);
4983 if (REGNO_POINTER_FLAG (i))
4984 fprintf (file, "; pointer");
4985 fprintf (file, ".\n");
4988 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4989 for (i = 0; i < n_basic_blocks; i++)
4991 register basic_block bb = BASIC_BLOCK (i);
4994 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4995 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4997 fprintf (file, "Predecessors: ");
4998 for (e = bb->pred; e ; e = e->pred_next)
4999 dump_edge_info (file, e, 0);
5001 fprintf (file, "\nSuccessors: ");
5002 for (e = bb->succ; e ; e = e->succ_next)
5003 dump_edge_info (file, e, 1);
5005 fprintf (file, "\nRegisters live at start:");
5006 dump_regset (bb->global_live_at_start, file);
5008 fprintf (file, "\nRegisters live at end:");
5009 dump_regset (bb->global_live_at_end, file);
5020 dump_flow_info (stderr);
5024 dump_edge_info (file, e, do_succ)
5029 basic_block side = (do_succ ? e->dest : e->src);
5031 if (side == ENTRY_BLOCK_PTR)
5032 fputs (" ENTRY", file);
5033 else if (side == EXIT_BLOCK_PTR)
5034 fputs (" EXIT", file);
5036 fprintf (file, " %d", side->index);
5040 static const char * const bitnames[] = {
5041 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5044 int i, flags = e->flags;
5048 for (i = 0; flags; i++)
5049 if (flags & (1 << i))
5055 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5056 fputs (bitnames[i], file);
5058 fprintf (file, "%d", i);
5066 /* Print out one basic block with live information at start and end. */
5076 fprintf (outf, ";; Basic block %d, loop depth %d",
5077 bb->index, bb->loop_depth - 1);
5078 if (bb->eh_beg != -1 || bb->eh_end != -1)
5079 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5082 fputs (";; Predecessors: ", outf);
5083 for (e = bb->pred; e ; e = e->pred_next)
5084 dump_edge_info (outf, e, 0);
5087 fputs (";; Registers live at start:", outf);
5088 dump_regset (bb->global_live_at_start, outf);
5091 for (insn = bb->head, last = NEXT_INSN (bb->end);
5093 insn = NEXT_INSN (insn))
5094 print_rtl_single (outf, insn);
5096 fputs (";; Registers live at end:", outf);
5097 dump_regset (bb->global_live_at_end, outf);
5100 fputs (";; Successors: ", outf);
5101 for (e = bb->succ; e; e = e->succ_next)
5102 dump_edge_info (outf, e, 1);
5110 dump_bb (bb, stderr);
5117 dump_bb (BASIC_BLOCK(n), stderr);
5120 /* Like print_rtl, but also print out live information for the start of each
5124 print_rtl_with_bb (outf, rtx_first)
5128 register rtx tmp_rtx;
5131 fprintf (outf, "(nil)\n");
5135 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5136 int max_uid = get_max_uid ();
5137 basic_block *start = (basic_block *)
5138 xcalloc (max_uid, sizeof (basic_block));
5139 basic_block *end = (basic_block *)
5140 xcalloc (max_uid, sizeof (basic_block));
5141 enum bb_state *in_bb_p = (enum bb_state *)
5142 xcalloc (max_uid, sizeof (enum bb_state));
5144 for (i = n_basic_blocks - 1; i >= 0; i--)
5146 basic_block bb = BASIC_BLOCK (i);
5149 start[INSN_UID (bb->head)] = bb;
5150 end[INSN_UID (bb->end)] = bb;
5151 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5153 enum bb_state state = IN_MULTIPLE_BB;
5154 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5156 in_bb_p[INSN_UID(x)] = state;
5163 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5168 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5170 fprintf (outf, ";; Start of basic block %d, registers live:",
5172 dump_regset (bb->global_live_at_start, outf);
5176 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5177 && GET_CODE (tmp_rtx) != NOTE
5178 && GET_CODE (tmp_rtx) != BARRIER)
5179 fprintf (outf, ";; Insn is not within a basic block\n");
5180 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5181 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5183 did_output = print_rtl_single (outf, tmp_rtx);
5185 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5186 fprintf (outf, ";; End of basic block %d\n", bb->index);
5197 if (current_function_epilogue_delay_list != 0)
5199 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5200 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5201 tmp_rtx = XEXP (tmp_rtx, 1))
5202 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5206 /* Compute dominator relationships using new flow graph structures. */
5208 compute_flow_dominators (dominators, post_dominators)
5209 sbitmap *dominators;
5210 sbitmap *post_dominators;
5213 sbitmap *temp_bitmap;
5215 basic_block *worklist, *tos;
5217 /* Allocate a worklist array/queue. Entries are only added to the
5218 list if they were not already on the list. So the size is
5219 bounded by the number of basic blocks. */
5220 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5223 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5224 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5228 /* The optimistic setting of dominators requires us to put every
5229 block on the work list initially. */
5230 for (bb = 0; bb < n_basic_blocks; bb++)
5232 *tos++ = BASIC_BLOCK (bb);
5233 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5236 /* We want a maximal solution, so initially assume everything dominates
5238 sbitmap_vector_ones (dominators, n_basic_blocks);
5240 /* Mark successors of the entry block so we can identify them below. */
5241 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5242 e->dest->aux = ENTRY_BLOCK_PTR;
5244 /* Iterate until the worklist is empty. */
5245 while (tos != worklist)
5247 /* Take the first entry off the worklist. */
5248 basic_block b = *--tos;
5251 /* Compute the intersection of the dominators of all the
5254 If one of the predecessor blocks is the ENTRY block, then the
5255 intersection of the dominators of the predecessor blocks is
5256 defined as the null set. We can identify such blocks by the
5257 special value in the AUX field in the block structure. */
5258 if (b->aux == ENTRY_BLOCK_PTR)
5260 /* Do not clear the aux field for blocks which are
5261 successors of the ENTRY block. That way we we never
5262 add them to the worklist again.
5264 The intersect of dominators of the preds of this block is
5265 defined as the null set. */
5266 sbitmap_zero (temp_bitmap[bb]);
5270 /* Clear the aux field of this block so it can be added to
5271 the worklist again if necessary. */
5273 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5276 /* Make sure each block always dominates itself. */
5277 SET_BIT (temp_bitmap[bb], bb);
5279 /* If the out state of this block changed, then we need to
5280 add the successors of this block to the worklist if they
5281 are not already on the worklist. */
5282 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5284 for (e = b->succ; e; e = e->succ_next)
5286 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5296 if (post_dominators)
5298 /* The optimistic setting of dominators requires us to put every
5299 block on the work list initially. */
5300 for (bb = 0; bb < n_basic_blocks; bb++)
5302 *tos++ = BASIC_BLOCK (bb);
5303 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5306 /* We want a maximal solution, so initially assume everything post
5307 dominates everything else. */
5308 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5310 /* Mark predecessors of the exit block so we can identify them below. */
5311 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5312 e->src->aux = EXIT_BLOCK_PTR;
5314 /* Iterate until the worklist is empty. */
5315 while (tos != worklist)
5317 /* Take the first entry off the worklist. */
5318 basic_block b = *--tos;
5321 /* Compute the intersection of the post dominators of all the
5324 If one of the successor blocks is the EXIT block, then the
5325 intersection of the dominators of the successor blocks is
5326 defined as the null set. We can identify such blocks by the
5327 special value in the AUX field in the block structure. */
5328 if (b->aux == EXIT_BLOCK_PTR)
5330 /* Do not clear the aux field for blocks which are
5331 predecessors of the EXIT block. That way we we never
5332 add them to the worklist again.
5334 The intersect of dominators of the succs of this block is
5335 defined as the null set. */
5336 sbitmap_zero (temp_bitmap[bb]);
5340 /* Clear the aux field of this block so it can be added to
5341 the worklist again if necessary. */
5343 sbitmap_intersection_of_succs (temp_bitmap[bb],
5344 post_dominators, bb);
5347 /* Make sure each block always post dominates itself. */
5348 SET_BIT (temp_bitmap[bb], bb);
5350 /* If the out state of this block changed, then we need to
5351 add the successors of this block to the worklist if they
5352 are not already on the worklist. */
5353 if (sbitmap_a_and_b (post_dominators[bb],
5354 post_dominators[bb],
5357 for (e = b->pred; e; e = e->pred_next)
5359 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5371 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5374 compute_immediate_dominators (idom, dominators)
5376 sbitmap *dominators;
5381 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5383 /* Begin with tmp(n) = dom(n) - { n }. */
5384 for (b = n_basic_blocks; --b >= 0; )
5386 sbitmap_copy (tmp[b], dominators[b]);
5387 RESET_BIT (tmp[b], b);
5390 /* Subtract out all of our dominator's dominators. */
5391 for (b = n_basic_blocks; --b >= 0; )
5393 sbitmap tmp_b = tmp[b];
5396 for (s = n_basic_blocks; --s >= 0; )
5397 if (TEST_BIT (tmp_b, s))
5398 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5401 /* Find the one bit set in the bitmap and put it in the output array. */
5402 for (b = n_basic_blocks; --b >= 0; )
5405 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5408 sbitmap_vector_free (tmp);
5411 /* Count for a single SET rtx, X. */
5414 count_reg_sets_1 (x)
5418 register rtx reg = SET_DEST (x);
5420 /* Find the register that's set/clobbered. */
5421 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5422 || GET_CODE (reg) == SIGN_EXTRACT
5423 || GET_CODE (reg) == STRICT_LOW_PART)
5424 reg = XEXP (reg, 0);
5426 if (GET_CODE (reg) == PARALLEL
5427 && GET_MODE (reg) == BLKmode)
5430 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5431 count_reg_sets_1 (XVECEXP (reg, 0, i));
5435 if (GET_CODE (reg) == REG)
5437 regno = REGNO (reg);
5438 if (regno >= FIRST_PSEUDO_REGISTER)
5440 /* Count (weighted) references, stores, etc. This counts a
5441 register twice if it is modified, but that is correct. */
5442 REG_N_SETS (regno)++;
5443 REG_N_REFS (regno) += loop_depth + 1;
5448 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5449 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5455 register RTX_CODE code = GET_CODE (x);
5457 if (code == SET || code == CLOBBER)
5458 count_reg_sets_1 (x);
5459 else if (code == PARALLEL)
5462 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5464 code = GET_CODE (XVECEXP (x, 0, i));
5465 if (code == SET || code == CLOBBER)
5466 count_reg_sets_1 (XVECEXP (x, 0, i));
5471 /* Increment REG_N_REFS by the current loop depth each register reference
5475 count_reg_references (x)
5478 register RTX_CODE code;
5481 code = GET_CODE (x);
5501 /* If we are clobbering a MEM, mark any registers inside the address
5503 if (GET_CODE (XEXP (x, 0)) == MEM)
5504 count_reg_references (XEXP (XEXP (x, 0), 0));
5508 /* While we're here, optimize this case. */
5511 /* In case the SUBREG is not of a register, don't optimize */
5512 if (GET_CODE (x) != REG)
5514 count_reg_references (x);
5518 /* ... fall through ... */
5521 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5522 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5527 register rtx testreg = SET_DEST (x);
5530 /* If storing into MEM, don't show it as being used. But do
5531 show the address as being used. */
5532 if (GET_CODE (testreg) == MEM)
5534 count_reg_references (XEXP (testreg, 0));
5535 count_reg_references (SET_SRC (x));
5539 /* Storing in STRICT_LOW_PART is like storing in a reg
5540 in that this SET might be dead, so ignore it in TESTREG.
5541 but in some other ways it is like using the reg.
5543 Storing in a SUBREG or a bit field is like storing the entire
5544 register in that if the register's value is not used
5545 then this SET is not needed. */
5546 while (GET_CODE (testreg) == STRICT_LOW_PART
5547 || GET_CODE (testreg) == ZERO_EXTRACT
5548 || GET_CODE (testreg) == SIGN_EXTRACT
5549 || GET_CODE (testreg) == SUBREG)
5551 /* Modifying a single register in an alternate mode
5552 does not use any of the old value. But these other
5553 ways of storing in a register do use the old value. */
5554 if (GET_CODE (testreg) == SUBREG
5555 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5560 testreg = XEXP (testreg, 0);
5563 /* If this is a store into a register,
5564 recursively scan the value being stored. */
5566 if ((GET_CODE (testreg) == PARALLEL
5567 && GET_MODE (testreg) == BLKmode)
5568 || GET_CODE (testreg) == REG)
5570 count_reg_references (SET_SRC (x));
5572 count_reg_references (SET_DEST (x));
5582 /* Recursively scan the operands of this expression. */
5585 register const char *fmt = GET_RTX_FORMAT (code);
5588 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5592 /* Tail recursive case: save a function call level. */
5598 count_reg_references (XEXP (x, i));
5600 else if (fmt[i] == 'E')
5603 for (j = 0; j < XVECLEN (x, i); j++)
5604 count_reg_references (XVECEXP (x, i, j));
5610 /* Recompute register set/reference counts immediately prior to register
5613 This avoids problems with set/reference counts changing to/from values
5614 which have special meanings to the register allocators.
5616 Additionally, the reference counts are the primary component used by the
5617 register allocators to prioritize pseudos for allocation to hard regs.
5618 More accurate reference counts generally lead to better register allocation.
5620 F is the first insn to be scanned.
5622 LOOP_STEP denotes how much loop_depth should be incremented per
5623 loop nesting level in order to increase the ref count more for
5624 references in a loop.
5626 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5627 possibly other information which is used by the register allocators. */
5630 recompute_reg_usage (f, loop_step)
5631 rtx f ATTRIBUTE_UNUSED;
5632 int loop_step ATTRIBUTE_UNUSED;
5638 /* Clear out the old data. */
5639 max_reg = max_reg_num ();
5640 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5646 /* Scan each insn in the chain and count how many times each register is
5648 for (index = 0; index < n_basic_blocks; index++)
5650 basic_block bb = BASIC_BLOCK (index);
5651 loop_depth = bb->loop_depth;
5652 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5654 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5658 /* This call will increment REG_N_SETS for each SET or CLOBBER
5659 of a register in INSN. It will also increment REG_N_REFS
5660 by the loop depth for each set of a register in INSN. */
5661 count_reg_sets (PATTERN (insn));
5663 /* count_reg_sets does not detect autoincrement address modes, so
5664 detect them here by looking at the notes attached to INSN. */
5665 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5667 if (REG_NOTE_KIND (links) == REG_INC)
5668 /* Count (weighted) references, stores, etc. This counts a
5669 register twice if it is modified, but that is correct. */
5670 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5673 /* This call will increment REG_N_REFS by the current loop depth for
5674 each reference to a register in INSN. */
5675 count_reg_references (PATTERN (insn));
5677 /* count_reg_references will not include counts for arguments to
5678 function calls, so detect them here by examining the
5679 CALL_INSN_FUNCTION_USAGE data. */
5680 if (GET_CODE (insn) == CALL_INSN)
5684 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5686 note = XEXP (note, 1))
5687 if (GET_CODE (XEXP (note, 0)) == USE)
5688 count_reg_references (XEXP (XEXP (note, 0), 0));
5691 if (insn == bb->end)
5697 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5698 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5699 of the number of registers that died. */
5702 count_or_remove_death_notes (blocks, kill)
5708 for (i = n_basic_blocks - 1; i >= 0; --i)
5713 if (blocks && ! TEST_BIT (blocks, i))
5716 bb = BASIC_BLOCK (i);
5718 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5720 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5722 rtx *pprev = ®_NOTES (insn);
5727 switch (REG_NOTE_KIND (link))
5730 if (GET_CODE (XEXP (link, 0)) == REG)
5732 rtx reg = XEXP (link, 0);
5735 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5738 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5746 rtx next = XEXP (link, 1);
5747 free_EXPR_LIST_node (link);
5748 *pprev = link = next;
5754 pprev = &XEXP (link, 1);
5761 if (insn == bb->end)
5769 /* Record INSN's block as BB. */
5772 set_block_for_insn (insn, bb)
5776 size_t uid = INSN_UID (insn);
5777 if (uid >= basic_block_for_insn->num_elements)
5781 /* Add one-eighth the size so we don't keep calling xrealloc. */
5782 new_size = uid + (uid + 7) / 8;
5784 VARRAY_GROW (basic_block_for_insn, new_size);
5786 VARRAY_BB (basic_block_for_insn, uid) = bb;
5789 /* Record INSN's block number as BB. */
5790 /* ??? This has got to go. */
5793 set_block_num (insn, bb)
5797 set_block_for_insn (insn, BASIC_BLOCK (bb));
5800 /* Verify the CFG consistency. This function check some CFG invariants and
5801 aborts when something is wrong. Hope that this function will help to
5802 convert many optimization passes to preserve CFG consistent.
5804 Currently it does following checks:
5806 - test head/end pointers
5807 - overlapping of basic blocks
5808 - edge list corectness
5809 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5810 - tails of basic blocks (ensure that boundary is necesary)
5811 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5812 and NOTE_INSN_BASIC_BLOCK
5813 - check that all insns are in the basic blocks
5814 (except the switch handling code, barriers and notes)
5815 - check that all returns are followed by barriers
5817 In future it can be extended check a lot of other stuff as well
5818 (reachability of basic blocks, life information, etc. etc.). */
5823 const int max_uid = get_max_uid ();
5824 const rtx rtx_first = get_insns ();
5825 basic_block *bb_info;
5829 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5831 /* First pass check head/end pointers and set bb_info array used by
5833 for (i = n_basic_blocks - 1; i >= 0; i--)
5835 basic_block bb = BASIC_BLOCK (i);
5837 /* Check the head pointer and make sure that it is pointing into
5839 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5844 error ("Head insn %d for block %d not found in the insn stream.",
5845 INSN_UID (bb->head), bb->index);
5849 /* Check the end pointer and make sure that it is pointing into
5851 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5853 if (bb_info[INSN_UID (x)] != NULL)
5855 error ("Insn %d is in multiple basic blocks (%d and %d)",
5856 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5859 bb_info[INSN_UID (x)] = bb;
5866 error ("End insn %d for block %d not found in the insn stream.",
5867 INSN_UID (bb->end), bb->index);
5872 /* Now check the basic blocks (boundaries etc.) */
5873 for (i = n_basic_blocks - 1; i >= 0; i--)
5875 basic_block bb = BASIC_BLOCK (i);
5876 /* Check corectness of edge lists */
5884 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5886 fprintf (stderr, "Predecessor: ");
5887 dump_edge_info (stderr, e, 0);
5888 fprintf (stderr, "\nSuccessor: ");
5889 dump_edge_info (stderr, e, 1);
5893 if (e->dest != EXIT_BLOCK_PTR)
5895 edge e2 = e->dest->pred;
5896 while (e2 && e2 != e)
5900 error ("Basic block %i edge lists are corrupted", bb->index);
5912 error ("Basic block %d pred edge is corrupted", bb->index);
5913 fputs ("Predecessor: ", stderr);
5914 dump_edge_info (stderr, e, 0);
5915 fputs ("\nSuccessor: ", stderr);
5916 dump_edge_info (stderr, e, 1);
5917 fputc ('\n', stderr);
5920 if (e->src != ENTRY_BLOCK_PTR)
5922 edge e2 = e->src->succ;
5923 while (e2 && e2 != e)
5927 error ("Basic block %i edge lists are corrupted", bb->index);
5934 /* OK pointers are correct. Now check the header of basic
5935 block. It ought to contain optional CODE_LABEL followed
5936 by NOTE_BASIC_BLOCK. */
5938 if (GET_CODE (x) == CODE_LABEL)
5942 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5948 if (GET_CODE (x) != NOTE
5949 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5950 || NOTE_BASIC_BLOCK (x) != bb)
5952 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5959 /* Do checks for empty blocks here */
5966 if (GET_CODE (x) == NOTE
5967 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5969 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5970 INSN_UID (x), bb->index);
5977 if (GET_CODE (x) == JUMP_INSN
5978 || GET_CODE (x) == CODE_LABEL
5979 || GET_CODE (x) == BARRIER)
5981 error ("In basic block %d:", bb->index);
5982 fatal_insn ("Flow control insn inside a basic block", x);
5993 if (!bb_info[INSN_UID (x)])
5995 switch (GET_CODE (x))
6002 /* An addr_vec is placed outside any block block. */
6004 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6005 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6006 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6011 /* But in any case, non-deletable labels can appear anywhere. */
6015 fatal_insn ("Insn outside basic block", x);
6019 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6020 && GET_CODE (x) == JUMP_INSN
6021 && returnjump_p (x) && ! condjump_p (x)
6022 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6023 fatal_insn ("Return not followed by barrier", x);
6035 /* Functions to access an edge list with a vector representation.
6036 Enough data is kept such that given an index number, the
6037 pred and succ that edge reprsents can be determined, or
6038 given a pred and a succ, it's index number can be returned.
6039 This allows algorithms which comsume a lot of memory to
6040 represent the normally full matrix of edge (pred,succ) with a
6041 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6042 wasted space in the client code due to sparse flow graphs. */
6044 /* This functions initializes the edge list. Basically the entire
6045 flowgraph is processed, and all edges are assigned a number,
6046 and the data structure is filed in. */
6050 struct edge_list *elist;
6056 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6060 /* Determine the number of edges in the flow graph by counting successor
6061 edges on each basic block. */
6062 for (x = 0; x < n_basic_blocks; x++)
6064 basic_block bb = BASIC_BLOCK (x);
6066 for (e = bb->succ; e; e = e->succ_next)
6069 /* Don't forget successors of the entry block. */
6070 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6073 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6074 elist->num_blocks = block_count;
6075 elist->num_edges = num_edges;
6076 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6080 /* Follow successors of the entry block, and register these edges. */
6081 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6083 elist->index_to_edge[num_edges] = e;
6087 for (x = 0; x < n_basic_blocks; x++)
6089 basic_block bb = BASIC_BLOCK (x);
6091 /* Follow all successors of blocks, and register these edges. */
6092 for (e = bb->succ; e; e = e->succ_next)
6094 elist->index_to_edge[num_edges] = e;
6101 /* This function free's memory associated with an edge list. */
6103 free_edge_list (elist)
6104 struct edge_list *elist;
6108 free (elist->index_to_edge);
6113 /* This function provides debug output showing an edge list. */
6115 print_edge_list (f, elist)
6117 struct edge_list *elist;
6120 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6121 elist->num_blocks - 2, elist->num_edges);
6123 for (x = 0; x < elist->num_edges; x++)
6125 fprintf (f, " %-4d - edge(", x);
6126 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6127 fprintf (f,"entry,");
6129 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6131 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6132 fprintf (f,"exit)\n");
6134 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6138 /* This function provides an internal consistancy check of an edge list,
6139 verifying that all edges are present, and that there are no
6142 verify_edge_list (f, elist)
6144 struct edge_list *elist;
6146 int x, pred, succ, index;
6149 for (x = 0; x < n_basic_blocks; x++)
6151 basic_block bb = BASIC_BLOCK (x);
6153 for (e = bb->succ; e; e = e->succ_next)
6155 pred = e->src->index;
6156 succ = e->dest->index;
6157 index = EDGE_INDEX (elist, e->src, e->dest);
6158 if (index == EDGE_INDEX_NO_EDGE)
6160 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6163 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6164 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6165 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6166 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6167 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6168 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6171 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6173 pred = e->src->index;
6174 succ = e->dest->index;
6175 index = EDGE_INDEX (elist, e->src, e->dest);
6176 if (index == EDGE_INDEX_NO_EDGE)
6178 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6181 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6182 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6183 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6184 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6185 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6186 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6188 /* We've verified that all the edges are in the list, no lets make sure
6189 there are no spurious edges in the list. */
6191 for (pred = 0 ; pred < n_basic_blocks; pred++)
6192 for (succ = 0 ; succ < n_basic_blocks; succ++)
6194 basic_block p = BASIC_BLOCK (pred);
6195 basic_block s = BASIC_BLOCK (succ);
6199 for (e = p->succ; e; e = e->succ_next)
6205 for (e = s->pred; e; e = e->pred_next)
6211 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6212 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6213 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6215 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6216 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6217 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6218 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6219 BASIC_BLOCK (succ)));
6221 for (succ = 0 ; succ < n_basic_blocks; succ++)
6223 basic_block p = ENTRY_BLOCK_PTR;
6224 basic_block s = BASIC_BLOCK (succ);
6228 for (e = p->succ; e; e = e->succ_next)
6234 for (e = s->pred; e; e = e->pred_next)
6240 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6241 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6242 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6244 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6245 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6246 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6247 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6248 BASIC_BLOCK (succ)));
6250 for (pred = 0 ; pred < n_basic_blocks; pred++)
6252 basic_block p = BASIC_BLOCK (pred);
6253 basic_block s = EXIT_BLOCK_PTR;
6257 for (e = p->succ; e; e = e->succ_next)
6263 for (e = s->pred; e; e = e->pred_next)
6269 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6270 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6271 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6273 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6274 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6275 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6276 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6281 /* This routine will determine what, if any, edge there is between
6282 a specified predecessor and successor. */
6285 find_edge_index (edge_list, pred, succ)
6286 struct edge_list *edge_list;
6287 basic_block pred, succ;
6290 for (x = 0; x < NUM_EDGES (edge_list); x++)
6292 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6293 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6296 return (EDGE_INDEX_NO_EDGE);
6299 /* This function will remove an edge from the flow graph. */
6304 edge last_pred = NULL;
6305 edge last_succ = NULL;
6307 basic_block src, dest;
6310 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6316 last_succ->succ_next = e->succ_next;
6318 src->succ = e->succ_next;
6320 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6326 last_pred->pred_next = e->pred_next;
6328 dest->pred = e->pred_next;
6334 /* This routine will remove any fake successor edges for a basic block.
6335 When the edge is removed, it is also removed from whatever predecessor
6338 remove_fake_successors (bb)
6342 for (e = bb->succ; e ; )
6346 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6351 /* This routine will remove all fake edges from the flow graph. If
6352 we remove all fake successors, it will automatically remove all
6353 fake predecessors. */
6355 remove_fake_edges ()
6359 for (x = 0; x < n_basic_blocks; x++)
6360 remove_fake_successors (BASIC_BLOCK (x));
6362 /* We've handled all successors except the entry block's. */
6363 remove_fake_successors (ENTRY_BLOCK_PTR);
6366 /* This functions will add a fake edge between any block which has no
6367 successors, and the exit block. Some data flow equations require these
6370 add_noreturn_fake_exit_edges ()
6374 for (x = 0; x < n_basic_blocks; x++)
6375 if (BASIC_BLOCK (x)->succ == NULL)
6376 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6379 /* Dump the list of basic blocks in the bitmap NODES. */
6381 flow_nodes_print (str, nodes, file)
6383 const sbitmap nodes;
6388 fprintf (file, "%s { ", str);
6389 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6390 fputs ("}\n", file);
6394 /* Dump the list of exiting edges in the array EDGES. */
6396 flow_exits_print (str, edges, num_edges, file)
6404 fprintf (file, "%s { ", str);
6405 for (i = 0; i < num_edges; i++)
6406 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6407 fputs ("}\n", file);
6411 /* Dump loop related CFG information. */
6413 flow_loops_cfg_dump (loops, file)
6414 const struct loops *loops;
6419 if (! loops->num || ! file || ! loops->cfg.dom)
6422 for (i = 0; i < n_basic_blocks; i++)
6426 fprintf (file, ";; %d succs { ", i);
6427 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6428 fprintf (file, "%d ", succ->dest->index);
6429 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6433 /* Dump the DFS node order. */
6434 if (loops->cfg.dfs_order)
6436 fputs (";; DFS order: ", file);
6437 for (i = 0; i < n_basic_blocks; i++)
6438 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6444 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6446 flow_loop_nested_p (outer, loop)
6450 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6454 /* Dump the loop information specified by LOOPS to the stream FILE. */
6456 flow_loops_dump (loops, file, verbose)
6457 const struct loops *loops;
6464 num_loops = loops->num;
6465 if (! num_loops || ! file)
6468 fprintf (file, ";; %d loops found, %d levels\n",
6469 num_loops, loops->levels);
6471 for (i = 0; i < num_loops; i++)
6473 struct loop *loop = &loops->array[i];
6475 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6476 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6477 loop->header->index, loop->latch->index,
6478 loop->pre_header ? loop->pre_header->index : -1,
6479 loop->depth, loop->level,
6480 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6481 fprintf (file, ";; %d", loop->num_nodes);
6482 flow_nodes_print (" nodes", loop->nodes, file);
6483 fprintf (file, ";; %d", loop->num_exits);
6484 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6490 for (j = 0; j < i; j++)
6492 struct loop *oloop = &loops->array[j];
6494 if (loop->header == oloop->header)
6499 smaller = loop->num_nodes < oloop->num_nodes;
6501 /* If the union of LOOP and OLOOP is different than
6502 the larger of LOOP and OLOOP then LOOP and OLOOP
6503 must be disjoint. */
6504 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6505 smaller ? oloop : loop);
6506 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6507 loop->header->index, i, j,
6508 disjoint ? "disjoint" : "nested");
6515 /* Print diagnostics to compare our concept of a loop with
6516 what the loop notes say. */
6517 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6518 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6519 != NOTE_INSN_LOOP_BEG)
6520 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6521 INSN_UID (PREV_INSN (loop->first->head)));
6522 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6523 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6524 != NOTE_INSN_LOOP_END)
6525 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6526 INSN_UID (NEXT_INSN (loop->last->end)));
6531 flow_loops_cfg_dump (loops, file);
6535 /* Free all the memory allocated for LOOPS. */
6537 flow_loops_free (loops)
6538 struct loops *loops;
6547 /* Free the loop descriptors. */
6548 for (i = 0; i < loops->num; i++)
6550 struct loop *loop = &loops->array[i];
6553 sbitmap_free (loop->nodes);
6557 free (loops->array);
6558 loops->array = NULL;
6561 sbitmap_vector_free (loops->cfg.dom);
6562 if (loops->cfg.dfs_order)
6563 free (loops->cfg.dfs_order);
6565 sbitmap_free (loops->shared_headers);
6570 /* Find the exits from the loop using the bitmap of loop nodes NODES
6571 and store in EXITS array. Return the number of exits from the
6574 flow_loop_exits_find (nodes, exits)
6575 const sbitmap nodes;
6584 /* Check all nodes within the loop to see if there are any
6585 successors not in the loop. Note that a node may have multiple
6588 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6589 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6591 basic_block dest = e->dest;
6593 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6601 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6603 /* Store all exiting edges into an array. */
6605 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6606 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6608 basic_block dest = e->dest;
6610 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6611 (*exits)[num_exits++] = e;
6619 /* Find the nodes contained within the loop with header HEADER and
6620 latch LATCH and store in NODES. Return the number of nodes within
6623 flow_loop_nodes_find (header, latch, nodes)
6632 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6635 /* Start with only the loop header in the set of loop nodes. */
6636 sbitmap_zero (nodes);
6637 SET_BIT (nodes, header->index);
6639 header->loop_depth++;
6641 /* Push the loop latch on to the stack. */
6642 if (! TEST_BIT (nodes, latch->index))
6644 SET_BIT (nodes, latch->index);
6645 latch->loop_depth++;
6647 stack[sp++] = latch;
6656 for (e = node->pred; e; e = e->pred_next)
6658 basic_block ancestor = e->src;
6660 /* If each ancestor not marked as part of loop, add to set of
6661 loop nodes and push on to stack. */
6662 if (ancestor != ENTRY_BLOCK_PTR
6663 && ! TEST_BIT (nodes, ancestor->index))
6665 SET_BIT (nodes, ancestor->index);
6666 ancestor->loop_depth++;
6668 stack[sp++] = ancestor;
6677 /* Compute the depth first search order and store in the array
6678 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6679 number of nodes visited. */
6681 flow_depth_first_order_compute (dfs_order)
6690 /* Allocate stack for back-tracking up CFG. */
6691 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6694 /* Allocate bitmap to track nodes that have been visited. */
6695 visited = sbitmap_alloc (n_basic_blocks);
6697 /* None of the nodes in the CFG have been visited yet. */
6698 sbitmap_zero (visited);
6700 /* Start with the first successor edge from the entry block. */
6701 e = ENTRY_BLOCK_PTR->succ;
6704 basic_block src = e->src;
6705 basic_block dest = e->dest;
6707 /* Mark that we have visited this node. */
6708 if (src != ENTRY_BLOCK_PTR)
6709 SET_BIT (visited, src->index);
6711 /* If this node has not been visited before, push the current
6712 edge on to the stack and proceed with the first successor
6713 edge of this node. */
6714 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6722 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6725 /* DEST has no successors (for example, a non-returning
6726 function is called) so do not push the current edge
6727 but carry on with its next successor. */
6728 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6729 SET_BIT (visited, dest->index);
6732 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6734 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6736 /* Pop edge off stack. */
6744 sbitmap_free (visited);
6746 /* The number of nodes visited should not be greater than
6748 if (dfsnum > n_basic_blocks)
6751 /* There are some nodes left in the CFG that are unreachable. */
6752 if (dfsnum < n_basic_blocks)
6758 /* Return the block for the pre-header of the loop with header
6759 HEADER where DOM specifies the dominator information. Return NULL if
6760 there is no pre-header. */
6762 flow_loop_pre_header_find (header, dom)
6766 basic_block pre_header;
6769 /* If block p is a predecessor of the header and is the only block
6770 that the header does not dominate, then it is the pre-header. */
6772 for (e = header->pred; e; e = e->pred_next)
6774 basic_block node = e->src;
6776 if (node != ENTRY_BLOCK_PTR
6777 && ! TEST_BIT (dom[node->index], header->index))
6779 if (pre_header == NULL)
6783 /* There are multiple edges into the header from outside
6784 the loop so there is no pre-header block. */
6794 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6795 previously added. The insertion algorithm assumes that the loops
6796 are added in the order found by a depth first search of the CFG. */
6798 flow_loop_tree_node_add (prevloop, loop)
6799 struct loop *prevloop;
6803 if (flow_loop_nested_p (prevloop, loop))
6805 prevloop->inner = loop;
6806 loop->outer = prevloop;
6810 while (prevloop->outer)
6812 if (flow_loop_nested_p (prevloop->outer, loop))
6814 prevloop->next = loop;
6815 loop->outer = prevloop->outer;
6818 prevloop = prevloop->outer;
6821 prevloop->next = loop;
6826 /* Build the loop hierarchy tree for LOOPS. */
6828 flow_loops_tree_build (loops)
6829 struct loops *loops;
6834 num_loops = loops->num;
6838 /* Root the loop hierarchy tree with the first loop found.
6839 Since we used a depth first search this should be the
6841 loops->tree = &loops->array[0];
6842 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6844 /* Add the remaining loops to the tree. */
6845 for (i = 1; i < num_loops; i++)
6846 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6850 /* Helper function to compute loop nesting depth and enclosed loop level
6851 for the natural loop specified by LOOP at the loop depth DEPTH.
6852 Returns the loop level. */
6854 flow_loop_level_compute (loop, depth)
6864 /* Traverse loop tree assigning depth and computing level as the
6865 maximum level of all the inner loops of this loop. The loop
6866 level is equivalent to the height of the loop in the loop tree
6867 and corresponds to the number of enclosed loop levels (including
6869 for (inner = loop->inner; inner; inner = inner->next)
6873 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6878 loop->level = level;
6879 loop->depth = depth;
6884 /* Compute the loop nesting depth and enclosed loop level for the loop
6885 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6889 flow_loops_level_compute (loops)
6890 struct loops *loops;
6896 /* Traverse all the outer level loops. */
6897 for (loop = loops->tree; loop; loop = loop->next)
6899 level = flow_loop_level_compute (loop, 1);
6907 /* Find all the natural loops in the function and save in LOOPS structure
6908 and recalculate loop_depth information in basic block structures.
6909 Return the number of natural loops found. */
6912 flow_loops_find (loops)
6913 struct loops *loops;
6924 loops->array = NULL;
6928 /* Taking care of this degenerate case makes the rest of
6929 this code simpler. */
6930 if (n_basic_blocks == 0)
6933 /* Compute the dominators. */
6934 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6935 compute_flow_dominators (dom, NULL);
6937 /* Count the number of loop edges (back edges). This should be the
6938 same as the number of natural loops. Also clear the loop_depth
6939 and as we work from inner->outer in a loop nest we call
6940 find_loop_nodes_find which will increment loop_depth for nodes
6941 within the current loop, which happens to enclose inner loops. */
6944 for (b = 0; b < n_basic_blocks; b++)
6946 BASIC_BLOCK (b)->loop_depth = 0;
6947 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6949 basic_block latch = e->src;
6951 /* Look for back edges where a predecessor is dominated
6952 by this block. A natural loop has a single entry
6953 node (header) that dominates all the nodes in the
6954 loop. It also has single back edge to the header
6955 from a latch node. Note that multiple natural loops
6956 may share the same header. */
6957 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6964 /* Compute depth first search order of the CFG so that outer
6965 natural loops will be found before inner natural loops. */
6966 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6967 flow_depth_first_order_compute (dfs_order);
6969 /* Allocate loop structures. */
6971 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6973 headers = sbitmap_alloc (n_basic_blocks);
6974 sbitmap_zero (headers);
6976 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6977 sbitmap_zero (loops->shared_headers);
6979 /* Find and record information about all the natural loops
6982 for (b = 0; b < n_basic_blocks; b++)
6986 /* Search the nodes of the CFG in DFS order that we can find
6987 outer loops first. */
6988 header = BASIC_BLOCK (dfs_order[b]);
6990 /* Look for all the possible latch blocks for this header. */
6991 for (e = header->pred; e; e = e->pred_next)
6993 basic_block latch = e->src;
6995 /* Look for back edges where a predecessor is dominated
6996 by this block. A natural loop has a single entry
6997 node (header) that dominates all the nodes in the
6998 loop. It also has single back edge to the header
6999 from a latch node. Note that multiple natural loops
7000 may share the same header. */
7001 if (latch != ENTRY_BLOCK_PTR
7002 && TEST_BIT (dom[latch->index], header->index))
7006 loop = loops->array + num_loops;
7008 loop->header = header;
7009 loop->latch = latch;
7011 /* Keep track of blocks that are loop headers so
7012 that we can tell which loops should be merged. */
7013 if (TEST_BIT (headers, header->index))
7014 SET_BIT (loops->shared_headers, header->index);
7015 SET_BIT (headers, header->index);
7017 /* Find nodes contained within the loop. */
7018 loop->nodes = sbitmap_alloc (n_basic_blocks);
7020 = flow_loop_nodes_find (header, latch, loop->nodes);
7022 /* Compute first and last blocks within the loop.
7023 These are often the same as the loop header and
7024 loop latch respectively, but this is not always
7027 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7029 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7031 /* Find edges which exit the loop. Note that a node
7032 may have several exit edges. */
7034 = flow_loop_exits_find (loop->nodes, &loop->exits);
7036 /* Look to see if the loop has a pre-header node. */
7038 = flow_loop_pre_header_find (header, dom);
7045 /* Natural loops with shared headers may either be disjoint or
7046 nested. Disjoint loops with shared headers cannot be inner
7047 loops and should be merged. For now just mark loops that share
7049 for (i = 0; i < num_loops; i++)
7050 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7051 loops->array[i].shared = 1;
7053 sbitmap_free (headers);
7056 loops->num = num_loops;
7058 /* Save CFG derived information to avoid recomputing it. */
7059 loops->cfg.dom = dom;
7060 loops->cfg.dfs_order = dfs_order;
7062 /* Build the loop hierarchy tree. */
7063 flow_loops_tree_build (loops);
7065 /* Assign the loop nesting depth and enclosed loop level for each
7067 loops->levels = flow_loops_level_compute (loops);
7073 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7075 flow_loop_outside_edge_p (loop, e)
7076 const struct loop *loop;
7079 if (e->dest != loop->header)
7081 return (e->src == ENTRY_BLOCK_PTR)
7082 || ! TEST_BIT (loop->nodes, e->src->index);
7086 /* Clear LOG_LINKS fields of insns in a chain. */
7088 clear_log_links (insns)
7092 for (i = insns; i; i = NEXT_INSN (i))
7093 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')