1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
306 static void try_merge_blocks PARAMS ((void));
307 static void tidy_fallthru_edges PARAMS ((void));
308 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
309 static void verify_wide_reg PARAMS ((int, rtx, rtx));
310 static void verify_local_live_at_start PARAMS ((regset, basic_block));
311 static int set_noop_p PARAMS ((rtx));
312 static int noop_move_p PARAMS ((rtx));
313 static void delete_noop_moves PARAMS ((rtx));
314 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
315 static void notice_stack_pointer_modification PARAMS ((rtx));
316 static void mark_reg PARAMS ((rtx, void *));
317 static void mark_regs_live_at_end PARAMS ((regset));
318 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
319 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
320 static void propagate_block PARAMS ((basic_block, regset,
322 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
323 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
324 static void mark_set_regs PARAMS ((regset, regset, rtx,
326 static void mark_set_1 PARAMS ((regset, regset, rtx,
329 static void find_auto_inc PARAMS ((regset, rtx, rtx));
330 static int try_pre_increment_1 PARAMS ((rtx));
331 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
333 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
334 void dump_flow_info PARAMS ((FILE *));
335 void debug_flow_info PARAMS ((void));
336 static void dump_edge_info PARAMS ((FILE *, edge, int));
338 static void count_reg_sets_1 PARAMS ((rtx));
339 static void count_reg_sets PARAMS ((rtx));
340 static void count_reg_references PARAMS ((rtx));
341 static void invalidate_mems_from_autoinc PARAMS ((rtx));
342 static void remove_fake_successors PARAMS ((basic_block));
343 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
344 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
345 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
346 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
347 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
348 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
349 static int flow_depth_first_order_compute PARAMS ((int *));
350 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
351 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
352 static void flow_loops_tree_build PARAMS ((struct loops *));
353 static int flow_loop_level_compute PARAMS ((struct loop *, int));
354 static int flow_loops_level_compute PARAMS ((struct loops *));
356 /* Find basic blocks of the current function.
357 F is the first insn of the function and NREGS the number of register
361 find_basic_blocks (f, nregs, file)
363 int nregs ATTRIBUTE_UNUSED;
364 FILE *file ATTRIBUTE_UNUSED;
368 /* Flush out existing data. */
369 if (basic_block_info != NULL)
375 /* Clear bb->aux on all extant basic blocks. We'll use this as a
376 tag for reuse during create_basic_block, just in case some pass
377 copies around basic block notes improperly. */
378 for (i = 0; i < n_basic_blocks; ++i)
379 BASIC_BLOCK (i)->aux = NULL;
381 VARRAY_FREE (basic_block_info);
384 n_basic_blocks = count_basic_blocks (f);
386 /* Size the basic block table. The actual structures will be allocated
387 by find_basic_blocks_1, since we want to keep the structure pointers
388 stable across calls to find_basic_blocks. */
389 /* ??? This whole issue would be much simpler if we called find_basic_blocks
390 exactly once, and thereafter we don't have a single long chain of
391 instructions at all until close to the end of compilation when we
392 actually lay them out. */
394 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
396 label_value_list = find_basic_blocks_1 (f);
398 /* Record the block to which an insn belongs. */
399 /* ??? This should be done another way, by which (perhaps) a label is
400 tagged directly with the basic block that it starts. It is used for
401 more than that currently, but IMO that is the only valid use. */
403 max_uid = get_max_uid ();
405 /* Leave space for insns life_analysis makes in some cases for auto-inc.
406 These cases are rare, so we don't need too much space. */
407 max_uid += max_uid / 10;
410 compute_bb_for_insn (max_uid);
412 /* Discover the edges of our cfg. */
413 record_active_eh_regions (f);
414 make_edges (label_value_list);
416 /* Do very simple cleanup now, for the benefit of code that runs between
417 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
418 tidy_fallthru_edges ();
420 mark_critical_edges ();
422 #ifdef ENABLE_CHECKING
427 /* Count the basic blocks of the function. */
430 count_basic_blocks (f)
434 register RTX_CODE prev_code;
435 register int count = 0;
437 int call_had_abnormal_edge = 0;
438 rtx prev_call = NULL_RTX;
440 prev_code = JUMP_INSN;
441 for (insn = f; insn; insn = NEXT_INSN (insn))
443 register RTX_CODE code = GET_CODE (insn);
445 if (code == CODE_LABEL
446 || (GET_RTX_CLASS (code) == 'i'
447 && (prev_code == JUMP_INSN
448 || prev_code == BARRIER
449 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
454 /* Record whether this call created an edge. */
455 if (code == CALL_INSN)
457 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
458 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
460 call_had_abnormal_edge = 0;
462 /* If there is an EH region or rethrow, we have an edge. */
463 if ((eh_region && region > 0)
464 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
465 call_had_abnormal_edge = 1;
468 /* If there is a nonlocal goto label and the specified
469 region number isn't -1, we have an edge. (0 means
470 no throw, but might have a nonlocal goto). */
471 if (nonlocal_goto_handler_labels && region >= 0)
472 call_had_abnormal_edge = 1;
475 else if (code != NOTE)
476 prev_call = NULL_RTX;
480 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
482 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
487 /* The rest of the compiler works a bit smoother when we don't have to
488 check for the edge case of do-nothing functions with no basic blocks. */
491 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
498 /* Find all basic blocks of the function whose first insn is F.
500 Collect and return a list of labels whose addresses are taken. This
501 will be used in make_edges for use with computed gotos. */
504 find_basic_blocks_1 (f)
507 register rtx insn, next;
508 int call_has_abnormal_edge = 0;
510 rtx bb_note = NULL_RTX;
511 rtx eh_list = NULL_RTX;
512 rtx label_value_list = NULL_RTX;
516 /* We process the instructions in a slightly different way than we did
517 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
518 closed out the previous block, so that it gets attached at the proper
519 place. Since this form should be equivalent to the previous,
520 count_basic_blocks continues to use the old form as a check. */
522 for (insn = f; insn; insn = next)
524 enum rtx_code code = GET_CODE (insn);
526 next = NEXT_INSN (insn);
528 if (code == CALL_INSN)
530 /* Record whether this call created an edge. */
531 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
532 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
533 call_has_abnormal_edge = 0;
535 /* If there is an EH region or rethrow, we have an edge. */
536 if ((eh_list && region > 0)
537 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
538 call_has_abnormal_edge = 1;
541 /* If there is a nonlocal goto label and the specified
542 region number isn't -1, we have an edge. (0 means
543 no throw, but might have a nonlocal goto). */
544 if (nonlocal_goto_handler_labels && region >= 0)
545 call_has_abnormal_edge = 1;
553 int kind = NOTE_LINE_NUMBER (insn);
555 /* Keep a LIFO list of the currently active exception notes. */
556 if (kind == NOTE_INSN_EH_REGION_BEG)
557 eh_list = alloc_INSN_LIST (insn, eh_list);
558 else if (kind == NOTE_INSN_EH_REGION_END)
561 eh_list = XEXP (eh_list, 1);
562 free_INSN_LIST_node (t);
565 /* Look for basic block notes with which to keep the
566 basic_block_info pointers stable. Unthread the note now;
567 we'll put it back at the right place in create_basic_block.
568 Or not at all if we've already found a note in this block. */
569 else if (kind == NOTE_INSN_BASIC_BLOCK)
571 if (bb_note == NULL_RTX)
573 next = flow_delete_insn (insn);
580 /* A basic block starts at a label. If we've closed one off due
581 to a barrier or some such, no need to do it again. */
582 if (head != NULL_RTX)
584 /* While we now have edge lists with which other portions of
585 the compiler might determine a call ending a basic block
586 does not imply an abnormal edge, it will be a bit before
587 everything can be updated. So continue to emit a noop at
588 the end of such a block. */
589 if (GET_CODE (end) == CALL_INSN
590 && ! SIBLING_CALL_P (end))
592 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
593 end = emit_insn_after (nop, end);
596 create_basic_block (i++, head, end, bb_note);
603 /* A basic block ends at a jump. */
604 if (head == NULL_RTX)
608 /* ??? Make a special check for table jumps. The way this
609 happens is truly and amazingly gross. We are about to
610 create a basic block that contains just a code label and
611 an addr*vec jump insn. Worse, an addr_diff_vec creates
612 its own natural loop.
614 Prevent this bit of brain damage, pasting things together
615 correctly in make_edges.
617 The correct solution involves emitting the table directly
618 on the tablejump instruction as a note, or JUMP_LABEL. */
620 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
621 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
629 goto new_bb_inclusive;
632 /* A basic block ends at a barrier. It may be that an unconditional
633 jump already closed the basic block -- no need to do it again. */
634 if (head == NULL_RTX)
637 /* While we now have edge lists with which other portions of the
638 compiler might determine a call ending a basic block does not
639 imply an abnormal edge, it will be a bit before everything can
640 be updated. So continue to emit a noop at the end of such a
642 if (GET_CODE (end) == CALL_INSN
643 && ! SIBLING_CALL_P (end))
645 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
646 end = emit_insn_after (nop, end);
648 goto new_bb_exclusive;
651 /* A basic block ends at a call that can either throw or
652 do a non-local goto. */
653 if (call_has_abnormal_edge)
656 if (head == NULL_RTX)
661 create_basic_block (i++, head, end, bb_note);
662 head = end = NULL_RTX;
669 if (GET_RTX_CLASS (code) == 'i')
671 if (head == NULL_RTX)
678 if (GET_RTX_CLASS (code) == 'i')
682 /* Make a list of all labels referred to other than by jumps
683 (which just don't have the REG_LABEL notes).
685 Make a special exception for labels followed by an ADDR*VEC,
686 as this would be a part of the tablejump setup code.
688 Make a special exception for the eh_return_stub_label, which
689 we know isn't part of any otherwise visible control flow. */
691 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
692 if (REG_NOTE_KIND (note) == REG_LABEL)
694 rtx lab = XEXP (note, 0), next;
696 if (lab == eh_return_stub_label)
698 else if ((next = next_nonnote_insn (lab)) != NULL
699 && GET_CODE (next) == JUMP_INSN
700 && (GET_CODE (PATTERN (next)) == ADDR_VEC
701 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
705 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
710 if (head != NULL_RTX)
711 create_basic_block (i++, head, end, bb_note);
713 if (i != n_basic_blocks)
716 return label_value_list;
719 /* Tidy the CFG by deleting unreachable code and whatnot. */
725 delete_unreachable_blocks ();
726 move_stray_eh_region_notes ();
727 record_active_eh_regions (f);
729 mark_critical_edges ();
731 /* Kill the data we won't maintain. */
732 label_value_list = NULL_RTX;
735 /* Create a new basic block consisting of the instructions between
736 HEAD and END inclusive. Reuses the note and basic block struct
737 in BB_NOTE, if any. */
740 create_basic_block (index, head, end, bb_note)
742 rtx head, end, bb_note;
747 && ! RTX_INTEGRATED_P (bb_note)
748 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751 /* If we found an existing note, thread it back onto the chain. */
753 if (GET_CODE (head) == CODE_LABEL)
754 add_insn_after (bb_note, head);
757 add_insn_before (bb_note, head);
763 /* Otherwise we must create a note and a basic block structure.
764 Since we allow basic block structs in rtl, give the struct
765 the same lifetime by allocating it off the function obstack
766 rather than using malloc. */
768 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
769 memset (bb, 0, sizeof (*bb));
771 if (GET_CODE (head) == CODE_LABEL)
772 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778 NOTE_BASIC_BLOCK (bb_note) = bb;
781 /* Always include the bb note in the block. */
782 if (NEXT_INSN (end) == bb_note)
788 BASIC_BLOCK (index) = bb;
790 /* Tag the block so that we know it has been used when considering
791 other basic block notes. */
795 /* Records the basic block struct in BB_FOR_INSN, for every instruction
796 indexed by INSN_UID. MAX is the size of the array. */
799 compute_bb_for_insn (max)
804 if (basic_block_for_insn)
805 VARRAY_FREE (basic_block_for_insn);
806 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
808 for (i = 0; i < n_basic_blocks; ++i)
810 basic_block bb = BASIC_BLOCK (i);
817 int uid = INSN_UID (insn);
819 VARRAY_BB (basic_block_for_insn, uid) = bb;
822 insn = NEXT_INSN (insn);
827 /* Free the memory associated with the edge structures. */
835 for (i = 0; i < n_basic_blocks; ++i)
837 basic_block bb = BASIC_BLOCK (i);
839 for (e = bb->succ; e ; e = n)
849 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
855 ENTRY_BLOCK_PTR->succ = 0;
856 EXIT_BLOCK_PTR->pred = 0;
861 /* Identify the edges between basic blocks.
863 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
864 that are otherwise unreachable may be reachable with a non-local goto.
866 BB_EH_END is an array indexed by basic block number in which we record
867 the list of exception regions active at the end of the basic block. */
870 make_edges (label_value_list)
871 rtx label_value_list;
874 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
875 sbitmap *edge_cache = NULL;
877 /* Assume no computed jump; revise as we create edges. */
878 current_function_has_computed_jump = 0;
880 /* Heavy use of computed goto in machine-generated code can lead to
881 nearly fully-connected CFGs. In that case we spend a significant
882 amount of time searching the edge lists for duplicates. */
883 if (forced_labels || label_value_list)
885 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
886 sbitmap_vector_zero (edge_cache, n_basic_blocks);
889 /* By nature of the way these get numbered, block 0 is always the entry. */
890 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
892 for (i = 0; i < n_basic_blocks; ++i)
894 basic_block bb = BASIC_BLOCK (i);
897 int force_fallthru = 0;
899 /* Examine the last instruction of the block, and discover the
900 ways we can leave the block. */
903 code = GET_CODE (insn);
906 if (code == JUMP_INSN)
910 /* ??? Recognize a tablejump and do the right thing. */
911 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
912 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
913 && GET_CODE (tmp) == JUMP_INSN
914 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
915 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
920 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
921 vec = XVEC (PATTERN (tmp), 0);
923 vec = XVEC (PATTERN (tmp), 1);
925 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
926 make_label_edge (edge_cache, bb,
927 XEXP (RTVEC_ELT (vec, j), 0), 0);
929 /* Some targets (eg, ARM) emit a conditional jump that also
930 contains the out-of-range target. Scan for these and
931 add an edge if necessary. */
932 if ((tmp = single_set (insn)) != NULL
933 && SET_DEST (tmp) == pc_rtx
934 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
935 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
936 make_label_edge (edge_cache, bb,
937 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
939 #ifdef CASE_DROPS_THROUGH
940 /* Silly VAXen. The ADDR_VEC is going to be in the way of
941 us naturally detecting fallthru into the next block. */
946 /* If this is a computed jump, then mark it as reaching
947 everything on the label_value_list and forced_labels list. */
948 else if (computed_jump_p (insn))
950 current_function_has_computed_jump = 1;
952 for (x = label_value_list; x; x = XEXP (x, 1))
953 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
955 for (x = forced_labels; x; x = XEXP (x, 1))
956 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
959 /* Returns create an exit out. */
960 else if (returnjump_p (insn))
961 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
963 /* Otherwise, we have a plain conditional or unconditional jump. */
966 if (! JUMP_LABEL (insn))
968 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
972 /* If this is a sibling call insn, then this is in effect a
973 combined call and return, and so we need an edge to the
974 exit block. No need to worry about EH edges, since we
975 wouldn't have created the sibling call in the first place. */
977 if (code == CALL_INSN && SIBLING_CALL_P (insn))
978 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
981 /* If this is a CALL_INSN, then mark it as reaching the active EH
982 handler for this CALL_INSN. If we're handling asynchronous
983 exceptions then any insn can reach any of the active handlers.
985 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
987 if (code == CALL_INSN || asynchronous_exceptions)
989 /* Add any appropriate EH edges. We do this unconditionally
990 since there may be a REG_EH_REGION or REG_EH_RETHROW note
991 on the call, and this needn't be within an EH region. */
992 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
994 /* If we have asynchronous exceptions, do the same for *all*
995 exception regions active in the block. */
996 if (asynchronous_exceptions
997 && bb->eh_beg != bb->eh_end)
1000 make_eh_edge (edge_cache, eh_nest_info, bb,
1001 NULL_RTX, bb->eh_beg);
1003 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1004 if (GET_CODE (x) == NOTE
1005 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1006 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1008 int region = NOTE_EH_HANDLER (x);
1009 make_eh_edge (edge_cache, eh_nest_info, bb,
1014 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1016 /* ??? This could be made smarter: in some cases it's possible
1017 to tell that certain calls will not do a nonlocal goto.
1019 For example, if the nested functions that do the nonlocal
1020 gotos do not have their addresses taken, then only calls to
1021 those functions or to other nested functions that use them
1022 could possibly do nonlocal gotos. */
1023 /* We do know that a REG_EH_REGION note with a value less
1024 than 0 is guaranteed not to perform a non-local goto. */
1025 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1026 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1027 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1028 make_label_edge (edge_cache, bb, XEXP (x, 0),
1029 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1033 /* We know something about the structure of the function __throw in
1034 libgcc2.c. It is the only function that ever contains eh_stub
1035 labels. It modifies its return address so that the last block
1036 returns to one of the eh_stub labels within it. So we have to
1037 make additional edges in the flow graph. */
1038 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1039 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1041 /* Find out if we can drop through to the next block. */
1042 insn = next_nonnote_insn (insn);
1043 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1044 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1045 else if (i + 1 < n_basic_blocks)
1047 rtx tmp = BLOCK_HEAD (i + 1);
1048 if (GET_CODE (tmp) == NOTE)
1049 tmp = next_nonnote_insn (tmp);
1050 if (force_fallthru || insn == tmp)
1051 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1055 free_eh_nesting_info (eh_nest_info);
1057 sbitmap_vector_free (edge_cache);
1060 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1061 about the edge that is accumulated between calls. */
1064 make_edge (edge_cache, src, dst, flags)
1065 sbitmap *edge_cache;
1066 basic_block src, dst;
1072 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1073 many edges to them, and we didn't allocate memory for it. */
1074 use_edge_cache = (edge_cache
1075 && src != ENTRY_BLOCK_PTR
1076 && dst != EXIT_BLOCK_PTR);
1078 /* Make sure we don't add duplicate edges. */
1079 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1080 for (e = src->succ; e ; e = e->succ_next)
1087 e = (edge) xcalloc (1, sizeof (*e));
1090 e->succ_next = src->succ;
1091 e->pred_next = dst->pred;
1100 SET_BIT (edge_cache[src->index], dst->index);
1103 /* Create an edge from a basic block to a label. */
1106 make_label_edge (edge_cache, src, label, flags)
1107 sbitmap *edge_cache;
1112 if (GET_CODE (label) != CODE_LABEL)
1115 /* If the label was never emitted, this insn is junk, but avoid a
1116 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1117 as a result of a syntax error and a diagnostic has already been
1120 if (INSN_UID (label) == 0)
1123 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1126 /* Create the edges generated by INSN in REGION. */
1129 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1130 sbitmap *edge_cache;
1131 eh_nesting_info *eh_nest_info;
1136 handler_info **handler_list;
1139 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1140 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1143 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1144 EDGE_ABNORMAL | EDGE_EH | is_call);
1148 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1149 dangerous if we intend to move basic blocks around. Move such notes
1150 into the following block. */
1153 move_stray_eh_region_notes ()
1158 if (n_basic_blocks < 2)
1161 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1162 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1164 rtx insn, next, list = NULL_RTX;
1166 b1 = BASIC_BLOCK (i);
1167 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1169 next = NEXT_INSN (insn);
1170 if (GET_CODE (insn) == NOTE
1171 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1172 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1174 /* Unlink from the insn chain. */
1175 NEXT_INSN (PREV_INSN (insn)) = next;
1176 PREV_INSN (next) = PREV_INSN (insn);
1179 NEXT_INSN (insn) = list;
1184 if (list == NULL_RTX)
1187 /* Find where to insert these things. */
1189 if (GET_CODE (insn) == CODE_LABEL)
1190 insn = NEXT_INSN (insn);
1194 next = NEXT_INSN (list);
1195 add_insn_after (list, insn);
1201 /* Recompute eh_beg/eh_end for each basic block. */
1204 record_active_eh_regions (f)
1207 rtx insn, eh_list = NULL_RTX;
1209 basic_block bb = BASIC_BLOCK (0);
1211 for (insn = f; insn ; insn = NEXT_INSN (insn))
1213 if (bb->head == insn)
1214 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1216 if (GET_CODE (insn) == NOTE)
1218 int kind = NOTE_LINE_NUMBER (insn);
1219 if (kind == NOTE_INSN_EH_REGION_BEG)
1220 eh_list = alloc_INSN_LIST (insn, eh_list);
1221 else if (kind == NOTE_INSN_EH_REGION_END)
1223 rtx t = XEXP (eh_list, 1);
1224 free_INSN_LIST_node (eh_list);
1229 if (bb->end == insn)
1231 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1233 if (i == n_basic_blocks)
1235 bb = BASIC_BLOCK (i);
1240 /* Identify critical edges and set the bits appropriately. */
1243 mark_critical_edges ()
1245 int i, n = n_basic_blocks;
1248 /* We begin with the entry block. This is not terribly important now,
1249 but could be if a front end (Fortran) implemented alternate entry
1251 bb = ENTRY_BLOCK_PTR;
1258 /* (1) Critical edges must have a source with multiple successors. */
1259 if (bb->succ && bb->succ->succ_next)
1261 for (e = bb->succ; e ; e = e->succ_next)
1263 /* (2) Critical edges must have a destination with multiple
1264 predecessors. Note that we know there is at least one
1265 predecessor -- the edge we followed to get here. */
1266 if (e->dest->pred->pred_next)
1267 e->flags |= EDGE_CRITICAL;
1269 e->flags &= ~EDGE_CRITICAL;
1274 for (e = bb->succ; e ; e = e->succ_next)
1275 e->flags &= ~EDGE_CRITICAL;
1280 bb = BASIC_BLOCK (i);
1284 /* Split a (typically critical) edge. Return the new block.
1285 Abort on abnormal edges.
1287 ??? The code generally expects to be called on critical edges.
1288 The case of a block ending in an unconditional jump to a
1289 block with multiple predecessors is not handled optimally. */
1292 split_edge (edge_in)
1295 basic_block old_pred, bb, old_succ;
1300 /* Abnormal edges cannot be split. */
1301 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1304 old_pred = edge_in->src;
1305 old_succ = edge_in->dest;
1307 /* Remove the existing edge from the destination's pred list. */
1310 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1312 *pp = edge_in->pred_next;
1313 edge_in->pred_next = NULL;
1316 /* Create the new structures. */
1317 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1318 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1321 memset (bb, 0, sizeof (*bb));
1322 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1323 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1325 /* ??? This info is likely going to be out of date very soon. */
1326 if (old_succ->global_live_at_start)
1328 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1329 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1333 CLEAR_REG_SET (bb->global_live_at_start);
1334 CLEAR_REG_SET (bb->global_live_at_end);
1339 bb->succ = edge_out;
1342 edge_in->flags &= ~EDGE_CRITICAL;
1344 edge_out->pred_next = old_succ->pred;
1345 edge_out->succ_next = NULL;
1347 edge_out->dest = old_succ;
1348 edge_out->flags = EDGE_FALLTHRU;
1349 edge_out->probability = REG_BR_PROB_BASE;
1351 old_succ->pred = edge_out;
1353 /* Tricky case -- if there existed a fallthru into the successor
1354 (and we're not it) we must add a new unconditional jump around
1355 the new block we're actually interested in.
1357 Further, if that edge is critical, this means a second new basic
1358 block must be created to hold it. In order to simplify correct
1359 insn placement, do this before we touch the existing basic block
1360 ordering for the block we were really wanting. */
1361 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1364 for (e = edge_out->pred_next; e ; e = e->pred_next)
1365 if (e->flags & EDGE_FALLTHRU)
1370 basic_block jump_block;
1373 if ((e->flags & EDGE_CRITICAL) == 0
1374 && e->src != ENTRY_BLOCK_PTR)
1376 /* Non critical -- we can simply add a jump to the end
1377 of the existing predecessor. */
1378 jump_block = e->src;
1382 /* We need a new block to hold the jump. The simplest
1383 way to do the bulk of the work here is to recursively
1385 jump_block = split_edge (e);
1386 e = jump_block->succ;
1389 /* Now add the jump insn ... */
1390 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1392 jump_block->end = pos;
1393 if (basic_block_for_insn)
1394 set_block_for_insn (pos, jump_block);
1395 emit_barrier_after (pos);
1397 /* ... let jump know that label is in use, ... */
1398 JUMP_LABEL (pos) = old_succ->head;
1399 ++LABEL_NUSES (old_succ->head);
1401 /* ... and clear fallthru on the outgoing edge. */
1402 e->flags &= ~EDGE_FALLTHRU;
1404 /* Continue splitting the interesting edge. */
1408 /* Place the new block just in front of the successor. */
1409 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1410 if (old_succ == EXIT_BLOCK_PTR)
1411 j = n_basic_blocks - 1;
1413 j = old_succ->index;
1414 for (i = n_basic_blocks - 1; i > j; --i)
1416 basic_block tmp = BASIC_BLOCK (i - 1);
1417 BASIC_BLOCK (i) = tmp;
1420 BASIC_BLOCK (i) = bb;
1423 /* Create the basic block note.
1425 Where we place the note can have a noticable impact on the generated
1426 code. Consider this cfg:
1437 If we need to insert an insn on the edge from block 0 to block 1,
1438 we want to ensure the instructions we insert are outside of any
1439 loop notes that physically sit between block 0 and block 1. Otherwise
1440 we confuse the loop optimizer into thinking the loop is a phony. */
1441 if (old_succ != EXIT_BLOCK_PTR
1442 && PREV_INSN (old_succ->head)
1443 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1444 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1445 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1446 PREV_INSN (old_succ->head));
1447 else if (old_succ != EXIT_BLOCK_PTR)
1448 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1450 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1451 NOTE_BASIC_BLOCK (bb_note) = bb;
1452 bb->head = bb->end = bb_note;
1454 /* Not quite simple -- for non-fallthru edges, we must adjust the
1455 predecessor's jump instruction to target our new block. */
1456 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1458 rtx tmp, insn = old_pred->end;
1459 rtx old_label = old_succ->head;
1460 rtx new_label = gen_label_rtx ();
1462 if (GET_CODE (insn) != JUMP_INSN)
1465 /* ??? Recognize a tablejump and adjust all matching cases. */
1466 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1467 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1468 && GET_CODE (tmp) == JUMP_INSN
1469 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1470 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1475 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1476 vec = XVEC (PATTERN (tmp), 0);
1478 vec = XVEC (PATTERN (tmp), 1);
1480 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1481 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1483 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1484 --LABEL_NUSES (old_label);
1485 ++LABEL_NUSES (new_label);
1488 /* Handle casesi dispatch insns */
1489 if ((tmp = single_set (insn)) != NULL
1490 && SET_DEST (tmp) == pc_rtx
1491 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1492 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1493 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1495 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1497 --LABEL_NUSES (old_label);
1498 ++LABEL_NUSES (new_label);
1503 /* This would have indicated an abnormal edge. */
1504 if (computed_jump_p (insn))
1507 /* A return instruction can't be redirected. */
1508 if (returnjump_p (insn))
1511 /* If the insn doesn't go where we think, we're confused. */
1512 if (JUMP_LABEL (insn) != old_label)
1515 redirect_jump (insn, new_label);
1518 emit_label_before (new_label, bb_note);
1519 bb->head = new_label;
1525 /* Queue instructions for insertion on an edge between two basic blocks.
1526 The new instructions and basic blocks (if any) will not appear in the
1527 CFG until commit_edge_insertions is called. */
1530 insert_insn_on_edge (pattern, e)
1534 /* We cannot insert instructions on an abnormal critical edge.
1535 It will be easier to find the culprit if we die now. */
1536 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1537 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1540 if (e->insns == NULL_RTX)
1543 push_to_sequence (e->insns);
1545 emit_insn (pattern);
1547 e->insns = get_insns ();
1551 /* Update the CFG for the instructions queued on edge E. */
1554 commit_one_edge_insertion (e)
1557 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1560 /* Pull the insns off the edge now since the edge might go away. */
1562 e->insns = NULL_RTX;
1564 /* Figure out where to put these things. If the destination has
1565 one predecessor, insert there. Except for the exit block. */
1566 if (e->dest->pred->pred_next == NULL
1567 && e->dest != EXIT_BLOCK_PTR)
1571 /* Get the location correct wrt a code label, and "nice" wrt
1572 a basic block note, and before everything else. */
1574 if (GET_CODE (tmp) == CODE_LABEL)
1575 tmp = NEXT_INSN (tmp);
1576 if (GET_CODE (tmp) == NOTE
1577 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1578 tmp = NEXT_INSN (tmp);
1579 if (tmp == bb->head)
1582 after = PREV_INSN (tmp);
1585 /* If the source has one successor and the edge is not abnormal,
1586 insert there. Except for the entry block. */
1587 else if ((e->flags & EDGE_ABNORMAL) == 0
1588 && e->src->succ->succ_next == NULL
1589 && e->src != ENTRY_BLOCK_PTR)
1592 /* It is possible to have a non-simple jump here. Consider a target
1593 where some forms of unconditional jumps clobber a register. This
1594 happens on the fr30 for example.
1596 We know this block has a single successor, so we can just emit
1597 the queued insns before the jump. */
1598 if (GET_CODE (bb->end) == JUMP_INSN)
1604 /* We'd better be fallthru, or we've lost track of what's what. */
1605 if ((e->flags & EDGE_FALLTHRU) == 0)
1612 /* Otherwise we must split the edge. */
1615 bb = split_edge (e);
1619 /* Now that we've found the spot, do the insertion. */
1621 /* Set the new block number for these insns, if structure is allocated. */
1622 if (basic_block_for_insn)
1625 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1626 set_block_for_insn (i, bb);
1631 emit_insns_before (insns, before);
1632 if (before == bb->head)
1637 rtx last = emit_insns_after (insns, after);
1638 if (after == bb->end)
1642 if (GET_CODE (last) == JUMP_INSN)
1644 if (returnjump_p (last))
1646 /* ??? Remove all outgoing edges from BB and add one
1647 for EXIT. This is not currently a problem because
1648 this only happens for the (single) epilogue, which
1649 already has a fallthru edge to EXIT. */
1652 if (e->dest != EXIT_BLOCK_PTR
1653 || e->succ_next != NULL
1654 || (e->flags & EDGE_FALLTHRU) == 0)
1656 e->flags &= ~EDGE_FALLTHRU;
1658 emit_barrier_after (last);
1667 /* Update the CFG for all queued instructions. */
1670 commit_edge_insertions ()
1675 #ifdef ENABLE_CHECKING
1676 verify_flow_info ();
1680 bb = ENTRY_BLOCK_PTR;
1685 for (e = bb->succ; e ; e = next)
1687 next = e->succ_next;
1689 commit_one_edge_insertion (e);
1692 if (++i >= n_basic_blocks)
1694 bb = BASIC_BLOCK (i);
1698 /* Delete all unreachable basic blocks. */
1701 delete_unreachable_blocks ()
1703 basic_block *worklist, *tos;
1704 int deleted_handler;
1709 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1711 /* Use basic_block->aux as a marker. Clear them all. */
1713 for (i = 0; i < n; ++i)
1714 BASIC_BLOCK (i)->aux = NULL;
1716 /* Add our starting points to the worklist. Almost always there will
1717 be only one. It isn't inconcievable that we might one day directly
1718 support Fortran alternate entry points. */
1720 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1724 /* Mark the block with a handy non-null value. */
1728 /* Iterate: find everything reachable from what we've already seen. */
1730 while (tos != worklist)
1732 basic_block b = *--tos;
1734 for (e = b->succ; e ; e = e->succ_next)
1742 /* Delete all unreachable basic blocks. Count down so that we don't
1743 interfere with the block renumbering that happens in delete_block. */
1745 deleted_handler = 0;
1747 for (i = n - 1; i >= 0; --i)
1749 basic_block b = BASIC_BLOCK (i);
1752 /* This block was found. Tidy up the mark. */
1755 deleted_handler |= delete_block (b);
1758 tidy_fallthru_edges ();
1760 /* If we deleted an exception handler, we may have EH region begin/end
1761 blocks to remove as well. */
1762 if (deleted_handler)
1763 delete_eh_regions ();
1768 /* Find EH regions for which there is no longer a handler, and delete them. */
1771 delete_eh_regions ()
1775 update_rethrow_references ();
1777 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1778 if (GET_CODE (insn) == NOTE)
1780 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1781 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1783 int num = NOTE_EH_HANDLER (insn);
1784 /* A NULL handler indicates a region is no longer needed,
1785 as long as its rethrow label isn't used. */
1786 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1788 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1789 NOTE_SOURCE_FILE (insn) = 0;
1795 /* Return true if NOTE is not one of the ones that must be kept paired,
1796 so that we may simply delete them. */
1799 can_delete_note_p (note)
1802 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1803 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1806 /* Unlink a chain of insns between START and FINISH, leaving notes
1807 that must be paired. */
1810 flow_delete_insn_chain (start, finish)
1813 /* Unchain the insns one by one. It would be quicker to delete all
1814 of these with a single unchaining, rather than one at a time, but
1815 we need to keep the NOTE's. */
1821 next = NEXT_INSN (start);
1822 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1824 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1827 next = flow_delete_insn (start);
1829 if (start == finish)
1835 /* Delete the insns in a (non-live) block. We physically delete every
1836 non-deleted-note insn, and update the flow graph appropriately.
1838 Return nonzero if we deleted an exception handler. */
1840 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1841 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1847 int deleted_handler = 0;
1850 /* If the head of this block is a CODE_LABEL, then it might be the
1851 label for an exception handler which can't be reached.
1853 We need to remove the label from the exception_handler_label list
1854 and remove the associated NOTE_INSN_EH_REGION_BEG and
1855 NOTE_INSN_EH_REGION_END notes. */
1859 never_reached_warning (insn);
1861 if (GET_CODE (insn) == CODE_LABEL)
1863 rtx x, *prev = &exception_handler_labels;
1865 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1867 if (XEXP (x, 0) == insn)
1869 /* Found a match, splice this label out of the EH label list. */
1870 *prev = XEXP (x, 1);
1871 XEXP (x, 1) = NULL_RTX;
1872 XEXP (x, 0) = NULL_RTX;
1874 /* Remove the handler from all regions */
1875 remove_handler (insn);
1876 deleted_handler = 1;
1879 prev = &XEXP (x, 1);
1882 /* This label may be referenced by code solely for its value, or
1883 referenced by static data, or something. We have determined
1884 that it is not reachable, but cannot delete the label itself.
1885 Save code space and continue to delete the balance of the block,
1886 along with properly updating the cfg. */
1887 if (!can_delete_label_p (insn))
1889 /* If we've only got one of these, skip the whole deleting
1892 goto no_delete_insns;
1893 insn = NEXT_INSN (insn);
1897 /* Include any jump table following the basic block. */
1899 if (GET_CODE (end) == JUMP_INSN
1900 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1901 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1902 && GET_CODE (tmp) == JUMP_INSN
1903 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1904 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1907 /* Include any barrier that may follow the basic block. */
1908 tmp = next_nonnote_insn (end);
1909 if (tmp && GET_CODE (tmp) == BARRIER)
1912 /* Selectively delete the entire chain. */
1913 flow_delete_insn_chain (insn, end);
1917 /* Remove the edges into and out of this block. Note that there may
1918 indeed be edges in, if we are removing an unreachable loop. */
1922 for (e = b->pred; e ; e = next)
1924 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1927 next = e->pred_next;
1931 for (e = b->succ; e ; e = next)
1933 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1936 next = e->succ_next;
1945 /* Remove the basic block from the array, and compact behind it. */
1948 return deleted_handler;
1951 /* Remove block B from the basic block array and compact behind it. */
1957 int i, n = n_basic_blocks;
1959 for (i = b->index; i + 1 < n; ++i)
1961 basic_block x = BASIC_BLOCK (i + 1);
1962 BASIC_BLOCK (i) = x;
1966 basic_block_info->num_elements--;
1970 /* Delete INSN by patching it out. Return the next insn. */
1973 flow_delete_insn (insn)
1976 rtx prev = PREV_INSN (insn);
1977 rtx next = NEXT_INSN (insn);
1980 PREV_INSN (insn) = NULL_RTX;
1981 NEXT_INSN (insn) = NULL_RTX;
1984 NEXT_INSN (prev) = next;
1986 PREV_INSN (next) = prev;
1988 set_last_insn (prev);
1990 if (GET_CODE (insn) == CODE_LABEL)
1991 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1993 /* If deleting a jump, decrement the use count of the label. Deleting
1994 the label itself should happen in the normal course of block merging. */
1995 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1996 LABEL_NUSES (JUMP_LABEL (insn))--;
1998 /* Also if deleting an insn that references a label. */
1999 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2000 LABEL_NUSES (XEXP (note, 0))--;
2005 /* True if a given label can be deleted. */
2008 can_delete_label_p (label)
2013 if (LABEL_PRESERVE_P (label))
2016 for (x = forced_labels; x ; x = XEXP (x, 1))
2017 if (label == XEXP (x, 0))
2019 for (x = label_value_list; x ; x = XEXP (x, 1))
2020 if (label == XEXP (x, 0))
2022 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2023 if (label == XEXP (x, 0))
2026 /* User declared labels must be preserved. */
2027 if (LABEL_NAME (label) != 0)
2033 /* Blocks A and B are to be merged into a single block A. The insns
2034 are already contiguous, hence `nomove'. */
2037 merge_blocks_nomove (a, b)
2041 rtx b_head, b_end, a_end;
2042 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2045 /* If there was a CODE_LABEL beginning B, delete it. */
2048 if (GET_CODE (b_head) == CODE_LABEL)
2050 /* Detect basic blocks with nothing but a label. This can happen
2051 in particular at the end of a function. */
2052 if (b_head == b_end)
2054 del_first = del_last = b_head;
2055 b_head = NEXT_INSN (b_head);
2058 /* Delete the basic block note. */
2059 if (GET_CODE (b_head) == NOTE
2060 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2062 if (b_head == b_end)
2067 b_head = NEXT_INSN (b_head);
2070 /* If there was a jump out of A, delete it. */
2072 if (GET_CODE (a_end) == JUMP_INSN)
2076 prev = prev_nonnote_insn (a_end);
2083 /* If this was a conditional jump, we need to also delete
2084 the insn that set cc0. */
2085 if (prev && sets_cc0_p (prev))
2088 prev = prev_nonnote_insn (prev);
2098 /* Delete everything marked above as well as crap that might be
2099 hanging out between the two blocks. */
2100 flow_delete_insn_chain (del_first, del_last);
2102 /* Normally there should only be one successor of A and that is B, but
2103 partway though the merge of blocks for conditional_execution we'll
2104 be merging a TEST block with THEN and ELSE successors. Free the
2105 whole lot of them and hope the caller knows what they're doing. */
2107 remove_edge (a->succ);
2109 /* Adjust the edges out of B for the new owner. */
2110 for (e = b->succ; e ; e = e->succ_next)
2114 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2115 b->pred = b->succ = NULL;
2117 /* Reassociate the insns of B with A. */
2120 if (basic_block_for_insn)
2122 BLOCK_FOR_INSN (b_head) = a;
2123 while (b_head != b_end)
2125 b_head = NEXT_INSN (b_head);
2126 BLOCK_FOR_INSN (b_head) = a;
2136 /* Blocks A and B are to be merged into a single block. A has no incoming
2137 fallthru edge, so it can be moved before B without adding or modifying
2138 any jumps (aside from the jump from A to B). */
2141 merge_blocks_move_predecessor_nojumps (a, b)
2144 rtx start, end, barrier;
2150 /* We want to delete the BARRIER after the end of the insns we are
2151 going to move. If we don't find a BARRIER, then do nothing. This
2152 can happen in some cases if we have labels we can not delete.
2154 Similarly, do nothing if we can not delete the label at the start
2155 of the target block. */
2156 barrier = next_nonnote_insn (end);
2157 if (GET_CODE (barrier) != BARRIER
2158 || (GET_CODE (b->head) == CODE_LABEL
2159 && ! can_delete_label_p (b->head)))
2162 flow_delete_insn (barrier);
2164 /* Move block and loop notes out of the chain so that we do not
2165 disturb their order.
2167 ??? A better solution would be to squeeze out all the non-nested notes
2168 and adjust the block trees appropriately. Even better would be to have
2169 a tighter connection between block trees and rtl so that this is not
2171 start = squeeze_notes (start, end);
2173 /* Scramble the insn chain. */
2174 if (end != PREV_INSN (b->head))
2175 reorder_insns (start, end, PREV_INSN (b->head));
2179 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2180 a->index, b->index);
2183 /* Swap the records for the two blocks around. Although we are deleting B,
2184 A is now where B was and we want to compact the BB array from where
2186 BASIC_BLOCK(a->index) = b;
2187 BASIC_BLOCK(b->index) = a;
2189 a->index = b->index;
2192 /* Now blocks A and B are contiguous. Merge them. */
2193 merge_blocks_nomove (a, b);
2198 /* Blocks A and B are to be merged into a single block. B has no outgoing
2199 fallthru edge, so it can be moved after A without adding or modifying
2200 any jumps (aside from the jump from A to B). */
2203 merge_blocks_move_successor_nojumps (a, b)
2206 rtx start, end, barrier;
2211 /* We want to delete the BARRIER after the end of the insns we are
2212 going to move. If we don't find a BARRIER, then do nothing. This
2213 can happen in some cases if we have labels we can not delete.
2215 Similarly, do nothing if we can not delete the label at the start
2216 of the target block. */
2217 barrier = next_nonnote_insn (end);
2218 if (GET_CODE (barrier) != BARRIER
2219 || (GET_CODE (b->head) == CODE_LABEL
2220 && ! can_delete_label_p (b->head)))
2223 flow_delete_insn (barrier);
2225 /* Move block and loop notes out of the chain so that we do not
2226 disturb their order.
2228 ??? A better solution would be to squeeze out all the non-nested notes
2229 and adjust the block trees appropriately. Even better would be to have
2230 a tighter connection between block trees and rtl so that this is not
2232 start = squeeze_notes (start, end);
2234 /* Scramble the insn chain. */
2235 reorder_insns (start, end, a->end);
2237 /* Now blocks A and B are contiguous. Merge them. */
2238 merge_blocks_nomove (a, b);
2242 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2243 b->index, a->index);
2249 /* Attempt to merge basic blocks that are potentially non-adjacent.
2250 Return true iff the attempt succeeded. */
2253 merge_blocks (e, b, c)
2257 /* If B has a fallthru edge to C, no need to move anything. */
2258 if (e->flags & EDGE_FALLTHRU)
2260 /* If a label still appears somewhere and we cannot delete the label,
2261 then we cannot merge the blocks. The edge was tidied already. */
2263 rtx insn, stop = NEXT_INSN (c->head);
2264 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2265 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2268 merge_blocks_nomove (b, c);
2272 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2273 b->index, c->index);
2282 int c_has_outgoing_fallthru;
2283 int b_has_incoming_fallthru;
2285 /* We must make sure to not munge nesting of exception regions,
2286 lexical blocks, and loop notes.
2288 The first is taken care of by requiring that the active eh
2289 region at the end of one block always matches the active eh
2290 region at the beginning of the next block.
2292 The later two are taken care of by squeezing out all the notes. */
2294 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2295 executed and we may want to treat blocks which have two out
2296 edges, one normal, one abnormal as only having one edge for
2297 block merging purposes. */
2299 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2300 if (tmp_edge->flags & EDGE_FALLTHRU)
2302 c_has_outgoing_fallthru = (tmp_edge != NULL);
2304 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2305 if (tmp_edge->flags & EDGE_FALLTHRU)
2307 b_has_incoming_fallthru = (tmp_edge != NULL);
2309 /* If B does not have an incoming fallthru, and the exception regions
2310 match, then it can be moved immediately before C without introducing
2313 C can not be the first block, so we do not have to worry about
2314 accessing a non-existent block. */
2315 d = BASIC_BLOCK (c->index - 1);
2316 if (! b_has_incoming_fallthru
2317 && d->eh_end == b->eh_beg
2318 && b->eh_end == c->eh_beg)
2319 return merge_blocks_move_predecessor_nojumps (b, c);
2321 /* Otherwise, we're going to try to move C after B. Make sure the
2322 exception regions match.
2324 If B is the last basic block, then we must not try to access the
2325 block structure for block B + 1. Luckily in that case we do not
2326 need to worry about matching exception regions. */
2327 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2328 if (b->eh_end == c->eh_beg
2329 && (d == NULL || c->eh_end == d->eh_beg))
2331 /* If C does not have an outgoing fallthru, then it can be moved
2332 immediately after B without introducing or modifying jumps. */
2333 if (! c_has_outgoing_fallthru)
2334 return merge_blocks_move_successor_nojumps (b, c);
2336 /* Otherwise, we'll need to insert an extra jump, and possibly
2337 a new block to contain it. */
2338 /* ??? Not implemented yet. */
2345 /* Top level driver for merge_blocks. */
2352 /* Attempt to merge blocks as made possible by edge removal. If a block
2353 has only one successor, and the successor has only one predecessor,
2354 they may be combined. */
2356 for (i = 0; i < n_basic_blocks; )
2358 basic_block c, b = BASIC_BLOCK (i);
2361 /* A loop because chains of blocks might be combineable. */
2362 while ((s = b->succ) != NULL
2363 && s->succ_next == NULL
2364 && (s->flags & EDGE_EH) == 0
2365 && (c = s->dest) != EXIT_BLOCK_PTR
2366 && c->pred->pred_next == NULL
2367 /* If the jump insn has side effects, we can't kill the edge. */
2368 && (GET_CODE (b->end) != JUMP_INSN
2369 || onlyjump_p (b->end))
2370 && merge_blocks (s, b, c))
2373 /* Don't get confused by the index shift caused by deleting blocks. */
2378 /* The given edge should potentially be a fallthru edge. If that is in
2379 fact true, delete the jump and barriers that are in the way. */
2382 tidy_fallthru_edge (e, b, c)
2388 /* ??? In a late-running flow pass, other folks may have deleted basic
2389 blocks by nopping out blocks, leaving multiple BARRIERs between here
2390 and the target label. They ought to be chastized and fixed.
2392 We can also wind up with a sequence of undeletable labels between
2393 one block and the next.
2395 So search through a sequence of barriers, labels, and notes for
2396 the head of block C and assert that we really do fall through. */
2398 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2401 /* Remove what will soon cease being the jump insn from the source block.
2402 If block B consisted only of this single jump, turn it into a deleted
2405 if (GET_CODE (q) == JUMP_INSN)
2408 /* If this was a conditional jump, we need to also delete
2409 the insn that set cc0. */
2410 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2417 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2418 NOTE_SOURCE_FILE (q) = 0;
2421 b->end = q = PREV_INSN (q);
2424 /* Selectively unlink the sequence. */
2425 if (q != PREV_INSN (c->head))
2426 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2428 e->flags |= EDGE_FALLTHRU;
2431 /* Fix up edges that now fall through, or rather should now fall through
2432 but previously required a jump around now deleted blocks. Simplify
2433 the search by only examining blocks numerically adjacent, since this
2434 is how find_basic_blocks created them. */
2437 tidy_fallthru_edges ()
2441 for (i = 1; i < n_basic_blocks; ++i)
2443 basic_block b = BASIC_BLOCK (i - 1);
2444 basic_block c = BASIC_BLOCK (i);
2447 /* We care about simple conditional or unconditional jumps with
2450 If we had a conditional branch to the next instruction when
2451 find_basic_blocks was called, then there will only be one
2452 out edge for the block which ended with the conditional
2453 branch (since we do not create duplicate edges).
2455 Furthermore, the edge will be marked as a fallthru because we
2456 merge the flags for the duplicate edges. So we do not want to
2457 check that the edge is not a FALLTHRU edge. */
2458 if ((s = b->succ) != NULL
2459 && s->succ_next == NULL
2461 /* If the jump insn has side effects, we can't tidy the edge. */
2462 && (GET_CODE (b->end) != JUMP_INSN
2463 || onlyjump_p (b->end)))
2464 tidy_fallthru_edge (s, b, c);
2468 /* Discover and record the loop depth at the head of each basic block. */
2471 calculate_loop_depth (dump)
2476 /* The loop infrastructure does the real job for us. */
2477 flow_loops_find (&loops);
2480 flow_loops_dump (&loops, dump, 0);
2482 flow_loops_free (&loops);
2485 /* Perform data flow analysis.
2486 F is the first insn of the function and NREGS the number of register numbers
2490 life_analysis (f, nregs, file, remove_dead_code)
2494 int remove_dead_code;
2496 #ifdef ELIMINABLE_REGS
2498 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2503 /* Dead code elimination changes basic block structure and therefore
2504 breaks the SSA phi representation. Particularly, a phi node
2505 can have an alternative value for each incoming block, referenced
2506 by the block number. Removing dead code can bump entire blocks
2507 and therefore cause blocks to be renumbered, invalidating the
2508 numbering of phi alternatives. */
2509 if (remove_dead_code && in_ssa_form)
2512 /* Record which registers will be eliminated. We use this in
2515 CLEAR_HARD_REG_SET (elim_reg_set);
2517 #ifdef ELIMINABLE_REGS
2518 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2519 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2521 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2524 /* We want alias analysis information for local dead store elimination. */
2525 init_alias_analysis ();
2528 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2532 if (! remove_dead_code)
2533 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2536 /* The post-reload life analysis have (on a global basis) the same
2537 registers live as was computed by reload itself. elimination
2538 Otherwise offsets and such may be incorrect.
2540 Reload will make some registers as live even though they do not
2541 appear in the rtl. */
2542 if (reload_completed)
2543 flags &= ~PROP_REG_INFO;
2547 /* Always remove no-op moves. Do this before other processing so
2548 that we don't have to keep re-scanning them. */
2549 delete_noop_moves (f);
2551 /* Some targets can emit simpler epilogues if they know that sp was
2552 not ever modified during the function. After reload, of course,
2553 we've already emitted the epilogue so there's no sense searching. */
2554 if (! reload_completed)
2555 notice_stack_pointer_modification (f);
2557 /* Allocate and zero out data structures that will record the
2558 data from lifetime analysis. */
2559 allocate_reg_life_data ();
2560 allocate_bb_life_data ();
2561 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2562 all_blocks = sbitmap_alloc (n_basic_blocks);
2563 sbitmap_ones (all_blocks);
2565 /* Find the set of registers live on function exit. */
2566 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2568 /* "Update" life info from zero. It'd be nice to begin the
2569 relaxation with just the exit and noreturn blocks, but that set
2570 is not immediately handy. */
2572 if (flags & PROP_REG_INFO)
2573 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2574 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2577 sbitmap_free (all_blocks);
2578 free (reg_next_use);
2579 reg_next_use = NULL;
2580 end_alias_analysis ();
2583 dump_flow_info (file);
2585 free_basic_block_vars (1);
2588 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2589 Search for REGNO. If found, abort if it is not wider than word_mode. */
2592 verify_wide_reg_1 (px, pregno)
2597 unsigned int regno = *(int *) pregno;
2599 if (GET_CODE (x) == REG && REGNO (x) == regno)
2601 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2608 /* A subroutine of verify_local_live_at_start. Search through insns
2609 between HEAD and END looking for register REGNO. */
2612 verify_wide_reg (regno, head, end)
2618 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2619 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2623 head = NEXT_INSN (head);
2626 /* We didn't find the register at all. Something's way screwy. */
2630 /* A subroutine of update_life_info. Verify that there are no untoward
2631 changes in live_at_start during a local update. */
2634 verify_local_live_at_start (new_live_at_start, bb)
2635 regset new_live_at_start;
2638 if (reload_completed)
2640 /* After reload, there are no pseudos, nor subregs of multi-word
2641 registers. The regsets should exactly match. */
2642 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2649 /* Find the set of changed registers. */
2650 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2652 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2654 /* No registers should die. */
2655 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2657 /* Verify that the now-live register is wider than word_mode. */
2658 verify_wide_reg (i, bb->head, bb->end);
2663 /* Updates life information starting with the basic blocks set in BLOCKS.
2665 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2666 expecting local modifications to basic blocks. If we find extra
2667 registers live at the beginning of a block, then we either killed
2668 useful data, or we have a broken split that wants data not provided.
2669 If we find registers removed from live_at_start, that means we have
2670 a broken peephole that is killing a register it shouldn't.
2672 ??? This is not true in one situation -- when a pre-reload splitter
2673 generates subregs of a multi-word pseudo, current life analysis will
2674 lose the kill. So we _can_ have a pseudo go live. How irritating.
2676 BLOCK_FOR_INSN is assumed to be correct.
2678 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2679 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2680 regs_ever_live unless the caller resets it to zero. */
2683 update_life_info (blocks, extent, prop_flags)
2685 enum update_life_extent extent;
2689 regset_head tmp_head;
2692 tmp = INITIALIZE_REG_SET (tmp_head);
2694 /* For a global update, we go through the relaxation process again. */
2695 if (extent != UPDATE_LIFE_LOCAL)
2697 calculate_global_regs_live (blocks, blocks,
2698 prop_flags & PROP_SCAN_DEAD_CODE);
2700 /* If asked, remove notes from the blocks we'll update. */
2701 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2702 count_or_remove_death_notes (blocks, 1);
2705 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2707 basic_block bb = BASIC_BLOCK (i);
2709 COPY_REG_SET (tmp, bb->global_live_at_end);
2710 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2712 if (extent == UPDATE_LIFE_LOCAL)
2713 verify_local_live_at_start (tmp, bb);
2718 if (prop_flags & PROP_REG_INFO)
2720 /* The only pseudos that are live at the beginning of the function
2721 are those that were not set anywhere in the function. local-alloc
2722 doesn't know how to handle these correctly, so mark them as not
2723 local to any one basic block. */
2724 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2725 FIRST_PSEUDO_REGISTER, i,
2726 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2728 /* We have a problem with any pseudoreg that lives across the setjmp.
2729 ANSI says that if a user variable does not change in value between
2730 the setjmp and the longjmp, then the longjmp preserves it. This
2731 includes longjmp from a place where the pseudo appears dead.
2732 (In principle, the value still exists if it is in scope.)
2733 If the pseudo goes in a hard reg, some other value may occupy
2734 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2735 Conclusion: such a pseudo must not go in a hard reg. */
2736 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2737 FIRST_PSEUDO_REGISTER, i,
2739 if (regno_reg_rtx[i] != 0)
2741 REG_LIVE_LENGTH (i) = -1;
2742 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2748 /* Free the variables allocated by find_basic_blocks.
2750 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2753 free_basic_block_vars (keep_head_end_p)
2754 int keep_head_end_p;
2756 if (basic_block_for_insn)
2758 VARRAY_FREE (basic_block_for_insn);
2759 basic_block_for_insn = NULL;
2762 if (! keep_head_end_p)
2765 VARRAY_FREE (basic_block_info);
2768 ENTRY_BLOCK_PTR->aux = NULL;
2769 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2770 EXIT_BLOCK_PTR->aux = NULL;
2771 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2775 /* Return nonzero if the destination of SET equals the source. */
2780 rtx src = SET_SRC (set);
2781 rtx dst = SET_DEST (set);
2783 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2785 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2787 src = SUBREG_REG (src);
2788 dst = SUBREG_REG (dst);
2791 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2792 && REGNO (src) == REGNO (dst));
2795 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2801 rtx pat = PATTERN (insn);
2803 /* Insns carrying these notes are useful later on. */
2804 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2807 if (GET_CODE (pat) == SET && set_noop_p (pat))
2810 if (GET_CODE (pat) == PARALLEL)
2813 /* If nothing but SETs of registers to themselves,
2814 this insn can also be deleted. */
2815 for (i = 0; i < XVECLEN (pat, 0); i++)
2817 rtx tem = XVECEXP (pat, 0, i);
2819 if (GET_CODE (tem) == USE
2820 || GET_CODE (tem) == CLOBBER)
2823 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2832 /* Delete any insns that copy a register to itself. */
2835 delete_noop_moves (f)
2839 for (insn = f; insn; insn = NEXT_INSN (insn))
2841 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2843 PUT_CODE (insn, NOTE);
2844 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2845 NOTE_SOURCE_FILE (insn) = 0;
2850 /* Determine if the stack pointer is constant over the life of the function.
2851 Only useful before prologues have been emitted. */
2854 notice_stack_pointer_modification_1 (x, pat, data)
2856 rtx pat ATTRIBUTE_UNUSED;
2857 void *data ATTRIBUTE_UNUSED;
2859 if (x == stack_pointer_rtx
2860 /* The stack pointer is only modified indirectly as the result
2861 of a push until later in flow. See the comments in rtl.texi
2862 regarding Embedded Side-Effects on Addresses. */
2863 || (GET_CODE (x) == MEM
2864 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2865 || GET_CODE (XEXP (x, 0)) == PRE_INC
2866 || GET_CODE (XEXP (x, 0)) == POST_DEC
2867 || GET_CODE (XEXP (x, 0)) == POST_INC)
2868 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2869 current_function_sp_is_unchanging = 0;
2873 notice_stack_pointer_modification (f)
2878 /* Assume that the stack pointer is unchanging if alloca hasn't
2880 current_function_sp_is_unchanging = !current_function_calls_alloca;
2881 if (! current_function_sp_is_unchanging)
2884 for (insn = f; insn; insn = NEXT_INSN (insn))
2886 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2888 /* Check if insn modifies the stack pointer. */
2889 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2891 if (! current_function_sp_is_unchanging)
2897 /* Mark a register in SET. Hard registers in large modes get all
2898 of their component registers set as well. */
2900 mark_reg (reg, xset)
2904 regset set = (regset) xset;
2905 int regno = REGNO (reg);
2907 if (GET_MODE (reg) == BLKmode)
2910 SET_REGNO_REG_SET (set, regno);
2911 if (regno < FIRST_PSEUDO_REGISTER)
2913 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2915 SET_REGNO_REG_SET (set, regno + n);
2919 /* Mark those regs which are needed at the end of the function as live
2920 at the end of the last basic block. */
2922 mark_regs_live_at_end (set)
2927 /* If exiting needs the right stack value, consider the stack pointer
2928 live at the end of the function. */
2929 if ((HAVE_epilogue && reload_completed)
2930 || ! EXIT_IGNORE_STACK
2931 || (! FRAME_POINTER_REQUIRED
2932 && ! current_function_calls_alloca
2933 && flag_omit_frame_pointer)
2934 || current_function_sp_is_unchanging)
2936 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2939 /* Mark the frame pointer if needed at the end of the function. If
2940 we end up eliminating it, it will be removed from the live list
2941 of each basic block by reload. */
2943 if (! reload_completed || frame_pointer_needed)
2945 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2946 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2947 /* If they are different, also mark the hard frame pointer as live */
2948 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2952 #ifdef PIC_OFFSET_TABLE_REGNUM
2953 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2954 /* Many architectures have a GP register even without flag_pic.
2955 Assume the pic register is not in use, or will be handled by
2956 other means, if it is not fixed. */
2957 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2958 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2962 /* Mark all global registers, and all registers used by the epilogue
2963 as being live at the end of the function since they may be
2964 referenced by our caller. */
2965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2967 #ifdef EPILOGUE_USES
2968 || EPILOGUE_USES (i)
2971 SET_REGNO_REG_SET (set, i);
2973 /* Mark all call-saved registers that we actaully used. */
2974 if (HAVE_epilogue && reload_completed)
2976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2977 if (! call_used_regs[i] && regs_ever_live[i])
2978 SET_REGNO_REG_SET (set, i);
2981 /* Mark function return value. */
2982 diddle_return_value (mark_reg, set);
2985 /* Callback function for for_each_successor_phi. DATA is a regset.
2986 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2987 INSN, in the regset. */
2990 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2991 rtx insn ATTRIBUTE_UNUSED;
2992 int dest_regno ATTRIBUTE_UNUSED;
2996 regset live = (regset) data;
2997 SET_REGNO_REG_SET (live, src_regno);
3001 /* Propagate global life info around the graph of basic blocks. Begin
3002 considering blocks with their corresponding bit set in BLOCKS_IN.
3003 BLOCKS_OUT is set for every block that was changed. */
3006 calculate_global_regs_live (blocks_in, blocks_out, flags)
3007 sbitmap blocks_in, blocks_out;
3010 basic_block *queue, *qhead, *qtail, *qend;
3011 regset tmp, new_live_at_end;
3012 regset_head tmp_head;
3013 regset_head new_live_at_end_head;
3016 tmp = INITIALIZE_REG_SET (tmp_head);
3017 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3019 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3020 because the `head == tail' style test for an empty queue doesn't
3021 work with a full queue. */
3022 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3024 qhead = qend = queue + n_basic_blocks + 2;
3026 /* Clear out the garbage that might be hanging out in bb->aux. */
3027 for (i = n_basic_blocks - 1; i >= 0; --i)
3028 BASIC_BLOCK (i)->aux = NULL;
3030 /* Queue the blocks set in the initial mask. Do this in reverse block
3031 number order so that we are more likely for the first round to do
3032 useful work. We use AUX non-null to flag that the block is queued. */
3033 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3035 basic_block bb = BASIC_BLOCK (i);
3040 sbitmap_zero (blocks_out);
3042 while (qhead != qtail)
3044 int rescan, changed;
3053 /* Begin by propogating live_at_start from the successor blocks. */
3054 CLEAR_REG_SET (new_live_at_end);
3055 for (e = bb->succ; e ; e = e->succ_next)
3057 basic_block sb = e->dest;
3058 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3061 /* Regs used in phi nodes are not included in
3062 global_live_at_start, since they are live only along a
3063 particular edge. Set those regs that are live because of a
3064 phi node alternative corresponding to this particular block. */
3065 for_each_successor_phi (bb->index, &set_phi_alternative_reg,
3068 if (bb == ENTRY_BLOCK_PTR)
3070 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3074 /* On our first pass through this block, we'll go ahead and continue.
3075 Recognize first pass by local_set NULL. On subsequent passes, we
3076 get to skip out early if live_at_end wouldn't have changed. */
3078 if (bb->local_set == NULL)
3080 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3085 /* If any bits were removed from live_at_end, we'll have to
3086 rescan the block. This wouldn't be necessary if we had
3087 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3088 local_live is really dependant on live_at_end. */
3089 CLEAR_REG_SET (tmp);
3090 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3091 new_live_at_end, BITMAP_AND_COMPL);
3095 /* Find the set of changed bits. Take this opportunity
3096 to notice that this set is empty and early out. */
3097 CLEAR_REG_SET (tmp);
3098 changed = bitmap_operation (tmp, bb->global_live_at_end,
3099 new_live_at_end, BITMAP_XOR);
3103 /* If any of the changed bits overlap with local_set,
3104 we'll have to rescan the block. Detect overlap by
3105 the AND with ~local_set turning off bits. */
3106 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3111 /* Let our caller know that BB changed enough to require its
3112 death notes updated. */
3113 SET_BIT (blocks_out, bb->index);
3117 /* Add to live_at_start the set of all registers in
3118 new_live_at_end that aren't in the old live_at_end. */
3120 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3122 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3124 changed = bitmap_operation (bb->global_live_at_start,
3125 bb->global_live_at_start,
3132 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3134 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3135 into live_at_start. */
3136 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3138 /* If live_at start didn't change, no need to go farther. */
3139 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3142 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3145 /* Queue all predecessors of BB so that we may re-examine
3146 their live_at_end. */
3147 for (e = bb->pred; e ; e = e->pred_next)
3149 basic_block pb = e->src;
3150 if (pb->aux == NULL)
3161 FREE_REG_SET (new_live_at_end);
3163 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3165 basic_block bb = BASIC_BLOCK (i);
3166 FREE_REG_SET (bb->local_set);
3172 /* Subroutines of life analysis. */
3174 /* Allocate the permanent data structures that represent the results
3175 of life analysis. Not static since used also for stupid life analysis. */
3178 allocate_bb_life_data ()
3182 for (i = 0; i < n_basic_blocks; i++)
3184 basic_block bb = BASIC_BLOCK (i);
3186 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3187 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3190 ENTRY_BLOCK_PTR->global_live_at_end
3191 = OBSTACK_ALLOC_REG_SET (function_obstack);
3192 EXIT_BLOCK_PTR->global_live_at_start
3193 = OBSTACK_ALLOC_REG_SET (function_obstack);
3195 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3199 allocate_reg_life_data ()
3203 /* Recalculate the register space, in case it has grown. Old style
3204 vector oriented regsets would set regset_{size,bytes} here also. */
3205 allocate_reg_info (max_regno, FALSE, FALSE);
3207 /* Reset all the data we'll collect in propagate_block and its
3209 for (i = 0; i < max_regno; i++)
3213 REG_N_DEATHS (i) = 0;
3214 REG_N_CALLS_CROSSED (i) = 0;
3215 REG_LIVE_LENGTH (i) = 0;
3216 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3220 /* Compute the registers live at the beginning of a basic block
3221 from those live at the end.
3223 When called, OLD contains those live at the end.
3224 On return, it contains those live at the beginning.
3225 FIRST and LAST are the first and last insns of the basic block.
3227 FINAL is nonzero if we are doing the final pass which is not
3228 for computing the life info (since that has already been done)
3229 but for acting on it. On this pass, we delete dead stores,
3230 set up the logical links and dead-variables lists of instructions,
3231 and merge instructions for autoincrement and autodecrement addresses.
3233 SIGNIFICANT is nonzero only the first time for each basic block.
3234 If it is nonzero, it points to a regset in which we store
3235 a 1 for each register that is set within the block.
3237 BNUM is the number of the basic block. */
3240 propagate_block (bb, old, significant, flags)
3249 regset_head live_head;
3251 regset_head dead_head;
3253 /* Find the loop depth for this block. Ignore loop level changes in the
3254 middle of the basic block -- for register allocation purposes, the
3255 important uses will be in the blocks wholely contained within the loop
3256 not in the loop pre-header or post-trailer. */
3257 loop_depth = bb->loop_depth;
3259 dead = INITIALIZE_REG_SET (live_head);
3260 live = INITIALIZE_REG_SET (dead_head);
3264 if (flags & PROP_REG_INFO)
3268 /* Process the regs live at the end of the block.
3269 Mark them as not local to any one basic block. */
3270 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3272 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3276 /* Scan the block an insn at a time from end to beginning. */
3278 for (insn = bb->end; ; insn = prev)
3280 prev = PREV_INSN (insn);
3282 if (GET_CODE (insn) == NOTE)
3284 /* If this is a call to `setjmp' et al,
3285 warn if any non-volatile datum is live. */
3287 if ((flags & PROP_REG_INFO)
3288 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3289 IOR_REG_SET (regs_live_at_setjmp, old);
3292 /* Update the life-status of regs for this insn.
3293 First DEAD gets which regs are set in this insn
3294 then LIVE gets which regs are used in this insn.
3295 Then the regs live before the insn
3296 are those live after, with DEAD regs turned off,
3297 and then LIVE regs turned on. */
3299 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3302 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3303 int insn_is_dead = 0;
3304 int libcall_is_dead = 0;
3306 if (flags & PROP_SCAN_DEAD_CODE)
3308 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3310 libcall_is_dead = (insn_is_dead && note != 0
3311 && libcall_dead_p (PATTERN (insn), old,
3315 /* We almost certainly don't want to delete prologue or epilogue
3316 instructions. Warn about probable compiler losage. */
3319 && (((HAVE_epilogue || HAVE_prologue)
3320 && prologue_epilogue_contains (insn))
3321 || (HAVE_sibcall_epilogue
3322 && sibcall_epilogue_contains (insn))))
3324 if (flags & PROP_KILL_DEAD_CODE)
3326 warning ("ICE: would have deleted prologue/epilogue insn");
3327 if (!inhibit_warnings)
3330 libcall_is_dead = insn_is_dead = 0;
3333 /* If an instruction consists of just dead store(s) on final pass,
3334 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3335 We could really delete it with delete_insn, but that
3336 can cause trouble for first or last insn in a basic block. */
3337 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3340 /* If the insn referred to a label, note that the label is
3342 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3344 if (REG_NOTE_KIND (inote) == REG_LABEL)
3346 rtx label = XEXP (inote, 0);
3348 LABEL_NUSES (label)--;
3350 /* If this label was attached to an ADDR_VEC, it's
3351 safe to delete the ADDR_VEC. In fact, it's pretty
3352 much mandatory to delete it, because the ADDR_VEC may
3353 be referencing labels that no longer exist. */
3354 if (LABEL_NUSES (label) == 0
3355 && (next = next_nonnote_insn (label)) != NULL
3356 && GET_CODE (next) == JUMP_INSN
3357 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3358 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3360 rtx pat = PATTERN (next);
3361 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3362 int len = XVECLEN (pat, diff_vec_p);
3364 for (i = 0; i < len; i++)
3365 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3366 PUT_CODE (next, NOTE);
3367 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3368 NOTE_SOURCE_FILE (next) = 0;
3370 if ((next = next_nonnote_insn (label)) != NULL
3371 && GET_CODE (next) == BARRIER)
3373 PUT_CODE (next, NOTE);
3374 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3375 NOTE_SOURCE_FILE (next) = 0;
3381 PUT_CODE (insn, NOTE);
3382 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3383 NOTE_SOURCE_FILE (insn) = 0;
3385 /* CC0 is now known to be dead. Either this insn used it,
3386 in which case it doesn't anymore, or clobbered it,
3387 so the next insn can't use it. */
3390 /* If this insn is copying the return value from a library call,
3391 delete the entire library call. */
3392 if (libcall_is_dead)
3394 rtx first = XEXP (note, 0);
3396 while (INSN_DELETED_P (first))
3397 first = NEXT_INSN (first);
3402 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3403 NOTE_SOURCE_FILE (p) = 0;
3409 CLEAR_REG_SET (dead);
3410 CLEAR_REG_SET (live);
3412 /* See if this is an increment or decrement that can be
3413 merged into a following memory address. */
3416 register rtx x = single_set (insn);
3418 /* Does this instruction increment or decrement a register? */
3419 if (!reload_completed
3420 && (flags & PROP_AUTOINC)
3422 && GET_CODE (SET_DEST (x)) == REG
3423 && (GET_CODE (SET_SRC (x)) == PLUS
3424 || GET_CODE (SET_SRC (x)) == MINUS)
3425 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3426 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3427 /* Ok, look for a following memory ref we can combine with.
3428 If one is found, change the memory ref to a PRE_INC
3429 or PRE_DEC, cancel this insn, and return 1.
3430 Return 0 if nothing has been done. */
3431 && try_pre_increment_1 (insn))
3434 #endif /* AUTO_INC_DEC */
3436 /* If this is not the final pass, and this insn is copying the
3437 value of a library call and it's dead, don't scan the
3438 insns that perform the library call, so that the call's
3439 arguments are not marked live. */
3440 if (libcall_is_dead)
3442 /* Mark the dest reg as `significant'. */
3443 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3444 significant, flags);
3446 insn = XEXP (note, 0);
3447 prev = PREV_INSN (insn);
3449 else if (GET_CODE (PATTERN (insn)) == SET
3450 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3451 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3452 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3453 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3454 /* We have an insn to pop a constant amount off the stack.
3455 (Such insns use PLUS regardless of the direction of the stack,
3456 and any insn to adjust the stack by a constant is always a pop.)
3457 These insns, if not dead stores, have no effect on life. */
3461 /* Any regs live at the time of a call instruction
3462 must not go in a register clobbered by calls.
3463 Find all regs now live and record this for them. */
3465 if (GET_CODE (insn) == CALL_INSN
3466 && (flags & PROP_REG_INFO))
3467 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3469 REG_N_CALLS_CROSSED (i)++;
3472 /* LIVE gets the regs used in INSN;
3473 DEAD gets those set by it. Dead insns don't make anything
3476 mark_set_regs (old, dead, PATTERN (insn),
3477 insn, significant, flags);
3479 /* If an insn doesn't use CC0, it becomes dead since we
3480 assume that every insn clobbers it. So show it dead here;
3481 mark_used_regs will set it live if it is referenced. */
3485 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3487 /* Sometimes we may have inserted something before INSN (such as
3488 a move) when we make an auto-inc. So ensure we will scan
3491 prev = PREV_INSN (insn);
3494 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3500 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3502 note = XEXP (note, 1))
3503 if (GET_CODE (XEXP (note, 0)) == USE)
3504 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3507 /* Each call clobbers all call-clobbered regs that are not
3508 global or fixed. Note that the function-value reg is a
3509 call-clobbered reg, and mark_set_regs has already had
3510 a chance to handle it. */
3512 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3513 if (call_used_regs[i] && ! global_regs[i]
3516 SET_REGNO_REG_SET (dead, i);
3518 SET_REGNO_REG_SET (significant, i);
3521 /* The stack ptr is used (honorarily) by a CALL insn. */
3522 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3524 /* Calls may also reference any of the global registers,
3525 so they are made live. */
3526 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3528 mark_used_regs (old, live,
3529 gen_rtx_REG (reg_raw_mode[i], i),
3532 /* Calls also clobber memory. */
3533 free_EXPR_LIST_list (&mem_set_list);
3536 /* Update OLD for the registers used or set. */
3537 AND_COMPL_REG_SET (old, dead);
3538 IOR_REG_SET (old, live);
3542 /* On final pass, update counts of how many insns in which
3543 each reg is live. */
3544 if (flags & PROP_REG_INFO)
3545 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3548 if (insn == bb->head)
3552 FREE_REG_SET (dead);
3553 FREE_REG_SET (live);
3554 free_EXPR_LIST_list (&mem_set_list);
3557 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3558 (SET expressions whose destinations are registers dead after the insn).
3559 NEEDED is the regset that says which regs are alive after the insn.
3561 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3563 If X is the entire body of an insn, NOTES contains the reg notes
3564 pertaining to the insn. */
3567 insn_dead_p (x, needed, call_ok, notes)
3571 rtx notes ATTRIBUTE_UNUSED;
3573 enum rtx_code code = GET_CODE (x);
3576 /* If flow is invoked after reload, we must take existing AUTO_INC
3577 expresions into account. */
3578 if (reload_completed)
3580 for ( ; notes; notes = XEXP (notes, 1))
3582 if (REG_NOTE_KIND (notes) == REG_INC)
3584 int regno = REGNO (XEXP (notes, 0));
3586 /* Don't delete insns to set global regs. */
3587 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3588 || REGNO_REG_SET_P (needed, regno))
3595 /* If setting something that's a reg or part of one,
3596 see if that register's altered value will be live. */
3600 rtx r = SET_DEST (x);
3603 if (GET_CODE (r) == CC0)
3607 /* A SET that is a subroutine call cannot be dead. */
3608 if (GET_CODE (SET_SRC (x)) == CALL)
3614 /* Don't eliminate loads from volatile memory or volatile asms. */
3615 else if (volatile_refs_p (SET_SRC (x)))
3618 if (GET_CODE (r) == MEM)
3622 if (MEM_VOLATILE_P (r))
3625 /* Walk the set of memory locations we are currently tracking
3626 and see if one is an identical match to this memory location.
3627 If so, this memory write is dead (remember, we're walking
3628 backwards from the end of the block to the start. */
3629 temp = mem_set_list;
3632 if (rtx_equal_p (XEXP (temp, 0), r))
3634 temp = XEXP (temp, 1);
3639 while (GET_CODE (r) == SUBREG
3640 || GET_CODE (r) == STRICT_LOW_PART
3641 || GET_CODE (r) == ZERO_EXTRACT)
3644 if (GET_CODE (r) == REG)
3646 int regno = REGNO (r);
3649 if (REGNO_REG_SET_P (needed, regno))
3652 /* If this is a hard register, verify that subsequent
3653 words are not needed. */
3654 if (regno < FIRST_PSEUDO_REGISTER)
3656 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3659 if (REGNO_REG_SET_P (needed, regno+n))
3663 /* Don't delete insns to set global regs. */
3664 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3667 /* Make sure insns to set the stack pointer aren't deleted. */
3668 if (regno == STACK_POINTER_REGNUM)
3671 /* Make sure insns to set the frame pointer aren't deleted. */
3672 if (regno == FRAME_POINTER_REGNUM
3673 && (! reload_completed || frame_pointer_needed))
3675 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3676 if (regno == HARD_FRAME_POINTER_REGNUM
3677 && (! reload_completed || frame_pointer_needed))
3681 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3682 /* Make sure insns to set arg pointer are never deleted
3683 (if the arg pointer isn't fixed, there will be a USE
3684 for it, so we can treat it normally). */
3685 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3689 /* Otherwise, the set is dead. */
3695 /* If performing several activities, insn is dead if each activity
3696 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3697 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3699 else if (code == PARALLEL)
3701 int i = XVECLEN (x, 0);
3703 for (i--; i >= 0; i--)
3704 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3705 && GET_CODE (XVECEXP (x, 0, i)) != USE
3706 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3712 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3713 is not necessarily true for hard registers. */
3714 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3715 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3716 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3719 /* We do not check other CLOBBER or USE here. An insn consisting of just
3720 a CLOBBER or just a USE should not be deleted. */
3724 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3725 return 1 if the entire library call is dead.
3726 This is true if X copies a register (hard or pseudo)
3727 and if the hard return reg of the call insn is dead.
3728 (The caller should have tested the destination of X already for death.)
3730 If this insn doesn't just copy a register, then we don't
3731 have an ordinary libcall. In that case, cse could not have
3732 managed to substitute the source for the dest later on,
3733 so we can assume the libcall is dead.
3735 NEEDED is the bit vector of pseudoregs live before this insn.
3736 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3739 libcall_dead_p (x, needed, note, insn)
3745 register RTX_CODE code = GET_CODE (x);
3749 register rtx r = SET_SRC (x);
3750 if (GET_CODE (r) == REG)
3752 rtx call = XEXP (note, 0);
3756 /* Find the call insn. */
3757 while (call != insn && GET_CODE (call) != CALL_INSN)
3758 call = NEXT_INSN (call);
3760 /* If there is none, do nothing special,
3761 since ordinary death handling can understand these insns. */
3765 /* See if the hard reg holding the value is dead.
3766 If this is a PARALLEL, find the call within it. */
3767 call_pat = PATTERN (call);
3768 if (GET_CODE (call_pat) == PARALLEL)
3770 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3771 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3772 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3775 /* This may be a library call that is returning a value
3776 via invisible pointer. Do nothing special, since
3777 ordinary death handling can understand these insns. */
3781 call_pat = XVECEXP (call_pat, 0, i);
3784 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3790 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3791 live at function entry. Don't count global register variables, variables
3792 in registers that can be used for function arg passing, or variables in
3793 fixed hard registers. */
3796 regno_uninitialized (regno)
3799 if (n_basic_blocks == 0
3800 || (regno < FIRST_PSEUDO_REGISTER
3801 && (global_regs[regno]
3802 || fixed_regs[regno]
3803 || FUNCTION_ARG_REGNO_P (regno))))
3806 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3809 /* 1 if register REGNO was alive at a place where `setjmp' was called
3810 and was set more than once or is an argument.
3811 Such regs may be clobbered by `longjmp'. */
3814 regno_clobbered_at_setjmp (regno)
3817 if (n_basic_blocks == 0)
3820 return ((REG_N_SETS (regno) > 1
3821 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3822 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3825 /* INSN references memory, possibly using autoincrement addressing modes.
3826 Find any entries on the mem_set_list that need to be invalidated due
3827 to an address change. */
3829 invalidate_mems_from_autoinc (insn)
3832 rtx note = REG_NOTES (insn);
3833 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3835 if (REG_NOTE_KIND (note) == REG_INC)
3837 rtx temp = mem_set_list;
3838 rtx prev = NULL_RTX;
3843 next = XEXP (temp, 1);
3844 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3846 /* Splice temp out of list. */
3848 XEXP (prev, 1) = next;
3850 mem_set_list = next;
3851 free_EXPR_LIST_node (temp);
3861 /* Process the registers that are set within X. Their bits are set to
3862 1 in the regset DEAD, because they are dead prior to this insn.
3864 If INSN is nonzero, it is the insn being processed.
3866 FLAGS is the set of operations to perform. */
3869 mark_set_regs (needed, dead, x, insn, significant, flags)
3877 register RTX_CODE code = GET_CODE (x);
3879 if (code == SET || code == CLOBBER)
3880 mark_set_1 (needed, dead, x, insn, significant, flags);
3881 else if (code == PARALLEL)
3884 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3886 code = GET_CODE (XVECEXP (x, 0, i));
3887 if (code == SET || code == CLOBBER)
3888 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3889 significant, flags);
3894 /* Process a single SET rtx, X. */
3897 mark_set_1 (needed, dead, x, insn, significant, flags)
3905 register int regno = -1;
3906 register rtx reg = SET_DEST (x);
3908 /* Some targets place small structures in registers for
3909 return values of functions. We have to detect this
3910 case specially here to get correct flow information. */
3911 if (GET_CODE (reg) == PARALLEL
3912 && GET_MODE (reg) == BLKmode)
3916 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3917 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3918 significant, flags);
3922 /* Modifying just one hardware register of a multi-reg value
3923 or just a byte field of a register
3924 does not mean the value from before this insn is now dead.
3925 But it does mean liveness of that register at the end of the block
3928 Within mark_set_1, however, we treat it as if the register is
3929 indeed modified. mark_used_regs will, however, also treat this
3930 register as being used. Thus, we treat these insns as setting a
3931 new value for the register as a function of its old value. This
3932 cases LOG_LINKS to be made appropriately and this will help combine. */
3934 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3935 || GET_CODE (reg) == SIGN_EXTRACT
3936 || GET_CODE (reg) == STRICT_LOW_PART)
3937 reg = XEXP (reg, 0);
3939 /* If this set is a MEM, then it kills any aliased writes.
3940 If this set is a REG, then it kills any MEMs which use the reg. */
3941 if (flags & PROP_SCAN_DEAD_CODE)
3943 if (GET_CODE (reg) == MEM
3944 || GET_CODE (reg) == REG)
3946 rtx temp = mem_set_list;
3947 rtx prev = NULL_RTX;
3952 next = XEXP (temp, 1);
3953 if ((GET_CODE (reg) == MEM
3954 && output_dependence (XEXP (temp, 0), reg))
3955 || (GET_CODE (reg) == REG
3956 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3958 /* Splice this entry out of the list. */
3960 XEXP (prev, 1) = next;
3962 mem_set_list = next;
3963 free_EXPR_LIST_node (temp);
3971 /* If the memory reference had embedded side effects (autoincrement
3972 address modes. Then we may need to kill some entries on the
3974 if (insn && GET_CODE (reg) == MEM)
3975 invalidate_mems_from_autoinc (insn);
3977 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3978 /* We do not know the size of a BLKmode store, so we do not track
3979 them for redundant store elimination. */
3980 && GET_MODE (reg) != BLKmode
3981 /* There are no REG_INC notes for SP, so we can't assume we'll see
3982 everything that invalidates it. To be safe, don't eliminate any
3983 stores though SP; none of them should be redundant anyway. */
3984 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3985 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3988 if (GET_CODE (reg) == REG
3989 && (regno = REGNO (reg),
3990 ! (regno == FRAME_POINTER_REGNUM
3991 && (! reload_completed || frame_pointer_needed)))
3992 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3993 && ! (regno == HARD_FRAME_POINTER_REGNUM
3994 && (! reload_completed || frame_pointer_needed))
3996 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3997 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3999 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
4000 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
4002 int some_needed = REGNO_REG_SET_P (needed, regno);
4003 int some_not_needed = ! some_needed;
4005 /* Mark it as a significant register for this basic block. */
4007 SET_REGNO_REG_SET (significant, regno);
4009 /* Mark it as dead before this insn. */
4010 SET_REGNO_REG_SET (dead, regno);
4012 /* A hard reg in a wide mode may really be multiple registers.
4013 If so, mark all of them just like the first. */
4014 if (regno < FIRST_PSEUDO_REGISTER)
4018 /* Nothing below is needed for the stack pointer; get out asap.
4019 Eg, log links aren't needed, since combine won't use them. */
4020 if (regno == STACK_POINTER_REGNUM)
4023 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4026 int regno_n = regno + n;
4027 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4029 SET_REGNO_REG_SET (significant, regno_n);
4031 SET_REGNO_REG_SET (dead, regno_n);
4032 some_needed |= needed_regno;
4033 some_not_needed |= ! needed_regno;
4037 /* Additional data to record if this is the final pass. */
4038 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4039 | PROP_DEATH_NOTES | PROP_AUTOINC))
4042 register int blocknum = BLOCK_NUM (insn);
4045 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4046 y = reg_next_use[regno];
4048 /* If this is a hard reg, record this function uses the reg. */
4050 if (regno < FIRST_PSEUDO_REGISTER)
4053 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4055 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4056 for (i = regno; i < endregno; i++)
4058 /* The next use is no longer "next", since a store
4060 reg_next_use[i] = 0;
4063 if (flags & PROP_REG_INFO)
4064 for (i = regno; i < endregno; i++)
4066 regs_ever_live[i] = 1;
4072 /* The next use is no longer "next", since a store
4074 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4075 reg_next_use[regno] = 0;
4077 /* Keep track of which basic blocks each reg appears in. */
4079 if (flags & PROP_REG_INFO)
4081 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4082 REG_BASIC_BLOCK (regno) = blocknum;
4083 else if (REG_BASIC_BLOCK (regno) != blocknum)
4084 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4086 /* Count (weighted) references, stores, etc. This counts a
4087 register twice if it is modified, but that is correct. */
4088 REG_N_SETS (regno)++;
4089 REG_N_REFS (regno) += loop_depth + 1;
4091 /* The insns where a reg is live are normally counted
4092 elsewhere, but we want the count to include the insn
4093 where the reg is set, and the normal counting mechanism
4094 would not count it. */
4095 REG_LIVE_LENGTH (regno)++;
4099 if (! some_not_needed)
4101 if (flags & PROP_LOG_LINKS)
4103 /* Make a logical link from the next following insn
4104 that uses this register, back to this insn.
4105 The following insns have already been processed.
4107 We don't build a LOG_LINK for hard registers containing
4108 in ASM_OPERANDs. If these registers get replaced,
4109 we might wind up changing the semantics of the insn,
4110 even if reload can make what appear to be valid
4111 assignments later. */
4112 if (y && (BLOCK_NUM (y) == blocknum)
4113 && (regno >= FIRST_PSEUDO_REGISTER
4114 || asm_noperands (PATTERN (y)) < 0))
4115 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4118 else if (! some_needed)
4120 if (flags & PROP_REG_INFO)
4121 REG_N_DEATHS (REGNO (reg))++;
4123 if (flags & PROP_DEATH_NOTES)
4125 /* Note that dead stores have already been deleted
4126 when possible. If we get here, we have found a
4127 dead store that cannot be eliminated (because the
4128 same insn does something useful). Indicate this
4129 by marking the reg being set as dying here. */
4131 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4136 if (flags & PROP_DEATH_NOTES)
4138 /* This is a case where we have a multi-word hard register
4139 and some, but not all, of the words of the register are
4140 needed in subsequent insns. Write REG_UNUSED notes
4141 for those parts that were not needed. This case should
4146 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4148 if (!REGNO_REG_SET_P (needed, regno + i))
4152 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4158 else if (GET_CODE (reg) == REG)
4160 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4161 reg_next_use[regno] = 0;
4164 /* If this is the last pass and this is a SCRATCH, show it will be dying
4165 here and count it. */
4166 else if (GET_CODE (reg) == SCRATCH)
4168 if (flags & PROP_DEATH_NOTES)
4170 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4176 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4180 find_auto_inc (needed, x, insn)
4185 rtx addr = XEXP (x, 0);
4186 HOST_WIDE_INT offset = 0;
4189 /* Here we detect use of an index register which might be good for
4190 postincrement, postdecrement, preincrement, or predecrement. */
4192 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4193 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4195 if (GET_CODE (addr) == REG)
4198 register int size = GET_MODE_SIZE (GET_MODE (x));
4201 int regno = REGNO (addr);
4203 /* Is the next use an increment that might make auto-increment? */
4204 if ((incr = reg_next_use[regno]) != 0
4205 && (set = single_set (incr)) != 0
4206 && GET_CODE (set) == SET
4207 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4208 /* Can't add side effects to jumps; if reg is spilled and
4209 reloaded, there's no way to store back the altered value. */
4210 && GET_CODE (insn) != JUMP_INSN
4211 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4212 && XEXP (y, 0) == addr
4213 && GET_CODE (XEXP (y, 1)) == CONST_INT
4214 && ((HAVE_POST_INCREMENT
4215 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4216 || (HAVE_POST_DECREMENT
4217 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4218 || (HAVE_PRE_INCREMENT
4219 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4220 || (HAVE_PRE_DECREMENT
4221 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4222 /* Make sure this reg appears only once in this insn. */
4223 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4224 use != 0 && use != (rtx) 1))
4226 rtx q = SET_DEST (set);
4227 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4228 ? (offset ? PRE_INC : POST_INC)
4229 : (offset ? PRE_DEC : POST_DEC));
4231 if (dead_or_set_p (incr, addr))
4233 /* This is the simple case. Try to make the auto-inc. If
4234 we can't, we are done. Otherwise, we will do any
4235 needed updates below. */
4236 if (! validate_change (insn, &XEXP (x, 0),
4237 gen_rtx_fmt_e (inc_code, Pmode, addr),
4241 else if (GET_CODE (q) == REG
4242 /* PREV_INSN used here to check the semi-open interval
4244 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4245 /* We must also check for sets of q as q may be
4246 a call clobbered hard register and there may
4247 be a call between PREV_INSN (insn) and incr. */
4248 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4250 /* We have *p followed sometime later by q = p+size.
4251 Both p and q must be live afterward,
4252 and q is not used between INSN and its assignment.
4253 Change it to q = p, ...*q..., q = q+size.
4254 Then fall into the usual case. */
4259 emit_move_insn (q, addr);
4260 insns = get_insns ();
4263 bb = BLOCK_FOR_INSN (insn);
4264 for (temp = insns; temp; temp = NEXT_INSN (temp))
4265 set_block_for_insn (temp, bb);
4267 /* If we can't make the auto-inc, or can't make the
4268 replacement into Y, exit. There's no point in making
4269 the change below if we can't do the auto-inc and doing
4270 so is not correct in the pre-inc case. */
4272 validate_change (insn, &XEXP (x, 0),
4273 gen_rtx_fmt_e (inc_code, Pmode, q),
4275 validate_change (incr, &XEXP (y, 0), q, 1);
4276 if (! apply_change_group ())
4279 /* We now know we'll be doing this change, so emit the
4280 new insn(s) and do the updates. */
4281 emit_insns_before (insns, insn);
4283 if (BLOCK_FOR_INSN (insn)->head == insn)
4284 BLOCK_FOR_INSN (insn)->head = insns;
4286 /* INCR will become a NOTE and INSN won't contain a
4287 use of ADDR. If a use of ADDR was just placed in
4288 the insn before INSN, make that the next use.
4289 Otherwise, invalidate it. */
4290 if (GET_CODE (PREV_INSN (insn)) == INSN
4291 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4292 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4293 reg_next_use[regno] = PREV_INSN (insn);
4295 reg_next_use[regno] = 0;
4300 /* REGNO is now used in INCR which is below INSN, but
4301 it previously wasn't live here. If we don't mark
4302 it as needed, we'll put a REG_DEAD note for it
4303 on this insn, which is incorrect. */
4304 SET_REGNO_REG_SET (needed, regno);
4306 /* If there are any calls between INSN and INCR, show
4307 that REGNO now crosses them. */
4308 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4309 if (GET_CODE (temp) == CALL_INSN)
4310 REG_N_CALLS_CROSSED (regno)++;
4315 /* If we haven't returned, it means we were able to make the
4316 auto-inc, so update the status. First, record that this insn
4317 has an implicit side effect. */
4320 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4322 /* Modify the old increment-insn to simply copy
4323 the already-incremented value of our register. */
4324 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4327 /* If that makes it a no-op (copying the register into itself) delete
4328 it so it won't appear to be a "use" and a "set" of this
4330 if (SET_DEST (set) == addr)
4332 PUT_CODE (incr, NOTE);
4333 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4334 NOTE_SOURCE_FILE (incr) = 0;
4337 if (regno >= FIRST_PSEUDO_REGISTER)
4339 /* Count an extra reference to the reg. When a reg is
4340 incremented, spilling it is worse, so we want to make
4341 that less likely. */
4342 REG_N_REFS (regno) += loop_depth + 1;
4344 /* Count the increment as a setting of the register,
4345 even though it isn't a SET in rtl. */
4346 REG_N_SETS (regno)++;
4351 #endif /* AUTO_INC_DEC */
4353 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4354 This is done assuming the registers needed from X
4355 are those that have 1-bits in NEEDED.
4357 FLAGS is the set of enabled operations.
4359 INSN is the containing instruction. If INSN is dead, this function is not
4363 mark_used_regs (needed, live, x, flags, insn)
4370 register RTX_CODE code;
4375 code = GET_CODE (x);
4395 /* If we are clobbering a MEM, mark any registers inside the address
4397 if (GET_CODE (XEXP (x, 0)) == MEM)
4398 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4402 /* Don't bother watching stores to mems if this is not the
4403 final pass. We'll not be deleting dead stores this round. */
4404 if (flags & PROP_SCAN_DEAD_CODE)
4406 /* Invalidate the data for the last MEM stored, but only if MEM is
4407 something that can be stored into. */
4408 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4409 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4410 ; /* needn't clear the memory set list */
4413 rtx temp = mem_set_list;
4414 rtx prev = NULL_RTX;
4419 next = XEXP (temp, 1);
4420 if (anti_dependence (XEXP (temp, 0), x))
4422 /* Splice temp out of the list. */
4424 XEXP (prev, 1) = next;
4426 mem_set_list = next;
4427 free_EXPR_LIST_node (temp);
4435 /* If the memory reference had embedded side effects (autoincrement
4436 address modes. Then we may need to kill some entries on the
4439 invalidate_mems_from_autoinc (insn);
4443 if (flags & PROP_AUTOINC)
4444 find_auto_inc (needed, x, insn);
4449 if (GET_CODE (SUBREG_REG (x)) == REG
4450 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4451 && (GET_MODE_SIZE (GET_MODE (x))
4452 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4453 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4455 /* While we're here, optimize this case. */
4458 /* In case the SUBREG is not of a register, don't optimize */
4459 if (GET_CODE (x) != REG)
4461 mark_used_regs (needed, live, x, flags, insn);
4465 /* ... fall through ... */
4468 /* See a register other than being set
4469 => mark it as needed. */
4473 int some_needed = REGNO_REG_SET_P (needed, regno);
4474 int some_not_needed = ! some_needed;
4476 SET_REGNO_REG_SET (live, regno);
4478 /* A hard reg in a wide mode may really be multiple registers.
4479 If so, mark all of them just like the first. */
4480 if (regno < FIRST_PSEUDO_REGISTER)
4484 /* For stack ptr or fixed arg pointer,
4485 nothing below can be necessary, so waste no more time. */
4486 if (regno == STACK_POINTER_REGNUM
4487 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4488 || (regno == HARD_FRAME_POINTER_REGNUM
4489 && (! reload_completed || frame_pointer_needed))
4491 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4492 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4494 || (regno == FRAME_POINTER_REGNUM
4495 && (! reload_completed || frame_pointer_needed)))
4497 /* If this is a register we are going to try to eliminate,
4498 don't mark it live here. If we are successful in
4499 eliminating it, it need not be live unless it is used for
4500 pseudos, in which case it will have been set live when
4501 it was allocated to the pseudos. If the register will not
4502 be eliminated, reload will set it live at that point. */
4504 if ((flags & PROP_REG_INFO)
4505 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4506 regs_ever_live[regno] = 1;
4509 /* No death notes for global register variables;
4510 their values are live after this function exits. */
4511 if (global_regs[regno])
4513 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4514 reg_next_use[regno] = insn;
4518 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4521 int regno_n = regno + n;
4522 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4524 SET_REGNO_REG_SET (live, regno_n);
4525 some_needed |= needed_regno;
4526 some_not_needed |= ! needed_regno;
4530 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4532 /* Record where each reg is used, so when the reg
4533 is set we know the next insn that uses it. */
4535 reg_next_use[regno] = insn;
4537 if (flags & PROP_REG_INFO)
4539 if (regno < FIRST_PSEUDO_REGISTER)
4541 /* If a hard reg is being used,
4542 record that this function does use it. */
4544 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4548 regs_ever_live[regno + --i] = 1;
4553 /* Keep track of which basic block each reg appears in. */
4555 register int blocknum = BLOCK_NUM (insn);
4557 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4558 REG_BASIC_BLOCK (regno) = blocknum;
4559 else if (REG_BASIC_BLOCK (regno) != blocknum)
4560 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4562 /* Count (weighted) number of uses of each reg. */
4564 REG_N_REFS (regno) += loop_depth + 1;
4568 /* Record and count the insns in which a reg dies.
4569 If it is used in this insn and was dead below the insn
4570 then it dies in this insn. If it was set in this insn,
4571 we do not make a REG_DEAD note; likewise if we already
4572 made such a note. */
4574 if (flags & PROP_DEATH_NOTES)
4577 && ! dead_or_set_p (insn, x)
4579 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4583 /* Check for the case where the register dying partially
4584 overlaps the register set by this insn. */
4585 if (regno < FIRST_PSEUDO_REGISTER
4586 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4588 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4590 some_needed |= dead_or_set_regno_p (insn, regno + n);
4593 /* If none of the words in X is needed, make a REG_DEAD
4594 note. Otherwise, we must make partial REG_DEAD notes. */
4598 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4599 REG_N_DEATHS (regno)++;
4605 /* Don't make a REG_DEAD note for a part of a register
4606 that is set in the insn. */
4608 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4610 if (!REGNO_REG_SET_P (needed, regno + i)
4611 && ! dead_or_set_regno_p (insn, regno + i))
4614 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4625 register rtx testreg = SET_DEST (x);
4628 /* If storing into MEM, don't show it as being used. But do
4629 show the address as being used. */
4630 if (GET_CODE (testreg) == MEM)
4633 if (flags & PROP_AUTOINC)
4634 find_auto_inc (needed, testreg, insn);
4636 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4637 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4641 /* Storing in STRICT_LOW_PART is like storing in a reg
4642 in that this SET might be dead, so ignore it in TESTREG.
4643 but in some other ways it is like using the reg.
4645 Storing in a SUBREG or a bit field is like storing the entire
4646 register in that if the register's value is not used
4647 then this SET is not needed. */
4648 while (GET_CODE (testreg) == STRICT_LOW_PART
4649 || GET_CODE (testreg) == ZERO_EXTRACT
4650 || GET_CODE (testreg) == SIGN_EXTRACT
4651 || GET_CODE (testreg) == SUBREG)
4653 if (GET_CODE (testreg) == SUBREG
4654 && GET_CODE (SUBREG_REG (testreg)) == REG
4655 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4656 && (GET_MODE_SIZE (GET_MODE (testreg))
4657 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4658 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4660 /* Modifying a single register in an alternate mode
4661 does not use any of the old value. But these other
4662 ways of storing in a register do use the old value. */
4663 if (GET_CODE (testreg) == SUBREG
4664 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4669 testreg = XEXP (testreg, 0);
4672 /* If this is a store into a register,
4673 recursively scan the value being stored. */
4675 if ((GET_CODE (testreg) == PARALLEL
4676 && GET_MODE (testreg) == BLKmode)
4677 || (GET_CODE (testreg) == REG
4678 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4679 && (! reload_completed || frame_pointer_needed)))
4680 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4681 && ! (regno == HARD_FRAME_POINTER_REGNUM
4682 && (! reload_completed || frame_pointer_needed))
4684 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4685 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4688 /* We used to exclude global_regs here, but that seems wrong.
4689 Storing in them is like storing in mem. */
4691 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4693 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4700 case UNSPEC_VOLATILE:
4704 /* Traditional and volatile asm instructions must be considered to use
4705 and clobber all hard registers, all pseudo-registers and all of
4706 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4708 Consider for instance a volatile asm that changes the fpu rounding
4709 mode. An insn should not be moved across this even if it only uses
4710 pseudo-regs because it might give an incorrectly rounded result.
4712 ?!? Unfortunately, marking all hard registers as live causes massive
4713 problems for the register allocator and marking all pseudos as live
4714 creates mountains of uninitialized variable warnings.
4716 So for now, just clear the memory set list and mark any regs
4717 we can find in ASM_OPERANDS as used. */
4718 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4719 free_EXPR_LIST_list (&mem_set_list);
4721 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4722 We can not just fall through here since then we would be confused
4723 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4724 traditional asms unlike their normal usage. */
4725 if (code == ASM_OPERANDS)
4729 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4730 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4737 /* We _do_not_ want to scan operands of phi nodes. Operands of
4738 a phi function are evaluated only when control reaches this
4739 block along a particular edge. Therefore, regs that appear
4740 as arguments to phi should not be added to the global live at
4748 /* Recursively scan the operands of this expression. */
4751 register const char *fmt = GET_RTX_FORMAT (code);
4754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4758 /* Tail recursive case: save a function call level. */
4764 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4766 else if (fmt[i] == 'E')
4769 for (j = 0; j < XVECLEN (x, i); j++)
4770 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4779 try_pre_increment_1 (insn)
4782 /* Find the next use of this reg. If in same basic block,
4783 make it do pre-increment or pre-decrement if appropriate. */
4784 rtx x = single_set (insn);
4785 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4786 * INTVAL (XEXP (SET_SRC (x), 1)));
4787 int regno = REGNO (SET_DEST (x));
4788 rtx y = reg_next_use[regno];
4790 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4791 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4792 mode would be better. */
4793 && ! dead_or_set_p (y, SET_DEST (x))
4794 && try_pre_increment (y, SET_DEST (x), amount))
4796 /* We have found a suitable auto-increment
4797 and already changed insn Y to do it.
4798 So flush this increment-instruction. */
4799 PUT_CODE (insn, NOTE);
4800 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4801 NOTE_SOURCE_FILE (insn) = 0;
4802 /* Count a reference to this reg for the increment
4803 insn we are deleting. When a reg is incremented.
4804 spilling it is worse, so we want to make that
4806 if (regno >= FIRST_PSEUDO_REGISTER)
4808 REG_N_REFS (regno) += loop_depth + 1;
4809 REG_N_SETS (regno)++;
4816 /* Try to change INSN so that it does pre-increment or pre-decrement
4817 addressing on register REG in order to add AMOUNT to REG.
4818 AMOUNT is negative for pre-decrement.
4819 Returns 1 if the change could be made.
4820 This checks all about the validity of the result of modifying INSN. */
4823 try_pre_increment (insn, reg, amount)
4825 HOST_WIDE_INT amount;
4829 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4830 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4832 /* Nonzero if we can try to make a post-increment or post-decrement.
4833 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4834 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4835 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4838 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4841 /* From the sign of increment, see which possibilities are conceivable
4842 on this target machine. */
4843 if (HAVE_PRE_INCREMENT && amount > 0)
4845 if (HAVE_POST_INCREMENT && amount > 0)
4848 if (HAVE_PRE_DECREMENT && amount < 0)
4850 if (HAVE_POST_DECREMENT && amount < 0)
4853 if (! (pre_ok || post_ok))
4856 /* It is not safe to add a side effect to a jump insn
4857 because if the incremented register is spilled and must be reloaded
4858 there would be no way to store the incremented value back in memory. */
4860 if (GET_CODE (insn) == JUMP_INSN)
4865 use = find_use_as_address (PATTERN (insn), reg, 0);
4866 if (post_ok && (use == 0 || use == (rtx) 1))
4868 use = find_use_as_address (PATTERN (insn), reg, -amount);
4872 if (use == 0 || use == (rtx) 1)
4875 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4878 /* See if this combination of instruction and addressing mode exists. */
4879 if (! validate_change (insn, &XEXP (use, 0),
4880 gen_rtx_fmt_e (amount > 0
4881 ? (do_post ? POST_INC : PRE_INC)
4882 : (do_post ? POST_DEC : PRE_DEC),
4886 /* Record that this insn now has an implicit side effect on X. */
4887 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4891 #endif /* AUTO_INC_DEC */
4893 /* Find the place in the rtx X where REG is used as a memory address.
4894 Return the MEM rtx that so uses it.
4895 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4896 (plus REG (const_int PLUSCONST)).
4898 If such an address does not appear, return 0.
4899 If REG appears more than once, or is used other than in such an address,
4903 find_use_as_address (x, reg, plusconst)
4906 HOST_WIDE_INT plusconst;
4908 enum rtx_code code = GET_CODE (x);
4909 const char *fmt = GET_RTX_FORMAT (code);
4911 register rtx value = 0;
4914 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4917 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4918 && XEXP (XEXP (x, 0), 0) == reg
4919 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4920 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4923 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4925 /* If REG occurs inside a MEM used in a bit-field reference,
4926 that is unacceptable. */
4927 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4928 return (rtx) (HOST_WIDE_INT) 1;
4932 return (rtx) (HOST_WIDE_INT) 1;
4934 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4938 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4942 return (rtx) (HOST_WIDE_INT) 1;
4944 else if (fmt[i] == 'E')
4947 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4949 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4953 return (rtx) (HOST_WIDE_INT) 1;
4961 /* Write information about registers and basic blocks into FILE.
4962 This is part of making a debugging dump. */
4965 dump_regset (r, outf)
4972 fputs (" (nil)", outf);
4976 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4978 fprintf (outf, " %d", i);
4979 if (i < FIRST_PSEUDO_REGISTER)
4980 fprintf (outf, " [%s]",
4989 dump_regset (r, stderr);
4990 putc ('\n', stderr);
4994 dump_flow_info (file)
4998 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5000 fprintf (file, "%d registers.\n", max_regno);
5001 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5004 enum reg_class class, altclass;
5005 fprintf (file, "\nRegister %d used %d times across %d insns",
5006 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5007 if (REG_BASIC_BLOCK (i) >= 0)
5008 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5010 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5011 (REG_N_SETS (i) == 1) ? "" : "s");
5012 if (REG_USERVAR_P (regno_reg_rtx[i]))
5013 fprintf (file, "; user var");
5014 if (REG_N_DEATHS (i) != 1)
5015 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5016 if (REG_N_CALLS_CROSSED (i) == 1)
5017 fprintf (file, "; crosses 1 call");
5018 else if (REG_N_CALLS_CROSSED (i))
5019 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5020 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5021 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5022 class = reg_preferred_class (i);
5023 altclass = reg_alternate_class (i);
5024 if (class != GENERAL_REGS || altclass != ALL_REGS)
5026 if (altclass == ALL_REGS || class == ALL_REGS)
5027 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5028 else if (altclass == NO_REGS)
5029 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5031 fprintf (file, "; pref %s, else %s",
5032 reg_class_names[(int) class],
5033 reg_class_names[(int) altclass]);
5035 if (REGNO_POINTER_FLAG (i))
5036 fprintf (file, "; pointer");
5037 fprintf (file, ".\n");
5040 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5041 for (i = 0; i < n_basic_blocks; i++)
5043 register basic_block bb = BASIC_BLOCK (i);
5046 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5047 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5049 fprintf (file, "Predecessors: ");
5050 for (e = bb->pred; e ; e = e->pred_next)
5051 dump_edge_info (file, e, 0);
5053 fprintf (file, "\nSuccessors: ");
5054 for (e = bb->succ; e ; e = e->succ_next)
5055 dump_edge_info (file, e, 1);
5057 fprintf (file, "\nRegisters live at start:");
5058 dump_regset (bb->global_live_at_start, file);
5060 fprintf (file, "\nRegisters live at end:");
5061 dump_regset (bb->global_live_at_end, file);
5072 dump_flow_info (stderr);
5076 dump_edge_info (file, e, do_succ)
5081 basic_block side = (do_succ ? e->dest : e->src);
5083 if (side == ENTRY_BLOCK_PTR)
5084 fputs (" ENTRY", file);
5085 else if (side == EXIT_BLOCK_PTR)
5086 fputs (" EXIT", file);
5088 fprintf (file, " %d", side->index);
5092 static const char * const bitnames[] = {
5093 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5096 int i, flags = e->flags;
5100 for (i = 0; flags; i++)
5101 if (flags & (1 << i))
5107 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5108 fputs (bitnames[i], file);
5110 fprintf (file, "%d", i);
5118 /* Print out one basic block with live information at start and end. */
5128 fprintf (outf, ";; Basic block %d, loop depth %d",
5129 bb->index, bb->loop_depth - 1);
5130 if (bb->eh_beg != -1 || bb->eh_end != -1)
5131 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5134 fputs (";; Predecessors: ", outf);
5135 for (e = bb->pred; e ; e = e->pred_next)
5136 dump_edge_info (outf, e, 0);
5139 fputs (";; Registers live at start:", outf);
5140 dump_regset (bb->global_live_at_start, outf);
5143 for (insn = bb->head, last = NEXT_INSN (bb->end);
5145 insn = NEXT_INSN (insn))
5146 print_rtl_single (outf, insn);
5148 fputs (";; Registers live at end:", outf);
5149 dump_regset (bb->global_live_at_end, outf);
5152 fputs (";; Successors: ", outf);
5153 for (e = bb->succ; e; e = e->succ_next)
5154 dump_edge_info (outf, e, 1);
5162 dump_bb (bb, stderr);
5169 dump_bb (BASIC_BLOCK(n), stderr);
5172 /* Like print_rtl, but also print out live information for the start of each
5176 print_rtl_with_bb (outf, rtx_first)
5180 register rtx tmp_rtx;
5183 fprintf (outf, "(nil)\n");
5187 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5188 int max_uid = get_max_uid ();
5189 basic_block *start = (basic_block *)
5190 xcalloc (max_uid, sizeof (basic_block));
5191 basic_block *end = (basic_block *)
5192 xcalloc (max_uid, sizeof (basic_block));
5193 enum bb_state *in_bb_p = (enum bb_state *)
5194 xcalloc (max_uid, sizeof (enum bb_state));
5196 for (i = n_basic_blocks - 1; i >= 0; i--)
5198 basic_block bb = BASIC_BLOCK (i);
5201 start[INSN_UID (bb->head)] = bb;
5202 end[INSN_UID (bb->end)] = bb;
5203 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5205 enum bb_state state = IN_MULTIPLE_BB;
5206 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5208 in_bb_p[INSN_UID(x)] = state;
5215 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5220 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5222 fprintf (outf, ";; Start of basic block %d, registers live:",
5224 dump_regset (bb->global_live_at_start, outf);
5228 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5229 && GET_CODE (tmp_rtx) != NOTE
5230 && GET_CODE (tmp_rtx) != BARRIER)
5231 fprintf (outf, ";; Insn is not within a basic block\n");
5232 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5233 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5235 did_output = print_rtl_single (outf, tmp_rtx);
5237 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5238 fprintf (outf, ";; End of basic block %d\n", bb->index);
5249 if (current_function_epilogue_delay_list != 0)
5251 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5252 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5253 tmp_rtx = XEXP (tmp_rtx, 1))
5254 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5258 /* Compute dominator relationships using new flow graph structures. */
5260 compute_flow_dominators (dominators, post_dominators)
5261 sbitmap *dominators;
5262 sbitmap *post_dominators;
5265 sbitmap *temp_bitmap;
5267 basic_block *worklist, *workend, *qin, *qout;
5270 /* Allocate a worklist array/queue. Entries are only added to the
5271 list if they were not already on the list. So the size is
5272 bounded by the number of basic blocks. */
5273 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5274 workend = &worklist[n_basic_blocks];
5276 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5277 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5281 /* The optimistic setting of dominators requires us to put every
5282 block on the work list initially. */
5283 qin = qout = worklist;
5284 for (bb = 0; bb < n_basic_blocks; bb++)
5286 *qin++ = BASIC_BLOCK (bb);
5287 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5289 qlen = n_basic_blocks;
5292 /* We want a maximal solution, so initially assume everything dominates
5294 sbitmap_vector_ones (dominators, n_basic_blocks);
5296 /* Mark successors of the entry block so we can identify them below. */
5297 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5298 e->dest->aux = ENTRY_BLOCK_PTR;
5300 /* Iterate until the worklist is empty. */
5303 /* Take the first entry off the worklist. */
5304 basic_block b = *qout++;
5305 if (qout >= workend)
5311 /* Compute the intersection of the dominators of all the
5314 If one of the predecessor blocks is the ENTRY block, then the
5315 intersection of the dominators of the predecessor blocks is
5316 defined as the null set. We can identify such blocks by the
5317 special value in the AUX field in the block structure. */
5318 if (b->aux == ENTRY_BLOCK_PTR)
5320 /* Do not clear the aux field for blocks which are
5321 successors of the ENTRY block. That way we we never
5322 add them to the worklist again.
5324 The intersect of dominators of the preds of this block is
5325 defined as the null set. */
5326 sbitmap_zero (temp_bitmap[bb]);
5330 /* Clear the aux field of this block so it can be added to
5331 the worklist again if necessary. */
5333 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5336 /* Make sure each block always dominates itself. */
5337 SET_BIT (temp_bitmap[bb], bb);
5339 /* If the out state of this block changed, then we need to
5340 add the successors of this block to the worklist if they
5341 are not already on the worklist. */
5342 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5344 for (e = b->succ; e; e = e->succ_next)
5346 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5360 if (post_dominators)
5362 /* The optimistic setting of dominators requires us to put every
5363 block on the work list initially. */
5364 qin = qout = worklist;
5365 for (bb = 0; bb < n_basic_blocks; bb++)
5367 *qin++ = BASIC_BLOCK (bb);
5368 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5370 qlen = n_basic_blocks;
5373 /* We want a maximal solution, so initially assume everything post
5374 dominates everything else. */
5375 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5377 /* Mark predecessors of the exit block so we can identify them below. */
5378 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5379 e->src->aux = EXIT_BLOCK_PTR;
5381 /* Iterate until the worklist is empty. */
5384 /* Take the first entry off the worklist. */
5385 basic_block b = *qout++;
5386 if (qout >= workend)
5392 /* Compute the intersection of the post dominators of all the
5395 If one of the successor blocks is the EXIT block, then the
5396 intersection of the dominators of the successor blocks is
5397 defined as the null set. We can identify such blocks by the
5398 special value in the AUX field in the block structure. */
5399 if (b->aux == EXIT_BLOCK_PTR)
5401 /* Do not clear the aux field for blocks which are
5402 predecessors of the EXIT block. That way we we never
5403 add them to the worklist again.
5405 The intersect of dominators of the succs of this block is
5406 defined as the null set. */
5407 sbitmap_zero (temp_bitmap[bb]);
5411 /* Clear the aux field of this block so it can be added to
5412 the worklist again if necessary. */
5414 sbitmap_intersection_of_succs (temp_bitmap[bb],
5415 post_dominators, bb);
5418 /* Make sure each block always post dominates itself. */
5419 SET_BIT (temp_bitmap[bb], bb);
5421 /* If the out state of this block changed, then we need to
5422 add the successors of this block to the worklist if they
5423 are not already on the worklist. */
5424 if (sbitmap_a_and_b (post_dominators[bb],
5425 post_dominators[bb],
5428 for (e = b->pred; e; e = e->pred_next)
5430 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5448 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5451 compute_immediate_dominators (idom, dominators)
5453 sbitmap *dominators;
5458 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5460 /* Begin with tmp(n) = dom(n) - { n }. */
5461 for (b = n_basic_blocks; --b >= 0; )
5463 sbitmap_copy (tmp[b], dominators[b]);
5464 RESET_BIT (tmp[b], b);
5467 /* Subtract out all of our dominator's dominators. */
5468 for (b = n_basic_blocks; --b >= 0; )
5470 sbitmap tmp_b = tmp[b];
5473 for (s = n_basic_blocks; --s >= 0; )
5474 if (TEST_BIT (tmp_b, s))
5475 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5478 /* Find the one bit set in the bitmap and put it in the output array. */
5479 for (b = n_basic_blocks; --b >= 0; )
5482 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5485 sbitmap_vector_free (tmp);
5488 /* Count for a single SET rtx, X. */
5491 count_reg_sets_1 (x)
5495 register rtx reg = SET_DEST (x);
5497 /* Find the register that's set/clobbered. */
5498 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5499 || GET_CODE (reg) == SIGN_EXTRACT
5500 || GET_CODE (reg) == STRICT_LOW_PART)
5501 reg = XEXP (reg, 0);
5503 if (GET_CODE (reg) == PARALLEL
5504 && GET_MODE (reg) == BLKmode)
5507 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5508 count_reg_sets_1 (XVECEXP (reg, 0, i));
5512 if (GET_CODE (reg) == REG)
5514 regno = REGNO (reg);
5515 if (regno >= FIRST_PSEUDO_REGISTER)
5517 /* Count (weighted) references, stores, etc. This counts a
5518 register twice if it is modified, but that is correct. */
5519 REG_N_SETS (regno)++;
5520 REG_N_REFS (regno) += loop_depth + 1;
5525 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5526 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5532 register RTX_CODE code = GET_CODE (x);
5534 if (code == SET || code == CLOBBER)
5535 count_reg_sets_1 (x);
5536 else if (code == PARALLEL)
5539 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5541 code = GET_CODE (XVECEXP (x, 0, i));
5542 if (code == SET || code == CLOBBER)
5543 count_reg_sets_1 (XVECEXP (x, 0, i));
5548 /* Increment REG_N_REFS by the current loop depth each register reference
5552 count_reg_references (x)
5555 register RTX_CODE code;
5558 code = GET_CODE (x);
5578 /* If we are clobbering a MEM, mark any registers inside the address
5580 if (GET_CODE (XEXP (x, 0)) == MEM)
5581 count_reg_references (XEXP (XEXP (x, 0), 0));
5585 /* While we're here, optimize this case. */
5588 /* In case the SUBREG is not of a register, don't optimize */
5589 if (GET_CODE (x) != REG)
5591 count_reg_references (x);
5595 /* ... fall through ... */
5598 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5599 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5604 register rtx testreg = SET_DEST (x);
5607 /* If storing into MEM, don't show it as being used. But do
5608 show the address as being used. */
5609 if (GET_CODE (testreg) == MEM)
5611 count_reg_references (XEXP (testreg, 0));
5612 count_reg_references (SET_SRC (x));
5616 /* Storing in STRICT_LOW_PART is like storing in a reg
5617 in that this SET might be dead, so ignore it in TESTREG.
5618 but in some other ways it is like using the reg.
5620 Storing in a SUBREG or a bit field is like storing the entire
5621 register in that if the register's value is not used
5622 then this SET is not needed. */
5623 while (GET_CODE (testreg) == STRICT_LOW_PART
5624 || GET_CODE (testreg) == ZERO_EXTRACT
5625 || GET_CODE (testreg) == SIGN_EXTRACT
5626 || GET_CODE (testreg) == SUBREG)
5628 /* Modifying a single register in an alternate mode
5629 does not use any of the old value. But these other
5630 ways of storing in a register do use the old value. */
5631 if (GET_CODE (testreg) == SUBREG
5632 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5637 testreg = XEXP (testreg, 0);
5640 /* If this is a store into a register,
5641 recursively scan the value being stored. */
5643 if ((GET_CODE (testreg) == PARALLEL
5644 && GET_MODE (testreg) == BLKmode)
5645 || GET_CODE (testreg) == REG)
5647 count_reg_references (SET_SRC (x));
5649 count_reg_references (SET_DEST (x));
5659 /* Recursively scan the operands of this expression. */
5662 register const char *fmt = GET_RTX_FORMAT (code);
5665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5669 /* Tail recursive case: save a function call level. */
5675 count_reg_references (XEXP (x, i));
5677 else if (fmt[i] == 'E')
5680 for (j = 0; j < XVECLEN (x, i); j++)
5681 count_reg_references (XVECEXP (x, i, j));
5687 /* Recompute register set/reference counts immediately prior to register
5690 This avoids problems with set/reference counts changing to/from values
5691 which have special meanings to the register allocators.
5693 Additionally, the reference counts are the primary component used by the
5694 register allocators to prioritize pseudos for allocation to hard regs.
5695 More accurate reference counts generally lead to better register allocation.
5697 F is the first insn to be scanned.
5699 LOOP_STEP denotes how much loop_depth should be incremented per
5700 loop nesting level in order to increase the ref count more for
5701 references in a loop.
5703 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5704 possibly other information which is used by the register allocators. */
5707 recompute_reg_usage (f, loop_step)
5708 rtx f ATTRIBUTE_UNUSED;
5709 int loop_step ATTRIBUTE_UNUSED;
5715 /* Clear out the old data. */
5716 max_reg = max_reg_num ();
5717 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5723 /* Scan each insn in the chain and count how many times each register is
5725 for (index = 0; index < n_basic_blocks; index++)
5727 basic_block bb = BASIC_BLOCK (index);
5728 loop_depth = bb->loop_depth;
5729 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5731 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5735 /* This call will increment REG_N_SETS for each SET or CLOBBER
5736 of a register in INSN. It will also increment REG_N_REFS
5737 by the loop depth for each set of a register in INSN. */
5738 count_reg_sets (PATTERN (insn));
5740 /* count_reg_sets does not detect autoincrement address modes, so
5741 detect them here by looking at the notes attached to INSN. */
5742 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5744 if (REG_NOTE_KIND (links) == REG_INC)
5745 /* Count (weighted) references, stores, etc. This counts a
5746 register twice if it is modified, but that is correct. */
5747 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5750 /* This call will increment REG_N_REFS by the current loop depth for
5751 each reference to a register in INSN. */
5752 count_reg_references (PATTERN (insn));
5754 /* count_reg_references will not include counts for arguments to
5755 function calls, so detect them here by examining the
5756 CALL_INSN_FUNCTION_USAGE data. */
5757 if (GET_CODE (insn) == CALL_INSN)
5761 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5763 note = XEXP (note, 1))
5764 if (GET_CODE (XEXP (note, 0)) == USE)
5765 count_reg_references (XEXP (XEXP (note, 0), 0));
5768 if (insn == bb->end)
5774 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5775 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5776 of the number of registers that died. */
5779 count_or_remove_death_notes (blocks, kill)
5785 for (i = n_basic_blocks - 1; i >= 0; --i)
5790 if (blocks && ! TEST_BIT (blocks, i))
5793 bb = BASIC_BLOCK (i);
5795 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5797 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5799 rtx *pprev = ®_NOTES (insn);
5804 switch (REG_NOTE_KIND (link))
5807 if (GET_CODE (XEXP (link, 0)) == REG)
5809 rtx reg = XEXP (link, 0);
5812 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5815 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5823 rtx next = XEXP (link, 1);
5824 free_EXPR_LIST_node (link);
5825 *pprev = link = next;
5831 pprev = &XEXP (link, 1);
5838 if (insn == bb->end)
5846 /* Record INSN's block as BB. */
5849 set_block_for_insn (insn, bb)
5853 size_t uid = INSN_UID (insn);
5854 if (uid >= basic_block_for_insn->num_elements)
5858 /* Add one-eighth the size so we don't keep calling xrealloc. */
5859 new_size = uid + (uid + 7) / 8;
5861 VARRAY_GROW (basic_block_for_insn, new_size);
5863 VARRAY_BB (basic_block_for_insn, uid) = bb;
5866 /* Record INSN's block number as BB. */
5867 /* ??? This has got to go. */
5870 set_block_num (insn, bb)
5874 set_block_for_insn (insn, BASIC_BLOCK (bb));
5877 /* Verify the CFG consistency. This function check some CFG invariants and
5878 aborts when something is wrong. Hope that this function will help to
5879 convert many optimization passes to preserve CFG consistent.
5881 Currently it does following checks:
5883 - test head/end pointers
5884 - overlapping of basic blocks
5885 - edge list corectness
5886 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5887 - tails of basic blocks (ensure that boundary is necesary)
5888 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5889 and NOTE_INSN_BASIC_BLOCK
5890 - check that all insns are in the basic blocks
5891 (except the switch handling code, barriers and notes)
5892 - check that all returns are followed by barriers
5894 In future it can be extended check a lot of other stuff as well
5895 (reachability of basic blocks, life information, etc. etc.). */
5900 const int max_uid = get_max_uid ();
5901 const rtx rtx_first = get_insns ();
5902 basic_block *bb_info;
5906 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5908 /* First pass check head/end pointers and set bb_info array used by
5910 for (i = n_basic_blocks - 1; i >= 0; i--)
5912 basic_block bb = BASIC_BLOCK (i);
5914 /* Check the head pointer and make sure that it is pointing into
5916 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5921 error ("Head insn %d for block %d not found in the insn stream.",
5922 INSN_UID (bb->head), bb->index);
5926 /* Check the end pointer and make sure that it is pointing into
5928 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5930 if (bb_info[INSN_UID (x)] != NULL)
5932 error ("Insn %d is in multiple basic blocks (%d and %d)",
5933 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5936 bb_info[INSN_UID (x)] = bb;
5943 error ("End insn %d for block %d not found in the insn stream.",
5944 INSN_UID (bb->end), bb->index);
5949 /* Now check the basic blocks (boundaries etc.) */
5950 for (i = n_basic_blocks - 1; i >= 0; i--)
5952 basic_block bb = BASIC_BLOCK (i);
5953 /* Check corectness of edge lists */
5961 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5963 fprintf (stderr, "Predecessor: ");
5964 dump_edge_info (stderr, e, 0);
5965 fprintf (stderr, "\nSuccessor: ");
5966 dump_edge_info (stderr, e, 1);
5970 if (e->dest != EXIT_BLOCK_PTR)
5972 edge e2 = e->dest->pred;
5973 while (e2 && e2 != e)
5977 error ("Basic block %i edge lists are corrupted", bb->index);
5989 error ("Basic block %d pred edge is corrupted", bb->index);
5990 fputs ("Predecessor: ", stderr);
5991 dump_edge_info (stderr, e, 0);
5992 fputs ("\nSuccessor: ", stderr);
5993 dump_edge_info (stderr, e, 1);
5994 fputc ('\n', stderr);
5997 if (e->src != ENTRY_BLOCK_PTR)
5999 edge e2 = e->src->succ;
6000 while (e2 && e2 != e)
6004 error ("Basic block %i edge lists are corrupted", bb->index);
6011 /* OK pointers are correct. Now check the header of basic
6012 block. It ought to contain optional CODE_LABEL followed
6013 by NOTE_BASIC_BLOCK. */
6015 if (GET_CODE (x) == CODE_LABEL)
6019 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6025 if (GET_CODE (x) != NOTE
6026 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6027 || NOTE_BASIC_BLOCK (x) != bb)
6029 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6036 /* Do checks for empty blocks here */
6043 if (GET_CODE (x) == NOTE
6044 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6046 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6047 INSN_UID (x), bb->index);
6054 if (GET_CODE (x) == JUMP_INSN
6055 || GET_CODE (x) == CODE_LABEL
6056 || GET_CODE (x) == BARRIER)
6058 error ("In basic block %d:", bb->index);
6059 fatal_insn ("Flow control insn inside a basic block", x);
6070 if (!bb_info[INSN_UID (x)])
6072 switch (GET_CODE (x))
6079 /* An addr_vec is placed outside any block block. */
6081 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6082 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6083 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6088 /* But in any case, non-deletable labels can appear anywhere. */
6092 fatal_insn ("Insn outside basic block", x);
6096 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6097 && GET_CODE (x) == JUMP_INSN
6098 && returnjump_p (x) && ! condjump_p (x)
6099 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6100 fatal_insn ("Return not followed by barrier", x);
6112 /* Functions to access an edge list with a vector representation.
6113 Enough data is kept such that given an index number, the
6114 pred and succ that edge reprsents can be determined, or
6115 given a pred and a succ, it's index number can be returned.
6116 This allows algorithms which comsume a lot of memory to
6117 represent the normally full matrix of edge (pred,succ) with a
6118 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6119 wasted space in the client code due to sparse flow graphs. */
6121 /* This functions initializes the edge list. Basically the entire
6122 flowgraph is processed, and all edges are assigned a number,
6123 and the data structure is filed in. */
6127 struct edge_list *elist;
6133 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6137 /* Determine the number of edges in the flow graph by counting successor
6138 edges on each basic block. */
6139 for (x = 0; x < n_basic_blocks; x++)
6141 basic_block bb = BASIC_BLOCK (x);
6143 for (e = bb->succ; e; e = e->succ_next)
6146 /* Don't forget successors of the entry block. */
6147 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6150 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6151 elist->num_blocks = block_count;
6152 elist->num_edges = num_edges;
6153 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6157 /* Follow successors of the entry block, and register these edges. */
6158 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6160 elist->index_to_edge[num_edges] = e;
6164 for (x = 0; x < n_basic_blocks; x++)
6166 basic_block bb = BASIC_BLOCK (x);
6168 /* Follow all successors of blocks, and register these edges. */
6169 for (e = bb->succ; e; e = e->succ_next)
6171 elist->index_to_edge[num_edges] = e;
6178 /* This function free's memory associated with an edge list. */
6180 free_edge_list (elist)
6181 struct edge_list *elist;
6185 free (elist->index_to_edge);
6190 /* This function provides debug output showing an edge list. */
6192 print_edge_list (f, elist)
6194 struct edge_list *elist;
6197 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6198 elist->num_blocks - 2, elist->num_edges);
6200 for (x = 0; x < elist->num_edges; x++)
6202 fprintf (f, " %-4d - edge(", x);
6203 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6204 fprintf (f,"entry,");
6206 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6208 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6209 fprintf (f,"exit)\n");
6211 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6215 /* This function provides an internal consistancy check of an edge list,
6216 verifying that all edges are present, and that there are no
6219 verify_edge_list (f, elist)
6221 struct edge_list *elist;
6223 int x, pred, succ, index;
6226 for (x = 0; x < n_basic_blocks; x++)
6228 basic_block bb = BASIC_BLOCK (x);
6230 for (e = bb->succ; e; e = e->succ_next)
6232 pred = e->src->index;
6233 succ = e->dest->index;
6234 index = EDGE_INDEX (elist, e->src, e->dest);
6235 if (index == EDGE_INDEX_NO_EDGE)
6237 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6240 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6241 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6242 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6243 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6244 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6245 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6248 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6250 pred = e->src->index;
6251 succ = e->dest->index;
6252 index = EDGE_INDEX (elist, e->src, e->dest);
6253 if (index == EDGE_INDEX_NO_EDGE)
6255 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6258 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6259 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6260 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6261 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6262 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6263 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6265 /* We've verified that all the edges are in the list, no lets make sure
6266 there are no spurious edges in the list. */
6268 for (pred = 0 ; pred < n_basic_blocks; pred++)
6269 for (succ = 0 ; succ < n_basic_blocks; succ++)
6271 basic_block p = BASIC_BLOCK (pred);
6272 basic_block s = BASIC_BLOCK (succ);
6276 for (e = p->succ; e; e = e->succ_next)
6282 for (e = s->pred; e; e = e->pred_next)
6288 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6289 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6290 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6292 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6293 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6294 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6295 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6296 BASIC_BLOCK (succ)));
6298 for (succ = 0 ; succ < n_basic_blocks; succ++)
6300 basic_block p = ENTRY_BLOCK_PTR;
6301 basic_block s = BASIC_BLOCK (succ);
6305 for (e = p->succ; e; e = e->succ_next)
6311 for (e = s->pred; e; e = e->pred_next)
6317 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6318 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6319 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6321 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6322 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6323 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6324 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6325 BASIC_BLOCK (succ)));
6327 for (pred = 0 ; pred < n_basic_blocks; pred++)
6329 basic_block p = BASIC_BLOCK (pred);
6330 basic_block s = EXIT_BLOCK_PTR;
6334 for (e = p->succ; e; e = e->succ_next)
6340 for (e = s->pred; e; e = e->pred_next)
6346 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6347 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6348 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6350 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6351 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6352 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6353 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6358 /* This routine will determine what, if any, edge there is between
6359 a specified predecessor and successor. */
6362 find_edge_index (edge_list, pred, succ)
6363 struct edge_list *edge_list;
6364 basic_block pred, succ;
6367 for (x = 0; x < NUM_EDGES (edge_list); x++)
6369 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6370 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6373 return (EDGE_INDEX_NO_EDGE);
6376 /* This function will remove an edge from the flow graph. */
6381 edge last_pred = NULL;
6382 edge last_succ = NULL;
6384 basic_block src, dest;
6387 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6393 last_succ->succ_next = e->succ_next;
6395 src->succ = e->succ_next;
6397 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6403 last_pred->pred_next = e->pred_next;
6405 dest->pred = e->pred_next;
6411 /* This routine will remove any fake successor edges for a basic block.
6412 When the edge is removed, it is also removed from whatever predecessor
6415 remove_fake_successors (bb)
6419 for (e = bb->succ; e ; )
6423 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6428 /* This routine will remove all fake edges from the flow graph. If
6429 we remove all fake successors, it will automatically remove all
6430 fake predecessors. */
6432 remove_fake_edges ()
6436 for (x = 0; x < n_basic_blocks; x++)
6437 remove_fake_successors (BASIC_BLOCK (x));
6439 /* We've handled all successors except the entry block's. */
6440 remove_fake_successors (ENTRY_BLOCK_PTR);
6443 /* This functions will add a fake edge between any block which has no
6444 successors, and the exit block. Some data flow equations require these
6447 add_noreturn_fake_exit_edges ()
6451 for (x = 0; x < n_basic_blocks; x++)
6452 if (BASIC_BLOCK (x)->succ == NULL)
6453 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6456 /* Dump the list of basic blocks in the bitmap NODES. */
6458 flow_nodes_print (str, nodes, file)
6460 const sbitmap nodes;
6465 fprintf (file, "%s { ", str);
6466 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6467 fputs ("}\n", file);
6471 /* Dump the list of exiting edges in the array EDGES. */
6473 flow_exits_print (str, edges, num_edges, file)
6481 fprintf (file, "%s { ", str);
6482 for (i = 0; i < num_edges; i++)
6483 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6484 fputs ("}\n", file);
6488 /* Dump loop related CFG information. */
6490 flow_loops_cfg_dump (loops, file)
6491 const struct loops *loops;
6496 if (! loops->num || ! file || ! loops->cfg.dom)
6499 for (i = 0; i < n_basic_blocks; i++)
6503 fprintf (file, ";; %d succs { ", i);
6504 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6505 fprintf (file, "%d ", succ->dest->index);
6506 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6510 /* Dump the DFS node order. */
6511 if (loops->cfg.dfs_order)
6513 fputs (";; DFS order: ", file);
6514 for (i = 0; i < n_basic_blocks; i++)
6515 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6521 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6523 flow_loop_nested_p (outer, loop)
6527 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6531 /* Dump the loop information specified by LOOPS to the stream FILE. */
6533 flow_loops_dump (loops, file, verbose)
6534 const struct loops *loops;
6541 num_loops = loops->num;
6542 if (! num_loops || ! file)
6545 fprintf (file, ";; %d loops found, %d levels\n",
6546 num_loops, loops->levels);
6548 for (i = 0; i < num_loops; i++)
6550 struct loop *loop = &loops->array[i];
6552 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6553 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6554 loop->header->index, loop->latch->index,
6555 loop->pre_header ? loop->pre_header->index : -1,
6556 loop->depth, loop->level,
6557 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6558 fprintf (file, ";; %d", loop->num_nodes);
6559 flow_nodes_print (" nodes", loop->nodes, file);
6560 fprintf (file, ";; %d", loop->num_exits);
6561 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6567 for (j = 0; j < i; j++)
6569 struct loop *oloop = &loops->array[j];
6571 if (loop->header == oloop->header)
6576 smaller = loop->num_nodes < oloop->num_nodes;
6578 /* If the union of LOOP and OLOOP is different than
6579 the larger of LOOP and OLOOP then LOOP and OLOOP
6580 must be disjoint. */
6581 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6582 smaller ? oloop : loop);
6583 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6584 loop->header->index, i, j,
6585 disjoint ? "disjoint" : "nested");
6592 /* Print diagnostics to compare our concept of a loop with
6593 what the loop notes say. */
6594 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6595 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6596 != NOTE_INSN_LOOP_BEG)
6597 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6598 INSN_UID (PREV_INSN (loop->first->head)));
6599 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6600 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6601 != NOTE_INSN_LOOP_END)
6602 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6603 INSN_UID (NEXT_INSN (loop->last->end)));
6608 flow_loops_cfg_dump (loops, file);
6612 /* Free all the memory allocated for LOOPS. */
6614 flow_loops_free (loops)
6615 struct loops *loops;
6624 /* Free the loop descriptors. */
6625 for (i = 0; i < loops->num; i++)
6627 struct loop *loop = &loops->array[i];
6630 sbitmap_free (loop->nodes);
6634 free (loops->array);
6635 loops->array = NULL;
6638 sbitmap_vector_free (loops->cfg.dom);
6639 if (loops->cfg.dfs_order)
6640 free (loops->cfg.dfs_order);
6642 sbitmap_free (loops->shared_headers);
6647 /* Find the exits from the loop using the bitmap of loop nodes NODES
6648 and store in EXITS array. Return the number of exits from the
6651 flow_loop_exits_find (nodes, exits)
6652 const sbitmap nodes;
6661 /* Check all nodes within the loop to see if there are any
6662 successors not in the loop. Note that a node may have multiple
6665 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6666 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6668 basic_block dest = e->dest;
6670 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6678 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6680 /* Store all exiting edges into an array. */
6682 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6683 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6685 basic_block dest = e->dest;
6687 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6688 (*exits)[num_exits++] = e;
6696 /* Find the nodes contained within the loop with header HEADER and
6697 latch LATCH and store in NODES. Return the number of nodes within
6700 flow_loop_nodes_find (header, latch, nodes)
6709 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6712 /* Start with only the loop header in the set of loop nodes. */
6713 sbitmap_zero (nodes);
6714 SET_BIT (nodes, header->index);
6716 header->loop_depth++;
6718 /* Push the loop latch on to the stack. */
6719 if (! TEST_BIT (nodes, latch->index))
6721 SET_BIT (nodes, latch->index);
6722 latch->loop_depth++;
6724 stack[sp++] = latch;
6733 for (e = node->pred; e; e = e->pred_next)
6735 basic_block ancestor = e->src;
6737 /* If each ancestor not marked as part of loop, add to set of
6738 loop nodes and push on to stack. */
6739 if (ancestor != ENTRY_BLOCK_PTR
6740 && ! TEST_BIT (nodes, ancestor->index))
6742 SET_BIT (nodes, ancestor->index);
6743 ancestor->loop_depth++;
6745 stack[sp++] = ancestor;
6754 /* Compute the depth first search order and store in the array
6755 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6756 number of nodes visited. */
6758 flow_depth_first_order_compute (dfs_order)
6767 /* Allocate stack for back-tracking up CFG. */
6768 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6771 /* Allocate bitmap to track nodes that have been visited. */
6772 visited = sbitmap_alloc (n_basic_blocks);
6774 /* None of the nodes in the CFG have been visited yet. */
6775 sbitmap_zero (visited);
6777 /* Start with the first successor edge from the entry block. */
6778 e = ENTRY_BLOCK_PTR->succ;
6781 basic_block src = e->src;
6782 basic_block dest = e->dest;
6784 /* Mark that we have visited this node. */
6785 if (src != ENTRY_BLOCK_PTR)
6786 SET_BIT (visited, src->index);
6788 /* If this node has not been visited before, push the current
6789 edge on to the stack and proceed with the first successor
6790 edge of this node. */
6791 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6799 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6802 /* DEST has no successors (for example, a non-returning
6803 function is called) so do not push the current edge
6804 but carry on with its next successor. */
6805 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6806 SET_BIT (visited, dest->index);
6809 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6811 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6813 /* Pop edge off stack. */
6821 sbitmap_free (visited);
6823 /* The number of nodes visited should not be greater than
6825 if (dfsnum > n_basic_blocks)
6828 /* There are some nodes left in the CFG that are unreachable. */
6829 if (dfsnum < n_basic_blocks)
6835 /* Return the block for the pre-header of the loop with header
6836 HEADER where DOM specifies the dominator information. Return NULL if
6837 there is no pre-header. */
6839 flow_loop_pre_header_find (header, dom)
6843 basic_block pre_header;
6846 /* If block p is a predecessor of the header and is the only block
6847 that the header does not dominate, then it is the pre-header. */
6849 for (e = header->pred; e; e = e->pred_next)
6851 basic_block node = e->src;
6853 if (node != ENTRY_BLOCK_PTR
6854 && ! TEST_BIT (dom[node->index], header->index))
6856 if (pre_header == NULL)
6860 /* There are multiple edges into the header from outside
6861 the loop so there is no pre-header block. */
6871 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6872 previously added. The insertion algorithm assumes that the loops
6873 are added in the order found by a depth first search of the CFG. */
6875 flow_loop_tree_node_add (prevloop, loop)
6876 struct loop *prevloop;
6880 if (flow_loop_nested_p (prevloop, loop))
6882 prevloop->inner = loop;
6883 loop->outer = prevloop;
6887 while (prevloop->outer)
6889 if (flow_loop_nested_p (prevloop->outer, loop))
6891 prevloop->next = loop;
6892 loop->outer = prevloop->outer;
6895 prevloop = prevloop->outer;
6898 prevloop->next = loop;
6903 /* Build the loop hierarchy tree for LOOPS. */
6905 flow_loops_tree_build (loops)
6906 struct loops *loops;
6911 num_loops = loops->num;
6915 /* Root the loop hierarchy tree with the first loop found.
6916 Since we used a depth first search this should be the
6918 loops->tree = &loops->array[0];
6919 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6921 /* Add the remaining loops to the tree. */
6922 for (i = 1; i < num_loops; i++)
6923 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6927 /* Helper function to compute loop nesting depth and enclosed loop level
6928 for the natural loop specified by LOOP at the loop depth DEPTH.
6929 Returns the loop level. */
6931 flow_loop_level_compute (loop, depth)
6941 /* Traverse loop tree assigning depth and computing level as the
6942 maximum level of all the inner loops of this loop. The loop
6943 level is equivalent to the height of the loop in the loop tree
6944 and corresponds to the number of enclosed loop levels (including
6946 for (inner = loop->inner; inner; inner = inner->next)
6950 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6955 loop->level = level;
6956 loop->depth = depth;
6961 /* Compute the loop nesting depth and enclosed loop level for the loop
6962 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6966 flow_loops_level_compute (loops)
6967 struct loops *loops;
6973 /* Traverse all the outer level loops. */
6974 for (loop = loops->tree; loop; loop = loop->next)
6976 level = flow_loop_level_compute (loop, 1);
6984 /* Find all the natural loops in the function and save in LOOPS structure
6985 and recalculate loop_depth information in basic block structures.
6986 Return the number of natural loops found. */
6989 flow_loops_find (loops)
6990 struct loops *loops;
7001 loops->array = NULL;
7005 /* Taking care of this degenerate case makes the rest of
7006 this code simpler. */
7007 if (n_basic_blocks == 0)
7010 /* Compute the dominators. */
7011 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7012 compute_flow_dominators (dom, NULL);
7014 /* Count the number of loop edges (back edges). This should be the
7015 same as the number of natural loops. Also clear the loop_depth
7016 and as we work from inner->outer in a loop nest we call
7017 find_loop_nodes_find which will increment loop_depth for nodes
7018 within the current loop, which happens to enclose inner loops. */
7021 for (b = 0; b < n_basic_blocks; b++)
7023 BASIC_BLOCK (b)->loop_depth = 0;
7024 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7026 basic_block latch = e->src;
7028 /* Look for back edges where a predecessor is dominated
7029 by this block. A natural loop has a single entry
7030 node (header) that dominates all the nodes in the
7031 loop. It also has single back edge to the header
7032 from a latch node. Note that multiple natural loops
7033 may share the same header. */
7034 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7041 /* Compute depth first search order of the CFG so that outer
7042 natural loops will be found before inner natural loops. */
7043 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7044 flow_depth_first_order_compute (dfs_order);
7046 /* Allocate loop structures. */
7048 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7050 headers = sbitmap_alloc (n_basic_blocks);
7051 sbitmap_zero (headers);
7053 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7054 sbitmap_zero (loops->shared_headers);
7056 /* Find and record information about all the natural loops
7059 for (b = 0; b < n_basic_blocks; b++)
7063 /* Search the nodes of the CFG in DFS order that we can find
7064 outer loops first. */
7065 header = BASIC_BLOCK (dfs_order[b]);
7067 /* Look for all the possible latch blocks for this header. */
7068 for (e = header->pred; e; e = e->pred_next)
7070 basic_block latch = e->src;
7072 /* Look for back edges where a predecessor is dominated
7073 by this block. A natural loop has a single entry
7074 node (header) that dominates all the nodes in the
7075 loop. It also has single back edge to the header
7076 from a latch node. Note that multiple natural loops
7077 may share the same header. */
7078 if (latch != ENTRY_BLOCK_PTR
7079 && TEST_BIT (dom[latch->index], header->index))
7083 loop = loops->array + num_loops;
7085 loop->header = header;
7086 loop->latch = latch;
7088 /* Keep track of blocks that are loop headers so
7089 that we can tell which loops should be merged. */
7090 if (TEST_BIT (headers, header->index))
7091 SET_BIT (loops->shared_headers, header->index);
7092 SET_BIT (headers, header->index);
7094 /* Find nodes contained within the loop. */
7095 loop->nodes = sbitmap_alloc (n_basic_blocks);
7097 = flow_loop_nodes_find (header, latch, loop->nodes);
7099 /* Compute first and last blocks within the loop.
7100 These are often the same as the loop header and
7101 loop latch respectively, but this is not always
7104 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7106 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7108 /* Find edges which exit the loop. Note that a node
7109 may have several exit edges. */
7111 = flow_loop_exits_find (loop->nodes, &loop->exits);
7113 /* Look to see if the loop has a pre-header node. */
7115 = flow_loop_pre_header_find (header, dom);
7122 /* Natural loops with shared headers may either be disjoint or
7123 nested. Disjoint loops with shared headers cannot be inner
7124 loops and should be merged. For now just mark loops that share
7126 for (i = 0; i < num_loops; i++)
7127 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7128 loops->array[i].shared = 1;
7130 sbitmap_free (headers);
7133 loops->num = num_loops;
7135 /* Save CFG derived information to avoid recomputing it. */
7136 loops->cfg.dom = dom;
7137 loops->cfg.dfs_order = dfs_order;
7139 /* Build the loop hierarchy tree. */
7140 flow_loops_tree_build (loops);
7142 /* Assign the loop nesting depth and enclosed loop level for each
7144 loops->levels = flow_loops_level_compute (loops);
7150 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7152 flow_loop_outside_edge_p (loop, e)
7153 const struct loop *loop;
7156 if (e->dest != loop->header)
7158 return (e->src == ENTRY_BLOCK_PTR)
7159 || ! TEST_BIT (loop->nodes, e->src->index);
7163 /* Clear LOG_LINKS fields of insns in a chain. */
7165 clear_log_links (insns)
7169 for (i = insns; i; i = NEXT_INSN (i))
7170 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')