1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static int insn_dead_p PARAMS ((struct propagate_block_info *,
334 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
336 static void mark_set_regs PARAMS ((struct propagate_block_info *,
338 static void mark_set_1 PARAMS ((struct propagate_block_info *,
339 enum rtx_code, rtx, rtx,
342 static void find_auto_inc PARAMS ((struct propagate_block_info *,
344 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
346 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
348 static void mark_used_reg PARAMS ((struct propagate_block_info *,
350 static void mark_used_regs PARAMS ((struct propagate_block_info *,
352 void dump_flow_info PARAMS ((FILE *));
353 void debug_flow_info PARAMS ((void));
354 static void dump_edge_info PARAMS ((FILE *, edge, int));
356 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
358 static void remove_fake_successors PARAMS ((basic_block));
359 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
360 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
361 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
362 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
363 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
364 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
365 static int flow_depth_first_order_compute PARAMS ((int *));
366 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
367 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
368 static void flow_loops_tree_build PARAMS ((struct loops *));
369 static int flow_loop_level_compute PARAMS ((struct loop *, int));
370 static int flow_loops_level_compute PARAMS ((struct loops *));
372 /* Find basic blocks of the current function.
373 F is the first insn of the function and NREGS the number of register
377 find_basic_blocks (f, nregs, file)
379 int nregs ATTRIBUTE_UNUSED;
380 FILE *file ATTRIBUTE_UNUSED;
384 /* Flush out existing data. */
385 if (basic_block_info != NULL)
391 /* Clear bb->aux on all extant basic blocks. We'll use this as a
392 tag for reuse during create_basic_block, just in case some pass
393 copies around basic block notes improperly. */
394 for (i = 0; i < n_basic_blocks; ++i)
395 BASIC_BLOCK (i)->aux = NULL;
397 VARRAY_FREE (basic_block_info);
400 n_basic_blocks = count_basic_blocks (f);
402 /* Size the basic block table. The actual structures will be allocated
403 by find_basic_blocks_1, since we want to keep the structure pointers
404 stable across calls to find_basic_blocks. */
405 /* ??? This whole issue would be much simpler if we called find_basic_blocks
406 exactly once, and thereafter we don't have a single long chain of
407 instructions at all until close to the end of compilation when we
408 actually lay them out. */
410 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
412 label_value_list = find_basic_blocks_1 (f);
414 /* Record the block to which an insn belongs. */
415 /* ??? This should be done another way, by which (perhaps) a label is
416 tagged directly with the basic block that it starts. It is used for
417 more than that currently, but IMO that is the only valid use. */
419 max_uid = get_max_uid ();
421 /* Leave space for insns life_analysis makes in some cases for auto-inc.
422 These cases are rare, so we don't need too much space. */
423 max_uid += max_uid / 10;
426 compute_bb_for_insn (max_uid);
428 /* Discover the edges of our cfg. */
429 record_active_eh_regions (f);
430 make_edges (label_value_list);
432 /* Do very simple cleanup now, for the benefit of code that runs between
433 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
434 tidy_fallthru_edges ();
436 mark_critical_edges ();
438 #ifdef ENABLE_CHECKING
443 /* Count the basic blocks of the function. */
446 count_basic_blocks (f)
450 register RTX_CODE prev_code;
451 register int count = 0;
453 int call_had_abnormal_edge = 0;
455 prev_code = JUMP_INSN;
456 for (insn = f; insn; insn = NEXT_INSN (insn))
458 register RTX_CODE code = GET_CODE (insn);
460 if (code == CODE_LABEL
461 || (GET_RTX_CLASS (code) == 'i'
462 && (prev_code == JUMP_INSN
463 || prev_code == BARRIER
464 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
467 /* Record whether this call created an edge. */
468 if (code == CALL_INSN)
470 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
471 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
473 call_had_abnormal_edge = 0;
475 /* If there is an EH region or rethrow, we have an edge. */
476 if ((eh_region && region > 0)
477 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
478 call_had_abnormal_edge = 1;
479 else if (nonlocal_goto_handler_labels && region >= 0)
480 /* If there is a nonlocal goto label and the specified
481 region number isn't -1, we have an edge. (0 means
482 no throw, but might have a nonlocal goto). */
483 call_had_abnormal_edge = 1;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
494 /* The rest of the compiler works a bit smoother when we don't have to
495 check for the edge case of do-nothing functions with no basic blocks. */
498 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
505 /* Find all basic blocks of the function whose first insn is F.
507 Collect and return a list of labels whose addresses are taken. This
508 will be used in make_edges for use with computed gotos. */
511 find_basic_blocks_1 (f)
514 register rtx insn, next;
516 rtx bb_note = NULL_RTX;
517 rtx eh_list = NULL_RTX;
518 rtx label_value_list = NULL_RTX;
522 /* We process the instructions in a slightly different way than we did
523 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
524 closed out the previous block, so that it gets attached at the proper
525 place. Since this form should be equivalent to the previous,
526 count_basic_blocks continues to use the old form as a check. */
528 for (insn = f; insn; insn = next)
530 enum rtx_code code = GET_CODE (insn);
532 next = NEXT_INSN (insn);
538 int kind = NOTE_LINE_NUMBER (insn);
540 /* Keep a LIFO list of the currently active exception notes. */
541 if (kind == NOTE_INSN_EH_REGION_BEG)
542 eh_list = alloc_INSN_LIST (insn, eh_list);
543 else if (kind == NOTE_INSN_EH_REGION_END)
547 eh_list = XEXP (eh_list, 1);
548 free_INSN_LIST_node (t);
551 /* Look for basic block notes with which to keep the
552 basic_block_info pointers stable. Unthread the note now;
553 we'll put it back at the right place in create_basic_block.
554 Or not at all if we've already found a note in this block. */
555 else if (kind == NOTE_INSN_BASIC_BLOCK)
557 if (bb_note == NULL_RTX)
560 next = flow_delete_insn (insn);
566 /* A basic block starts at a label. If we've closed one off due
567 to a barrier or some such, no need to do it again. */
568 if (head != NULL_RTX)
570 /* While we now have edge lists with which other portions of
571 the compiler might determine a call ending a basic block
572 does not imply an abnormal edge, it will be a bit before
573 everything can be updated. So continue to emit a noop at
574 the end of such a block. */
575 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
577 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
578 end = emit_insn_after (nop, end);
581 create_basic_block (i++, head, end, bb_note);
589 /* A basic block ends at a jump. */
590 if (head == NULL_RTX)
594 /* ??? Make a special check for table jumps. The way this
595 happens is truly and amazingly gross. We are about to
596 create a basic block that contains just a code label and
597 an addr*vec jump insn. Worse, an addr_diff_vec creates
598 its own natural loop.
600 Prevent this bit of brain damage, pasting things together
601 correctly in make_edges.
603 The correct solution involves emitting the table directly
604 on the tablejump instruction as a note, or JUMP_LABEL. */
606 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
607 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
615 goto new_bb_inclusive;
618 /* A basic block ends at a barrier. It may be that an unconditional
619 jump already closed the basic block -- no need to do it again. */
620 if (head == NULL_RTX)
623 /* While we now have edge lists with which other portions of the
624 compiler might determine a call ending a basic block does not
625 imply an abnormal edge, it will be a bit before everything can
626 be updated. So continue to emit a noop at the end of such a
628 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
630 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
631 end = emit_insn_after (nop, end);
633 goto new_bb_exclusive;
637 /* Record whether this call created an edge. */
638 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
639 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
640 int call_has_abnormal_edge = 0;
642 /* If there is an EH region or rethrow, we have an edge. */
643 if ((eh_list && region > 0)
644 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
645 call_has_abnormal_edge = 1;
646 else if (nonlocal_goto_handler_labels && region >= 0)
647 /* If there is a nonlocal goto label and the specified
648 region number isn't -1, we have an edge. (0 means
649 no throw, but might have a nonlocal goto). */
650 call_has_abnormal_edge = 1;
652 /* A basic block ends at a call that can either throw or
653 do a non-local goto. */
654 if (call_has_abnormal_edge)
657 if (head == NULL_RTX)
662 create_basic_block (i++, head, end, bb_note);
663 head = end = NULL_RTX;
671 if (GET_RTX_CLASS (code) == 'i')
673 if (head == NULL_RTX)
680 if (GET_RTX_CLASS (code) == 'i')
684 /* Make a list of all labels referred to other than by jumps
685 (which just don't have the REG_LABEL notes).
687 Make a special exception for labels followed by an ADDR*VEC,
688 as this would be a part of the tablejump setup code.
690 Make a special exception for the eh_return_stub_label, which
691 we know isn't part of any otherwise visible control flow. */
693 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
694 if (REG_NOTE_KIND (note) == REG_LABEL)
696 rtx lab = XEXP (note, 0), next;
698 if (lab == eh_return_stub_label)
700 else if ((next = next_nonnote_insn (lab)) != NULL
701 && GET_CODE (next) == JUMP_INSN
702 && (GET_CODE (PATTERN (next)) == ADDR_VEC
703 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
707 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
712 if (head != NULL_RTX)
713 create_basic_block (i++, head, end, bb_note);
715 if (i != n_basic_blocks)
718 return label_value_list;
721 /* Tidy the CFG by deleting unreachable code and whatnot. */
727 delete_unreachable_blocks ();
728 move_stray_eh_region_notes ();
729 record_active_eh_regions (f);
731 mark_critical_edges ();
733 /* Kill the data we won't maintain. */
734 label_value_list = NULL_RTX;
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
742 create_basic_block (index, head, end, bb_note)
744 rtx head, end, bb_note;
749 && ! RTX_INTEGRATED_P (bb_note)
750 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
753 /* If we found an existing note, thread it back onto the chain. */
755 if (GET_CODE (head) == CODE_LABEL)
756 add_insn_after (bb_note, head);
759 add_insn_before (bb_note, head);
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
770 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771 memset (bb, 0, sizeof (*bb));
773 if (GET_CODE (head) == CODE_LABEL)
774 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
777 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
780 NOTE_BASIC_BLOCK (bb_note) = bb;
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end) == bb_note)
790 BASIC_BLOCK (index) = bb;
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
801 compute_bb_for_insn (max)
806 if (basic_block_for_insn)
807 VARRAY_FREE (basic_block_for_insn);
808 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
810 for (i = 0; i < n_basic_blocks; ++i)
812 basic_block bb = BASIC_BLOCK (i);
819 int uid = INSN_UID (insn);
821 VARRAY_BB (basic_block_for_insn, uid) = bb;
824 insn = NEXT_INSN (insn);
829 /* Free the memory associated with the edge structures. */
837 for (i = 0; i < n_basic_blocks; ++i)
839 basic_block bb = BASIC_BLOCK (i);
841 for (e = bb->succ; e ; e = n)
851 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
857 ENTRY_BLOCK_PTR->succ = 0;
858 EXIT_BLOCK_PTR->pred = 0;
863 /* Identify the edges between basic blocks.
865 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
866 that are otherwise unreachable may be reachable with a non-local goto.
868 BB_EH_END is an array indexed by basic block number in which we record
869 the list of exception regions active at the end of the basic block. */
872 make_edges (label_value_list)
873 rtx label_value_list;
876 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
877 sbitmap *edge_cache = NULL;
879 /* Assume no computed jump; revise as we create edges. */
880 current_function_has_computed_jump = 0;
882 /* Heavy use of computed goto in machine-generated code can lead to
883 nearly fully-connected CFGs. In that case we spend a significant
884 amount of time searching the edge lists for duplicates. */
885 if (forced_labels || label_value_list)
887 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
888 sbitmap_vector_zero (edge_cache, n_basic_blocks);
891 /* By nature of the way these get numbered, block 0 is always the entry. */
892 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
894 for (i = 0; i < n_basic_blocks; ++i)
896 basic_block bb = BASIC_BLOCK (i);
899 int force_fallthru = 0;
901 /* Examine the last instruction of the block, and discover the
902 ways we can leave the block. */
905 code = GET_CODE (insn);
908 if (code == JUMP_INSN)
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915 && GET_CODE (tmp) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
922 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923 vec = XVEC (PATTERN (tmp), 0);
925 vec = XVEC (PATTERN (tmp), 1);
927 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928 make_label_edge (edge_cache, bb,
929 XEXP (RTVEC_ELT (vec, j), 0), 0);
931 /* Some targets (eg, ARM) emit a conditional jump that also
932 contains the out-of-range target. Scan for these and
933 add an edge if necessary. */
934 if ((tmp = single_set (insn)) != NULL
935 && SET_DEST (tmp) == pc_rtx
936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938 make_label_edge (edge_cache, bb,
939 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
941 #ifdef CASE_DROPS_THROUGH
942 /* Silly VAXen. The ADDR_VEC is going to be in the way of
943 us naturally detecting fallthru into the next block. */
948 /* If this is a computed jump, then mark it as reaching
949 everything on the label_value_list and forced_labels list. */
950 else if (computed_jump_p (insn))
952 current_function_has_computed_jump = 1;
954 for (x = label_value_list; x; x = XEXP (x, 1))
955 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
957 for (x = forced_labels; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
961 /* Returns create an exit out. */
962 else if (returnjump_p (insn))
963 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
965 /* Otherwise, we have a plain conditional or unconditional jump. */
968 if (! JUMP_LABEL (insn))
970 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
974 /* If this is a sibling call insn, then this is in effect a
975 combined call and return, and so we need an edge to the
976 exit block. No need to worry about EH edges, since we
977 wouldn't have created the sibling call in the first place. */
979 if (code == CALL_INSN && SIBLING_CALL_P (insn))
980 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
983 /* If this is a CALL_INSN, then mark it as reaching the active EH
984 handler for this CALL_INSN. If we're handling asynchronous
985 exceptions then any insn can reach any of the active handlers.
987 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
989 if (code == CALL_INSN || asynchronous_exceptions)
991 /* Add any appropriate EH edges. We do this unconditionally
992 since there may be a REG_EH_REGION or REG_EH_RETHROW note
993 on the call, and this needn't be within an EH region. */
994 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
996 /* If we have asynchronous exceptions, do the same for *all*
997 exception regions active in the block. */
998 if (asynchronous_exceptions
999 && bb->eh_beg != bb->eh_end)
1001 if (bb->eh_beg >= 0)
1002 make_eh_edge (edge_cache, eh_nest_info, bb,
1003 NULL_RTX, bb->eh_beg);
1005 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1006 if (GET_CODE (x) == NOTE
1007 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1008 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1010 int region = NOTE_EH_HANDLER (x);
1011 make_eh_edge (edge_cache, eh_nest_info, bb,
1016 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1018 /* ??? This could be made smarter: in some cases it's possible
1019 to tell that certain calls will not do a nonlocal goto.
1021 For example, if the nested functions that do the nonlocal
1022 gotos do not have their addresses taken, then only calls to
1023 those functions or to other nested functions that use them
1024 could possibly do nonlocal gotos. */
1025 /* We do know that a REG_EH_REGION note with a value less
1026 than 0 is guaranteed not to perform a non-local goto. */
1027 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1028 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1029 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1030 make_label_edge (edge_cache, bb, XEXP (x, 0),
1031 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1035 /* We know something about the structure of the function __throw in
1036 libgcc2.c. It is the only function that ever contains eh_stub
1037 labels. It modifies its return address so that the last block
1038 returns to one of the eh_stub labels within it. So we have to
1039 make additional edges in the flow graph. */
1040 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1041 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1043 /* Find out if we can drop through to the next block. */
1044 insn = next_nonnote_insn (insn);
1045 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1046 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1047 else if (i + 1 < n_basic_blocks)
1049 rtx tmp = BLOCK_HEAD (i + 1);
1050 if (GET_CODE (tmp) == NOTE)
1051 tmp = next_nonnote_insn (tmp);
1052 if (force_fallthru || insn == tmp)
1053 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1057 free_eh_nesting_info (eh_nest_info);
1059 sbitmap_vector_free (edge_cache);
1062 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1063 about the edge that is accumulated between calls. */
1066 make_edge (edge_cache, src, dst, flags)
1067 sbitmap *edge_cache;
1068 basic_block src, dst;
1074 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1075 many edges to them, and we didn't allocate memory for it. */
1076 use_edge_cache = (edge_cache
1077 && src != ENTRY_BLOCK_PTR
1078 && dst != EXIT_BLOCK_PTR);
1080 /* Make sure we don't add duplicate edges. */
1081 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1082 for (e = src->succ; e ; e = e->succ_next)
1089 e = (edge) xcalloc (1, sizeof (*e));
1092 e->succ_next = src->succ;
1093 e->pred_next = dst->pred;
1102 SET_BIT (edge_cache[src->index], dst->index);
1105 /* Create an edge from a basic block to a label. */
1108 make_label_edge (edge_cache, src, label, flags)
1109 sbitmap *edge_cache;
1114 if (GET_CODE (label) != CODE_LABEL)
1117 /* If the label was never emitted, this insn is junk, but avoid a
1118 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1119 as a result of a syntax error and a diagnostic has already been
1122 if (INSN_UID (label) == 0)
1125 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1128 /* Create the edges generated by INSN in REGION. */
1131 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1132 sbitmap *edge_cache;
1133 eh_nesting_info *eh_nest_info;
1138 handler_info **handler_list;
1141 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1142 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1145 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1146 EDGE_ABNORMAL | EDGE_EH | is_call);
1150 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1151 dangerous if we intend to move basic blocks around. Move such notes
1152 into the following block. */
1155 move_stray_eh_region_notes ()
1160 if (n_basic_blocks < 2)
1163 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1164 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1166 rtx insn, next, list = NULL_RTX;
1168 b1 = BASIC_BLOCK (i);
1169 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1171 next = NEXT_INSN (insn);
1172 if (GET_CODE (insn) == NOTE
1173 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1174 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1176 /* Unlink from the insn chain. */
1177 NEXT_INSN (PREV_INSN (insn)) = next;
1178 PREV_INSN (next) = PREV_INSN (insn);
1181 NEXT_INSN (insn) = list;
1186 if (list == NULL_RTX)
1189 /* Find where to insert these things. */
1191 if (GET_CODE (insn) == CODE_LABEL)
1192 insn = NEXT_INSN (insn);
1196 next = NEXT_INSN (list);
1197 add_insn_after (list, insn);
1203 /* Recompute eh_beg/eh_end for each basic block. */
1206 record_active_eh_regions (f)
1209 rtx insn, eh_list = NULL_RTX;
1211 basic_block bb = BASIC_BLOCK (0);
1213 for (insn = f; insn ; insn = NEXT_INSN (insn))
1215 if (bb->head == insn)
1216 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1218 if (GET_CODE (insn) == NOTE)
1220 int kind = NOTE_LINE_NUMBER (insn);
1221 if (kind == NOTE_INSN_EH_REGION_BEG)
1222 eh_list = alloc_INSN_LIST (insn, eh_list);
1223 else if (kind == NOTE_INSN_EH_REGION_END)
1225 rtx t = XEXP (eh_list, 1);
1226 free_INSN_LIST_node (eh_list);
1231 if (bb->end == insn)
1233 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1235 if (i == n_basic_blocks)
1237 bb = BASIC_BLOCK (i);
1242 /* Identify critical edges and set the bits appropriately. */
1245 mark_critical_edges ()
1247 int i, n = n_basic_blocks;
1250 /* We begin with the entry block. This is not terribly important now,
1251 but could be if a front end (Fortran) implemented alternate entry
1253 bb = ENTRY_BLOCK_PTR;
1260 /* (1) Critical edges must have a source with multiple successors. */
1261 if (bb->succ && bb->succ->succ_next)
1263 for (e = bb->succ; e ; e = e->succ_next)
1265 /* (2) Critical edges must have a destination with multiple
1266 predecessors. Note that we know there is at least one
1267 predecessor -- the edge we followed to get here. */
1268 if (e->dest->pred->pred_next)
1269 e->flags |= EDGE_CRITICAL;
1271 e->flags &= ~EDGE_CRITICAL;
1276 for (e = bb->succ; e ; e = e->succ_next)
1277 e->flags &= ~EDGE_CRITICAL;
1282 bb = BASIC_BLOCK (i);
1286 /* Split a (typically critical) edge. Return the new block.
1287 Abort on abnormal edges.
1289 ??? The code generally expects to be called on critical edges.
1290 The case of a block ending in an unconditional jump to a
1291 block with multiple predecessors is not handled optimally. */
1294 split_edge (edge_in)
1297 basic_block old_pred, bb, old_succ;
1302 /* Abnormal edges cannot be split. */
1303 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1306 old_pred = edge_in->src;
1307 old_succ = edge_in->dest;
1309 /* Remove the existing edge from the destination's pred list. */
1312 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1314 *pp = edge_in->pred_next;
1315 edge_in->pred_next = NULL;
1318 /* Create the new structures. */
1319 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1320 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1323 memset (bb, 0, sizeof (*bb));
1324 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1325 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1327 /* ??? This info is likely going to be out of date very soon. */
1328 if (old_succ->global_live_at_start)
1330 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1331 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1335 CLEAR_REG_SET (bb->global_live_at_start);
1336 CLEAR_REG_SET (bb->global_live_at_end);
1341 bb->succ = edge_out;
1344 edge_in->flags &= ~EDGE_CRITICAL;
1346 edge_out->pred_next = old_succ->pred;
1347 edge_out->succ_next = NULL;
1349 edge_out->dest = old_succ;
1350 edge_out->flags = EDGE_FALLTHRU;
1351 edge_out->probability = REG_BR_PROB_BASE;
1353 old_succ->pred = edge_out;
1355 /* Tricky case -- if there existed a fallthru into the successor
1356 (and we're not it) we must add a new unconditional jump around
1357 the new block we're actually interested in.
1359 Further, if that edge is critical, this means a second new basic
1360 block must be created to hold it. In order to simplify correct
1361 insn placement, do this before we touch the existing basic block
1362 ordering for the block we were really wanting. */
1363 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1366 for (e = edge_out->pred_next; e ; e = e->pred_next)
1367 if (e->flags & EDGE_FALLTHRU)
1372 basic_block jump_block;
1375 if ((e->flags & EDGE_CRITICAL) == 0
1376 && e->src != ENTRY_BLOCK_PTR)
1378 /* Non critical -- we can simply add a jump to the end
1379 of the existing predecessor. */
1380 jump_block = e->src;
1384 /* We need a new block to hold the jump. The simplest
1385 way to do the bulk of the work here is to recursively
1387 jump_block = split_edge (e);
1388 e = jump_block->succ;
1391 /* Now add the jump insn ... */
1392 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1394 jump_block->end = pos;
1395 if (basic_block_for_insn)
1396 set_block_for_insn (pos, jump_block);
1397 emit_barrier_after (pos);
1399 /* ... let jump know that label is in use, ... */
1400 JUMP_LABEL (pos) = old_succ->head;
1401 ++LABEL_NUSES (old_succ->head);
1403 /* ... and clear fallthru on the outgoing edge. */
1404 e->flags &= ~EDGE_FALLTHRU;
1406 /* Continue splitting the interesting edge. */
1410 /* Place the new block just in front of the successor. */
1411 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1412 if (old_succ == EXIT_BLOCK_PTR)
1413 j = n_basic_blocks - 1;
1415 j = old_succ->index;
1416 for (i = n_basic_blocks - 1; i > j; --i)
1418 basic_block tmp = BASIC_BLOCK (i - 1);
1419 BASIC_BLOCK (i) = tmp;
1422 BASIC_BLOCK (i) = bb;
1425 /* Create the basic block note.
1427 Where we place the note can have a noticable impact on the generated
1428 code. Consider this cfg:
1439 If we need to insert an insn on the edge from block 0 to block 1,
1440 we want to ensure the instructions we insert are outside of any
1441 loop notes that physically sit between block 0 and block 1. Otherwise
1442 we confuse the loop optimizer into thinking the loop is a phony. */
1443 if (old_succ != EXIT_BLOCK_PTR
1444 && PREV_INSN (old_succ->head)
1445 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1446 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1447 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1448 PREV_INSN (old_succ->head));
1449 else if (old_succ != EXIT_BLOCK_PTR)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1452 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1453 NOTE_BASIC_BLOCK (bb_note) = bb;
1454 bb->head = bb->end = bb_note;
1456 /* Not quite simple -- for non-fallthru edges, we must adjust the
1457 predecessor's jump instruction to target our new block. */
1458 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1460 rtx tmp, insn = old_pred->end;
1461 rtx old_label = old_succ->head;
1462 rtx new_label = gen_label_rtx ();
1464 if (GET_CODE (insn) != JUMP_INSN)
1467 /* ??? Recognize a tablejump and adjust all matching cases. */
1468 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1469 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1470 && GET_CODE (tmp) == JUMP_INSN
1471 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1472 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1477 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1478 vec = XVEC (PATTERN (tmp), 0);
1480 vec = XVEC (PATTERN (tmp), 1);
1482 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1483 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1485 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1486 --LABEL_NUSES (old_label);
1487 ++LABEL_NUSES (new_label);
1490 /* Handle casesi dispatch insns */
1491 if ((tmp = single_set (insn)) != NULL
1492 && SET_DEST (tmp) == pc_rtx
1493 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1494 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1495 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1497 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1499 --LABEL_NUSES (old_label);
1500 ++LABEL_NUSES (new_label);
1505 /* This would have indicated an abnormal edge. */
1506 if (computed_jump_p (insn))
1509 /* A return instruction can't be redirected. */
1510 if (returnjump_p (insn))
1513 /* If the insn doesn't go where we think, we're confused. */
1514 if (JUMP_LABEL (insn) != old_label)
1517 redirect_jump (insn, new_label);
1520 emit_label_before (new_label, bb_note);
1521 bb->head = new_label;
1527 /* Queue instructions for insertion on an edge between two basic blocks.
1528 The new instructions and basic blocks (if any) will not appear in the
1529 CFG until commit_edge_insertions is called. */
1532 insert_insn_on_edge (pattern, e)
1536 /* We cannot insert instructions on an abnormal critical edge.
1537 It will be easier to find the culprit if we die now. */
1538 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1539 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1542 if (e->insns == NULL_RTX)
1545 push_to_sequence (e->insns);
1547 emit_insn (pattern);
1549 e->insns = get_insns ();
1553 /* Update the CFG for the instructions queued on edge E. */
1556 commit_one_edge_insertion (e)
1559 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1562 /* Pull the insns off the edge now since the edge might go away. */
1564 e->insns = NULL_RTX;
1566 /* Figure out where to put these things. If the destination has
1567 one predecessor, insert there. Except for the exit block. */
1568 if (e->dest->pred->pred_next == NULL
1569 && e->dest != EXIT_BLOCK_PTR)
1573 /* Get the location correct wrt a code label, and "nice" wrt
1574 a basic block note, and before everything else. */
1576 if (GET_CODE (tmp) == CODE_LABEL)
1577 tmp = NEXT_INSN (tmp);
1578 if (GET_CODE (tmp) == NOTE
1579 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1580 tmp = NEXT_INSN (tmp);
1581 if (tmp == bb->head)
1584 after = PREV_INSN (tmp);
1587 /* If the source has one successor and the edge is not abnormal,
1588 insert there. Except for the entry block. */
1589 else if ((e->flags & EDGE_ABNORMAL) == 0
1590 && e->src->succ->succ_next == NULL
1591 && e->src != ENTRY_BLOCK_PTR)
1594 /* It is possible to have a non-simple jump here. Consider a target
1595 where some forms of unconditional jumps clobber a register. This
1596 happens on the fr30 for example.
1598 We know this block has a single successor, so we can just emit
1599 the queued insns before the jump. */
1600 if (GET_CODE (bb->end) == JUMP_INSN)
1606 /* We'd better be fallthru, or we've lost track of what's what. */
1607 if ((e->flags & EDGE_FALLTHRU) == 0)
1614 /* Otherwise we must split the edge. */
1617 bb = split_edge (e);
1621 /* Now that we've found the spot, do the insertion. */
1623 /* Set the new block number for these insns, if structure is allocated. */
1624 if (basic_block_for_insn)
1627 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1628 set_block_for_insn (i, bb);
1633 emit_insns_before (insns, before);
1634 if (before == bb->head)
1639 rtx last = emit_insns_after (insns, after);
1640 if (after == bb->end)
1644 if (GET_CODE (last) == JUMP_INSN)
1646 if (returnjump_p (last))
1648 /* ??? Remove all outgoing edges from BB and add one
1649 for EXIT. This is not currently a problem because
1650 this only happens for the (single) epilogue, which
1651 already has a fallthru edge to EXIT. */
1654 if (e->dest != EXIT_BLOCK_PTR
1655 || e->succ_next != NULL
1656 || (e->flags & EDGE_FALLTHRU) == 0)
1658 e->flags &= ~EDGE_FALLTHRU;
1660 emit_barrier_after (last);
1669 /* Update the CFG for all queued instructions. */
1672 commit_edge_insertions ()
1677 #ifdef ENABLE_CHECKING
1678 verify_flow_info ();
1682 bb = ENTRY_BLOCK_PTR;
1687 for (e = bb->succ; e ; e = next)
1689 next = e->succ_next;
1691 commit_one_edge_insertion (e);
1694 if (++i >= n_basic_blocks)
1696 bb = BASIC_BLOCK (i);
1700 /* Delete all unreachable basic blocks. */
1703 delete_unreachable_blocks ()
1705 basic_block *worklist, *tos;
1706 int deleted_handler;
1711 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1713 /* Use basic_block->aux as a marker. Clear them all. */
1715 for (i = 0; i < n; ++i)
1716 BASIC_BLOCK (i)->aux = NULL;
1718 /* Add our starting points to the worklist. Almost always there will
1719 be only one. It isn't inconcievable that we might one day directly
1720 support Fortran alternate entry points. */
1722 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1726 /* Mark the block with a handy non-null value. */
1730 /* Iterate: find everything reachable from what we've already seen. */
1732 while (tos != worklist)
1734 basic_block b = *--tos;
1736 for (e = b->succ; e ; e = e->succ_next)
1744 /* Delete all unreachable basic blocks. Count down so that we don't
1745 interfere with the block renumbering that happens in flow_delete_block. */
1747 deleted_handler = 0;
1749 for (i = n - 1; i >= 0; --i)
1751 basic_block b = BASIC_BLOCK (i);
1754 /* This block was found. Tidy up the mark. */
1757 deleted_handler |= flow_delete_block (b);
1760 tidy_fallthru_edges ();
1762 /* If we deleted an exception handler, we may have EH region begin/end
1763 blocks to remove as well. */
1764 if (deleted_handler)
1765 delete_eh_regions ();
1770 /* Find EH regions for which there is no longer a handler, and delete them. */
1773 delete_eh_regions ()
1777 update_rethrow_references ();
1779 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1780 if (GET_CODE (insn) == NOTE)
1782 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1783 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1785 int num = NOTE_EH_HANDLER (insn);
1786 /* A NULL handler indicates a region is no longer needed,
1787 as long as its rethrow label isn't used. */
1788 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1790 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1791 NOTE_SOURCE_FILE (insn) = 0;
1797 /* Return true if NOTE is not one of the ones that must be kept paired,
1798 so that we may simply delete them. */
1801 can_delete_note_p (note)
1804 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1805 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1808 /* Unlink a chain of insns between START and FINISH, leaving notes
1809 that must be paired. */
1812 flow_delete_insn_chain (start, finish)
1815 /* Unchain the insns one by one. It would be quicker to delete all
1816 of these with a single unchaining, rather than one at a time, but
1817 we need to keep the NOTE's. */
1823 next = NEXT_INSN (start);
1824 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1826 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1829 next = flow_delete_insn (start);
1831 if (start == finish)
1837 /* Delete the insns in a (non-live) block. We physically delete every
1838 non-deleted-note insn, and update the flow graph appropriately.
1840 Return nonzero if we deleted an exception handler. */
1842 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1843 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1846 flow_delete_block (b)
1849 int deleted_handler = 0;
1852 /* If the head of this block is a CODE_LABEL, then it might be the
1853 label for an exception handler which can't be reached.
1855 We need to remove the label from the exception_handler_label list
1856 and remove the associated NOTE_INSN_EH_REGION_BEG and
1857 NOTE_INSN_EH_REGION_END notes. */
1861 never_reached_warning (insn);
1863 if (GET_CODE (insn) == CODE_LABEL)
1865 rtx x, *prev = &exception_handler_labels;
1867 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1869 if (XEXP (x, 0) == insn)
1871 /* Found a match, splice this label out of the EH label list. */
1872 *prev = XEXP (x, 1);
1873 XEXP (x, 1) = NULL_RTX;
1874 XEXP (x, 0) = NULL_RTX;
1876 /* Remove the handler from all regions */
1877 remove_handler (insn);
1878 deleted_handler = 1;
1881 prev = &XEXP (x, 1);
1884 /* This label may be referenced by code solely for its value, or
1885 referenced by static data, or something. We have determined
1886 that it is not reachable, but cannot delete the label itself.
1887 Save code space and continue to delete the balance of the block,
1888 along with properly updating the cfg. */
1889 if (!can_delete_label_p (insn))
1891 /* If we've only got one of these, skip the whole deleting
1894 goto no_delete_insns;
1895 insn = NEXT_INSN (insn);
1899 /* Include any jump table following the basic block. */
1901 if (GET_CODE (end) == JUMP_INSN
1902 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1903 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1904 && GET_CODE (tmp) == JUMP_INSN
1905 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1906 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1909 /* Include any barrier that may follow the basic block. */
1910 tmp = next_nonnote_insn (end);
1911 if (tmp && GET_CODE (tmp) == BARRIER)
1914 /* Selectively delete the entire chain. */
1915 flow_delete_insn_chain (insn, end);
1919 /* Remove the edges into and out of this block. Note that there may
1920 indeed be edges in, if we are removing an unreachable loop. */
1924 for (e = b->pred; e ; e = next)
1926 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1929 next = e->pred_next;
1933 for (e = b->succ; e ; e = next)
1935 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1938 next = e->succ_next;
1947 /* Remove the basic block from the array, and compact behind it. */
1950 return deleted_handler;
1953 /* Remove block B from the basic block array and compact behind it. */
1959 int i, n = n_basic_blocks;
1961 for (i = b->index; i + 1 < n; ++i)
1963 basic_block x = BASIC_BLOCK (i + 1);
1964 BASIC_BLOCK (i) = x;
1968 basic_block_info->num_elements--;
1972 /* Delete INSN by patching it out. Return the next insn. */
1975 flow_delete_insn (insn)
1978 rtx prev = PREV_INSN (insn);
1979 rtx next = NEXT_INSN (insn);
1982 PREV_INSN (insn) = NULL_RTX;
1983 NEXT_INSN (insn) = NULL_RTX;
1986 NEXT_INSN (prev) = next;
1988 PREV_INSN (next) = prev;
1990 set_last_insn (prev);
1992 if (GET_CODE (insn) == CODE_LABEL)
1993 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1995 /* If deleting a jump, decrement the use count of the label. Deleting
1996 the label itself should happen in the normal course of block merging. */
1997 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1998 LABEL_NUSES (JUMP_LABEL (insn))--;
2000 /* Also if deleting an insn that references a label. */
2001 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2002 LABEL_NUSES (XEXP (note, 0))--;
2007 /* True if a given label can be deleted. */
2010 can_delete_label_p (label)
2015 if (LABEL_PRESERVE_P (label))
2018 for (x = forced_labels; x ; x = XEXP (x, 1))
2019 if (label == XEXP (x, 0))
2021 for (x = label_value_list; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2024 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2028 /* User declared labels must be preserved. */
2029 if (LABEL_NAME (label) != 0)
2035 /* Blocks A and B are to be merged into a single block A. The insns
2036 are already contiguous, hence `nomove'. */
2039 merge_blocks_nomove (a, b)
2043 rtx b_head, b_end, a_end;
2044 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2047 /* If there was a CODE_LABEL beginning B, delete it. */
2050 if (GET_CODE (b_head) == CODE_LABEL)
2052 /* Detect basic blocks with nothing but a label. This can happen
2053 in particular at the end of a function. */
2054 if (b_head == b_end)
2056 del_first = del_last = b_head;
2057 b_head = NEXT_INSN (b_head);
2060 /* Delete the basic block note. */
2061 if (GET_CODE (b_head) == NOTE
2062 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2064 if (b_head == b_end)
2069 b_head = NEXT_INSN (b_head);
2072 /* If there was a jump out of A, delete it. */
2074 if (GET_CODE (a_end) == JUMP_INSN)
2078 prev = prev_nonnote_insn (a_end);
2085 /* If this was a conditional jump, we need to also delete
2086 the insn that set cc0. */
2087 if (prev && sets_cc0_p (prev))
2090 prev = prev_nonnote_insn (prev);
2100 /* Delete everything marked above as well as crap that might be
2101 hanging out between the two blocks. */
2102 flow_delete_insn_chain (del_first, del_last);
2104 /* Normally there should only be one successor of A and that is B, but
2105 partway though the merge of blocks for conditional_execution we'll
2106 be merging a TEST block with THEN and ELSE successors. Free the
2107 whole lot of them and hope the caller knows what they're doing. */
2109 remove_edge (a->succ);
2111 /* Adjust the edges out of B for the new owner. */
2112 for (e = b->succ; e ; e = e->succ_next)
2116 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2117 b->pred = b->succ = NULL;
2119 /* Reassociate the insns of B with A. */
2122 if (basic_block_for_insn)
2124 BLOCK_FOR_INSN (b_head) = a;
2125 while (b_head != b_end)
2127 b_head = NEXT_INSN (b_head);
2128 BLOCK_FOR_INSN (b_head) = a;
2138 /* Blocks A and B are to be merged into a single block. A has no incoming
2139 fallthru edge, so it can be moved before B without adding or modifying
2140 any jumps (aside from the jump from A to B). */
2143 merge_blocks_move_predecessor_nojumps (a, b)
2146 rtx start, end, barrier;
2152 /* We want to delete the BARRIER after the end of the insns we are
2153 going to move. If we don't find a BARRIER, then do nothing. This
2154 can happen in some cases if we have labels we can not delete.
2156 Similarly, do nothing if we can not delete the label at the start
2157 of the target block. */
2158 barrier = next_nonnote_insn (end);
2159 if (GET_CODE (barrier) != BARRIER
2160 || (GET_CODE (b->head) == CODE_LABEL
2161 && ! can_delete_label_p (b->head)))
2164 flow_delete_insn (barrier);
2166 /* Move block and loop notes out of the chain so that we do not
2167 disturb their order.
2169 ??? A better solution would be to squeeze out all the non-nested notes
2170 and adjust the block trees appropriately. Even better would be to have
2171 a tighter connection between block trees and rtl so that this is not
2173 start = squeeze_notes (start, end);
2175 /* Scramble the insn chain. */
2176 if (end != PREV_INSN (b->head))
2177 reorder_insns (start, end, PREV_INSN (b->head));
2181 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2182 a->index, b->index);
2185 /* Swap the records for the two blocks around. Although we are deleting B,
2186 A is now where B was and we want to compact the BB array from where
2188 BASIC_BLOCK(a->index) = b;
2189 BASIC_BLOCK(b->index) = a;
2191 a->index = b->index;
2194 /* Now blocks A and B are contiguous. Merge them. */
2195 merge_blocks_nomove (a, b);
2200 /* Blocks A and B are to be merged into a single block. B has no outgoing
2201 fallthru edge, so it can be moved after A without adding or modifying
2202 any jumps (aside from the jump from A to B). */
2205 merge_blocks_move_successor_nojumps (a, b)
2208 rtx start, end, barrier;
2213 /* We want to delete the BARRIER after the end of the insns we are
2214 going to move. If we don't find a BARRIER, then do nothing. This
2215 can happen in some cases if we have labels we can not delete.
2217 Similarly, do nothing if we can not delete the label at the start
2218 of the target block. */
2219 barrier = next_nonnote_insn (end);
2220 if (GET_CODE (barrier) != BARRIER
2221 || (GET_CODE (b->head) == CODE_LABEL
2222 && ! can_delete_label_p (b->head)))
2225 flow_delete_insn (barrier);
2227 /* Move block and loop notes out of the chain so that we do not
2228 disturb their order.
2230 ??? A better solution would be to squeeze out all the non-nested notes
2231 and adjust the block trees appropriately. Even better would be to have
2232 a tighter connection between block trees and rtl so that this is not
2234 start = squeeze_notes (start, end);
2236 /* Scramble the insn chain. */
2237 reorder_insns (start, end, a->end);
2239 /* Now blocks A and B are contiguous. Merge them. */
2240 merge_blocks_nomove (a, b);
2244 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2245 b->index, a->index);
2251 /* Attempt to merge basic blocks that are potentially non-adjacent.
2252 Return true iff the attempt succeeded. */
2255 merge_blocks (e, b, c)
2259 /* If B has a fallthru edge to C, no need to move anything. */
2260 if (e->flags & EDGE_FALLTHRU)
2262 /* If a label still appears somewhere and we cannot delete the label,
2263 then we cannot merge the blocks. The edge was tidied already. */
2265 rtx insn, stop = NEXT_INSN (c->head);
2266 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2267 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2270 merge_blocks_nomove (b, c);
2274 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2275 b->index, c->index);
2284 int c_has_outgoing_fallthru;
2285 int b_has_incoming_fallthru;
2287 /* We must make sure to not munge nesting of exception regions,
2288 lexical blocks, and loop notes.
2290 The first is taken care of by requiring that the active eh
2291 region at the end of one block always matches the active eh
2292 region at the beginning of the next block.
2294 The later two are taken care of by squeezing out all the notes. */
2296 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2297 executed and we may want to treat blocks which have two out
2298 edges, one normal, one abnormal as only having one edge for
2299 block merging purposes. */
2301 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2302 if (tmp_edge->flags & EDGE_FALLTHRU)
2304 c_has_outgoing_fallthru = (tmp_edge != NULL);
2306 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2307 if (tmp_edge->flags & EDGE_FALLTHRU)
2309 b_has_incoming_fallthru = (tmp_edge != NULL);
2311 /* If B does not have an incoming fallthru, and the exception regions
2312 match, then it can be moved immediately before C without introducing
2315 C can not be the first block, so we do not have to worry about
2316 accessing a non-existent block. */
2317 d = BASIC_BLOCK (c->index - 1);
2318 if (! b_has_incoming_fallthru
2319 && d->eh_end == b->eh_beg
2320 && b->eh_end == c->eh_beg)
2321 return merge_blocks_move_predecessor_nojumps (b, c);
2323 /* Otherwise, we're going to try to move C after B. Make sure the
2324 exception regions match.
2326 If B is the last basic block, then we must not try to access the
2327 block structure for block B + 1. Luckily in that case we do not
2328 need to worry about matching exception regions. */
2329 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2330 if (b->eh_end == c->eh_beg
2331 && (d == NULL || c->eh_end == d->eh_beg))
2333 /* If C does not have an outgoing fallthru, then it can be moved
2334 immediately after B without introducing or modifying jumps. */
2335 if (! c_has_outgoing_fallthru)
2336 return merge_blocks_move_successor_nojumps (b, c);
2338 /* Otherwise, we'll need to insert an extra jump, and possibly
2339 a new block to contain it. */
2340 /* ??? Not implemented yet. */
2347 /* Top level driver for merge_blocks. */
2354 /* Attempt to merge blocks as made possible by edge removal. If a block
2355 has only one successor, and the successor has only one predecessor,
2356 they may be combined. */
2358 for (i = 0; i < n_basic_blocks; )
2360 basic_block c, b = BASIC_BLOCK (i);
2363 /* A loop because chains of blocks might be combineable. */
2364 while ((s = b->succ) != NULL
2365 && s->succ_next == NULL
2366 && (s->flags & EDGE_EH) == 0
2367 && (c = s->dest) != EXIT_BLOCK_PTR
2368 && c->pred->pred_next == NULL
2369 /* If the jump insn has side effects, we can't kill the edge. */
2370 && (GET_CODE (b->end) != JUMP_INSN
2371 || onlyjump_p (b->end))
2372 && merge_blocks (s, b, c))
2375 /* Don't get confused by the index shift caused by deleting blocks. */
2380 /* The given edge should potentially be a fallthru edge. If that is in
2381 fact true, delete the jump and barriers that are in the way. */
2384 tidy_fallthru_edge (e, b, c)
2390 /* ??? In a late-running flow pass, other folks may have deleted basic
2391 blocks by nopping out blocks, leaving multiple BARRIERs between here
2392 and the target label. They ought to be chastized and fixed.
2394 We can also wind up with a sequence of undeletable labels between
2395 one block and the next.
2397 So search through a sequence of barriers, labels, and notes for
2398 the head of block C and assert that we really do fall through. */
2400 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2403 /* Remove what will soon cease being the jump insn from the source block.
2404 If block B consisted only of this single jump, turn it into a deleted
2407 if (GET_CODE (q) == JUMP_INSN
2408 && (simplejump_p (q)
2409 || (b->succ == e && e->succ_next == NULL)))
2412 /* If this was a conditional jump, we need to also delete
2413 the insn that set cc0. */
2414 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2421 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2422 NOTE_SOURCE_FILE (q) = 0;
2425 b->end = q = PREV_INSN (q);
2428 /* Selectively unlink the sequence. */
2429 if (q != PREV_INSN (c->head))
2430 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2432 e->flags |= EDGE_FALLTHRU;
2435 /* Fix up edges that now fall through, or rather should now fall through
2436 but previously required a jump around now deleted blocks. Simplify
2437 the search by only examining blocks numerically adjacent, since this
2438 is how find_basic_blocks created them. */
2441 tidy_fallthru_edges ()
2445 for (i = 1; i < n_basic_blocks; ++i)
2447 basic_block b = BASIC_BLOCK (i - 1);
2448 basic_block c = BASIC_BLOCK (i);
2451 /* We care about simple conditional or unconditional jumps with
2454 If we had a conditional branch to the next instruction when
2455 find_basic_blocks was called, then there will only be one
2456 out edge for the block which ended with the conditional
2457 branch (since we do not create duplicate edges).
2459 Furthermore, the edge will be marked as a fallthru because we
2460 merge the flags for the duplicate edges. So we do not want to
2461 check that the edge is not a FALLTHRU edge. */
2462 if ((s = b->succ) != NULL
2463 && s->succ_next == NULL
2465 /* If the jump insn has side effects, we can't tidy the edge. */
2466 && (GET_CODE (b->end) != JUMP_INSN
2467 || onlyjump_p (b->end)))
2468 tidy_fallthru_edge (s, b, c);
2472 /* Perform data flow analysis.
2473 F is the first insn of the function; FLAGS is a set of PROP_* flags
2474 to be used in accumulating flow info. */
2477 life_analysis (f, file, flags)
2482 #ifdef ELIMINABLE_REGS
2484 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2487 /* Record which registers will be eliminated. We use this in
2490 CLEAR_HARD_REG_SET (elim_reg_set);
2492 #ifdef ELIMINABLE_REGS
2493 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2494 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2496 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2500 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2502 /* The post-reload life analysis have (on a global basis) the same
2503 registers live as was computed by reload itself. elimination
2504 Otherwise offsets and such may be incorrect.
2506 Reload will make some registers as live even though they do not
2507 appear in the rtl. */
2508 if (reload_completed)
2509 flags &= ~PROP_REG_INFO;
2511 /* We want alias analysis information for local dead store elimination. */
2512 if (flags & PROP_SCAN_DEAD_CODE)
2513 init_alias_analysis ();
2515 /* Always remove no-op moves. Do this before other processing so
2516 that we don't have to keep re-scanning them. */
2517 delete_noop_moves (f);
2519 /* Some targets can emit simpler epilogues if they know that sp was
2520 not ever modified during the function. After reload, of course,
2521 we've already emitted the epilogue so there's no sense searching. */
2522 if (! reload_completed)
2523 notice_stack_pointer_modification (f);
2525 /* Allocate and zero out data structures that will record the
2526 data from lifetime analysis. */
2527 allocate_reg_life_data ();
2528 allocate_bb_life_data ();
2530 /* Find the set of registers live on function exit. */
2531 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2533 /* "Update" life info from zero. It'd be nice to begin the
2534 relaxation with just the exit and noreturn blocks, but that set
2535 is not immediately handy. */
2537 if (flags & PROP_REG_INFO)
2538 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2539 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2542 if (flags & PROP_SCAN_DEAD_CODE)
2543 end_alias_analysis ();
2546 dump_flow_info (file);
2548 free_basic_block_vars (1);
2551 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2552 Search for REGNO. If found, abort if it is not wider than word_mode. */
2555 verify_wide_reg_1 (px, pregno)
2560 unsigned int regno = *(int *) pregno;
2562 if (GET_CODE (x) == REG && REGNO (x) == regno)
2564 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2571 /* A subroutine of verify_local_live_at_start. Search through insns
2572 between HEAD and END looking for register REGNO. */
2575 verify_wide_reg (regno, head, end)
2581 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2582 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2586 head = NEXT_INSN (head);
2589 /* We didn't find the register at all. Something's way screwy. */
2593 /* A subroutine of update_life_info. Verify that there are no untoward
2594 changes in live_at_start during a local update. */
2597 verify_local_live_at_start (new_live_at_start, bb)
2598 regset new_live_at_start;
2601 if (reload_completed)
2603 /* After reload, there are no pseudos, nor subregs of multi-word
2604 registers. The regsets should exactly match. */
2605 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2612 /* Find the set of changed registers. */
2613 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2615 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2617 /* No registers should die. */
2618 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2620 /* Verify that the now-live register is wider than word_mode. */
2621 verify_wide_reg (i, bb->head, bb->end);
2626 /* Updates life information starting with the basic blocks set in BLOCKS.
2627 If BLOCKS is null, consider it to be the universal set.
2629 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2630 we are only expecting local modifications to basic blocks. If we find
2631 extra registers live at the beginning of a block, then we either killed
2632 useful data, or we have a broken split that wants data not provided.
2633 If we find registers removed from live_at_start, that means we have
2634 a broken peephole that is killing a register it shouldn't.
2636 ??? This is not true in one situation -- when a pre-reload splitter
2637 generates subregs of a multi-word pseudo, current life analysis will
2638 lose the kill. So we _can_ have a pseudo go live. How irritating.
2640 Including PROP_REG_INFO does not properly refresh regs_ever_live
2641 unless the caller resets it to zero. */
2644 update_life_info (blocks, extent, prop_flags)
2646 enum update_life_extent extent;
2650 regset_head tmp_head;
2653 tmp = INITIALIZE_REG_SET (tmp_head);
2655 /* For a global update, we go through the relaxation process again. */
2656 if (extent != UPDATE_LIFE_LOCAL)
2658 calculate_global_regs_live (blocks, blocks,
2659 prop_flags & PROP_SCAN_DEAD_CODE);
2661 /* If asked, remove notes from the blocks we'll update. */
2662 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2663 count_or_remove_death_notes (blocks, 1);
2668 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2670 basic_block bb = BASIC_BLOCK (i);
2672 COPY_REG_SET (tmp, bb->global_live_at_end);
2673 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2675 if (extent == UPDATE_LIFE_LOCAL)
2676 verify_local_live_at_start (tmp, bb);
2681 for (i = n_basic_blocks - 1; i >= 0; --i)
2683 basic_block bb = BASIC_BLOCK (i);
2685 COPY_REG_SET (tmp, bb->global_live_at_end);
2686 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2688 if (extent == UPDATE_LIFE_LOCAL)
2689 verify_local_live_at_start (tmp, bb);
2695 if (prop_flags & PROP_REG_INFO)
2697 /* The only pseudos that are live at the beginning of the function
2698 are those that were not set anywhere in the function. local-alloc
2699 doesn't know how to handle these correctly, so mark them as not
2700 local to any one basic block. */
2701 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2702 FIRST_PSEUDO_REGISTER, i,
2703 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2705 /* We have a problem with any pseudoreg that lives across the setjmp.
2706 ANSI says that if a user variable does not change in value between
2707 the setjmp and the longjmp, then the longjmp preserves it. This
2708 includes longjmp from a place where the pseudo appears dead.
2709 (In principle, the value still exists if it is in scope.)
2710 If the pseudo goes in a hard reg, some other value may occupy
2711 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2712 Conclusion: such a pseudo must not go in a hard reg. */
2713 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2714 FIRST_PSEUDO_REGISTER, i,
2716 if (regno_reg_rtx[i] != 0)
2718 REG_LIVE_LENGTH (i) = -1;
2719 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2725 /* Free the variables allocated by find_basic_blocks.
2727 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2730 free_basic_block_vars (keep_head_end_p)
2731 int keep_head_end_p;
2733 if (basic_block_for_insn)
2735 VARRAY_FREE (basic_block_for_insn);
2736 basic_block_for_insn = NULL;
2739 if (! keep_head_end_p)
2742 VARRAY_FREE (basic_block_info);
2745 ENTRY_BLOCK_PTR->aux = NULL;
2746 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2747 EXIT_BLOCK_PTR->aux = NULL;
2748 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2752 /* Return nonzero if the destination of SET equals the source. */
2757 rtx src = SET_SRC (set);
2758 rtx dst = SET_DEST (set);
2760 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2762 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2764 src = SUBREG_REG (src);
2765 dst = SUBREG_REG (dst);
2768 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2769 && REGNO (src) == REGNO (dst));
2772 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2778 rtx pat = PATTERN (insn);
2780 /* Insns carrying these notes are useful later on. */
2781 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2784 if (GET_CODE (pat) == SET && set_noop_p (pat))
2787 if (GET_CODE (pat) == PARALLEL)
2790 /* If nothing but SETs of registers to themselves,
2791 this insn can also be deleted. */
2792 for (i = 0; i < XVECLEN (pat, 0); i++)
2794 rtx tem = XVECEXP (pat, 0, i);
2796 if (GET_CODE (tem) == USE
2797 || GET_CODE (tem) == CLOBBER)
2800 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2809 /* Delete any insns that copy a register to itself. */
2812 delete_noop_moves (f)
2816 for (insn = f; insn; insn = NEXT_INSN (insn))
2818 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2820 PUT_CODE (insn, NOTE);
2821 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2822 NOTE_SOURCE_FILE (insn) = 0;
2827 /* Determine if the stack pointer is constant over the life of the function.
2828 Only useful before prologues have been emitted. */
2831 notice_stack_pointer_modification_1 (x, pat, data)
2833 rtx pat ATTRIBUTE_UNUSED;
2834 void *data ATTRIBUTE_UNUSED;
2836 if (x == stack_pointer_rtx
2837 /* The stack pointer is only modified indirectly as the result
2838 of a push until later in flow. See the comments in rtl.texi
2839 regarding Embedded Side-Effects on Addresses. */
2840 || (GET_CODE (x) == MEM
2841 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2842 || GET_CODE (XEXP (x, 0)) == PRE_INC
2843 || GET_CODE (XEXP (x, 0)) == POST_DEC
2844 || GET_CODE (XEXP (x, 0)) == POST_INC)
2845 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2846 current_function_sp_is_unchanging = 0;
2850 notice_stack_pointer_modification (f)
2855 /* Assume that the stack pointer is unchanging if alloca hasn't
2857 current_function_sp_is_unchanging = !current_function_calls_alloca;
2858 if (! current_function_sp_is_unchanging)
2861 for (insn = f; insn; insn = NEXT_INSN (insn))
2863 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2865 /* Check if insn modifies the stack pointer. */
2866 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2868 if (! current_function_sp_is_unchanging)
2874 /* Mark a register in SET. Hard registers in large modes get all
2875 of their component registers set as well. */
2877 mark_reg (reg, xset)
2881 regset set = (regset) xset;
2882 int regno = REGNO (reg);
2884 if (GET_MODE (reg) == BLKmode)
2887 SET_REGNO_REG_SET (set, regno);
2888 if (regno < FIRST_PSEUDO_REGISTER)
2890 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2892 SET_REGNO_REG_SET (set, regno + n);
2896 /* Mark those regs which are needed at the end of the function as live
2897 at the end of the last basic block. */
2899 mark_regs_live_at_end (set)
2904 /* If exiting needs the right stack value, consider the stack pointer
2905 live at the end of the function. */
2906 if ((HAVE_epilogue && reload_completed)
2907 || ! EXIT_IGNORE_STACK
2908 || (! FRAME_POINTER_REQUIRED
2909 && ! current_function_calls_alloca
2910 && flag_omit_frame_pointer)
2911 || current_function_sp_is_unchanging)
2913 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2916 /* Mark the frame pointer if needed at the end of the function. If
2917 we end up eliminating it, it will be removed from the live list
2918 of each basic block by reload. */
2920 if (! reload_completed || frame_pointer_needed)
2922 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2923 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2924 /* If they are different, also mark the hard frame pointer as live */
2925 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2929 #ifdef PIC_OFFSET_TABLE_REGNUM
2930 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2931 /* Many architectures have a GP register even without flag_pic.
2932 Assume the pic register is not in use, or will be handled by
2933 other means, if it is not fixed. */
2934 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2935 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2939 /* Mark all global registers, and all registers used by the epilogue
2940 as being live at the end of the function since they may be
2941 referenced by our caller. */
2942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2944 #ifdef EPILOGUE_USES
2945 || EPILOGUE_USES (i)
2948 SET_REGNO_REG_SET (set, i);
2950 /* Mark all call-saved registers that we actaully used. */
2951 if (HAVE_epilogue && reload_completed)
2953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2954 if (! call_used_regs[i] && regs_ever_live[i])
2955 SET_REGNO_REG_SET (set, i);
2958 /* Mark function return value. */
2959 diddle_return_value (mark_reg, set);
2962 /* Callback function for for_each_successor_phi. DATA is a regset.
2963 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2964 INSN, in the regset. */
2967 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2968 rtx insn ATTRIBUTE_UNUSED;
2969 int dest_regno ATTRIBUTE_UNUSED;
2973 regset live = (regset) data;
2974 SET_REGNO_REG_SET (live, src_regno);
2978 /* Propagate global life info around the graph of basic blocks. Begin
2979 considering blocks with their corresponding bit set in BLOCKS_IN.
2980 If BLOCKS_IN is null, consider it the universal set.
2982 BLOCKS_OUT is set for every block that was changed. */
2985 calculate_global_regs_live (blocks_in, blocks_out, flags)
2986 sbitmap blocks_in, blocks_out;
2989 basic_block *queue, *qhead, *qtail, *qend;
2990 regset tmp, new_live_at_end;
2991 regset_head tmp_head;
2992 regset_head new_live_at_end_head;
2995 tmp = INITIALIZE_REG_SET (tmp_head);
2996 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2998 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2999 because the `head == tail' style test for an empty queue doesn't
3000 work with a full queue. */
3001 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3003 qhead = qend = queue + n_basic_blocks + 2;
3005 /* Clear out the garbage that might be hanging out in bb->aux. */
3006 for (i = n_basic_blocks - 1; i >= 0; --i)
3007 BASIC_BLOCK (i)->aux = NULL;
3009 /* Queue the blocks set in the initial mask. Do this in reverse block
3010 number order so that we are more likely for the first round to do
3011 useful work. We use AUX non-null to flag that the block is queued. */
3014 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3016 basic_block bb = BASIC_BLOCK (i);
3023 for (i = 0; i < n_basic_blocks; ++i)
3025 basic_block bb = BASIC_BLOCK (i);
3032 sbitmap_zero (blocks_out);
3034 while (qhead != qtail)
3036 int rescan, changed;
3045 /* Begin by propogating live_at_start from the successor blocks. */
3046 CLEAR_REG_SET (new_live_at_end);
3047 for (e = bb->succ; e ; e = e->succ_next)
3049 basic_block sb = e->dest;
3050 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3053 /* Regs used in phi nodes are not included in
3054 global_live_at_start, since they are live only along a
3055 particular edge. Set those regs that are live because of a
3056 phi node alternative corresponding to this particular block. */
3057 for_each_successor_phi (bb, &set_phi_alternative_reg,
3060 if (bb == ENTRY_BLOCK_PTR)
3062 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3066 /* On our first pass through this block, we'll go ahead and continue.
3067 Recognize first pass by local_set NULL. On subsequent passes, we
3068 get to skip out early if live_at_end wouldn't have changed. */
3070 if (bb->local_set == NULL)
3072 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3077 /* If any bits were removed from live_at_end, we'll have to
3078 rescan the block. This wouldn't be necessary if we had
3079 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3080 local_live is really dependant on live_at_end. */
3081 CLEAR_REG_SET (tmp);
3082 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3083 new_live_at_end, BITMAP_AND_COMPL);
3087 /* Find the set of changed bits. Take this opportunity
3088 to notice that this set is empty and early out. */
3089 CLEAR_REG_SET (tmp);
3090 changed = bitmap_operation (tmp, bb->global_live_at_end,
3091 new_live_at_end, BITMAP_XOR);
3095 /* If any of the changed bits overlap with local_set,
3096 we'll have to rescan the block. Detect overlap by
3097 the AND with ~local_set turning off bits. */
3098 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3103 /* Let our caller know that BB changed enough to require its
3104 death notes updated. */
3106 SET_BIT (blocks_out, bb->index);
3110 /* Add to live_at_start the set of all registers in
3111 new_live_at_end that aren't in the old live_at_end. */
3113 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3115 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3117 changed = bitmap_operation (bb->global_live_at_start,
3118 bb->global_live_at_start,
3125 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3127 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3128 into live_at_start. */
3129 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3131 /* If live_at start didn't change, no need to go farther. */
3132 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3135 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3138 /* Queue all predecessors of BB so that we may re-examine
3139 their live_at_end. */
3140 for (e = bb->pred; e ; e = e->pred_next)
3142 basic_block pb = e->src;
3143 if (pb->aux == NULL)
3154 FREE_REG_SET (new_live_at_end);
3158 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3160 basic_block bb = BASIC_BLOCK (i);
3161 FREE_REG_SET (bb->local_set);
3166 for (i = n_basic_blocks - 1; i >= 0; --i)
3168 basic_block bb = BASIC_BLOCK (i);
3169 FREE_REG_SET (bb->local_set);
3176 /* Subroutines of life analysis. */
3178 /* Allocate the permanent data structures that represent the results
3179 of life analysis. Not static since used also for stupid life analysis. */
3182 allocate_bb_life_data ()
3186 for (i = 0; i < n_basic_blocks; i++)
3188 basic_block bb = BASIC_BLOCK (i);
3190 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3191 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3194 ENTRY_BLOCK_PTR->global_live_at_end
3195 = OBSTACK_ALLOC_REG_SET (function_obstack);
3196 EXIT_BLOCK_PTR->global_live_at_start
3197 = OBSTACK_ALLOC_REG_SET (function_obstack);
3199 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3203 allocate_reg_life_data ()
3207 max_regno = max_reg_num ();
3209 /* Recalculate the register space, in case it has grown. Old style
3210 vector oriented regsets would set regset_{size,bytes} here also. */
3211 allocate_reg_info (max_regno, FALSE, FALSE);
3213 /* Reset all the data we'll collect in propagate_block and its
3215 for (i = 0; i < max_regno; i++)
3219 REG_N_DEATHS (i) = 0;
3220 REG_N_CALLS_CROSSED (i) = 0;
3221 REG_LIVE_LENGTH (i) = 0;
3222 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3226 /* Delete dead instructions for propagate_block. */
3229 propagate_block_delete_insn (bb, insn)
3233 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3235 /* If the insn referred to a label, and that label was attached to
3236 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3237 pretty much mandatory to delete it, because the ADDR_VEC may be
3238 referencing labels that no longer exist. */
3242 rtx label = XEXP (inote, 0);
3245 if (LABEL_NUSES (label) == 1
3246 && (next = next_nonnote_insn (label)) != NULL
3247 && GET_CODE (next) == JUMP_INSN
3248 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3249 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3251 rtx pat = PATTERN (next);
3252 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3253 int len = XVECLEN (pat, diff_vec_p);
3256 for (i = 0; i < len; i++)
3257 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3259 flow_delete_insn (next);
3263 if (bb->end == insn)
3264 bb->end = PREV_INSN (insn);
3265 flow_delete_insn (insn);
3268 /* Delete dead libcalls for propagate_block. Return the insn
3269 before the libcall. */
3272 propagate_block_delete_libcall (bb, insn, note)
3276 rtx first = XEXP (note, 0);
3277 rtx before = PREV_INSN (first);
3279 if (insn == bb->end)
3282 flow_delete_insn_chain (first, insn);
3286 /* Update the life-status of regs for one insn. Return the previous insn. */
3289 propagate_one_insn (pbi, insn)
3290 struct propagate_block_info *pbi;
3293 rtx prev = PREV_INSN (insn);
3294 int flags = pbi->flags;
3295 int insn_is_dead = 0;
3296 int libcall_is_dead = 0;
3300 if (! INSN_P (insn))
3303 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3304 if (flags & PROP_SCAN_DEAD_CODE)
3306 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3308 libcall_is_dead = (insn_is_dead && note != 0
3309 && libcall_dead_p (pbi, PATTERN (insn),
3313 /* We almost certainly don't want to delete prologue or epilogue
3314 instructions. Warn about probable compiler losage. */
3317 && (((HAVE_epilogue || HAVE_prologue)
3318 && prologue_epilogue_contains (insn))
3319 || (HAVE_sibcall_epilogue
3320 && sibcall_epilogue_contains (insn))))
3322 if (flags & PROP_KILL_DEAD_CODE)
3324 warning ("ICE: would have deleted prologue/epilogue insn");
3325 if (!inhibit_warnings)
3328 libcall_is_dead = insn_is_dead = 0;
3331 /* If an instruction consists of just dead store(s) on final pass,
3333 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3335 if (libcall_is_dead)
3337 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3338 insn = NEXT_INSN (prev);
3341 propagate_block_delete_insn (pbi->bb, insn);
3343 /* CC0 is now known to be dead. Either this insn used it,
3344 in which case it doesn't anymore, or clobbered it,
3345 so the next insn can't use it. */
3351 /* See if this is an increment or decrement that can be merged into
3352 a following memory address. */
3355 register rtx x = single_set (insn);
3357 /* Does this instruction increment or decrement a register? */
3358 if (!reload_completed
3359 && (flags & PROP_AUTOINC)
3361 && GET_CODE (SET_DEST (x)) == REG
3362 && (GET_CODE (SET_SRC (x)) == PLUS
3363 || GET_CODE (SET_SRC (x)) == MINUS)
3364 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3365 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3366 /* Ok, look for a following memory ref we can combine with.
3367 If one is found, change the memory ref to a PRE_INC
3368 or PRE_DEC, cancel this insn, and return 1.
3369 Return 0 if nothing has been done. */
3370 && try_pre_increment_1 (pbi, insn))
3373 #endif /* AUTO_INC_DEC */
3375 CLEAR_REG_SET (pbi->new_live);
3376 CLEAR_REG_SET (pbi->new_dead);
3378 /* If this is not the final pass, and this insn is copying the value of
3379 a library call and it's dead, don't scan the insns that perform the
3380 library call, so that the call's arguments are not marked live. */
3381 if (libcall_is_dead)
3383 /* Record the death of the dest reg. */
3384 mark_set_regs (pbi, PATTERN (insn), insn);
3386 insn = XEXP (note, 0);
3387 return PREV_INSN (insn);
3389 else if (GET_CODE (PATTERN (insn)) == SET
3390 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3391 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3392 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3393 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3394 /* We have an insn to pop a constant amount off the stack.
3395 (Such insns use PLUS regardless of the direction of the stack,
3396 and any insn to adjust the stack by a constant is always a pop.)
3397 These insns, if not dead stores, have no effect on life. */
3401 /* Any regs live at the time of a call instruction must not go
3402 in a register clobbered by calls. Find all regs now live and
3403 record this for them. */
3405 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3406 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3407 { REG_N_CALLS_CROSSED (i)++; });
3409 /* Record sets. Do this even for dead instructions, since they
3410 would have killed the values if they hadn't been deleted. */
3411 mark_set_regs (pbi, PATTERN (insn), insn);
3413 if (GET_CODE (insn) == CALL_INSN)
3419 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3420 cond = COND_EXEC_TEST (PATTERN (insn));
3422 /* Non-constant calls clobber memory. */
3423 if (! CONST_CALL_P (insn))
3424 free_EXPR_LIST_list (&pbi->mem_set_list);
3426 /* There may be extra registers to be clobbered. */
3427 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3429 note = XEXP (note, 1))
3430 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3431 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3432 cond, insn, pbi->flags);
3434 /* Calls change all call-used and global registers. */
3435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3436 if (call_used_regs[i] && ! global_regs[i]
3439 /* We do not want REG_UNUSED notes for these registers. */
3440 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3441 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3445 /* If an insn doesn't use CC0, it becomes dead since we assume
3446 that every insn clobbers it. So show it dead here;
3447 mark_used_regs will set it live if it is referenced. */
3452 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3454 /* Sometimes we may have inserted something before INSN (such as a move)
3455 when we make an auto-inc. So ensure we will scan those insns. */
3457 prev = PREV_INSN (insn);
3460 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3466 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3467 cond = COND_EXEC_TEST (PATTERN (insn));
3469 /* Calls use their arguments. */
3470 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3472 note = XEXP (note, 1))
3473 if (GET_CODE (XEXP (note, 0)) == USE)
3474 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3477 /* The stack ptr is used (honorarily) by a CALL insn. */
3478 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3480 /* Calls may also reference any of the global registers,
3481 so they are made live. */
3482 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3484 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3489 /* Update reg_live for the registers killed and used. */
3490 AND_COMPL_REG_SET (pbi->reg_live, pbi->new_dead);
3491 IOR_REG_SET (pbi->reg_live, pbi->new_live);
3493 /* On final pass, update counts of how many insns in which each reg
3495 if (flags & PROP_REG_INFO)
3496 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3497 { REG_LIVE_LENGTH (i)++; });
3502 /* Initialize a propagate_block_info struct for public consumption.
3503 Note that the structure itself is opaque to this file, but that
3504 the user can use the regsets provided here. */
3506 struct propagate_block_info *
3507 init_propagate_block_info (bb, live, local_set, flags)
3513 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3516 pbi->reg_live = live;
3517 pbi->mem_set_list = NULL_RTX;
3518 pbi->local_set = local_set;
3522 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3523 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3525 pbi->reg_next_use = NULL;
3527 pbi->new_live = BITMAP_XMALLOC ();
3528 pbi->new_dead = BITMAP_XMALLOC ();
3533 /* Release a propagate_block_info struct. */
3536 free_propagate_block_info (pbi)
3537 struct propagate_block_info *pbi;
3539 free_EXPR_LIST_list (&pbi->mem_set_list);
3541 BITMAP_XFREE (pbi->new_live);
3542 BITMAP_XFREE (pbi->new_dead);
3544 if (pbi->reg_next_use)
3545 free (pbi->reg_next_use);
3550 /* Compute the registers live at the beginning of a basic block BB from
3551 those live at the end.
3553 When called, REG_LIVE contains those live at the end. On return, it
3554 contains those live at the beginning.
3556 LOCAL_SET, if non-null, will be set with all registers killed by
3557 this basic block. */
3560 propagate_block (bb, live, local_set, flags)
3566 struct propagate_block_info *pbi;
3569 pbi = init_propagate_block_info (bb, live, local_set, flags);
3571 if (flags & PROP_REG_INFO)
3575 /* Process the regs live at the end of the block.
3576 Mark them as not local to any one basic block. */
3577 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3578 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3581 /* Scan the block an insn at a time from end to beginning. */
3583 for (insn = bb->end; ; insn = prev)
3585 /* If this is a call to `setjmp' et al, warn if any
3586 non-volatile datum is live. */
3587 if ((flags & PROP_REG_INFO)
3588 && GET_CODE (insn) == NOTE
3589 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3590 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3592 prev = propagate_one_insn (pbi, insn);
3594 if (insn == bb->head)
3598 free_propagate_block_info (pbi);
3601 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3602 (SET expressions whose destinations are registers dead after the insn).
3603 NEEDED is the regset that says which regs are alive after the insn.
3605 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3607 If X is the entire body of an insn, NOTES contains the reg notes
3608 pertaining to the insn. */
3611 insn_dead_p (pbi, x, call_ok, notes)
3612 struct propagate_block_info *pbi;
3615 rtx notes ATTRIBUTE_UNUSED;
3617 enum rtx_code code = GET_CODE (x);
3620 /* If flow is invoked after reload, we must take existing AUTO_INC
3621 expresions into account. */
3622 if (reload_completed)
3624 for ( ; notes; notes = XEXP (notes, 1))
3626 if (REG_NOTE_KIND (notes) == REG_INC)
3628 int regno = REGNO (XEXP (notes, 0));
3630 /* Don't delete insns to set global regs. */
3631 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3632 || REGNO_REG_SET_P (pbi->reg_live, regno))
3639 /* If setting something that's a reg or part of one,
3640 see if that register's altered value will be live. */
3644 rtx r = SET_DEST (x);
3647 if (GET_CODE (r) == CC0)
3648 return ! pbi->cc0_live;
3651 /* A SET that is a subroutine call cannot be dead. */
3652 if (GET_CODE (SET_SRC (x)) == CALL)
3658 /* Don't eliminate loads from volatile memory or volatile asms. */
3659 else if (volatile_refs_p (SET_SRC (x)))
3662 if (GET_CODE (r) == MEM)
3666 if (MEM_VOLATILE_P (r))
3669 /* Walk the set of memory locations we are currently tracking
3670 and see if one is an identical match to this memory location.
3671 If so, this memory write is dead (remember, we're walking
3672 backwards from the end of the block to the start. */
3673 temp = pbi->mem_set_list;
3676 if (rtx_equal_p (XEXP (temp, 0), r))
3678 temp = XEXP (temp, 1);
3683 while (GET_CODE (r) == SUBREG
3684 || GET_CODE (r) == STRICT_LOW_PART
3685 || GET_CODE (r) == ZERO_EXTRACT)
3688 if (GET_CODE (r) == REG)
3690 int regno = REGNO (r);
3693 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3696 /* If this is a hard register, verify that subsequent
3697 words are not needed. */
3698 if (regno < FIRST_PSEUDO_REGISTER)
3700 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3703 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3707 /* Don't delete insns to set global regs. */
3708 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3711 /* Make sure insns to set the stack pointer aren't deleted. */
3712 if (regno == STACK_POINTER_REGNUM)
3715 /* Make sure insns to set the frame pointer aren't deleted. */
3716 if (regno == FRAME_POINTER_REGNUM
3717 && (! reload_completed || frame_pointer_needed))
3719 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3720 if (regno == HARD_FRAME_POINTER_REGNUM
3721 && (! reload_completed || frame_pointer_needed))
3725 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3726 /* Make sure insns to set arg pointer are never deleted
3727 (if the arg pointer isn't fixed, there will be a USE
3728 for it, so we can treat it normally). */
3729 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3733 /* Otherwise, the set is dead. */
3739 /* If performing several activities, insn is dead if each activity
3740 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3741 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3743 else if (code == PARALLEL)
3745 int i = XVECLEN (x, 0);
3747 for (i--; i >= 0; i--)
3748 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3749 && GET_CODE (XVECEXP (x, 0, i)) != USE
3750 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3756 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3757 is not necessarily true for hard registers. */
3758 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3759 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3760 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3763 /* We do not check other CLOBBER or USE here. An insn consisting of just
3764 a CLOBBER or just a USE should not be deleted. */
3768 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3769 return 1 if the entire library call is dead.
3770 This is true if X copies a register (hard or pseudo)
3771 and if the hard return reg of the call insn is dead.
3772 (The caller should have tested the destination of X already for death.)
3774 If this insn doesn't just copy a register, then we don't
3775 have an ordinary libcall. In that case, cse could not have
3776 managed to substitute the source for the dest later on,
3777 so we can assume the libcall is dead.
3779 NEEDED is the bit vector of pseudoregs live before this insn.
3780 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3783 libcall_dead_p (pbi, x, note, insn)
3784 struct propagate_block_info *pbi;
3789 register RTX_CODE code = GET_CODE (x);
3793 register rtx r = SET_SRC (x);
3794 if (GET_CODE (r) == REG)
3796 rtx call = XEXP (note, 0);
3800 /* Find the call insn. */
3801 while (call != insn && GET_CODE (call) != CALL_INSN)
3802 call = NEXT_INSN (call);
3804 /* If there is none, do nothing special,
3805 since ordinary death handling can understand these insns. */
3809 /* See if the hard reg holding the value is dead.
3810 If this is a PARALLEL, find the call within it. */
3811 call_pat = PATTERN (call);
3812 if (GET_CODE (call_pat) == PARALLEL)
3814 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3815 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3816 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3819 /* This may be a library call that is returning a value
3820 via invisible pointer. Do nothing special, since
3821 ordinary death handling can understand these insns. */
3825 call_pat = XVECEXP (call_pat, 0, i);
3828 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3834 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3835 live at function entry. Don't count global register variables, variables
3836 in registers that can be used for function arg passing, or variables in
3837 fixed hard registers. */
3840 regno_uninitialized (regno)
3843 if (n_basic_blocks == 0
3844 || (regno < FIRST_PSEUDO_REGISTER
3845 && (global_regs[regno]
3846 || fixed_regs[regno]
3847 || FUNCTION_ARG_REGNO_P (regno))))
3850 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3853 /* 1 if register REGNO was alive at a place where `setjmp' was called
3854 and was set more than once or is an argument.
3855 Such regs may be clobbered by `longjmp'. */
3858 regno_clobbered_at_setjmp (regno)
3861 if (n_basic_blocks == 0)
3864 return ((REG_N_SETS (regno) > 1
3865 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3866 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3869 /* INSN references memory, possibly using autoincrement addressing modes.
3870 Find any entries on the mem_set_list that need to be invalidated due
3871 to an address change. */
3873 invalidate_mems_from_autoinc (pbi, insn)
3874 struct propagate_block_info *pbi;
3877 rtx note = REG_NOTES (insn);
3878 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3880 if (REG_NOTE_KIND (note) == REG_INC)
3882 rtx temp = pbi->mem_set_list;
3883 rtx prev = NULL_RTX;
3888 next = XEXP (temp, 1);
3889 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3891 /* Splice temp out of list. */
3893 XEXP (prev, 1) = next;
3895 pbi->mem_set_list = next;
3896 free_EXPR_LIST_node (temp);
3906 /* Process the registers that are set within X. Their bits are set to
3907 1 in the regset DEAD, because they are dead prior to this insn.
3909 If INSN is nonzero, it is the insn being processed.
3911 FLAGS is the set of operations to perform. */
3914 mark_set_regs (pbi, x, insn)
3915 struct propagate_block_info *pbi;
3918 rtx cond = NULL_RTX;
3922 switch (code = GET_CODE (x))
3926 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
3930 cond = COND_EXEC_TEST (x);
3931 x = COND_EXEC_CODE (x);
3937 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3939 rtx sub = XVECEXP (x, 0, i);
3940 switch (code = GET_CODE (sub))
3943 if (cond != NULL_RTX)
3946 cond = COND_EXEC_TEST (sub);
3947 sub = COND_EXEC_CODE (sub);
3948 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3954 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
3969 /* Process a single SET rtx, X. */
3972 mark_set_1 (pbi, code, reg, cond, insn, flags)
3973 struct propagate_block_info *pbi;
3975 rtx reg, cond, insn;
3978 int regno_first = -1, regno_last = -1;
3981 /* Some targets place small structures in registers for
3982 return values of functions. We have to detect this
3983 case specially here to get correct flow information. */
3984 if (GET_CODE (reg) == PARALLEL
3985 && GET_MODE (reg) == BLKmode)
3987 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3988 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
3992 /* Modifying just one hardware register of a multi-reg value or just a
3993 byte field of a register does not mean the value from before this insn
3994 is now dead. But it does mean liveness of that register at the end of
3995 the block is significant.
3997 Within mark_set_1, however, we treat it as if the register is indeed
3998 modified. mark_used_regs will, however, also treat this register as
3999 being used. Thus, we treat these insns as setting a new value for the
4000 register as a function of its old value. This cases LOG_LINKS to be
4001 made appropriately and this will help combine.
4003 ??? This is all done incorrectly. We should not be setting bits in
4004 new_dead for these registers, since, as we just explained, they are
4005 not dead. We should be setting bits in local_set, and updating
4006 LOG_LINKS, but that is different. */
4008 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4009 || GET_CODE (reg) == SIGN_EXTRACT
4010 || GET_CODE (reg) == STRICT_LOW_PART)
4011 reg = XEXP (reg, 0);
4013 /* If this set is a MEM, then it kills any aliased writes.
4014 If this set is a REG, then it kills any MEMs which use the reg. */
4015 if (flags & PROP_SCAN_DEAD_CODE)
4017 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4019 rtx temp = pbi->mem_set_list;
4020 rtx prev = NULL_RTX;
4025 next = XEXP (temp, 1);
4026 if ((GET_CODE (reg) == MEM
4027 && output_dependence (XEXP (temp, 0), reg))
4028 || (GET_CODE (reg) == REG
4029 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4031 /* Splice this entry out of the list. */
4033 XEXP (prev, 1) = next;
4035 pbi->mem_set_list = next;
4036 free_EXPR_LIST_node (temp);
4044 /* If the memory reference had embedded side effects (autoincrement
4045 address modes. Then we may need to kill some entries on the
4047 if (insn && GET_CODE (reg) == MEM)
4048 invalidate_mems_from_autoinc (pbi, insn);
4050 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4051 /* We do not know the size of a BLKmode store, so we do not track
4052 them for redundant store elimination. */
4053 && GET_MODE (reg) != BLKmode
4054 /* There are no REG_INC notes for SP, so we can't assume we'll see
4055 everything that invalidates it. To be safe, don't eliminate any
4056 stores though SP; none of them should be redundant anyway. */
4057 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4058 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4061 if (GET_CODE (reg) == REG
4062 && (regno_first = REGNO (reg),
4063 ! (regno_first == FRAME_POINTER_REGNUM
4064 && (! reload_completed || frame_pointer_needed)))
4065 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4066 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4067 && (! reload_completed || frame_pointer_needed))
4069 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4070 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4074 int some_was_live = 0, some_was_dead = 0;
4076 if (regno_first < FIRST_PSEUDO_REGISTER)
4077 regno_last = (regno_first
4078 + HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1);
4080 regno_last = regno_first;
4082 for (i = regno_first; i <= regno_last; ++i)
4084 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4086 SET_REGNO_REG_SET (pbi->local_set, i);
4088 some_was_live |= needed_regno;
4089 some_was_dead |= ! needed_regno;
4092 /* Additional data to record if this is the final pass. */
4093 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4094 | PROP_DEATH_NOTES | PROP_AUTOINC))
4097 register int blocknum = pbi->bb->index;
4100 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4102 y = pbi->reg_next_use[regno_first];
4104 /* The next use is no longer next, since a store intervenes. */
4105 for (i = regno_first; i <= regno_last; ++i)
4106 pbi->reg_next_use[i] = 0;
4109 if (flags & PROP_REG_INFO)
4111 for (i = regno_first; i <= regno_last; ++i)
4113 /* Count (weighted) references, stores, etc. This counts a
4114 register twice if it is modified, but that is correct. */
4115 REG_N_SETS (i) += 1;
4116 REG_N_REFS (i) += (optimize_size ? 1
4117 : pbi->bb->loop_depth + 1);
4119 /* The insns where a reg is live are normally counted
4120 elsewhere, but we want the count to include the insn
4121 where the reg is set, and the normal counting mechanism
4122 would not count it. */
4123 REG_LIVE_LENGTH (i) += 1;
4126 /* If this is a hard reg, record this function uses the reg. */
4127 if (regno_first < FIRST_PSEUDO_REGISTER)
4129 for (i = regno_first; i <= regno_last; i++)
4130 regs_ever_live[i] = 1;
4134 /* Keep track of which basic blocks each reg appears in. */
4135 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4136 REG_BASIC_BLOCK (regno_first) = blocknum;
4137 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4138 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4142 if (! some_was_dead)
4144 if (flags & PROP_LOG_LINKS)
4146 /* Make a logical link from the next following insn
4147 that uses this register, back to this insn.
4148 The following insns have already been processed.
4150 We don't build a LOG_LINK for hard registers containing
4151 in ASM_OPERANDs. If these registers get replaced,
4152 we might wind up changing the semantics of the insn,
4153 even if reload can make what appear to be valid
4154 assignments later. */
4155 if (y && (BLOCK_NUM (y) == blocknum)
4156 && (regno_first >= FIRST_PSEUDO_REGISTER
4157 || asm_noperands (PATTERN (y)) < 0))
4158 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4161 else if (! some_was_live)
4163 if (flags & PROP_REG_INFO)
4164 REG_N_DEATHS (regno_first) += 1;
4166 if (flags & PROP_DEATH_NOTES)
4168 /* Note that dead stores have already been deleted
4169 when possible. If we get here, we have found a
4170 dead store that cannot be eliminated (because the
4171 same insn does something useful). Indicate this
4172 by marking the reg being set as dying here. */
4174 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4179 if (flags & PROP_DEATH_NOTES)
4181 /* This is a case where we have a multi-word hard register
4182 and some, but not all, of the words of the register are
4183 needed in subsequent insns. Write REG_UNUSED notes
4184 for those parts that were not needed. This case should
4187 for (i = regno_first; i <= regno_last; ++i)
4188 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4190 = alloc_EXPR_LIST (REG_UNUSED,
4191 gen_rtx_REG (reg_raw_mode[i], i),
4197 /* Mark the register as being dead. */
4199 /* The stack pointer is never dead. Well, not strictly true,
4200 but it's very difficult to tell from here. Hopefully
4201 combine_stack_adjustments will fix up the most egregious
4203 && regno_first != STACK_POINTER_REGNUM)
4205 for (i = regno_first; i <= regno_last; ++i)
4206 SET_REGNO_REG_SET (pbi->new_dead, i);
4209 else if (GET_CODE (reg) == REG)
4211 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4212 pbi->reg_next_use[regno_first] = 0;
4215 /* If this is the last pass and this is a SCRATCH, show it will be dying
4216 here and count it. */
4217 else if (GET_CODE (reg) == SCRATCH)
4219 if (flags & PROP_DEATH_NOTES)
4221 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4227 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4231 find_auto_inc (pbi, x, insn)
4232 struct propagate_block_info *pbi;
4236 rtx addr = XEXP (x, 0);
4237 HOST_WIDE_INT offset = 0;
4240 /* Here we detect use of an index register which might be good for
4241 postincrement, postdecrement, preincrement, or predecrement. */
4243 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4244 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4246 if (GET_CODE (addr) == REG)
4249 register int size = GET_MODE_SIZE (GET_MODE (x));
4252 int regno = REGNO (addr);
4254 /* Is the next use an increment that might make auto-increment? */
4255 if ((incr = pbi->reg_next_use[regno]) != 0
4256 && (set = single_set (incr)) != 0
4257 && GET_CODE (set) == SET
4258 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4259 /* Can't add side effects to jumps; if reg is spilled and
4260 reloaded, there's no way to store back the altered value. */
4261 && GET_CODE (insn) != JUMP_INSN
4262 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4263 && XEXP (y, 0) == addr
4264 && GET_CODE (XEXP (y, 1)) == CONST_INT
4265 && ((HAVE_POST_INCREMENT
4266 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4267 || (HAVE_POST_DECREMENT
4268 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4269 || (HAVE_PRE_INCREMENT
4270 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4271 || (HAVE_PRE_DECREMENT
4272 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4273 /* Make sure this reg appears only once in this insn. */
4274 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4275 use != 0 && use != (rtx) 1))
4277 rtx q = SET_DEST (set);
4278 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4279 ? (offset ? PRE_INC : POST_INC)
4280 : (offset ? PRE_DEC : POST_DEC));
4282 if (dead_or_set_p (incr, addr)
4283 /* Mustn't autoinc an eliminable register. */
4284 && (regno >= FIRST_PSEUDO_REGISTER
4285 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4287 /* This is the simple case. Try to make the auto-inc. If
4288 we can't, we are done. Otherwise, we will do any
4289 needed updates below. */
4290 if (! validate_change (insn, &XEXP (x, 0),
4291 gen_rtx_fmt_e (inc_code, Pmode, addr),
4295 else if (GET_CODE (q) == REG
4296 /* PREV_INSN used here to check the semi-open interval
4298 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4299 /* We must also check for sets of q as q may be
4300 a call clobbered hard register and there may
4301 be a call between PREV_INSN (insn) and incr. */
4302 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4304 /* We have *p followed sometime later by q = p+size.
4305 Both p and q must be live afterward,
4306 and q is not used between INSN and its assignment.
4307 Change it to q = p, ...*q..., q = q+size.
4308 Then fall into the usual case. */
4312 emit_move_insn (q, addr);
4313 insns = get_insns ();
4316 if (basic_block_for_insn)
4317 for (temp = insns; temp; temp = NEXT_INSN (temp))
4318 set_block_for_insn (temp, pbi->bb);
4320 /* If we can't make the auto-inc, or can't make the
4321 replacement into Y, exit. There's no point in making
4322 the change below if we can't do the auto-inc and doing
4323 so is not correct in the pre-inc case. */
4325 validate_change (insn, &XEXP (x, 0),
4326 gen_rtx_fmt_e (inc_code, Pmode, q),
4328 validate_change (incr, &XEXP (y, 0), q, 1);
4329 if (! apply_change_group ())
4332 /* We now know we'll be doing this change, so emit the
4333 new insn(s) and do the updates. */
4334 emit_insns_before (insns, insn);
4336 if (pbi->bb->head == insn)
4337 pbi->bb->head = insns;
4339 /* INCR will become a NOTE and INSN won't contain a
4340 use of ADDR. If a use of ADDR was just placed in
4341 the insn before INSN, make that the next use.
4342 Otherwise, invalidate it. */
4343 if (GET_CODE (PREV_INSN (insn)) == INSN
4344 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4345 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4346 pbi->reg_next_use[regno] = PREV_INSN (insn);
4348 pbi->reg_next_use[regno] = 0;
4353 /* REGNO is now used in INCR which is below INSN, but it
4354 previously wasn't live here. If we don't mark it as
4355 live, we'll put a REG_DEAD note for it on this insn,
4356 which is incorrect. */
4357 SET_REGNO_REG_SET (pbi->reg_live, regno);
4359 /* If there are any calls between INSN and INCR, show
4360 that REGNO now crosses them. */
4361 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4362 if (GET_CODE (temp) == CALL_INSN)
4363 REG_N_CALLS_CROSSED (regno)++;
4368 /* If we haven't returned, it means we were able to make the
4369 auto-inc, so update the status. First, record that this insn
4370 has an implicit side effect. */
4373 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4375 /* Modify the old increment-insn to simply copy
4376 the already-incremented value of our register. */
4377 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4380 /* If that makes it a no-op (copying the register into itself) delete
4381 it so it won't appear to be a "use" and a "set" of this
4383 if (SET_DEST (set) == addr)
4385 /* If the original source was dead, it's dead now. */
4386 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4387 if (note && XEXP (note, 0) != addr)
4388 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4390 PUT_CODE (incr, NOTE);
4391 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4392 NOTE_SOURCE_FILE (incr) = 0;
4395 if (regno >= FIRST_PSEUDO_REGISTER)
4397 /* Count an extra reference to the reg. When a reg is
4398 incremented, spilling it is worse, so we want to make
4399 that less likely. */
4400 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4402 /* Count the increment as a setting of the register,
4403 even though it isn't a SET in rtl. */
4404 REG_N_SETS (regno)++;
4409 #endif /* AUTO_INC_DEC */
4412 mark_used_reg (pbi, reg, cond, insn)
4413 struct propagate_block_info *pbi;
4415 rtx cond ATTRIBUTE_UNUSED;
4418 int regno = REGNO (reg);
4419 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4420 int some_was_dead = ! some_was_live;
4422 SET_REGNO_REG_SET (pbi->new_live, regno);
4424 /* A hard reg in a wide mode may really be multiple registers.
4425 If so, mark all of them just like the first. */
4426 if (regno < FIRST_PSEUDO_REGISTER)
4428 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4431 int regno_n = regno + n;
4432 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4434 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4435 some_was_live |= needed_regno;
4436 some_was_dead |= ! needed_regno;
4440 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4442 /* Record where each reg is used, so when the reg is set we know
4443 the next insn that uses it. */
4444 pbi->reg_next_use[regno] = insn;
4447 if (pbi->flags & PROP_REG_INFO)
4449 if (regno < FIRST_PSEUDO_REGISTER)
4451 /* If this is a register we are going to try to eliminate,
4452 don't mark it live here. If we are successful in
4453 eliminating it, it need not be live unless it is used for
4454 pseudos, in which case it will have been set live when it
4455 was allocated to the pseudos. If the register will not
4456 be eliminated, reload will set it live at that point.
4458 Otherwise, record that this function uses this register. */
4459 /* ??? The PPC backend tries to "eliminate" on the pic
4460 register to itself. This should be fixed. In the mean
4461 time, hack around it. */
4463 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4464 && (regno == FRAME_POINTER_REGNUM
4465 || regno == ARG_POINTER_REGNUM)))
4467 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4469 regs_ever_live[regno + --n] = 1;
4475 /* Keep track of which basic block each reg appears in. */
4477 register int blocknum = pbi->bb->index;
4478 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4479 REG_BASIC_BLOCK (regno) = blocknum;
4480 else if (REG_BASIC_BLOCK (regno) != blocknum)
4481 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4483 /* Count (weighted) number of uses of each reg. */
4484 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4488 /* Record and count the insns in which a reg dies. If it is used in
4489 this insn and was dead below the insn then it dies in this insn.
4490 If it was set in this insn, we do not make a REG_DEAD note;
4491 likewise if we already made such a note.
4493 ??? This could be done better. In new_dead we have a record of
4494 which registers are set or clobbered this insn (which in itself is
4495 slightly incorrect, see the commentary near strict_low_part in
4496 mark_set_1), which should be the set of registers that we do not
4497 wish to create death notes for under the above rule. Note that
4498 we have not yet processed the call-clobbered/call-used registers,
4499 and they do not quite follow the above rule, since we do want death
4500 notes for call-clobbered register arguments. Which begs the whole
4501 question of whether we should in fact have death notes for registers
4502 used and clobbered (but not set) in the same insn. The only useful
4503 thing we ought to be getting from dead_or_set_p is detection of
4504 duplicate death notes. */
4506 if ((pbi->flags & PROP_DEATH_NOTES)
4508 && ! dead_or_set_p (insn, reg))
4512 /* Check for the case where the register dying partially
4513 overlaps the register set by this insn. */
4514 if (regno < FIRST_PSEUDO_REGISTER
4515 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4517 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4519 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4522 /* If none of the words in X is needed, make a REG_DEAD note.
4523 Otherwise, we must make partial REG_DEAD notes. */
4524 if (! some_was_live)
4527 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4528 REG_N_DEATHS (regno)++;
4532 /* Don't make a REG_DEAD note for a part of a register
4533 that is set in the insn. */
4535 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4536 for (; n >= regno; n--)
4537 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4538 && ! dead_or_set_regno_p (insn, n))
4540 = alloc_EXPR_LIST (REG_DEAD,
4541 gen_rtx_REG (reg_raw_mode[n], n),
4547 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4548 This is done assuming the registers needed from X are those that
4549 have 1-bits in PBI->REG_LIVE.
4551 INSN is the containing instruction. If INSN is dead, this function
4555 mark_used_regs (pbi, x, cond, insn)
4556 struct propagate_block_info *pbi;
4559 register RTX_CODE code;
4561 int flags = pbi->flags;
4564 code = GET_CODE (x);
4584 /* If we are clobbering a MEM, mark any registers inside the address
4586 if (GET_CODE (XEXP (x, 0)) == MEM)
4587 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4591 /* Don't bother watching stores to mems if this is not the
4592 final pass. We'll not be deleting dead stores this round. */
4593 if (flags & PROP_SCAN_DEAD_CODE)
4595 /* Invalidate the data for the last MEM stored, but only if MEM is
4596 something that can be stored into. */
4597 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4598 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4599 ; /* needn't clear the memory set list */
4602 rtx temp = pbi->mem_set_list;
4603 rtx prev = NULL_RTX;
4608 next = XEXP (temp, 1);
4609 if (anti_dependence (XEXP (temp, 0), x))
4611 /* Splice temp out of the list. */
4613 XEXP (prev, 1) = next;
4615 pbi->mem_set_list = next;
4616 free_EXPR_LIST_node (temp);
4624 /* If the memory reference had embedded side effects (autoincrement
4625 address modes. Then we may need to kill some entries on the
4628 invalidate_mems_from_autoinc (pbi, insn);
4632 if (flags & PROP_AUTOINC)
4633 find_auto_inc (pbi, x, insn);
4638 if (GET_CODE (SUBREG_REG (x)) == REG
4639 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4640 && (GET_MODE_SIZE (GET_MODE (x))
4641 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4642 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4644 /* While we're here, optimize this case. */
4646 if (GET_CODE (x) != REG)
4651 /* See a register other than being set => mark it as needed. */
4652 mark_used_reg (pbi, x, cond, insn);
4657 register rtx testreg = SET_DEST (x);
4660 /* If storing into MEM, don't show it as being used. But do
4661 show the address as being used. */
4662 if (GET_CODE (testreg) == MEM)
4665 if (flags & PROP_AUTOINC)
4666 find_auto_inc (pbi, testreg, insn);
4668 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4669 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4673 /* Storing in STRICT_LOW_PART is like storing in a reg
4674 in that this SET might be dead, so ignore it in TESTREG.
4675 but in some other ways it is like using the reg.
4677 Storing in a SUBREG or a bit field is like storing the entire
4678 register in that if the register's value is not used
4679 then this SET is not needed. */
4680 while (GET_CODE (testreg) == STRICT_LOW_PART
4681 || GET_CODE (testreg) == ZERO_EXTRACT
4682 || GET_CODE (testreg) == SIGN_EXTRACT
4683 || GET_CODE (testreg) == SUBREG)
4685 if (GET_CODE (testreg) == SUBREG
4686 && GET_CODE (SUBREG_REG (testreg)) == REG
4687 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4688 && (GET_MODE_SIZE (GET_MODE (testreg))
4689 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4690 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4692 /* Modifying a single register in an alternate mode
4693 does not use any of the old value. But these other
4694 ways of storing in a register do use the old value. */
4695 if (GET_CODE (testreg) == SUBREG
4696 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4701 testreg = XEXP (testreg, 0);
4704 /* If this is a store into a register, recursively scan the
4705 value being stored. */
4707 if ((GET_CODE (testreg) == PARALLEL
4708 && GET_MODE (testreg) == BLKmode)
4709 || (GET_CODE (testreg) == REG
4710 && (regno = REGNO (testreg),
4711 ! (regno == FRAME_POINTER_REGNUM
4712 && (! reload_completed || frame_pointer_needed)))
4713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4714 && ! (regno == HARD_FRAME_POINTER_REGNUM
4715 && (! reload_completed || frame_pointer_needed))
4717 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4718 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4723 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4724 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4731 case UNSPEC_VOLATILE:
4735 /* Traditional and volatile asm instructions must be considered to use
4736 and clobber all hard registers, all pseudo-registers and all of
4737 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4739 Consider for instance a volatile asm that changes the fpu rounding
4740 mode. An insn should not be moved across this even if it only uses
4741 pseudo-regs because it might give an incorrectly rounded result.
4743 ?!? Unfortunately, marking all hard registers as live causes massive
4744 problems for the register allocator and marking all pseudos as live
4745 creates mountains of uninitialized variable warnings.
4747 So for now, just clear the memory set list and mark any regs
4748 we can find in ASM_OPERANDS as used. */
4749 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4750 free_EXPR_LIST_list (&pbi->mem_set_list);
4752 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4753 We can not just fall through here since then we would be confused
4754 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4755 traditional asms unlike their normal usage. */
4756 if (code == ASM_OPERANDS)
4760 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4761 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4767 if (cond != NULL_RTX)
4770 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4772 cond = COND_EXEC_TEST (x);
4773 x = COND_EXEC_CODE (x);
4777 /* We _do_not_ want to scan operands of phi nodes. Operands of
4778 a phi function are evaluated only when control reaches this
4779 block along a particular edge. Therefore, regs that appear
4780 as arguments to phi should not be added to the global live at
4788 /* Recursively scan the operands of this expression. */
4791 register const char *fmt = GET_RTX_FORMAT (code);
4794 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4798 /* Tail recursive case: save a function call level. */
4804 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4806 else if (fmt[i] == 'E')
4809 for (j = 0; j < XVECLEN (x, i); j++)
4810 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4819 try_pre_increment_1 (pbi, insn)
4820 struct propagate_block_info *pbi;
4823 /* Find the next use of this reg. If in same basic block,
4824 make it do pre-increment or pre-decrement if appropriate. */
4825 rtx x = single_set (insn);
4826 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4827 * INTVAL (XEXP (SET_SRC (x), 1)));
4828 int regno = REGNO (SET_DEST (x));
4829 rtx y = pbi->reg_next_use[regno];
4831 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4832 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4833 mode would be better. */
4834 && ! dead_or_set_p (y, SET_DEST (x))
4835 && try_pre_increment (y, SET_DEST (x), amount))
4837 /* We have found a suitable auto-increment
4838 and already changed insn Y to do it.
4839 So flush this increment-instruction. */
4840 PUT_CODE (insn, NOTE);
4841 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4842 NOTE_SOURCE_FILE (insn) = 0;
4843 /* Count a reference to this reg for the increment
4844 insn we are deleting. When a reg is incremented.
4845 spilling it is worse, so we want to make that
4847 if (regno >= FIRST_PSEUDO_REGISTER)
4849 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4850 REG_N_SETS (regno)++;
4857 /* Try to change INSN so that it does pre-increment or pre-decrement
4858 addressing on register REG in order to add AMOUNT to REG.
4859 AMOUNT is negative for pre-decrement.
4860 Returns 1 if the change could be made.
4861 This checks all about the validity of the result of modifying INSN. */
4864 try_pre_increment (insn, reg, amount)
4866 HOST_WIDE_INT amount;
4870 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4871 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4873 /* Nonzero if we can try to make a post-increment or post-decrement.
4874 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4875 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4876 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4879 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4882 /* From the sign of increment, see which possibilities are conceivable
4883 on this target machine. */
4884 if (HAVE_PRE_INCREMENT && amount > 0)
4886 if (HAVE_POST_INCREMENT && amount > 0)
4889 if (HAVE_PRE_DECREMENT && amount < 0)
4891 if (HAVE_POST_DECREMENT && amount < 0)
4894 if (! (pre_ok || post_ok))
4897 /* It is not safe to add a side effect to a jump insn
4898 because if the incremented register is spilled and must be reloaded
4899 there would be no way to store the incremented value back in memory. */
4901 if (GET_CODE (insn) == JUMP_INSN)
4906 use = find_use_as_address (PATTERN (insn), reg, 0);
4907 if (post_ok && (use == 0 || use == (rtx) 1))
4909 use = find_use_as_address (PATTERN (insn), reg, -amount);
4913 if (use == 0 || use == (rtx) 1)
4916 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4919 /* See if this combination of instruction and addressing mode exists. */
4920 if (! validate_change (insn, &XEXP (use, 0),
4921 gen_rtx_fmt_e (amount > 0
4922 ? (do_post ? POST_INC : PRE_INC)
4923 : (do_post ? POST_DEC : PRE_DEC),
4927 /* Record that this insn now has an implicit side effect on X. */
4928 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4932 #endif /* AUTO_INC_DEC */
4934 /* Find the place in the rtx X where REG is used as a memory address.
4935 Return the MEM rtx that so uses it.
4936 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4937 (plus REG (const_int PLUSCONST)).
4939 If such an address does not appear, return 0.
4940 If REG appears more than once, or is used other than in such an address,
4944 find_use_as_address (x, reg, plusconst)
4947 HOST_WIDE_INT plusconst;
4949 enum rtx_code code = GET_CODE (x);
4950 const char *fmt = GET_RTX_FORMAT (code);
4952 register rtx value = 0;
4955 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4958 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4959 && XEXP (XEXP (x, 0), 0) == reg
4960 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4961 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4964 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4966 /* If REG occurs inside a MEM used in a bit-field reference,
4967 that is unacceptable. */
4968 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4969 return (rtx) (HOST_WIDE_INT) 1;
4973 return (rtx) (HOST_WIDE_INT) 1;
4975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4979 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4983 return (rtx) (HOST_WIDE_INT) 1;
4985 else if (fmt[i] == 'E')
4988 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4990 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4994 return (rtx) (HOST_WIDE_INT) 1;
5002 /* Write information about registers and basic blocks into FILE.
5003 This is part of making a debugging dump. */
5006 dump_regset (r, outf)
5013 fputs (" (nil)", outf);
5017 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5019 fprintf (outf, " %d", i);
5020 if (i < FIRST_PSEUDO_REGISTER)
5021 fprintf (outf, " [%s]",
5030 dump_regset (r, stderr);
5031 putc ('\n', stderr);
5035 dump_flow_info (file)
5039 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5041 fprintf (file, "%d registers.\n", max_regno);
5042 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5045 enum reg_class class, altclass;
5046 fprintf (file, "\nRegister %d used %d times across %d insns",
5047 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5048 if (REG_BASIC_BLOCK (i) >= 0)
5049 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5051 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5052 (REG_N_SETS (i) == 1) ? "" : "s");
5053 if (REG_USERVAR_P (regno_reg_rtx[i]))
5054 fprintf (file, "; user var");
5055 if (REG_N_DEATHS (i) != 1)
5056 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5057 if (REG_N_CALLS_CROSSED (i) == 1)
5058 fprintf (file, "; crosses 1 call");
5059 else if (REG_N_CALLS_CROSSED (i))
5060 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5061 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5062 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5063 class = reg_preferred_class (i);
5064 altclass = reg_alternate_class (i);
5065 if (class != GENERAL_REGS || altclass != ALL_REGS)
5067 if (altclass == ALL_REGS || class == ALL_REGS)
5068 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5069 else if (altclass == NO_REGS)
5070 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5072 fprintf (file, "; pref %s, else %s",
5073 reg_class_names[(int) class],
5074 reg_class_names[(int) altclass]);
5076 if (REGNO_POINTER_FLAG (i))
5077 fprintf (file, "; pointer");
5078 fprintf (file, ".\n");
5081 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5082 for (i = 0; i < n_basic_blocks; i++)
5084 register basic_block bb = BASIC_BLOCK (i);
5087 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5088 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5090 fprintf (file, "Predecessors: ");
5091 for (e = bb->pred; e ; e = e->pred_next)
5092 dump_edge_info (file, e, 0);
5094 fprintf (file, "\nSuccessors: ");
5095 for (e = bb->succ; e ; e = e->succ_next)
5096 dump_edge_info (file, e, 1);
5098 fprintf (file, "\nRegisters live at start:");
5099 dump_regset (bb->global_live_at_start, file);
5101 fprintf (file, "\nRegisters live at end:");
5102 dump_regset (bb->global_live_at_end, file);
5113 dump_flow_info (stderr);
5117 dump_edge_info (file, e, do_succ)
5122 basic_block side = (do_succ ? e->dest : e->src);
5124 if (side == ENTRY_BLOCK_PTR)
5125 fputs (" ENTRY", file);
5126 else if (side == EXIT_BLOCK_PTR)
5127 fputs (" EXIT", file);
5129 fprintf (file, " %d", side->index);
5133 static const char * const bitnames[] = {
5134 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5137 int i, flags = e->flags;
5141 for (i = 0; flags; i++)
5142 if (flags & (1 << i))
5148 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5149 fputs (bitnames[i], file);
5151 fprintf (file, "%d", i);
5159 /* Print out one basic block with live information at start and end. */
5169 fprintf (outf, ";; Basic block %d, loop depth %d",
5170 bb->index, bb->loop_depth);
5171 if (bb->eh_beg != -1 || bb->eh_end != -1)
5172 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5175 fputs (";; Predecessors: ", outf);
5176 for (e = bb->pred; e ; e = e->pred_next)
5177 dump_edge_info (outf, e, 0);
5180 fputs (";; Registers live at start:", outf);
5181 dump_regset (bb->global_live_at_start, outf);
5184 for (insn = bb->head, last = NEXT_INSN (bb->end);
5186 insn = NEXT_INSN (insn))
5187 print_rtl_single (outf, insn);
5189 fputs (";; Registers live at end:", outf);
5190 dump_regset (bb->global_live_at_end, outf);
5193 fputs (";; Successors: ", outf);
5194 for (e = bb->succ; e; e = e->succ_next)
5195 dump_edge_info (outf, e, 1);
5203 dump_bb (bb, stderr);
5210 dump_bb (BASIC_BLOCK(n), stderr);
5213 /* Like print_rtl, but also print out live information for the start of each
5217 print_rtl_with_bb (outf, rtx_first)
5221 register rtx tmp_rtx;
5224 fprintf (outf, "(nil)\n");
5228 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5229 int max_uid = get_max_uid ();
5230 basic_block *start = (basic_block *)
5231 xcalloc (max_uid, sizeof (basic_block));
5232 basic_block *end = (basic_block *)
5233 xcalloc (max_uid, sizeof (basic_block));
5234 enum bb_state *in_bb_p = (enum bb_state *)
5235 xcalloc (max_uid, sizeof (enum bb_state));
5237 for (i = n_basic_blocks - 1; i >= 0; i--)
5239 basic_block bb = BASIC_BLOCK (i);
5242 start[INSN_UID (bb->head)] = bb;
5243 end[INSN_UID (bb->end)] = bb;
5244 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5246 enum bb_state state = IN_MULTIPLE_BB;
5247 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5249 in_bb_p[INSN_UID(x)] = state;
5256 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5261 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5263 fprintf (outf, ";; Start of basic block %d, registers live:",
5265 dump_regset (bb->global_live_at_start, outf);
5269 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5270 && GET_CODE (tmp_rtx) != NOTE
5271 && GET_CODE (tmp_rtx) != BARRIER)
5272 fprintf (outf, ";; Insn is not within a basic block\n");
5273 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5274 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5276 did_output = print_rtl_single (outf, tmp_rtx);
5278 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5280 fprintf (outf, ";; End of basic block %d, registers live:\n",
5282 dump_regset (bb->global_live_at_end, outf);
5295 if (current_function_epilogue_delay_list != 0)
5297 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5298 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5299 tmp_rtx = XEXP (tmp_rtx, 1))
5300 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5304 /* Compute dominator relationships using new flow graph structures. */
5306 compute_flow_dominators (dominators, post_dominators)
5307 sbitmap *dominators;
5308 sbitmap *post_dominators;
5311 sbitmap *temp_bitmap;
5313 basic_block *worklist, *workend, *qin, *qout;
5316 /* Allocate a worklist array/queue. Entries are only added to the
5317 list if they were not already on the list. So the size is
5318 bounded by the number of basic blocks. */
5319 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5320 workend = &worklist[n_basic_blocks];
5322 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5323 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5327 /* The optimistic setting of dominators requires us to put every
5328 block on the work list initially. */
5329 qin = qout = worklist;
5330 for (bb = 0; bb < n_basic_blocks; bb++)
5332 *qin++ = BASIC_BLOCK (bb);
5333 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5335 qlen = n_basic_blocks;
5338 /* We want a maximal solution, so initially assume everything dominates
5340 sbitmap_vector_ones (dominators, n_basic_blocks);
5342 /* Mark successors of the entry block so we can identify them below. */
5343 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5344 e->dest->aux = ENTRY_BLOCK_PTR;
5346 /* Iterate until the worklist is empty. */
5349 /* Take the first entry off the worklist. */
5350 basic_block b = *qout++;
5351 if (qout >= workend)
5357 /* Compute the intersection of the dominators of all the
5360 If one of the predecessor blocks is the ENTRY block, then the
5361 intersection of the dominators of the predecessor blocks is
5362 defined as the null set. We can identify such blocks by the
5363 special value in the AUX field in the block structure. */
5364 if (b->aux == ENTRY_BLOCK_PTR)
5366 /* Do not clear the aux field for blocks which are
5367 successors of the ENTRY block. That way we we never
5368 add them to the worklist again.
5370 The intersect of dominators of the preds of this block is
5371 defined as the null set. */
5372 sbitmap_zero (temp_bitmap[bb]);
5376 /* Clear the aux field of this block so it can be added to
5377 the worklist again if necessary. */
5379 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5382 /* Make sure each block always dominates itself. */
5383 SET_BIT (temp_bitmap[bb], bb);
5385 /* If the out state of this block changed, then we need to
5386 add the successors of this block to the worklist if they
5387 are not already on the worklist. */
5388 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5390 for (e = b->succ; e; e = e->succ_next)
5392 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5406 if (post_dominators)
5408 /* The optimistic setting of dominators requires us to put every
5409 block on the work list initially. */
5410 qin = qout = worklist;
5411 for (bb = 0; bb < n_basic_blocks; bb++)
5413 *qin++ = BASIC_BLOCK (bb);
5414 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5416 qlen = n_basic_blocks;
5419 /* We want a maximal solution, so initially assume everything post
5420 dominates everything else. */
5421 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5423 /* Mark predecessors of the exit block so we can identify them below. */
5424 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5425 e->src->aux = EXIT_BLOCK_PTR;
5427 /* Iterate until the worklist is empty. */
5430 /* Take the first entry off the worklist. */
5431 basic_block b = *qout++;
5432 if (qout >= workend)
5438 /* Compute the intersection of the post dominators of all the
5441 If one of the successor blocks is the EXIT block, then the
5442 intersection of the dominators of the successor blocks is
5443 defined as the null set. We can identify such blocks by the
5444 special value in the AUX field in the block structure. */
5445 if (b->aux == EXIT_BLOCK_PTR)
5447 /* Do not clear the aux field for blocks which are
5448 predecessors of the EXIT block. That way we we never
5449 add them to the worklist again.
5451 The intersect of dominators of the succs of this block is
5452 defined as the null set. */
5453 sbitmap_zero (temp_bitmap[bb]);
5457 /* Clear the aux field of this block so it can be added to
5458 the worklist again if necessary. */
5460 sbitmap_intersection_of_succs (temp_bitmap[bb],
5461 post_dominators, bb);
5464 /* Make sure each block always post dominates itself. */
5465 SET_BIT (temp_bitmap[bb], bb);
5467 /* If the out state of this block changed, then we need to
5468 add the successors of this block to the worklist if they
5469 are not already on the worklist. */
5470 if (sbitmap_a_and_b (post_dominators[bb],
5471 post_dominators[bb],
5474 for (e = b->pred; e; e = e->pred_next)
5476 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5494 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5497 compute_immediate_dominators (idom, dominators)
5499 sbitmap *dominators;
5504 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5506 /* Begin with tmp(n) = dom(n) - { n }. */
5507 for (b = n_basic_blocks; --b >= 0; )
5509 sbitmap_copy (tmp[b], dominators[b]);
5510 RESET_BIT (tmp[b], b);
5513 /* Subtract out all of our dominator's dominators. */
5514 for (b = n_basic_blocks; --b >= 0; )
5516 sbitmap tmp_b = tmp[b];
5519 for (s = n_basic_blocks; --s >= 0; )
5520 if (TEST_BIT (tmp_b, s))
5521 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5524 /* Find the one bit set in the bitmap and put it in the output array. */
5525 for (b = n_basic_blocks; --b >= 0; )
5528 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5531 sbitmap_vector_free (tmp);
5534 /* Recompute register set/reference counts immediately prior to register
5537 This avoids problems with set/reference counts changing to/from values
5538 which have special meanings to the register allocators.
5540 Additionally, the reference counts are the primary component used by the
5541 register allocators to prioritize pseudos for allocation to hard regs.
5542 More accurate reference counts generally lead to better register allocation.
5544 F is the first insn to be scanned.
5546 LOOP_STEP denotes how much loop_depth should be incremented per
5547 loop nesting level in order to increase the ref count more for
5548 references in a loop.
5550 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5551 possibly other information which is used by the register allocators. */
5554 recompute_reg_usage (f, loop_step)
5555 rtx f ATTRIBUTE_UNUSED;
5556 int loop_step ATTRIBUTE_UNUSED;
5558 allocate_reg_life_data ();
5559 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
5562 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5563 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5564 of the number of registers that died. */
5567 count_or_remove_death_notes (blocks, kill)
5573 for (i = n_basic_blocks - 1; i >= 0; --i)
5578 if (blocks && ! TEST_BIT (blocks, i))
5581 bb = BASIC_BLOCK (i);
5583 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5585 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5587 rtx *pprev = ®_NOTES (insn);
5592 switch (REG_NOTE_KIND (link))
5595 if (GET_CODE (XEXP (link, 0)) == REG)
5597 rtx reg = XEXP (link, 0);
5600 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5603 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5611 rtx next = XEXP (link, 1);
5612 free_EXPR_LIST_node (link);
5613 *pprev = link = next;
5619 pprev = &XEXP (link, 1);
5626 if (insn == bb->end)
5634 /* Record INSN's block as BB. */
5637 set_block_for_insn (insn, bb)
5641 size_t uid = INSN_UID (insn);
5642 if (uid >= basic_block_for_insn->num_elements)
5646 /* Add one-eighth the size so we don't keep calling xrealloc. */
5647 new_size = uid + (uid + 7) / 8;
5649 VARRAY_GROW (basic_block_for_insn, new_size);
5651 VARRAY_BB (basic_block_for_insn, uid) = bb;
5654 /* Record INSN's block number as BB. */
5655 /* ??? This has got to go. */
5658 set_block_num (insn, bb)
5662 set_block_for_insn (insn, BASIC_BLOCK (bb));
5665 /* Verify the CFG consistency. This function check some CFG invariants and
5666 aborts when something is wrong. Hope that this function will help to
5667 convert many optimization passes to preserve CFG consistent.
5669 Currently it does following checks:
5671 - test head/end pointers
5672 - overlapping of basic blocks
5673 - edge list corectness
5674 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5675 - tails of basic blocks (ensure that boundary is necesary)
5676 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5677 and NOTE_INSN_BASIC_BLOCK
5678 - check that all insns are in the basic blocks
5679 (except the switch handling code, barriers and notes)
5680 - check that all returns are followed by barriers
5682 In future it can be extended check a lot of other stuff as well
5683 (reachability of basic blocks, life information, etc. etc.). */
5688 const int max_uid = get_max_uid ();
5689 const rtx rtx_first = get_insns ();
5690 basic_block *bb_info;
5694 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5696 /* First pass check head/end pointers and set bb_info array used by
5698 for (i = n_basic_blocks - 1; i >= 0; i--)
5700 basic_block bb = BASIC_BLOCK (i);
5702 /* Check the head pointer and make sure that it is pointing into
5704 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5709 error ("Head insn %d for block %d not found in the insn stream.",
5710 INSN_UID (bb->head), bb->index);
5714 /* Check the end pointer and make sure that it is pointing into
5716 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5718 if (bb_info[INSN_UID (x)] != NULL)
5720 error ("Insn %d is in multiple basic blocks (%d and %d)",
5721 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5724 bb_info[INSN_UID (x)] = bb;
5731 error ("End insn %d for block %d not found in the insn stream.",
5732 INSN_UID (bb->end), bb->index);
5737 /* Now check the basic blocks (boundaries etc.) */
5738 for (i = n_basic_blocks - 1; i >= 0; i--)
5740 basic_block bb = BASIC_BLOCK (i);
5741 /* Check corectness of edge lists */
5749 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5751 fprintf (stderr, "Predecessor: ");
5752 dump_edge_info (stderr, e, 0);
5753 fprintf (stderr, "\nSuccessor: ");
5754 dump_edge_info (stderr, e, 1);
5758 if (e->dest != EXIT_BLOCK_PTR)
5760 edge e2 = e->dest->pred;
5761 while (e2 && e2 != e)
5765 error ("Basic block %i edge lists are corrupted", bb->index);
5777 error ("Basic block %d pred edge is corrupted", bb->index);
5778 fputs ("Predecessor: ", stderr);
5779 dump_edge_info (stderr, e, 0);
5780 fputs ("\nSuccessor: ", stderr);
5781 dump_edge_info (stderr, e, 1);
5782 fputc ('\n', stderr);
5785 if (e->src != ENTRY_BLOCK_PTR)
5787 edge e2 = e->src->succ;
5788 while (e2 && e2 != e)
5792 error ("Basic block %i edge lists are corrupted", bb->index);
5799 /* OK pointers are correct. Now check the header of basic
5800 block. It ought to contain optional CODE_LABEL followed
5801 by NOTE_BASIC_BLOCK. */
5803 if (GET_CODE (x) == CODE_LABEL)
5807 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5813 if (GET_CODE (x) != NOTE
5814 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5815 || NOTE_BASIC_BLOCK (x) != bb)
5817 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5824 /* Do checks for empty blocks here */
5831 if (GET_CODE (x) == NOTE
5832 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5834 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5835 INSN_UID (x), bb->index);
5842 if (GET_CODE (x) == JUMP_INSN
5843 || GET_CODE (x) == CODE_LABEL
5844 || GET_CODE (x) == BARRIER)
5846 error ("In basic block %d:", bb->index);
5847 fatal_insn ("Flow control insn inside a basic block", x);
5858 if (!bb_info[INSN_UID (x)])
5860 switch (GET_CODE (x))
5867 /* An addr_vec is placed outside any block block. */
5869 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5870 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5871 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5876 /* But in any case, non-deletable labels can appear anywhere. */
5880 fatal_insn ("Insn outside basic block", x);
5884 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
5885 && GET_CODE (x) == JUMP_INSN
5886 && returnjump_p (x) && ! condjump_p (x)
5887 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
5888 fatal_insn ("Return not followed by barrier", x);
5900 /* Functions to access an edge list with a vector representation.
5901 Enough data is kept such that given an index number, the
5902 pred and succ that edge reprsents can be determined, or
5903 given a pred and a succ, it's index number can be returned.
5904 This allows algorithms which comsume a lot of memory to
5905 represent the normally full matrix of edge (pred,succ) with a
5906 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5907 wasted space in the client code due to sparse flow graphs. */
5909 /* This functions initializes the edge list. Basically the entire
5910 flowgraph is processed, and all edges are assigned a number,
5911 and the data structure is filed in. */
5915 struct edge_list *elist;
5921 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5925 /* Determine the number of edges in the flow graph by counting successor
5926 edges on each basic block. */
5927 for (x = 0; x < n_basic_blocks; x++)
5929 basic_block bb = BASIC_BLOCK (x);
5931 for (e = bb->succ; e; e = e->succ_next)
5934 /* Don't forget successors of the entry block. */
5935 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5938 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
5939 elist->num_blocks = block_count;
5940 elist->num_edges = num_edges;
5941 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
5945 /* Follow successors of the entry block, and register these edges. */
5946 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5948 elist->index_to_edge[num_edges] = e;
5952 for (x = 0; x < n_basic_blocks; x++)
5954 basic_block bb = BASIC_BLOCK (x);
5956 /* Follow all successors of blocks, and register these edges. */
5957 for (e = bb->succ; e; e = e->succ_next)
5959 elist->index_to_edge[num_edges] = e;
5966 /* This function free's memory associated with an edge list. */
5968 free_edge_list (elist)
5969 struct edge_list *elist;
5973 free (elist->index_to_edge);
5978 /* This function provides debug output showing an edge list. */
5980 print_edge_list (f, elist)
5982 struct edge_list *elist;
5985 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
5986 elist->num_blocks - 2, elist->num_edges);
5988 for (x = 0; x < elist->num_edges; x++)
5990 fprintf (f, " %-4d - edge(", x);
5991 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
5992 fprintf (f,"entry,");
5994 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
5996 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
5997 fprintf (f,"exit)\n");
5999 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6003 /* This function provides an internal consistancy check of an edge list,
6004 verifying that all edges are present, and that there are no
6007 verify_edge_list (f, elist)
6009 struct edge_list *elist;
6011 int x, pred, succ, index;
6014 for (x = 0; x < n_basic_blocks; x++)
6016 basic_block bb = BASIC_BLOCK (x);
6018 for (e = bb->succ; e; e = e->succ_next)
6020 pred = e->src->index;
6021 succ = e->dest->index;
6022 index = EDGE_INDEX (elist, e->src, e->dest);
6023 if (index == EDGE_INDEX_NO_EDGE)
6025 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6028 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6029 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6030 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6031 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6032 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6033 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6036 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6038 pred = e->src->index;
6039 succ = e->dest->index;
6040 index = EDGE_INDEX (elist, e->src, e->dest);
6041 if (index == EDGE_INDEX_NO_EDGE)
6043 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6046 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6047 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6048 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6049 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6050 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6051 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6053 /* We've verified that all the edges are in the list, no lets make sure
6054 there are no spurious edges in the list. */
6056 for (pred = 0 ; pred < n_basic_blocks; pred++)
6057 for (succ = 0 ; succ < n_basic_blocks; succ++)
6059 basic_block p = BASIC_BLOCK (pred);
6060 basic_block s = BASIC_BLOCK (succ);
6064 for (e = p->succ; e; e = e->succ_next)
6070 for (e = s->pred; e; e = e->pred_next)
6076 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6077 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6078 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6080 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6081 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6082 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6083 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6084 BASIC_BLOCK (succ)));
6086 for (succ = 0 ; succ < n_basic_blocks; succ++)
6088 basic_block p = ENTRY_BLOCK_PTR;
6089 basic_block s = BASIC_BLOCK (succ);
6093 for (e = p->succ; e; e = e->succ_next)
6099 for (e = s->pred; e; e = e->pred_next)
6105 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6106 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6107 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6109 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6110 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6111 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6112 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6113 BASIC_BLOCK (succ)));
6115 for (pred = 0 ; pred < n_basic_blocks; pred++)
6117 basic_block p = BASIC_BLOCK (pred);
6118 basic_block s = EXIT_BLOCK_PTR;
6122 for (e = p->succ; e; e = e->succ_next)
6128 for (e = s->pred; e; e = e->pred_next)
6134 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6135 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6136 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6138 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6139 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6140 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6141 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6146 /* This routine will determine what, if any, edge there is between
6147 a specified predecessor and successor. */
6150 find_edge_index (edge_list, pred, succ)
6151 struct edge_list *edge_list;
6152 basic_block pred, succ;
6155 for (x = 0; x < NUM_EDGES (edge_list); x++)
6157 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6158 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6161 return (EDGE_INDEX_NO_EDGE);
6164 /* This function will remove an edge from the flow graph. */
6169 edge last_pred = NULL;
6170 edge last_succ = NULL;
6172 basic_block src, dest;
6175 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6181 last_succ->succ_next = e->succ_next;
6183 src->succ = e->succ_next;
6185 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6191 last_pred->pred_next = e->pred_next;
6193 dest->pred = e->pred_next;
6199 /* This routine will remove any fake successor edges for a basic block.
6200 When the edge is removed, it is also removed from whatever predecessor
6203 remove_fake_successors (bb)
6207 for (e = bb->succ; e ; )
6211 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6216 /* This routine will remove all fake edges from the flow graph. If
6217 we remove all fake successors, it will automatically remove all
6218 fake predecessors. */
6220 remove_fake_edges ()
6224 for (x = 0; x < n_basic_blocks; x++)
6225 remove_fake_successors (BASIC_BLOCK (x));
6227 /* We've handled all successors except the entry block's. */
6228 remove_fake_successors (ENTRY_BLOCK_PTR);
6231 /* This functions will add a fake edge between any block which has no
6232 successors, and the exit block. Some data flow equations require these
6235 add_noreturn_fake_exit_edges ()
6239 for (x = 0; x < n_basic_blocks; x++)
6240 if (BASIC_BLOCK (x)->succ == NULL)
6241 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6244 /* Dump the list of basic blocks in the bitmap NODES. */
6246 flow_nodes_print (str, nodes, file)
6248 const sbitmap nodes;
6253 fprintf (file, "%s { ", str);
6254 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6255 fputs ("}\n", file);
6259 /* Dump the list of exiting edges in the array EDGES. */
6261 flow_exits_print (str, edges, num_edges, file)
6269 fprintf (file, "%s { ", str);
6270 for (i = 0; i < num_edges; i++)
6271 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6272 fputs ("}\n", file);
6276 /* Dump loop related CFG information. */
6278 flow_loops_cfg_dump (loops, file)
6279 const struct loops *loops;
6284 if (! loops->num || ! file || ! loops->cfg.dom)
6287 for (i = 0; i < n_basic_blocks; i++)
6291 fprintf (file, ";; %d succs { ", i);
6292 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6293 fprintf (file, "%d ", succ->dest->index);
6294 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6298 /* Dump the DFS node order. */
6299 if (loops->cfg.dfs_order)
6301 fputs (";; DFS order: ", file);
6302 for (i = 0; i < n_basic_blocks; i++)
6303 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6309 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6311 flow_loop_nested_p (outer, loop)
6315 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6319 /* Dump the loop information specified by LOOPS to the stream FILE. */
6321 flow_loops_dump (loops, file, verbose)
6322 const struct loops *loops;
6329 num_loops = loops->num;
6330 if (! num_loops || ! file)
6333 fprintf (file, ";; %d loops found, %d levels\n",
6334 num_loops, loops->levels);
6336 for (i = 0; i < num_loops; i++)
6338 struct loop *loop = &loops->array[i];
6340 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6341 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6342 loop->header->index, loop->latch->index,
6343 loop->pre_header ? loop->pre_header->index : -1,
6344 loop->depth, loop->level,
6345 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6346 fprintf (file, ";; %d", loop->num_nodes);
6347 flow_nodes_print (" nodes", loop->nodes, file);
6348 fprintf (file, ";; %d", loop->num_exits);
6349 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6355 for (j = 0; j < i; j++)
6357 struct loop *oloop = &loops->array[j];
6359 if (loop->header == oloop->header)
6364 smaller = loop->num_nodes < oloop->num_nodes;
6366 /* If the union of LOOP and OLOOP is different than
6367 the larger of LOOP and OLOOP then LOOP and OLOOP
6368 must be disjoint. */
6369 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6370 smaller ? oloop : loop);
6371 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6372 loop->header->index, i, j,
6373 disjoint ? "disjoint" : "nested");
6380 /* Print diagnostics to compare our concept of a loop with
6381 what the loop notes say. */
6382 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6383 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6384 != NOTE_INSN_LOOP_BEG)
6385 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6386 INSN_UID (PREV_INSN (loop->first->head)));
6387 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6388 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6389 != NOTE_INSN_LOOP_END)
6390 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6391 INSN_UID (NEXT_INSN (loop->last->end)));
6396 flow_loops_cfg_dump (loops, file);
6400 /* Free all the memory allocated for LOOPS. */
6402 flow_loops_free (loops)
6403 struct loops *loops;
6412 /* Free the loop descriptors. */
6413 for (i = 0; i < loops->num; i++)
6415 struct loop *loop = &loops->array[i];
6418 sbitmap_free (loop->nodes);
6422 free (loops->array);
6423 loops->array = NULL;
6426 sbitmap_vector_free (loops->cfg.dom);
6427 if (loops->cfg.dfs_order)
6428 free (loops->cfg.dfs_order);
6430 sbitmap_free (loops->shared_headers);
6435 /* Find the exits from the loop using the bitmap of loop nodes NODES
6436 and store in EXITS array. Return the number of exits from the
6439 flow_loop_exits_find (nodes, exits)
6440 const sbitmap nodes;
6449 /* Check all nodes within the loop to see if there are any
6450 successors not in the loop. Note that a node may have multiple
6453 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6454 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6456 basic_block dest = e->dest;
6458 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6466 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6468 /* Store all exiting edges into an array. */
6470 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6471 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6473 basic_block dest = e->dest;
6475 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6476 (*exits)[num_exits++] = e;
6484 /* Find the nodes contained within the loop with header HEADER and
6485 latch LATCH and store in NODES. Return the number of nodes within
6488 flow_loop_nodes_find (header, latch, nodes)
6497 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6500 /* Start with only the loop header in the set of loop nodes. */
6501 sbitmap_zero (nodes);
6502 SET_BIT (nodes, header->index);
6504 header->loop_depth++;
6506 /* Push the loop latch on to the stack. */
6507 if (! TEST_BIT (nodes, latch->index))
6509 SET_BIT (nodes, latch->index);
6510 latch->loop_depth++;
6512 stack[sp++] = latch;
6521 for (e = node->pred; e; e = e->pred_next)
6523 basic_block ancestor = e->src;
6525 /* If each ancestor not marked as part of loop, add to set of
6526 loop nodes and push on to stack. */
6527 if (ancestor != ENTRY_BLOCK_PTR
6528 && ! TEST_BIT (nodes, ancestor->index))
6530 SET_BIT (nodes, ancestor->index);
6531 ancestor->loop_depth++;
6533 stack[sp++] = ancestor;
6542 /* Compute the depth first search order and store in the array
6543 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6544 number of nodes visited. */
6546 flow_depth_first_order_compute (dfs_order)
6555 /* Allocate stack for back-tracking up CFG. */
6556 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6559 /* Allocate bitmap to track nodes that have been visited. */
6560 visited = sbitmap_alloc (n_basic_blocks);
6562 /* None of the nodes in the CFG have been visited yet. */
6563 sbitmap_zero (visited);
6565 /* Start with the first successor edge from the entry block. */
6566 e = ENTRY_BLOCK_PTR->succ;
6569 basic_block src = e->src;
6570 basic_block dest = e->dest;
6572 /* Mark that we have visited this node. */
6573 if (src != ENTRY_BLOCK_PTR)
6574 SET_BIT (visited, src->index);
6576 /* If this node has not been visited before, push the current
6577 edge on to the stack and proceed with the first successor
6578 edge of this node. */
6579 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6587 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6590 /* DEST has no successors (for example, a non-returning
6591 function is called) so do not push the current edge
6592 but carry on with its next successor. */
6593 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6594 SET_BIT (visited, dest->index);
6597 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6599 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6601 /* Pop edge off stack. */
6609 sbitmap_free (visited);
6611 /* The number of nodes visited should not be greater than
6613 if (dfsnum > n_basic_blocks)
6616 /* There are some nodes left in the CFG that are unreachable. */
6617 if (dfsnum < n_basic_blocks)
6623 /* Return the block for the pre-header of the loop with header
6624 HEADER where DOM specifies the dominator information. Return NULL if
6625 there is no pre-header. */
6627 flow_loop_pre_header_find (header, dom)
6631 basic_block pre_header;
6634 /* If block p is a predecessor of the header and is the only block
6635 that the header does not dominate, then it is the pre-header. */
6637 for (e = header->pred; e; e = e->pred_next)
6639 basic_block node = e->src;
6641 if (node != ENTRY_BLOCK_PTR
6642 && ! TEST_BIT (dom[node->index], header->index))
6644 if (pre_header == NULL)
6648 /* There are multiple edges into the header from outside
6649 the loop so there is no pre-header block. */
6659 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6660 previously added. The insertion algorithm assumes that the loops
6661 are added in the order found by a depth first search of the CFG. */
6663 flow_loop_tree_node_add (prevloop, loop)
6664 struct loop *prevloop;
6668 if (flow_loop_nested_p (prevloop, loop))
6670 prevloop->inner = loop;
6671 loop->outer = prevloop;
6675 while (prevloop->outer)
6677 if (flow_loop_nested_p (prevloop->outer, loop))
6679 prevloop->next = loop;
6680 loop->outer = prevloop->outer;
6683 prevloop = prevloop->outer;
6686 prevloop->next = loop;
6691 /* Build the loop hierarchy tree for LOOPS. */
6693 flow_loops_tree_build (loops)
6694 struct loops *loops;
6699 num_loops = loops->num;
6703 /* Root the loop hierarchy tree with the first loop found.
6704 Since we used a depth first search this should be the
6706 loops->tree = &loops->array[0];
6707 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6709 /* Add the remaining loops to the tree. */
6710 for (i = 1; i < num_loops; i++)
6711 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6715 /* Helper function to compute loop nesting depth and enclosed loop level
6716 for the natural loop specified by LOOP at the loop depth DEPTH.
6717 Returns the loop level. */
6719 flow_loop_level_compute (loop, depth)
6729 /* Traverse loop tree assigning depth and computing level as the
6730 maximum level of all the inner loops of this loop. The loop
6731 level is equivalent to the height of the loop in the loop tree
6732 and corresponds to the number of enclosed loop levels (including
6734 for (inner = loop->inner; inner; inner = inner->next)
6738 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6743 loop->level = level;
6744 loop->depth = depth;
6749 /* Compute the loop nesting depth and enclosed loop level for the loop
6750 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6754 flow_loops_level_compute (loops)
6755 struct loops *loops;
6761 /* Traverse all the outer level loops. */
6762 for (loop = loops->tree; loop; loop = loop->next)
6764 level = flow_loop_level_compute (loop, 1);
6772 /* Find all the natural loops in the function and save in LOOPS structure
6773 and recalculate loop_depth information in basic block structures.
6774 Return the number of natural loops found. */
6777 flow_loops_find (loops)
6778 struct loops *loops;
6789 loops->array = NULL;
6793 /* Taking care of this degenerate case makes the rest of
6794 this code simpler. */
6795 if (n_basic_blocks == 0)
6798 /* Compute the dominators. */
6799 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6800 compute_flow_dominators (dom, NULL);
6802 /* Count the number of loop edges (back edges). This should be the
6803 same as the number of natural loops. Also clear the loop_depth
6804 and as we work from inner->outer in a loop nest we call
6805 find_loop_nodes_find which will increment loop_depth for nodes
6806 within the current loop, which happens to enclose inner loops. */
6809 for (b = 0; b < n_basic_blocks; b++)
6811 BASIC_BLOCK (b)->loop_depth = 0;
6812 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6814 basic_block latch = e->src;
6816 /* Look for back edges where a predecessor is dominated
6817 by this block. A natural loop has a single entry
6818 node (header) that dominates all the nodes in the
6819 loop. It also has single back edge to the header
6820 from a latch node. Note that multiple natural loops
6821 may share the same header. */
6822 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6829 /* Compute depth first search order of the CFG so that outer
6830 natural loops will be found before inner natural loops. */
6831 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6832 flow_depth_first_order_compute (dfs_order);
6834 /* Allocate loop structures. */
6836 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6838 headers = sbitmap_alloc (n_basic_blocks);
6839 sbitmap_zero (headers);
6841 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6842 sbitmap_zero (loops->shared_headers);
6844 /* Find and record information about all the natural loops
6847 for (b = 0; b < n_basic_blocks; b++)
6851 /* Search the nodes of the CFG in DFS order that we can find
6852 outer loops first. */
6853 header = BASIC_BLOCK (dfs_order[b]);
6855 /* Look for all the possible latch blocks for this header. */
6856 for (e = header->pred; e; e = e->pred_next)
6858 basic_block latch = e->src;
6860 /* Look for back edges where a predecessor is dominated
6861 by this block. A natural loop has a single entry
6862 node (header) that dominates all the nodes in the
6863 loop. It also has single back edge to the header
6864 from a latch node. Note that multiple natural loops
6865 may share the same header. */
6866 if (latch != ENTRY_BLOCK_PTR
6867 && TEST_BIT (dom[latch->index], header->index))
6871 loop = loops->array + num_loops;
6873 loop->header = header;
6874 loop->latch = latch;
6876 /* Keep track of blocks that are loop headers so
6877 that we can tell which loops should be merged. */
6878 if (TEST_BIT (headers, header->index))
6879 SET_BIT (loops->shared_headers, header->index);
6880 SET_BIT (headers, header->index);
6882 /* Find nodes contained within the loop. */
6883 loop->nodes = sbitmap_alloc (n_basic_blocks);
6885 = flow_loop_nodes_find (header, latch, loop->nodes);
6887 /* Compute first and last blocks within the loop.
6888 These are often the same as the loop header and
6889 loop latch respectively, but this is not always
6892 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6894 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6896 /* Find edges which exit the loop. Note that a node
6897 may have several exit edges. */
6899 = flow_loop_exits_find (loop->nodes, &loop->exits);
6901 /* Look to see if the loop has a pre-header node. */
6903 = flow_loop_pre_header_find (header, dom);
6910 /* Natural loops with shared headers may either be disjoint or
6911 nested. Disjoint loops with shared headers cannot be inner
6912 loops and should be merged. For now just mark loops that share
6914 for (i = 0; i < num_loops; i++)
6915 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6916 loops->array[i].shared = 1;
6918 sbitmap_free (headers);
6921 loops->num = num_loops;
6923 /* Save CFG derived information to avoid recomputing it. */
6924 loops->cfg.dom = dom;
6925 loops->cfg.dfs_order = dfs_order;
6927 /* Build the loop hierarchy tree. */
6928 flow_loops_tree_build (loops);
6930 /* Assign the loop nesting depth and enclosed loop level for each
6932 loops->levels = flow_loops_level_compute (loops);
6938 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
6940 flow_loop_outside_edge_p (loop, e)
6941 const struct loop *loop;
6944 if (e->dest != loop->header)
6946 return (e->src == ENTRY_BLOCK_PTR)
6947 || ! TEST_BIT (loop->nodes, e->src->index);
6951 /* Clear LOG_LINKS fields of insns in a chain. */
6953 clear_log_links (insns)
6957 for (i = insns; i; i = NEXT_INSN (i))
6958 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')