1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is set this insn. */
270 /* Element N is the next insn that uses (hard or pseudo) register N
271 within the current basic block; or zero, if there is no such insn. */
274 /* Contains a list of all the MEMs we are tracking for dead store
278 /* If non-null, record the set of registers set in the basic block. */
281 /* Non-zero if the value of CC0 is live. */
284 /* Flags controling the set of information propagate_block collects. */
288 /* Forward declarations */
289 static int count_basic_blocks PARAMS ((rtx));
290 static rtx find_basic_blocks_1 PARAMS ((rtx));
291 static void clear_edges PARAMS ((void));
292 static void make_edges PARAMS ((rtx));
293 static void make_label_edge PARAMS ((sbitmap *, basic_block,
295 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
296 basic_block, rtx, int));
297 static void mark_critical_edges PARAMS ((void));
298 static void move_stray_eh_region_notes PARAMS ((void));
299 static void record_active_eh_regions PARAMS ((rtx));
301 static void commit_one_edge_insertion PARAMS ((edge));
303 static void delete_unreachable_blocks PARAMS ((void));
304 static void delete_eh_regions PARAMS ((void));
305 static int can_delete_note_p PARAMS ((rtx));
306 static void expunge_block PARAMS ((basic_block));
307 static int can_delete_label_p PARAMS ((rtx));
308 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
310 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
312 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
313 static void try_merge_blocks PARAMS ((void));
314 static void tidy_fallthru_edges PARAMS ((void));
315 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
316 static void verify_wide_reg PARAMS ((int, rtx, rtx));
317 static void verify_local_live_at_start PARAMS ((regset, basic_block));
318 static int set_noop_p PARAMS ((rtx));
319 static int noop_move_p PARAMS ((rtx));
320 static void delete_noop_moves PARAMS ((rtx));
321 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
322 static void notice_stack_pointer_modification PARAMS ((rtx));
323 static void mark_reg PARAMS ((rtx, void *));
324 static void mark_regs_live_at_end PARAMS ((regset));
325 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
326 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
327 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
328 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
329 static int insn_dead_p PARAMS ((struct propagate_block_info *,
331 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
333 static void mark_set_regs PARAMS ((struct propagate_block_info *,
335 static void mark_set_1 PARAMS ((struct propagate_block_info *,
336 enum rtx_code, rtx, rtx,
339 static void find_auto_inc PARAMS ((struct propagate_block_info *,
341 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
343 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
345 static void mark_used_reg PARAMS ((struct propagate_block_info *,
347 static void mark_used_regs PARAMS ((struct propagate_block_info *,
349 void dump_flow_info PARAMS ((FILE *));
350 void debug_flow_info PARAMS ((void));
351 static void dump_edge_info PARAMS ((FILE *, edge, int));
353 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
355 static void remove_fake_successors PARAMS ((basic_block));
356 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
357 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
358 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
359 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
360 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
361 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
362 static int flow_depth_first_order_compute PARAMS ((int *));
363 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
364 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
365 static void flow_loops_tree_build PARAMS ((struct loops *));
366 static int flow_loop_level_compute PARAMS ((struct loop *, int));
367 static int flow_loops_level_compute PARAMS ((struct loops *));
369 /* Find basic blocks of the current function.
370 F is the first insn of the function and NREGS the number of register
374 find_basic_blocks (f, nregs, file)
376 int nregs ATTRIBUTE_UNUSED;
377 FILE *file ATTRIBUTE_UNUSED;
381 /* Flush out existing data. */
382 if (basic_block_info != NULL)
388 /* Clear bb->aux on all extant basic blocks. We'll use this as a
389 tag for reuse during create_basic_block, just in case some pass
390 copies around basic block notes improperly. */
391 for (i = 0; i < n_basic_blocks; ++i)
392 BASIC_BLOCK (i)->aux = NULL;
394 VARRAY_FREE (basic_block_info);
397 n_basic_blocks = count_basic_blocks (f);
399 /* Size the basic block table. The actual structures will be allocated
400 by find_basic_blocks_1, since we want to keep the structure pointers
401 stable across calls to find_basic_blocks. */
402 /* ??? This whole issue would be much simpler if we called find_basic_blocks
403 exactly once, and thereafter we don't have a single long chain of
404 instructions at all until close to the end of compilation when we
405 actually lay them out. */
407 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
409 label_value_list = find_basic_blocks_1 (f);
411 /* Record the block to which an insn belongs. */
412 /* ??? This should be done another way, by which (perhaps) a label is
413 tagged directly with the basic block that it starts. It is used for
414 more than that currently, but IMO that is the only valid use. */
416 max_uid = get_max_uid ();
418 /* Leave space for insns life_analysis makes in some cases for auto-inc.
419 These cases are rare, so we don't need too much space. */
420 max_uid += max_uid / 10;
423 compute_bb_for_insn (max_uid);
425 /* Discover the edges of our cfg. */
426 record_active_eh_regions (f);
427 make_edges (label_value_list);
429 /* Do very simple cleanup now, for the benefit of code that runs between
430 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
431 tidy_fallthru_edges ();
433 mark_critical_edges ();
435 #ifdef ENABLE_CHECKING
440 /* Count the basic blocks of the function. */
443 count_basic_blocks (f)
447 register RTX_CODE prev_code;
448 register int count = 0;
450 int call_had_abnormal_edge = 0;
452 prev_code = JUMP_INSN;
453 for (insn = f; insn; insn = NEXT_INSN (insn))
455 register RTX_CODE code = GET_CODE (insn);
457 if (code == CODE_LABEL
458 || (GET_RTX_CLASS (code) == 'i'
459 && (prev_code == JUMP_INSN
460 || prev_code == BARRIER
461 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
464 /* Record whether this call created an edge. */
465 if (code == CALL_INSN)
467 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
468 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
470 call_had_abnormal_edge = 0;
472 /* If there is an EH region or rethrow, we have an edge. */
473 if ((eh_region && region > 0)
474 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
475 call_had_abnormal_edge = 1;
476 else if (nonlocal_goto_handler_labels && region >= 0)
477 /* If there is a nonlocal goto label and the specified
478 region number isn't -1, we have an edge. (0 means
479 no throw, but might have a nonlocal goto). */
480 call_had_abnormal_edge = 1;
485 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
487 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
491 /* The rest of the compiler works a bit smoother when we don't have to
492 check for the edge case of do-nothing functions with no basic blocks. */
495 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
502 /* Find all basic blocks of the function whose first insn is F.
504 Collect and return a list of labels whose addresses are taken. This
505 will be used in make_edges for use with computed gotos. */
508 find_basic_blocks_1 (f)
511 register rtx insn, next;
513 rtx bb_note = NULL_RTX;
514 rtx eh_list = NULL_RTX;
515 rtx label_value_list = NULL_RTX;
519 /* We process the instructions in a slightly different way than we did
520 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
521 closed out the previous block, so that it gets attached at the proper
522 place. Since this form should be equivalent to the previous,
523 count_basic_blocks continues to use the old form as a check. */
525 for (insn = f; insn; insn = next)
527 enum rtx_code code = GET_CODE (insn);
529 next = NEXT_INSN (insn);
535 int kind = NOTE_LINE_NUMBER (insn);
537 /* Keep a LIFO list of the currently active exception notes. */
538 if (kind == NOTE_INSN_EH_REGION_BEG)
539 eh_list = alloc_INSN_LIST (insn, eh_list);
540 else if (kind == NOTE_INSN_EH_REGION_END)
544 eh_list = XEXP (eh_list, 1);
545 free_INSN_LIST_node (t);
548 /* Look for basic block notes with which to keep the
549 basic_block_info pointers stable. Unthread the note now;
550 we'll put it back at the right place in create_basic_block.
551 Or not at all if we've already found a note in this block. */
552 else if (kind == NOTE_INSN_BASIC_BLOCK)
554 if (bb_note == NULL_RTX)
557 next = flow_delete_insn (insn);
563 /* A basic block starts at a label. If we've closed one off due
564 to a barrier or some such, no need to do it again. */
565 if (head != NULL_RTX)
567 /* While we now have edge lists with which other portions of
568 the compiler might determine a call ending a basic block
569 does not imply an abnormal edge, it will be a bit before
570 everything can be updated. So continue to emit a noop at
571 the end of such a block. */
572 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
574 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
575 end = emit_insn_after (nop, end);
578 create_basic_block (i++, head, end, bb_note);
586 /* A basic block ends at a jump. */
587 if (head == NULL_RTX)
591 /* ??? Make a special check for table jumps. The way this
592 happens is truly and amazingly gross. We are about to
593 create a basic block that contains just a code label and
594 an addr*vec jump insn. Worse, an addr_diff_vec creates
595 its own natural loop.
597 Prevent this bit of brain damage, pasting things together
598 correctly in make_edges.
600 The correct solution involves emitting the table directly
601 on the tablejump instruction as a note, or JUMP_LABEL. */
603 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
604 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
612 goto new_bb_inclusive;
615 /* A basic block ends at a barrier. It may be that an unconditional
616 jump already closed the basic block -- no need to do it again. */
617 if (head == NULL_RTX)
620 /* While we now have edge lists with which other portions of the
621 compiler might determine a call ending a basic block does not
622 imply an abnormal edge, it will be a bit before everything can
623 be updated. So continue to emit a noop at the end of such a
625 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
627 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
628 end = emit_insn_after (nop, end);
630 goto new_bb_exclusive;
634 /* Record whether this call created an edge. */
635 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
636 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
637 int call_has_abnormal_edge = 0;
639 /* If there is an EH region or rethrow, we have an edge. */
640 if ((eh_list && region > 0)
641 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
642 call_has_abnormal_edge = 1;
643 else if (nonlocal_goto_handler_labels && region >= 0)
644 /* If there is a nonlocal goto label and the specified
645 region number isn't -1, we have an edge. (0 means
646 no throw, but might have a nonlocal goto). */
647 call_has_abnormal_edge = 1;
649 /* A basic block ends at a call that can either throw or
650 do a non-local goto. */
651 if (call_has_abnormal_edge)
654 if (head == NULL_RTX)
659 create_basic_block (i++, head, end, bb_note);
660 head = end = NULL_RTX;
668 if (GET_RTX_CLASS (code) == 'i')
670 if (head == NULL_RTX)
677 if (GET_RTX_CLASS (code) == 'i')
681 /* Make a list of all labels referred to other than by jumps
682 (which just don't have the REG_LABEL notes).
684 Make a special exception for labels followed by an ADDR*VEC,
685 as this would be a part of the tablejump setup code.
687 Make a special exception for the eh_return_stub_label, which
688 we know isn't part of any otherwise visible control flow. */
690 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
691 if (REG_NOTE_KIND (note) == REG_LABEL)
693 rtx lab = XEXP (note, 0), next;
695 if (lab == eh_return_stub_label)
697 else if ((next = next_nonnote_insn (lab)) != NULL
698 && GET_CODE (next) == JUMP_INSN
699 && (GET_CODE (PATTERN (next)) == ADDR_VEC
700 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
704 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
709 if (head != NULL_RTX)
710 create_basic_block (i++, head, end, bb_note);
712 if (i != n_basic_blocks)
715 return label_value_list;
718 /* Tidy the CFG by deleting unreachable code and whatnot. */
724 delete_unreachable_blocks ();
725 move_stray_eh_region_notes ();
726 record_active_eh_regions (f);
728 mark_critical_edges ();
730 /* Kill the data we won't maintain. */
731 label_value_list = NULL_RTX;
734 /* Create a new basic block consisting of the instructions between
735 HEAD and END inclusive. Reuses the note and basic block struct
736 in BB_NOTE, if any. */
739 create_basic_block (index, head, end, bb_note)
741 rtx head, end, bb_note;
746 && ! RTX_INTEGRATED_P (bb_note)
747 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
750 /* If we found an existing note, thread it back onto the chain. */
752 if (GET_CODE (head) == CODE_LABEL)
753 add_insn_after (bb_note, head);
756 add_insn_before (bb_note, head);
762 /* Otherwise we must create a note and a basic block structure.
763 Since we allow basic block structs in rtl, give the struct
764 the same lifetime by allocating it off the function obstack
765 rather than using malloc. */
767 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
768 memset (bb, 0, sizeof (*bb));
770 if (GET_CODE (head) == CODE_LABEL)
771 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
774 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
777 NOTE_BASIC_BLOCK (bb_note) = bb;
780 /* Always include the bb note in the block. */
781 if (NEXT_INSN (end) == bb_note)
787 BASIC_BLOCK (index) = bb;
789 /* Tag the block so that we know it has been used when considering
790 other basic block notes. */
794 /* Records the basic block struct in BB_FOR_INSN, for every instruction
795 indexed by INSN_UID. MAX is the size of the array. */
798 compute_bb_for_insn (max)
803 if (basic_block_for_insn)
804 VARRAY_FREE (basic_block_for_insn);
805 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
807 for (i = 0; i < n_basic_blocks; ++i)
809 basic_block bb = BASIC_BLOCK (i);
816 int uid = INSN_UID (insn);
818 VARRAY_BB (basic_block_for_insn, uid) = bb;
821 insn = NEXT_INSN (insn);
826 /* Free the memory associated with the edge structures. */
834 for (i = 0; i < n_basic_blocks; ++i)
836 basic_block bb = BASIC_BLOCK (i);
838 for (e = bb->succ; e ; e = n)
848 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
854 ENTRY_BLOCK_PTR->succ = 0;
855 EXIT_BLOCK_PTR->pred = 0;
860 /* Identify the edges between basic blocks.
862 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
863 that are otherwise unreachable may be reachable with a non-local goto.
865 BB_EH_END is an array indexed by basic block number in which we record
866 the list of exception regions active at the end of the basic block. */
869 make_edges (label_value_list)
870 rtx label_value_list;
873 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
874 sbitmap *edge_cache = NULL;
876 /* Assume no computed jump; revise as we create edges. */
877 current_function_has_computed_jump = 0;
879 /* Heavy use of computed goto in machine-generated code can lead to
880 nearly fully-connected CFGs. In that case we spend a significant
881 amount of time searching the edge lists for duplicates. */
882 if (forced_labels || label_value_list)
884 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
885 sbitmap_vector_zero (edge_cache, n_basic_blocks);
888 /* By nature of the way these get numbered, block 0 is always the entry. */
889 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
891 for (i = 0; i < n_basic_blocks; ++i)
893 basic_block bb = BASIC_BLOCK (i);
896 int force_fallthru = 0;
898 /* Examine the last instruction of the block, and discover the
899 ways we can leave the block. */
902 code = GET_CODE (insn);
905 if (code == JUMP_INSN)
909 /* ??? Recognize a tablejump and do the right thing. */
910 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
911 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
912 && GET_CODE (tmp) == JUMP_INSN
913 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
914 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
919 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
920 vec = XVEC (PATTERN (tmp), 0);
922 vec = XVEC (PATTERN (tmp), 1);
924 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
925 make_label_edge (edge_cache, bb,
926 XEXP (RTVEC_ELT (vec, j), 0), 0);
928 /* Some targets (eg, ARM) emit a conditional jump that also
929 contains the out-of-range target. Scan for these and
930 add an edge if necessary. */
931 if ((tmp = single_set (insn)) != NULL
932 && SET_DEST (tmp) == pc_rtx
933 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
934 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
935 make_label_edge (edge_cache, bb,
936 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
938 #ifdef CASE_DROPS_THROUGH
939 /* Silly VAXen. The ADDR_VEC is going to be in the way of
940 us naturally detecting fallthru into the next block. */
945 /* If this is a computed jump, then mark it as reaching
946 everything on the label_value_list and forced_labels list. */
947 else if (computed_jump_p (insn))
949 current_function_has_computed_jump = 1;
951 for (x = label_value_list; x; x = XEXP (x, 1))
952 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
954 for (x = forced_labels; x; x = XEXP (x, 1))
955 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
958 /* Returns create an exit out. */
959 else if (returnjump_p (insn))
960 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
962 /* Otherwise, we have a plain conditional or unconditional jump. */
965 if (! JUMP_LABEL (insn))
967 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
971 /* If this is a sibling call insn, then this is in effect a
972 combined call and return, and so we need an edge to the
973 exit block. No need to worry about EH edges, since we
974 wouldn't have created the sibling call in the first place. */
976 if (code == CALL_INSN && SIBLING_CALL_P (insn))
977 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
980 /* If this is a CALL_INSN, then mark it as reaching the active EH
981 handler for this CALL_INSN. If we're handling asynchronous
982 exceptions then any insn can reach any of the active handlers.
984 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
986 if (code == CALL_INSN || asynchronous_exceptions)
988 /* Add any appropriate EH edges. We do this unconditionally
989 since there may be a REG_EH_REGION or REG_EH_RETHROW note
990 on the call, and this needn't be within an EH region. */
991 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
993 /* If we have asynchronous exceptions, do the same for *all*
994 exception regions active in the block. */
995 if (asynchronous_exceptions
996 && bb->eh_beg != bb->eh_end)
999 make_eh_edge (edge_cache, eh_nest_info, bb,
1000 NULL_RTX, bb->eh_beg);
1002 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1003 if (GET_CODE (x) == NOTE
1004 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1005 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1007 int region = NOTE_EH_HANDLER (x);
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1013 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1015 /* ??? This could be made smarter: in some cases it's possible
1016 to tell that certain calls will not do a nonlocal goto.
1018 For example, if the nested functions that do the nonlocal
1019 gotos do not have their addresses taken, then only calls to
1020 those functions or to other nested functions that use them
1021 could possibly do nonlocal gotos. */
1022 /* We do know that a REG_EH_REGION note with a value less
1023 than 0 is guaranteed not to perform a non-local goto. */
1024 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1025 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1026 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1027 make_label_edge (edge_cache, bb, XEXP (x, 0),
1028 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1032 /* We know something about the structure of the function __throw in
1033 libgcc2.c. It is the only function that ever contains eh_stub
1034 labels. It modifies its return address so that the last block
1035 returns to one of the eh_stub labels within it. So we have to
1036 make additional edges in the flow graph. */
1037 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1038 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1040 /* Find out if we can drop through to the next block. */
1041 insn = next_nonnote_insn (insn);
1042 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1043 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1044 else if (i + 1 < n_basic_blocks)
1046 rtx tmp = BLOCK_HEAD (i + 1);
1047 if (GET_CODE (tmp) == NOTE)
1048 tmp = next_nonnote_insn (tmp);
1049 if (force_fallthru || insn == tmp)
1050 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1054 free_eh_nesting_info (eh_nest_info);
1056 sbitmap_vector_free (edge_cache);
1059 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1060 about the edge that is accumulated between calls. */
1063 make_edge (edge_cache, src, dst, flags)
1064 sbitmap *edge_cache;
1065 basic_block src, dst;
1071 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1072 many edges to them, and we didn't allocate memory for it. */
1073 use_edge_cache = (edge_cache
1074 && src != ENTRY_BLOCK_PTR
1075 && dst != EXIT_BLOCK_PTR);
1077 /* Make sure we don't add duplicate edges. */
1078 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1079 for (e = src->succ; e ; e = e->succ_next)
1086 e = (edge) xcalloc (1, sizeof (*e));
1089 e->succ_next = src->succ;
1090 e->pred_next = dst->pred;
1099 SET_BIT (edge_cache[src->index], dst->index);
1102 /* Create an edge from a basic block to a label. */
1105 make_label_edge (edge_cache, src, label, flags)
1106 sbitmap *edge_cache;
1111 if (GET_CODE (label) != CODE_LABEL)
1114 /* If the label was never emitted, this insn is junk, but avoid a
1115 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1116 as a result of a syntax error and a diagnostic has already been
1119 if (INSN_UID (label) == 0)
1122 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1125 /* Create the edges generated by INSN in REGION. */
1128 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1129 sbitmap *edge_cache;
1130 eh_nesting_info *eh_nest_info;
1135 handler_info **handler_list;
1138 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1139 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1142 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1143 EDGE_ABNORMAL | EDGE_EH | is_call);
1147 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1148 dangerous if we intend to move basic blocks around. Move such notes
1149 into the following block. */
1152 move_stray_eh_region_notes ()
1157 if (n_basic_blocks < 2)
1160 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1161 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1163 rtx insn, next, list = NULL_RTX;
1165 b1 = BASIC_BLOCK (i);
1166 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1168 next = NEXT_INSN (insn);
1169 if (GET_CODE (insn) == NOTE
1170 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1171 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1173 /* Unlink from the insn chain. */
1174 NEXT_INSN (PREV_INSN (insn)) = next;
1175 PREV_INSN (next) = PREV_INSN (insn);
1178 NEXT_INSN (insn) = list;
1183 if (list == NULL_RTX)
1186 /* Find where to insert these things. */
1188 if (GET_CODE (insn) == CODE_LABEL)
1189 insn = NEXT_INSN (insn);
1193 next = NEXT_INSN (list);
1194 add_insn_after (list, insn);
1200 /* Recompute eh_beg/eh_end for each basic block. */
1203 record_active_eh_regions (f)
1206 rtx insn, eh_list = NULL_RTX;
1208 basic_block bb = BASIC_BLOCK (0);
1210 for (insn = f; insn ; insn = NEXT_INSN (insn))
1212 if (bb->head == insn)
1213 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1215 if (GET_CODE (insn) == NOTE)
1217 int kind = NOTE_LINE_NUMBER (insn);
1218 if (kind == NOTE_INSN_EH_REGION_BEG)
1219 eh_list = alloc_INSN_LIST (insn, eh_list);
1220 else if (kind == NOTE_INSN_EH_REGION_END)
1222 rtx t = XEXP (eh_list, 1);
1223 free_INSN_LIST_node (eh_list);
1228 if (bb->end == insn)
1230 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1232 if (i == n_basic_blocks)
1234 bb = BASIC_BLOCK (i);
1239 /* Identify critical edges and set the bits appropriately. */
1242 mark_critical_edges ()
1244 int i, n = n_basic_blocks;
1247 /* We begin with the entry block. This is not terribly important now,
1248 but could be if a front end (Fortran) implemented alternate entry
1250 bb = ENTRY_BLOCK_PTR;
1257 /* (1) Critical edges must have a source with multiple successors. */
1258 if (bb->succ && bb->succ->succ_next)
1260 for (e = bb->succ; e ; e = e->succ_next)
1262 /* (2) Critical edges must have a destination with multiple
1263 predecessors. Note that we know there is at least one
1264 predecessor -- the edge we followed to get here. */
1265 if (e->dest->pred->pred_next)
1266 e->flags |= EDGE_CRITICAL;
1268 e->flags &= ~EDGE_CRITICAL;
1273 for (e = bb->succ; e ; e = e->succ_next)
1274 e->flags &= ~EDGE_CRITICAL;
1279 bb = BASIC_BLOCK (i);
1283 /* Split a (typically critical) edge. Return the new block.
1284 Abort on abnormal edges.
1286 ??? The code generally expects to be called on critical edges.
1287 The case of a block ending in an unconditional jump to a
1288 block with multiple predecessors is not handled optimally. */
1291 split_edge (edge_in)
1294 basic_block old_pred, bb, old_succ;
1299 /* Abnormal edges cannot be split. */
1300 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1303 old_pred = edge_in->src;
1304 old_succ = edge_in->dest;
1306 /* Remove the existing edge from the destination's pred list. */
1309 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1311 *pp = edge_in->pred_next;
1312 edge_in->pred_next = NULL;
1315 /* Create the new structures. */
1316 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1317 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1320 memset (bb, 0, sizeof (*bb));
1321 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1322 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1324 /* ??? This info is likely going to be out of date very soon. */
1325 if (old_succ->global_live_at_start)
1327 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1328 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1332 CLEAR_REG_SET (bb->global_live_at_start);
1333 CLEAR_REG_SET (bb->global_live_at_end);
1338 bb->succ = edge_out;
1341 edge_in->flags &= ~EDGE_CRITICAL;
1343 edge_out->pred_next = old_succ->pred;
1344 edge_out->succ_next = NULL;
1346 edge_out->dest = old_succ;
1347 edge_out->flags = EDGE_FALLTHRU;
1348 edge_out->probability = REG_BR_PROB_BASE;
1350 old_succ->pred = edge_out;
1352 /* Tricky case -- if there existed a fallthru into the successor
1353 (and we're not it) we must add a new unconditional jump around
1354 the new block we're actually interested in.
1356 Further, if that edge is critical, this means a second new basic
1357 block must be created to hold it. In order to simplify correct
1358 insn placement, do this before we touch the existing basic block
1359 ordering for the block we were really wanting. */
1360 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1363 for (e = edge_out->pred_next; e ; e = e->pred_next)
1364 if (e->flags & EDGE_FALLTHRU)
1369 basic_block jump_block;
1372 if ((e->flags & EDGE_CRITICAL) == 0
1373 && e->src != ENTRY_BLOCK_PTR)
1375 /* Non critical -- we can simply add a jump to the end
1376 of the existing predecessor. */
1377 jump_block = e->src;
1381 /* We need a new block to hold the jump. The simplest
1382 way to do the bulk of the work here is to recursively
1384 jump_block = split_edge (e);
1385 e = jump_block->succ;
1388 /* Now add the jump insn ... */
1389 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1391 jump_block->end = pos;
1392 if (basic_block_for_insn)
1393 set_block_for_insn (pos, jump_block);
1394 emit_barrier_after (pos);
1396 /* ... let jump know that label is in use, ... */
1397 JUMP_LABEL (pos) = old_succ->head;
1398 ++LABEL_NUSES (old_succ->head);
1400 /* ... and clear fallthru on the outgoing edge. */
1401 e->flags &= ~EDGE_FALLTHRU;
1403 /* Continue splitting the interesting edge. */
1407 /* Place the new block just in front of the successor. */
1408 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1409 if (old_succ == EXIT_BLOCK_PTR)
1410 j = n_basic_blocks - 1;
1412 j = old_succ->index;
1413 for (i = n_basic_blocks - 1; i > j; --i)
1415 basic_block tmp = BASIC_BLOCK (i - 1);
1416 BASIC_BLOCK (i) = tmp;
1419 BASIC_BLOCK (i) = bb;
1422 /* Create the basic block note.
1424 Where we place the note can have a noticable impact on the generated
1425 code. Consider this cfg:
1436 If we need to insert an insn on the edge from block 0 to block 1,
1437 we want to ensure the instructions we insert are outside of any
1438 loop notes that physically sit between block 0 and block 1. Otherwise
1439 we confuse the loop optimizer into thinking the loop is a phony. */
1440 if (old_succ != EXIT_BLOCK_PTR
1441 && PREV_INSN (old_succ->head)
1442 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1443 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1444 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1445 PREV_INSN (old_succ->head));
1446 else if (old_succ != EXIT_BLOCK_PTR)
1447 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1449 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1450 NOTE_BASIC_BLOCK (bb_note) = bb;
1451 bb->head = bb->end = bb_note;
1453 /* Not quite simple -- for non-fallthru edges, we must adjust the
1454 predecessor's jump instruction to target our new block. */
1455 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1457 rtx tmp, insn = old_pred->end;
1458 rtx old_label = old_succ->head;
1459 rtx new_label = gen_label_rtx ();
1461 if (GET_CODE (insn) != JUMP_INSN)
1464 /* ??? Recognize a tablejump and adjust all matching cases. */
1465 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1466 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1467 && GET_CODE (tmp) == JUMP_INSN
1468 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1469 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1474 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1475 vec = XVEC (PATTERN (tmp), 0);
1477 vec = XVEC (PATTERN (tmp), 1);
1479 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1480 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1482 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1483 --LABEL_NUSES (old_label);
1484 ++LABEL_NUSES (new_label);
1487 /* Handle casesi dispatch insns */
1488 if ((tmp = single_set (insn)) != NULL
1489 && SET_DEST (tmp) == pc_rtx
1490 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1491 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1492 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1494 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1496 --LABEL_NUSES (old_label);
1497 ++LABEL_NUSES (new_label);
1502 /* This would have indicated an abnormal edge. */
1503 if (computed_jump_p (insn))
1506 /* A return instruction can't be redirected. */
1507 if (returnjump_p (insn))
1510 /* If the insn doesn't go where we think, we're confused. */
1511 if (JUMP_LABEL (insn) != old_label)
1514 redirect_jump (insn, new_label);
1517 emit_label_before (new_label, bb_note);
1518 bb->head = new_label;
1524 /* Queue instructions for insertion on an edge between two basic blocks.
1525 The new instructions and basic blocks (if any) will not appear in the
1526 CFG until commit_edge_insertions is called. */
1529 insert_insn_on_edge (pattern, e)
1533 /* We cannot insert instructions on an abnormal critical edge.
1534 It will be easier to find the culprit if we die now. */
1535 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1536 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1539 if (e->insns == NULL_RTX)
1542 push_to_sequence (e->insns);
1544 emit_insn (pattern);
1546 e->insns = get_insns ();
1550 /* Update the CFG for the instructions queued on edge E. */
1553 commit_one_edge_insertion (e)
1556 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1559 /* Pull the insns off the edge now since the edge might go away. */
1561 e->insns = NULL_RTX;
1563 /* Figure out where to put these things. If the destination has
1564 one predecessor, insert there. Except for the exit block. */
1565 if (e->dest->pred->pred_next == NULL
1566 && e->dest != EXIT_BLOCK_PTR)
1570 /* Get the location correct wrt a code label, and "nice" wrt
1571 a basic block note, and before everything else. */
1573 if (GET_CODE (tmp) == CODE_LABEL)
1574 tmp = NEXT_INSN (tmp);
1575 if (GET_CODE (tmp) == NOTE
1576 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1577 tmp = NEXT_INSN (tmp);
1578 if (tmp == bb->head)
1581 after = PREV_INSN (tmp);
1584 /* If the source has one successor and the edge is not abnormal,
1585 insert there. Except for the entry block. */
1586 else if ((e->flags & EDGE_ABNORMAL) == 0
1587 && e->src->succ->succ_next == NULL
1588 && e->src != ENTRY_BLOCK_PTR)
1591 /* It is possible to have a non-simple jump here. Consider a target
1592 where some forms of unconditional jumps clobber a register. This
1593 happens on the fr30 for example.
1595 We know this block has a single successor, so we can just emit
1596 the queued insns before the jump. */
1597 if (GET_CODE (bb->end) == JUMP_INSN)
1603 /* We'd better be fallthru, or we've lost track of what's what. */
1604 if ((e->flags & EDGE_FALLTHRU) == 0)
1611 /* Otherwise we must split the edge. */
1614 bb = split_edge (e);
1618 /* Now that we've found the spot, do the insertion. */
1620 /* Set the new block number for these insns, if structure is allocated. */
1621 if (basic_block_for_insn)
1624 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1625 set_block_for_insn (i, bb);
1630 emit_insns_before (insns, before);
1631 if (before == bb->head)
1636 rtx last = emit_insns_after (insns, after);
1637 if (after == bb->end)
1641 if (GET_CODE (last) == JUMP_INSN)
1643 if (returnjump_p (last))
1645 /* ??? Remove all outgoing edges from BB and add one
1646 for EXIT. This is not currently a problem because
1647 this only happens for the (single) epilogue, which
1648 already has a fallthru edge to EXIT. */
1651 if (e->dest != EXIT_BLOCK_PTR
1652 || e->succ_next != NULL
1653 || (e->flags & EDGE_FALLTHRU) == 0)
1655 e->flags &= ~EDGE_FALLTHRU;
1657 emit_barrier_after (last);
1666 /* Update the CFG for all queued instructions. */
1669 commit_edge_insertions ()
1674 #ifdef ENABLE_CHECKING
1675 verify_flow_info ();
1679 bb = ENTRY_BLOCK_PTR;
1684 for (e = bb->succ; e ; e = next)
1686 next = e->succ_next;
1688 commit_one_edge_insertion (e);
1691 if (++i >= n_basic_blocks)
1693 bb = BASIC_BLOCK (i);
1697 /* Delete all unreachable basic blocks. */
1700 delete_unreachable_blocks ()
1702 basic_block *worklist, *tos;
1703 int deleted_handler;
1708 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1710 /* Use basic_block->aux as a marker. Clear them all. */
1712 for (i = 0; i < n; ++i)
1713 BASIC_BLOCK (i)->aux = NULL;
1715 /* Add our starting points to the worklist. Almost always there will
1716 be only one. It isn't inconcievable that we might one day directly
1717 support Fortran alternate entry points. */
1719 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1723 /* Mark the block with a handy non-null value. */
1727 /* Iterate: find everything reachable from what we've already seen. */
1729 while (tos != worklist)
1731 basic_block b = *--tos;
1733 for (e = b->succ; e ; e = e->succ_next)
1741 /* Delete all unreachable basic blocks. Count down so that we don't
1742 interfere with the block renumbering that happens in flow_delete_block. */
1744 deleted_handler = 0;
1746 for (i = n - 1; i >= 0; --i)
1748 basic_block b = BASIC_BLOCK (i);
1751 /* This block was found. Tidy up the mark. */
1754 deleted_handler |= flow_delete_block (b);
1757 tidy_fallthru_edges ();
1759 /* If we deleted an exception handler, we may have EH region begin/end
1760 blocks to remove as well. */
1761 if (deleted_handler)
1762 delete_eh_regions ();
1767 /* Find EH regions for which there is no longer a handler, and delete them. */
1770 delete_eh_regions ()
1774 update_rethrow_references ();
1776 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1777 if (GET_CODE (insn) == NOTE)
1779 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1780 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1782 int num = NOTE_EH_HANDLER (insn);
1783 /* A NULL handler indicates a region is no longer needed,
1784 as long as its rethrow label isn't used. */
1785 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1787 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1788 NOTE_SOURCE_FILE (insn) = 0;
1794 /* Return true if NOTE is not one of the ones that must be kept paired,
1795 so that we may simply delete them. */
1798 can_delete_note_p (note)
1801 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1802 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1805 /* Unlink a chain of insns between START and FINISH, leaving notes
1806 that must be paired. */
1809 flow_delete_insn_chain (start, finish)
1812 /* Unchain the insns one by one. It would be quicker to delete all
1813 of these with a single unchaining, rather than one at a time, but
1814 we need to keep the NOTE's. */
1820 next = NEXT_INSN (start);
1821 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1823 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1826 next = flow_delete_insn (start);
1828 if (start == finish)
1834 /* Delete the insns in a (non-live) block. We physically delete every
1835 non-deleted-note insn, and update the flow graph appropriately.
1837 Return nonzero if we deleted an exception handler. */
1839 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1840 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1843 flow_delete_block (b)
1846 int deleted_handler = 0;
1849 /* If the head of this block is a CODE_LABEL, then it might be the
1850 label for an exception handler which can't be reached.
1852 We need to remove the label from the exception_handler_label list
1853 and remove the associated NOTE_INSN_EH_REGION_BEG and
1854 NOTE_INSN_EH_REGION_END notes. */
1858 never_reached_warning (insn);
1860 if (GET_CODE (insn) == CODE_LABEL)
1862 rtx x, *prev = &exception_handler_labels;
1864 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1866 if (XEXP (x, 0) == insn)
1868 /* Found a match, splice this label out of the EH label list. */
1869 *prev = XEXP (x, 1);
1870 XEXP (x, 1) = NULL_RTX;
1871 XEXP (x, 0) = NULL_RTX;
1873 /* Remove the handler from all regions */
1874 remove_handler (insn);
1875 deleted_handler = 1;
1878 prev = &XEXP (x, 1);
1881 /* This label may be referenced by code solely for its value, or
1882 referenced by static data, or something. We have determined
1883 that it is not reachable, but cannot delete the label itself.
1884 Save code space and continue to delete the balance of the block,
1885 along with properly updating the cfg. */
1886 if (!can_delete_label_p (insn))
1888 /* If we've only got one of these, skip the whole deleting
1891 goto no_delete_insns;
1892 insn = NEXT_INSN (insn);
1896 /* Include any jump table following the basic block. */
1898 if (GET_CODE (end) == JUMP_INSN
1899 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1900 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1901 && GET_CODE (tmp) == JUMP_INSN
1902 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1903 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1906 /* Include any barrier that may follow the basic block. */
1907 tmp = next_nonnote_insn (end);
1908 if (tmp && GET_CODE (tmp) == BARRIER)
1911 /* Selectively delete the entire chain. */
1912 flow_delete_insn_chain (insn, end);
1916 /* Remove the edges into and out of this block. Note that there may
1917 indeed be edges in, if we are removing an unreachable loop. */
1921 for (e = b->pred; e ; e = next)
1923 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1926 next = e->pred_next;
1930 for (e = b->succ; e ; e = next)
1932 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1935 next = e->succ_next;
1944 /* Remove the basic block from the array, and compact behind it. */
1947 return deleted_handler;
1950 /* Remove block B from the basic block array and compact behind it. */
1956 int i, n = n_basic_blocks;
1958 for (i = b->index; i + 1 < n; ++i)
1960 basic_block x = BASIC_BLOCK (i + 1);
1961 BASIC_BLOCK (i) = x;
1965 basic_block_info->num_elements--;
1969 /* Delete INSN by patching it out. Return the next insn. */
1972 flow_delete_insn (insn)
1975 rtx prev = PREV_INSN (insn);
1976 rtx next = NEXT_INSN (insn);
1979 PREV_INSN (insn) = NULL_RTX;
1980 NEXT_INSN (insn) = NULL_RTX;
1983 NEXT_INSN (prev) = next;
1985 PREV_INSN (next) = prev;
1987 set_last_insn (prev);
1989 if (GET_CODE (insn) == CODE_LABEL)
1990 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1992 /* If deleting a jump, decrement the use count of the label. Deleting
1993 the label itself should happen in the normal course of block merging. */
1994 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1995 LABEL_NUSES (JUMP_LABEL (insn))--;
1997 /* Also if deleting an insn that references a label. */
1998 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
1999 LABEL_NUSES (XEXP (note, 0))--;
2004 /* True if a given label can be deleted. */
2007 can_delete_label_p (label)
2012 if (LABEL_PRESERVE_P (label))
2015 for (x = forced_labels; x ; x = XEXP (x, 1))
2016 if (label == XEXP (x, 0))
2018 for (x = label_value_list; x ; x = XEXP (x, 1))
2019 if (label == XEXP (x, 0))
2021 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2025 /* User declared labels must be preserved. */
2026 if (LABEL_NAME (label) != 0)
2032 /* Blocks A and B are to be merged into a single block A. The insns
2033 are already contiguous, hence `nomove'. */
2036 merge_blocks_nomove (a, b)
2040 rtx b_head, b_end, a_end;
2041 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2044 /* If there was a CODE_LABEL beginning B, delete it. */
2047 if (GET_CODE (b_head) == CODE_LABEL)
2049 /* Detect basic blocks with nothing but a label. This can happen
2050 in particular at the end of a function. */
2051 if (b_head == b_end)
2053 del_first = del_last = b_head;
2054 b_head = NEXT_INSN (b_head);
2057 /* Delete the basic block note. */
2058 if (GET_CODE (b_head) == NOTE
2059 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2061 if (b_head == b_end)
2066 b_head = NEXT_INSN (b_head);
2069 /* If there was a jump out of A, delete it. */
2071 if (GET_CODE (a_end) == JUMP_INSN)
2075 prev = prev_nonnote_insn (a_end);
2082 /* If this was a conditional jump, we need to also delete
2083 the insn that set cc0. */
2084 if (prev && sets_cc0_p (prev))
2087 prev = prev_nonnote_insn (prev);
2097 /* Delete everything marked above as well as crap that might be
2098 hanging out between the two blocks. */
2099 flow_delete_insn_chain (del_first, del_last);
2101 /* Normally there should only be one successor of A and that is B, but
2102 partway though the merge of blocks for conditional_execution we'll
2103 be merging a TEST block with THEN and ELSE successors. Free the
2104 whole lot of them and hope the caller knows what they're doing. */
2106 remove_edge (a->succ);
2108 /* Adjust the edges out of B for the new owner. */
2109 for (e = b->succ; e ; e = e->succ_next)
2113 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2114 b->pred = b->succ = NULL;
2116 /* Reassociate the insns of B with A. */
2119 if (basic_block_for_insn)
2121 BLOCK_FOR_INSN (b_head) = a;
2122 while (b_head != b_end)
2124 b_head = NEXT_INSN (b_head);
2125 BLOCK_FOR_INSN (b_head) = a;
2135 /* Blocks A and B are to be merged into a single block. A has no incoming
2136 fallthru edge, so it can be moved before B without adding or modifying
2137 any jumps (aside from the jump from A to B). */
2140 merge_blocks_move_predecessor_nojumps (a, b)
2143 rtx start, end, barrier;
2149 /* We want to delete the BARRIER after the end of the insns we are
2150 going to move. If we don't find a BARRIER, then do nothing. This
2151 can happen in some cases if we have labels we can not delete.
2153 Similarly, do nothing if we can not delete the label at the start
2154 of the target block. */
2155 barrier = next_nonnote_insn (end);
2156 if (GET_CODE (barrier) != BARRIER
2157 || (GET_CODE (b->head) == CODE_LABEL
2158 && ! can_delete_label_p (b->head)))
2161 flow_delete_insn (barrier);
2163 /* Move block and loop notes out of the chain so that we do not
2164 disturb their order.
2166 ??? A better solution would be to squeeze out all the non-nested notes
2167 and adjust the block trees appropriately. Even better would be to have
2168 a tighter connection between block trees and rtl so that this is not
2170 start = squeeze_notes (start, end);
2172 /* Scramble the insn chain. */
2173 if (end != PREV_INSN (b->head))
2174 reorder_insns (start, end, PREV_INSN (b->head));
2178 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2179 a->index, b->index);
2182 /* Swap the records for the two blocks around. Although we are deleting B,
2183 A is now where B was and we want to compact the BB array from where
2185 BASIC_BLOCK(a->index) = b;
2186 BASIC_BLOCK(b->index) = a;
2188 a->index = b->index;
2191 /* Now blocks A and B are contiguous. Merge them. */
2192 merge_blocks_nomove (a, b);
2197 /* Blocks A and B are to be merged into a single block. B has no outgoing
2198 fallthru edge, so it can be moved after A without adding or modifying
2199 any jumps (aside from the jump from A to B). */
2202 merge_blocks_move_successor_nojumps (a, b)
2205 rtx start, end, barrier;
2210 /* We want to delete the BARRIER after the end of the insns we are
2211 going to move. If we don't find a BARRIER, then do nothing. This
2212 can happen in some cases if we have labels we can not delete.
2214 Similarly, do nothing if we can not delete the label at the start
2215 of the target block. */
2216 barrier = next_nonnote_insn (end);
2217 if (GET_CODE (barrier) != BARRIER
2218 || (GET_CODE (b->head) == CODE_LABEL
2219 && ! can_delete_label_p (b->head)))
2222 flow_delete_insn (barrier);
2224 /* Move block and loop notes out of the chain so that we do not
2225 disturb their order.
2227 ??? A better solution would be to squeeze out all the non-nested notes
2228 and adjust the block trees appropriately. Even better would be to have
2229 a tighter connection between block trees and rtl so that this is not
2231 start = squeeze_notes (start, end);
2233 /* Scramble the insn chain. */
2234 reorder_insns (start, end, a->end);
2236 /* Now blocks A and B are contiguous. Merge them. */
2237 merge_blocks_nomove (a, b);
2241 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2242 b->index, a->index);
2248 /* Attempt to merge basic blocks that are potentially non-adjacent.
2249 Return true iff the attempt succeeded. */
2252 merge_blocks (e, b, c)
2256 /* If B has a fallthru edge to C, no need to move anything. */
2257 if (e->flags & EDGE_FALLTHRU)
2259 /* If a label still appears somewhere and we cannot delete the label,
2260 then we cannot merge the blocks. The edge was tidied already. */
2262 rtx insn, stop = NEXT_INSN (c->head);
2263 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2264 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2267 merge_blocks_nomove (b, c);
2271 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2272 b->index, c->index);
2281 int c_has_outgoing_fallthru;
2282 int b_has_incoming_fallthru;
2284 /* We must make sure to not munge nesting of exception regions,
2285 lexical blocks, and loop notes.
2287 The first is taken care of by requiring that the active eh
2288 region at the end of one block always matches the active eh
2289 region at the beginning of the next block.
2291 The later two are taken care of by squeezing out all the notes. */
2293 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2294 executed and we may want to treat blocks which have two out
2295 edges, one normal, one abnormal as only having one edge for
2296 block merging purposes. */
2298 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2299 if (tmp_edge->flags & EDGE_FALLTHRU)
2301 c_has_outgoing_fallthru = (tmp_edge != NULL);
2303 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2304 if (tmp_edge->flags & EDGE_FALLTHRU)
2306 b_has_incoming_fallthru = (tmp_edge != NULL);
2308 /* If B does not have an incoming fallthru, and the exception regions
2309 match, then it can be moved immediately before C without introducing
2312 C can not be the first block, so we do not have to worry about
2313 accessing a non-existent block. */
2314 d = BASIC_BLOCK (c->index - 1);
2315 if (! b_has_incoming_fallthru
2316 && d->eh_end == b->eh_beg
2317 && b->eh_end == c->eh_beg)
2318 return merge_blocks_move_predecessor_nojumps (b, c);
2320 /* Otherwise, we're going to try to move C after B. Make sure the
2321 exception regions match.
2323 If B is the last basic block, then we must not try to access the
2324 block structure for block B + 1. Luckily in that case we do not
2325 need to worry about matching exception regions. */
2326 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2327 if (b->eh_end == c->eh_beg
2328 && (d == NULL || c->eh_end == d->eh_beg))
2330 /* If C does not have an outgoing fallthru, then it can be moved
2331 immediately after B without introducing or modifying jumps. */
2332 if (! c_has_outgoing_fallthru)
2333 return merge_blocks_move_successor_nojumps (b, c);
2335 /* Otherwise, we'll need to insert an extra jump, and possibly
2336 a new block to contain it. */
2337 /* ??? Not implemented yet. */
2344 /* Top level driver for merge_blocks. */
2351 /* Attempt to merge blocks as made possible by edge removal. If a block
2352 has only one successor, and the successor has only one predecessor,
2353 they may be combined. */
2355 for (i = 0; i < n_basic_blocks; )
2357 basic_block c, b = BASIC_BLOCK (i);
2360 /* A loop because chains of blocks might be combineable. */
2361 while ((s = b->succ) != NULL
2362 && s->succ_next == NULL
2363 && (s->flags & EDGE_EH) == 0
2364 && (c = s->dest) != EXIT_BLOCK_PTR
2365 && c->pred->pred_next == NULL
2366 /* If the jump insn has side effects, we can't kill the edge. */
2367 && (GET_CODE (b->end) != JUMP_INSN
2368 || onlyjump_p (b->end))
2369 && merge_blocks (s, b, c))
2372 /* Don't get confused by the index shift caused by deleting blocks. */
2377 /* The given edge should potentially be a fallthru edge. If that is in
2378 fact true, delete the jump and barriers that are in the way. */
2381 tidy_fallthru_edge (e, b, c)
2387 /* ??? In a late-running flow pass, other folks may have deleted basic
2388 blocks by nopping out blocks, leaving multiple BARRIERs between here
2389 and the target label. They ought to be chastized and fixed.
2391 We can also wind up with a sequence of undeletable labels between
2392 one block and the next.
2394 So search through a sequence of barriers, labels, and notes for
2395 the head of block C and assert that we really do fall through. */
2397 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2400 /* Remove what will soon cease being the jump insn from the source block.
2401 If block B consisted only of this single jump, turn it into a deleted
2404 if (GET_CODE (q) == JUMP_INSN
2405 && (simplejump_p (q)
2406 || (b->succ == e && e->succ_next == NULL)))
2409 /* If this was a conditional jump, we need to also delete
2410 the insn that set cc0. */
2411 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2418 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2419 NOTE_SOURCE_FILE (q) = 0;
2422 b->end = q = PREV_INSN (q);
2425 /* Selectively unlink the sequence. */
2426 if (q != PREV_INSN (c->head))
2427 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2429 e->flags |= EDGE_FALLTHRU;
2432 /* Fix up edges that now fall through, or rather should now fall through
2433 but previously required a jump around now deleted blocks. Simplify
2434 the search by only examining blocks numerically adjacent, since this
2435 is how find_basic_blocks created them. */
2438 tidy_fallthru_edges ()
2442 for (i = 1; i < n_basic_blocks; ++i)
2444 basic_block b = BASIC_BLOCK (i - 1);
2445 basic_block c = BASIC_BLOCK (i);
2448 /* We care about simple conditional or unconditional jumps with
2451 If we had a conditional branch to the next instruction when
2452 find_basic_blocks was called, then there will only be one
2453 out edge for the block which ended with the conditional
2454 branch (since we do not create duplicate edges).
2456 Furthermore, the edge will be marked as a fallthru because we
2457 merge the flags for the duplicate edges. So we do not want to
2458 check that the edge is not a FALLTHRU edge. */
2459 if ((s = b->succ) != NULL
2460 && s->succ_next == NULL
2462 /* If the jump insn has side effects, we can't tidy the edge. */
2463 && (GET_CODE (b->end) != JUMP_INSN
2464 || onlyjump_p (b->end)))
2465 tidy_fallthru_edge (s, b, c);
2469 /* Perform data flow analysis.
2470 F is the first insn of the function; FLAGS is a set of PROP_* flags
2471 to be used in accumulating flow info. */
2474 life_analysis (f, file, flags)
2479 #ifdef ELIMINABLE_REGS
2481 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2484 /* Record which registers will be eliminated. We use this in
2487 CLEAR_HARD_REG_SET (elim_reg_set);
2489 #ifdef ELIMINABLE_REGS
2490 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2491 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2493 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2497 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2499 /* The post-reload life analysis have (on a global basis) the same
2500 registers live as was computed by reload itself. elimination
2501 Otherwise offsets and such may be incorrect.
2503 Reload will make some registers as live even though they do not
2504 appear in the rtl. */
2505 if (reload_completed)
2506 flags &= ~PROP_REG_INFO;
2508 /* We want alias analysis information for local dead store elimination. */
2509 if (flags & PROP_SCAN_DEAD_CODE)
2510 init_alias_analysis ();
2512 /* Always remove no-op moves. Do this before other processing so
2513 that we don't have to keep re-scanning them. */
2514 delete_noop_moves (f);
2516 /* Some targets can emit simpler epilogues if they know that sp was
2517 not ever modified during the function. After reload, of course,
2518 we've already emitted the epilogue so there's no sense searching. */
2519 if (! reload_completed)
2520 notice_stack_pointer_modification (f);
2522 /* Allocate and zero out data structures that will record the
2523 data from lifetime analysis. */
2524 allocate_reg_life_data ();
2525 allocate_bb_life_data ();
2527 /* Find the set of registers live on function exit. */
2528 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2530 /* "Update" life info from zero. It'd be nice to begin the
2531 relaxation with just the exit and noreturn blocks, but that set
2532 is not immediately handy. */
2534 if (flags & PROP_REG_INFO)
2535 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2536 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2539 if (flags & PROP_SCAN_DEAD_CODE)
2540 end_alias_analysis ();
2543 dump_flow_info (file);
2545 free_basic_block_vars (1);
2548 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2549 Search for REGNO. If found, abort if it is not wider than word_mode. */
2552 verify_wide_reg_1 (px, pregno)
2557 unsigned int regno = *(int *) pregno;
2559 if (GET_CODE (x) == REG && REGNO (x) == regno)
2561 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2568 /* A subroutine of verify_local_live_at_start. Search through insns
2569 between HEAD and END looking for register REGNO. */
2572 verify_wide_reg (regno, head, end)
2578 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2579 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2583 head = NEXT_INSN (head);
2586 /* We didn't find the register at all. Something's way screwy. */
2590 /* A subroutine of update_life_info. Verify that there are no untoward
2591 changes in live_at_start during a local update. */
2594 verify_local_live_at_start (new_live_at_start, bb)
2595 regset new_live_at_start;
2598 if (reload_completed)
2600 /* After reload, there are no pseudos, nor subregs of multi-word
2601 registers. The regsets should exactly match. */
2602 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2609 /* Find the set of changed registers. */
2610 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2612 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2614 /* No registers should die. */
2615 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2617 /* Verify that the now-live register is wider than word_mode. */
2618 verify_wide_reg (i, bb->head, bb->end);
2623 /* Updates life information starting with the basic blocks set in BLOCKS.
2624 If BLOCKS is null, consider it to be the universal set.
2626 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2627 we are only expecting local modifications to basic blocks. If we find
2628 extra registers live at the beginning of a block, then we either killed
2629 useful data, or we have a broken split that wants data not provided.
2630 If we find registers removed from live_at_start, that means we have
2631 a broken peephole that is killing a register it shouldn't.
2633 ??? This is not true in one situation -- when a pre-reload splitter
2634 generates subregs of a multi-word pseudo, current life analysis will
2635 lose the kill. So we _can_ have a pseudo go live. How irritating.
2637 Including PROP_REG_INFO does not properly refresh regs_ever_live
2638 unless the caller resets it to zero. */
2641 update_life_info (blocks, extent, prop_flags)
2643 enum update_life_extent extent;
2647 regset_head tmp_head;
2650 tmp = INITIALIZE_REG_SET (tmp_head);
2652 /* For a global update, we go through the relaxation process again. */
2653 if (extent != UPDATE_LIFE_LOCAL)
2655 calculate_global_regs_live (blocks, blocks,
2656 prop_flags & PROP_SCAN_DEAD_CODE);
2658 /* If asked, remove notes from the blocks we'll update. */
2659 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2660 count_or_remove_death_notes (blocks, 1);
2665 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2667 basic_block bb = BASIC_BLOCK (i);
2669 COPY_REG_SET (tmp, bb->global_live_at_end);
2670 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2672 if (extent == UPDATE_LIFE_LOCAL)
2673 verify_local_live_at_start (tmp, bb);
2678 for (i = n_basic_blocks - 1; i >= 0; --i)
2680 basic_block bb = BASIC_BLOCK (i);
2682 COPY_REG_SET (tmp, bb->global_live_at_end);
2683 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2685 if (extent == UPDATE_LIFE_LOCAL)
2686 verify_local_live_at_start (tmp, bb);
2692 if (prop_flags & PROP_REG_INFO)
2694 /* The only pseudos that are live at the beginning of the function
2695 are those that were not set anywhere in the function. local-alloc
2696 doesn't know how to handle these correctly, so mark them as not
2697 local to any one basic block. */
2698 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2699 FIRST_PSEUDO_REGISTER, i,
2700 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2702 /* We have a problem with any pseudoreg that lives across the setjmp.
2703 ANSI says that if a user variable does not change in value between
2704 the setjmp and the longjmp, then the longjmp preserves it. This
2705 includes longjmp from a place where the pseudo appears dead.
2706 (In principle, the value still exists if it is in scope.)
2707 If the pseudo goes in a hard reg, some other value may occupy
2708 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2709 Conclusion: such a pseudo must not go in a hard reg. */
2710 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2711 FIRST_PSEUDO_REGISTER, i,
2713 if (regno_reg_rtx[i] != 0)
2715 REG_LIVE_LENGTH (i) = -1;
2716 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2722 /* Free the variables allocated by find_basic_blocks.
2724 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2727 free_basic_block_vars (keep_head_end_p)
2728 int keep_head_end_p;
2730 if (basic_block_for_insn)
2732 VARRAY_FREE (basic_block_for_insn);
2733 basic_block_for_insn = NULL;
2736 if (! keep_head_end_p)
2739 VARRAY_FREE (basic_block_info);
2742 ENTRY_BLOCK_PTR->aux = NULL;
2743 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2744 EXIT_BLOCK_PTR->aux = NULL;
2745 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2749 /* Return nonzero if the destination of SET equals the source. */
2754 rtx src = SET_SRC (set);
2755 rtx dst = SET_DEST (set);
2757 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2759 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2761 src = SUBREG_REG (src);
2762 dst = SUBREG_REG (dst);
2765 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2766 && REGNO (src) == REGNO (dst));
2769 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2775 rtx pat = PATTERN (insn);
2777 /* Insns carrying these notes are useful later on. */
2778 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2781 if (GET_CODE (pat) == SET && set_noop_p (pat))
2784 if (GET_CODE (pat) == PARALLEL)
2787 /* If nothing but SETs of registers to themselves,
2788 this insn can also be deleted. */
2789 for (i = 0; i < XVECLEN (pat, 0); i++)
2791 rtx tem = XVECEXP (pat, 0, i);
2793 if (GET_CODE (tem) == USE
2794 || GET_CODE (tem) == CLOBBER)
2797 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2806 /* Delete any insns that copy a register to itself. */
2809 delete_noop_moves (f)
2813 for (insn = f; insn; insn = NEXT_INSN (insn))
2815 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2817 PUT_CODE (insn, NOTE);
2818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2819 NOTE_SOURCE_FILE (insn) = 0;
2824 /* Determine if the stack pointer is constant over the life of the function.
2825 Only useful before prologues have been emitted. */
2828 notice_stack_pointer_modification_1 (x, pat, data)
2830 rtx pat ATTRIBUTE_UNUSED;
2831 void *data ATTRIBUTE_UNUSED;
2833 if (x == stack_pointer_rtx
2834 /* The stack pointer is only modified indirectly as the result
2835 of a push until later in flow. See the comments in rtl.texi
2836 regarding Embedded Side-Effects on Addresses. */
2837 || (GET_CODE (x) == MEM
2838 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2839 || GET_CODE (XEXP (x, 0)) == PRE_INC
2840 || GET_CODE (XEXP (x, 0)) == POST_DEC
2841 || GET_CODE (XEXP (x, 0)) == POST_INC)
2842 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2843 current_function_sp_is_unchanging = 0;
2847 notice_stack_pointer_modification (f)
2852 /* Assume that the stack pointer is unchanging if alloca hasn't
2854 current_function_sp_is_unchanging = !current_function_calls_alloca;
2855 if (! current_function_sp_is_unchanging)
2858 for (insn = f; insn; insn = NEXT_INSN (insn))
2860 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2862 /* Check if insn modifies the stack pointer. */
2863 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2865 if (! current_function_sp_is_unchanging)
2871 /* Mark a register in SET. Hard registers in large modes get all
2872 of their component registers set as well. */
2874 mark_reg (reg, xset)
2878 regset set = (regset) xset;
2879 int regno = REGNO (reg);
2881 if (GET_MODE (reg) == BLKmode)
2884 SET_REGNO_REG_SET (set, regno);
2885 if (regno < FIRST_PSEUDO_REGISTER)
2887 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2889 SET_REGNO_REG_SET (set, regno + n);
2893 /* Mark those regs which are needed at the end of the function as live
2894 at the end of the last basic block. */
2896 mark_regs_live_at_end (set)
2901 /* If exiting needs the right stack value, consider the stack pointer
2902 live at the end of the function. */
2903 if ((HAVE_epilogue && reload_completed)
2904 || ! EXIT_IGNORE_STACK
2905 || (! FRAME_POINTER_REQUIRED
2906 && ! current_function_calls_alloca
2907 && flag_omit_frame_pointer)
2908 || current_function_sp_is_unchanging)
2910 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2913 /* Mark the frame pointer if needed at the end of the function. If
2914 we end up eliminating it, it will be removed from the live list
2915 of each basic block by reload. */
2917 if (! reload_completed || frame_pointer_needed)
2919 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2920 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2921 /* If they are different, also mark the hard frame pointer as live */
2922 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2926 #ifdef PIC_OFFSET_TABLE_REGNUM
2927 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2928 /* Many architectures have a GP register even without flag_pic.
2929 Assume the pic register is not in use, or will be handled by
2930 other means, if it is not fixed. */
2931 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2932 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2936 /* Mark all global registers, and all registers used by the epilogue
2937 as being live at the end of the function since they may be
2938 referenced by our caller. */
2939 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2941 #ifdef EPILOGUE_USES
2942 || EPILOGUE_USES (i)
2945 SET_REGNO_REG_SET (set, i);
2947 /* Mark all call-saved registers that we actaully used. */
2948 if (HAVE_epilogue && reload_completed)
2950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2951 if (! call_used_regs[i] && regs_ever_live[i])
2952 SET_REGNO_REG_SET (set, i);
2955 /* Mark function return value. */
2956 diddle_return_value (mark_reg, set);
2959 /* Callback function for for_each_successor_phi. DATA is a regset.
2960 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2961 INSN, in the regset. */
2964 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2965 rtx insn ATTRIBUTE_UNUSED;
2966 int dest_regno ATTRIBUTE_UNUSED;
2970 regset live = (regset) data;
2971 SET_REGNO_REG_SET (live, src_regno);
2975 /* Propagate global life info around the graph of basic blocks. Begin
2976 considering blocks with their corresponding bit set in BLOCKS_IN.
2977 If BLOCKS_IN is null, consider it the universal set.
2979 BLOCKS_OUT is set for every block that was changed. */
2982 calculate_global_regs_live (blocks_in, blocks_out, flags)
2983 sbitmap blocks_in, blocks_out;
2986 basic_block *queue, *qhead, *qtail, *qend;
2987 regset tmp, new_live_at_end;
2988 regset_head tmp_head;
2989 regset_head new_live_at_end_head;
2992 tmp = INITIALIZE_REG_SET (tmp_head);
2993 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2995 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2996 because the `head == tail' style test for an empty queue doesn't
2997 work with a full queue. */
2998 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3000 qhead = qend = queue + n_basic_blocks + 2;
3002 /* Clear out the garbage that might be hanging out in bb->aux. */
3003 for (i = n_basic_blocks - 1; i >= 0; --i)
3004 BASIC_BLOCK (i)->aux = NULL;
3006 /* Queue the blocks set in the initial mask. Do this in reverse block
3007 number order so that we are more likely for the first round to do
3008 useful work. We use AUX non-null to flag that the block is queued. */
3011 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3013 basic_block bb = BASIC_BLOCK (i);
3020 for (i = 0; i < n_basic_blocks; ++i)
3022 basic_block bb = BASIC_BLOCK (i);
3029 sbitmap_zero (blocks_out);
3031 while (qhead != qtail)
3033 int rescan, changed;
3042 /* Begin by propogating live_at_start from the successor blocks. */
3043 CLEAR_REG_SET (new_live_at_end);
3044 for (e = bb->succ; e ; e = e->succ_next)
3046 basic_block sb = e->dest;
3047 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3050 /* Force the stack pointer to be live -- which might not already be
3051 the case for blocks within infinite loops. */
3052 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3054 /* Regs used in phi nodes are not included in
3055 global_live_at_start, since they are live only along a
3056 particular edge. Set those regs that are live because of a
3057 phi node alternative corresponding to this particular block. */
3058 for_each_successor_phi (bb, &set_phi_alternative_reg,
3061 if (bb == ENTRY_BLOCK_PTR)
3063 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3067 /* On our first pass through this block, we'll go ahead and continue.
3068 Recognize first pass by local_set NULL. On subsequent passes, we
3069 get to skip out early if live_at_end wouldn't have changed. */
3071 if (bb->local_set == NULL)
3073 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3078 /* If any bits were removed from live_at_end, we'll have to
3079 rescan the block. This wouldn't be necessary if we had
3080 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3081 local_live is really dependant on live_at_end. */
3082 CLEAR_REG_SET (tmp);
3083 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3084 new_live_at_end, BITMAP_AND_COMPL);
3088 /* Find the set of changed bits. Take this opportunity
3089 to notice that this set is empty and early out. */
3090 CLEAR_REG_SET (tmp);
3091 changed = bitmap_operation (tmp, bb->global_live_at_end,
3092 new_live_at_end, BITMAP_XOR);
3096 /* If any of the changed bits overlap with local_set,
3097 we'll have to rescan the block. Detect overlap by
3098 the AND with ~local_set turning off bits. */
3099 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3104 /* Let our caller know that BB changed enough to require its
3105 death notes updated. */
3107 SET_BIT (blocks_out, bb->index);
3111 /* Add to live_at_start the set of all registers in
3112 new_live_at_end that aren't in the old live_at_end. */
3114 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3116 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3118 changed = bitmap_operation (bb->global_live_at_start,
3119 bb->global_live_at_start,
3126 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3128 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3129 into live_at_start. */
3130 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3132 /* If live_at start didn't change, no need to go farther. */
3133 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3136 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3139 /* Queue all predecessors of BB so that we may re-examine
3140 their live_at_end. */
3141 for (e = bb->pred; e ; e = e->pred_next)
3143 basic_block pb = e->src;
3144 if (pb->aux == NULL)
3155 FREE_REG_SET (new_live_at_end);
3159 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3161 basic_block bb = BASIC_BLOCK (i);
3162 FREE_REG_SET (bb->local_set);
3167 for (i = n_basic_blocks - 1; i >= 0; --i)
3169 basic_block bb = BASIC_BLOCK (i);
3170 FREE_REG_SET (bb->local_set);
3177 /* Subroutines of life analysis. */
3179 /* Allocate the permanent data structures that represent the results
3180 of life analysis. Not static since used also for stupid life analysis. */
3183 allocate_bb_life_data ()
3187 for (i = 0; i < n_basic_blocks; i++)
3189 basic_block bb = BASIC_BLOCK (i);
3191 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3192 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3195 ENTRY_BLOCK_PTR->global_live_at_end
3196 = OBSTACK_ALLOC_REG_SET (function_obstack);
3197 EXIT_BLOCK_PTR->global_live_at_start
3198 = OBSTACK_ALLOC_REG_SET (function_obstack);
3200 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3204 allocate_reg_life_data ()
3208 max_regno = max_reg_num ();
3210 /* Recalculate the register space, in case it has grown. Old style
3211 vector oriented regsets would set regset_{size,bytes} here also. */
3212 allocate_reg_info (max_regno, FALSE, FALSE);
3214 /* Reset all the data we'll collect in propagate_block and its
3216 for (i = 0; i < max_regno; i++)
3220 REG_N_DEATHS (i) = 0;
3221 REG_N_CALLS_CROSSED (i) = 0;
3222 REG_LIVE_LENGTH (i) = 0;
3223 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3227 /* Delete dead instructions for propagate_block. */
3230 propagate_block_delete_insn (bb, insn)
3234 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3236 /* If the insn referred to a label, and that label was attached to
3237 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3238 pretty much mandatory to delete it, because the ADDR_VEC may be
3239 referencing labels that no longer exist. */
3243 rtx label = XEXP (inote, 0);
3246 if (LABEL_NUSES (label) == 1
3247 && (next = next_nonnote_insn (label)) != NULL
3248 && GET_CODE (next) == JUMP_INSN
3249 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3250 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3252 rtx pat = PATTERN (next);
3253 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3254 int len = XVECLEN (pat, diff_vec_p);
3257 for (i = 0; i < len; i++)
3258 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3260 flow_delete_insn (next);
3264 if (bb->end == insn)
3265 bb->end = PREV_INSN (insn);
3266 flow_delete_insn (insn);
3269 /* Delete dead libcalls for propagate_block. Return the insn
3270 before the libcall. */
3273 propagate_block_delete_libcall (bb, insn, note)
3277 rtx first = XEXP (note, 0);
3278 rtx before = PREV_INSN (first);
3280 if (insn == bb->end)
3283 flow_delete_insn_chain (first, insn);
3287 /* Update the life-status of regs for one insn. Return the previous insn. */
3290 propagate_one_insn (pbi, insn)
3291 struct propagate_block_info *pbi;
3294 rtx prev = PREV_INSN (insn);
3295 int flags = pbi->flags;
3296 int insn_is_dead = 0;
3297 int libcall_is_dead = 0;
3301 if (! INSN_P (insn))
3304 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3305 if (flags & PROP_SCAN_DEAD_CODE)
3307 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3309 libcall_is_dead = (insn_is_dead && note != 0
3310 && libcall_dead_p (pbi, PATTERN (insn),
3314 /* We almost certainly don't want to delete prologue or epilogue
3315 instructions. Warn about probable compiler losage. */
3318 && (((HAVE_epilogue || HAVE_prologue)
3319 && prologue_epilogue_contains (insn))
3320 || (HAVE_sibcall_epilogue
3321 && sibcall_epilogue_contains (insn))))
3323 if (flags & PROP_KILL_DEAD_CODE)
3325 warning ("ICE: would have deleted prologue/epilogue insn");
3326 if (!inhibit_warnings)
3329 libcall_is_dead = insn_is_dead = 0;
3332 /* If an instruction consists of just dead store(s) on final pass,
3334 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3336 if (libcall_is_dead)
3338 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3339 insn = NEXT_INSN (prev);
3342 propagate_block_delete_insn (pbi->bb, insn);
3344 /* CC0 is now known to be dead. Either this insn used it,
3345 in which case it doesn't anymore, or clobbered it,
3346 so the next insn can't use it. */
3352 /* See if this is an increment or decrement that can be merged into
3353 a following memory address. */
3356 register rtx x = single_set (insn);
3358 /* Does this instruction increment or decrement a register? */
3359 if (!reload_completed
3360 && (flags & PROP_AUTOINC)
3362 && GET_CODE (SET_DEST (x)) == REG
3363 && (GET_CODE (SET_SRC (x)) == PLUS
3364 || GET_CODE (SET_SRC (x)) == MINUS)
3365 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3366 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3367 /* Ok, look for a following memory ref we can combine with.
3368 If one is found, change the memory ref to a PRE_INC
3369 or PRE_DEC, cancel this insn, and return 1.
3370 Return 0 if nothing has been done. */
3371 && try_pre_increment_1 (pbi, insn))
3374 #endif /* AUTO_INC_DEC */
3376 CLEAR_REG_SET (pbi->new_set);
3378 /* If this is not the final pass, and this insn is copying the value of
3379 a library call and it's dead, don't scan the insns that perform the
3380 library call, so that the call's arguments are not marked live. */
3381 if (libcall_is_dead)
3383 /* Record the death of the dest reg. */
3384 mark_set_regs (pbi, PATTERN (insn), insn);
3386 insn = XEXP (note, 0);
3387 return PREV_INSN (insn);
3389 else if (GET_CODE (PATTERN (insn)) == SET
3390 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3391 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3392 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3393 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3394 /* We have an insn to pop a constant amount off the stack.
3395 (Such insns use PLUS regardless of the direction of the stack,
3396 and any insn to adjust the stack by a constant is always a pop.)
3397 These insns, if not dead stores, have no effect on life. */
3401 /* Any regs live at the time of a call instruction must not go
3402 in a register clobbered by calls. Find all regs now live and
3403 record this for them. */
3405 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3406 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3407 { REG_N_CALLS_CROSSED (i)++; });
3409 /* Record sets. Do this even for dead instructions, since they
3410 would have killed the values if they hadn't been deleted. */
3411 mark_set_regs (pbi, PATTERN (insn), insn);
3413 if (GET_CODE (insn) == CALL_INSN)
3419 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3420 cond = COND_EXEC_TEST (PATTERN (insn));
3422 /* Non-constant calls clobber memory. */
3423 if (! CONST_CALL_P (insn))
3424 free_EXPR_LIST_list (&pbi->mem_set_list);
3426 /* There may be extra registers to be clobbered. */
3427 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3429 note = XEXP (note, 1))
3430 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3431 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3432 cond, insn, pbi->flags);
3434 /* Calls change all call-used and global registers. */
3435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3436 if (call_used_regs[i] && ! global_regs[i]
3439 /* We do not want REG_UNUSED notes for these registers. */
3440 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3441 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3445 /* If an insn doesn't use CC0, it becomes dead since we assume
3446 that every insn clobbers it. So show it dead here;
3447 mark_used_regs will set it live if it is referenced. */
3452 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3454 /* Sometimes we may have inserted something before INSN (such as a move)
3455 when we make an auto-inc. So ensure we will scan those insns. */
3457 prev = PREV_INSN (insn);
3460 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3466 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3467 cond = COND_EXEC_TEST (PATTERN (insn));
3469 /* Calls use their arguments. */
3470 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3472 note = XEXP (note, 1))
3473 if (GET_CODE (XEXP (note, 0)) == USE)
3474 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3477 /* The stack ptr is used (honorarily) by a CALL insn. */
3478 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3480 /* Calls may also reference any of the global registers,
3481 so they are made live. */
3482 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3484 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3489 /* On final pass, update counts of how many insns in which each reg
3491 if (flags & PROP_REG_INFO)
3492 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3493 { REG_LIVE_LENGTH (i)++; });
3498 /* Initialize a propagate_block_info struct for public consumption.
3499 Note that the structure itself is opaque to this file, but that
3500 the user can use the regsets provided here. */
3502 struct propagate_block_info *
3503 init_propagate_block_info (bb, live, local_set, flags)
3509 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3512 pbi->reg_live = live;
3513 pbi->mem_set_list = NULL_RTX;
3514 pbi->local_set = local_set;
3518 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3519 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3521 pbi->reg_next_use = NULL;
3523 pbi->new_set = BITMAP_XMALLOC ();
3528 /* Release a propagate_block_info struct. */
3531 free_propagate_block_info (pbi)
3532 struct propagate_block_info *pbi;
3534 free_EXPR_LIST_list (&pbi->mem_set_list);
3536 BITMAP_XFREE (pbi->new_set);
3538 if (pbi->reg_next_use)
3539 free (pbi->reg_next_use);
3544 /* Compute the registers live at the beginning of a basic block BB from
3545 those live at the end.
3547 When called, REG_LIVE contains those live at the end. On return, it
3548 contains those live at the beginning.
3550 LOCAL_SET, if non-null, will be set with all registers killed by
3551 this basic block. */
3554 propagate_block (bb, live, local_set, flags)
3560 struct propagate_block_info *pbi;
3563 pbi = init_propagate_block_info (bb, live, local_set, flags);
3565 if (flags & PROP_REG_INFO)
3569 /* Process the regs live at the end of the block.
3570 Mark them as not local to any one basic block. */
3571 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3572 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3575 /* Scan the block an insn at a time from end to beginning. */
3577 for (insn = bb->end; ; insn = prev)
3579 /* If this is a call to `setjmp' et al, warn if any
3580 non-volatile datum is live. */
3581 if ((flags & PROP_REG_INFO)
3582 && GET_CODE (insn) == NOTE
3583 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3584 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3586 prev = propagate_one_insn (pbi, insn);
3588 if (insn == bb->head)
3592 free_propagate_block_info (pbi);
3595 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3596 (SET expressions whose destinations are registers dead after the insn).
3597 NEEDED is the regset that says which regs are alive after the insn.
3599 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3601 If X is the entire body of an insn, NOTES contains the reg notes
3602 pertaining to the insn. */
3605 insn_dead_p (pbi, x, call_ok, notes)
3606 struct propagate_block_info *pbi;
3609 rtx notes ATTRIBUTE_UNUSED;
3611 enum rtx_code code = GET_CODE (x);
3614 /* If flow is invoked after reload, we must take existing AUTO_INC
3615 expresions into account. */
3616 if (reload_completed)
3618 for ( ; notes; notes = XEXP (notes, 1))
3620 if (REG_NOTE_KIND (notes) == REG_INC)
3622 int regno = REGNO (XEXP (notes, 0));
3624 /* Don't delete insns to set global regs. */
3625 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3626 || REGNO_REG_SET_P (pbi->reg_live, regno))
3633 /* If setting something that's a reg or part of one,
3634 see if that register's altered value will be live. */
3638 rtx r = SET_DEST (x);
3641 if (GET_CODE (r) == CC0)
3642 return ! pbi->cc0_live;
3645 /* A SET that is a subroutine call cannot be dead. */
3646 if (GET_CODE (SET_SRC (x)) == CALL)
3652 /* Don't eliminate loads from volatile memory or volatile asms. */
3653 else if (volatile_refs_p (SET_SRC (x)))
3656 if (GET_CODE (r) == MEM)
3660 if (MEM_VOLATILE_P (r))
3663 /* Walk the set of memory locations we are currently tracking
3664 and see if one is an identical match to this memory location.
3665 If so, this memory write is dead (remember, we're walking
3666 backwards from the end of the block to the start. */
3667 temp = pbi->mem_set_list;
3670 if (rtx_equal_p (XEXP (temp, 0), r))
3672 temp = XEXP (temp, 1);
3677 while (GET_CODE (r) == SUBREG
3678 || GET_CODE (r) == STRICT_LOW_PART
3679 || GET_CODE (r) == ZERO_EXTRACT)
3682 if (GET_CODE (r) == REG)
3684 int regno = REGNO (r);
3687 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3690 /* If this is a hard register, verify that subsequent
3691 words are not needed. */
3692 if (regno < FIRST_PSEUDO_REGISTER)
3694 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3697 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3701 /* Don't delete insns to set global regs. */
3702 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3705 /* Make sure insns to set the stack pointer aren't deleted. */
3706 if (regno == STACK_POINTER_REGNUM)
3709 /* Make sure insns to set the frame pointer aren't deleted. */
3710 if (regno == FRAME_POINTER_REGNUM
3711 && (! reload_completed || frame_pointer_needed))
3713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3714 if (regno == HARD_FRAME_POINTER_REGNUM
3715 && (! reload_completed || frame_pointer_needed))
3719 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3720 /* Make sure insns to set arg pointer are never deleted
3721 (if the arg pointer isn't fixed, there will be a USE
3722 for it, so we can treat it normally). */
3723 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3727 /* Otherwise, the set is dead. */
3733 /* If performing several activities, insn is dead if each activity
3734 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3735 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3737 else if (code == PARALLEL)
3739 int i = XVECLEN (x, 0);
3741 for (i--; i >= 0; i--)
3742 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3743 && GET_CODE (XVECEXP (x, 0, i)) != USE
3744 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3750 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3751 is not necessarily true for hard registers. */
3752 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3753 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3754 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3757 /* We do not check other CLOBBER or USE here. An insn consisting of just
3758 a CLOBBER or just a USE should not be deleted. */
3762 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3763 return 1 if the entire library call is dead.
3764 This is true if X copies a register (hard or pseudo)
3765 and if the hard return reg of the call insn is dead.
3766 (The caller should have tested the destination of X already for death.)
3768 If this insn doesn't just copy a register, then we don't
3769 have an ordinary libcall. In that case, cse could not have
3770 managed to substitute the source for the dest later on,
3771 so we can assume the libcall is dead.
3773 NEEDED is the bit vector of pseudoregs live before this insn.
3774 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3777 libcall_dead_p (pbi, x, note, insn)
3778 struct propagate_block_info *pbi;
3783 register RTX_CODE code = GET_CODE (x);
3787 register rtx r = SET_SRC (x);
3788 if (GET_CODE (r) == REG)
3790 rtx call = XEXP (note, 0);
3794 /* Find the call insn. */
3795 while (call != insn && GET_CODE (call) != CALL_INSN)
3796 call = NEXT_INSN (call);
3798 /* If there is none, do nothing special,
3799 since ordinary death handling can understand these insns. */
3803 /* See if the hard reg holding the value is dead.
3804 If this is a PARALLEL, find the call within it. */
3805 call_pat = PATTERN (call);
3806 if (GET_CODE (call_pat) == PARALLEL)
3808 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3809 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3810 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3813 /* This may be a library call that is returning a value
3814 via invisible pointer. Do nothing special, since
3815 ordinary death handling can understand these insns. */
3819 call_pat = XVECEXP (call_pat, 0, i);
3822 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3828 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3829 live at function entry. Don't count global register variables, variables
3830 in registers that can be used for function arg passing, or variables in
3831 fixed hard registers. */
3834 regno_uninitialized (regno)
3837 if (n_basic_blocks == 0
3838 || (regno < FIRST_PSEUDO_REGISTER
3839 && (global_regs[regno]
3840 || fixed_regs[regno]
3841 || FUNCTION_ARG_REGNO_P (regno))))
3844 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3847 /* 1 if register REGNO was alive at a place where `setjmp' was called
3848 and was set more than once or is an argument.
3849 Such regs may be clobbered by `longjmp'. */
3852 regno_clobbered_at_setjmp (regno)
3855 if (n_basic_blocks == 0)
3858 return ((REG_N_SETS (regno) > 1
3859 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3860 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3863 /* INSN references memory, possibly using autoincrement addressing modes.
3864 Find any entries on the mem_set_list that need to be invalidated due
3865 to an address change. */
3867 invalidate_mems_from_autoinc (pbi, insn)
3868 struct propagate_block_info *pbi;
3871 rtx note = REG_NOTES (insn);
3872 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3874 if (REG_NOTE_KIND (note) == REG_INC)
3876 rtx temp = pbi->mem_set_list;
3877 rtx prev = NULL_RTX;
3882 next = XEXP (temp, 1);
3883 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3885 /* Splice temp out of list. */
3887 XEXP (prev, 1) = next;
3889 pbi->mem_set_list = next;
3890 free_EXPR_LIST_node (temp);
3900 /* Process the registers that are set within X. Their bits are set to
3901 1 in the regset DEAD, because they are dead prior to this insn.
3903 If INSN is nonzero, it is the insn being processed.
3905 FLAGS is the set of operations to perform. */
3908 mark_set_regs (pbi, x, insn)
3909 struct propagate_block_info *pbi;
3912 rtx cond = NULL_RTX;
3916 switch (code = GET_CODE (x))
3920 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
3924 cond = COND_EXEC_TEST (x);
3925 x = COND_EXEC_CODE (x);
3931 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3933 rtx sub = XVECEXP (x, 0, i);
3934 switch (code = GET_CODE (sub))
3937 if (cond != NULL_RTX)
3940 cond = COND_EXEC_TEST (sub);
3941 sub = COND_EXEC_CODE (sub);
3942 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3948 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
3963 /* Process a single SET rtx, X. */
3966 mark_set_1 (pbi, code, reg, cond, insn, flags)
3967 struct propagate_block_info *pbi;
3969 rtx reg, cond, insn;
3972 int regno_first = -1, regno_last = -1;
3976 /* Some targets place small structures in registers for
3977 return values of functions. We have to detect this
3978 case specially here to get correct flow information. */
3979 if (GET_CODE (reg) == PARALLEL
3980 && GET_MODE (reg) == BLKmode)
3982 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3983 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
3987 /* Modifying just one hardware register of a multi-reg value or just a
3988 byte field of a register does not mean the value from before this insn
3989 is now dead. Of course, if it was dead after it's unused now. */
3991 switch (GET_CODE (reg))
3995 case STRICT_LOW_PART:
3996 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
3998 reg = XEXP (reg, 0);
3999 while (GET_CODE (reg) == SUBREG
4000 || GET_CODE (reg) == ZERO_EXTRACT
4001 || GET_CODE (reg) == SIGN_EXTRACT
4002 || GET_CODE (reg) == STRICT_LOW_PART);
4003 if (GET_CODE (reg) == MEM)
4005 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4009 regno_last = regno_first = REGNO (reg);
4010 if (regno_first < FIRST_PSEUDO_REGISTER)
4011 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4015 if (GET_CODE (SUBREG_REG (reg)) == REG)
4017 enum machine_mode outer_mode = GET_MODE (reg);
4018 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4020 /* Identify the range of registers affected. This is moderately
4021 tricky for hard registers. See alter_subreg. */
4023 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4024 if (regno_first < FIRST_PSEUDO_REGISTER)
4026 #ifdef ALTER_HARD_SUBREG
4027 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4028 inner_mode, regno_first);
4030 regno_first += SUBREG_WORD (reg);
4032 regno_last = (regno_first
4033 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4035 /* Since we've just adjusted the register number ranges, make
4036 sure REG matches. Otherwise some_was_live will be clear
4037 when it shouldn't have been, and we'll create incorrect
4038 REG_UNUSED notes. */
4039 reg = gen_rtx_REG (outer_mode, regno_first);
4043 /* If the number of words in the subreg is less than the number
4044 of words in the full register, we have a well-defined partial
4045 set. Otherwise the high bits are undefined.
4047 This is only really applicable to pseudos, since we just took
4048 care of multi-word hard registers. */
4049 if (((GET_MODE_SIZE (outer_mode)
4050 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4051 < ((GET_MODE_SIZE (inner_mode)
4052 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4053 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4055 reg = SUBREG_REG (reg);
4059 reg = SUBREG_REG (reg);
4066 /* If this set is a MEM, then it kills any aliased writes.
4067 If this set is a REG, then it kills any MEMs which use the reg. */
4068 if (flags & PROP_SCAN_DEAD_CODE)
4070 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4072 rtx temp = pbi->mem_set_list;
4073 rtx prev = NULL_RTX;
4078 next = XEXP (temp, 1);
4079 if ((GET_CODE (reg) == MEM
4080 && output_dependence (XEXP (temp, 0), reg))
4081 || (GET_CODE (reg) == REG
4082 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4084 /* Splice this entry out of the list. */
4086 XEXP (prev, 1) = next;
4088 pbi->mem_set_list = next;
4089 free_EXPR_LIST_node (temp);
4097 /* If the memory reference had embedded side effects (autoincrement
4098 address modes. Then we may need to kill some entries on the
4100 if (insn && GET_CODE (reg) == MEM)
4101 invalidate_mems_from_autoinc (pbi, insn);
4103 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4104 /* We do not know the size of a BLKmode store, so we do not track
4105 them for redundant store elimination. */
4106 && GET_MODE (reg) != BLKmode
4107 /* There are no REG_INC notes for SP, so we can't assume we'll see
4108 everything that invalidates it. To be safe, don't eliminate any
4109 stores though SP; none of them should be redundant anyway. */
4110 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4111 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4114 if (GET_CODE (reg) == REG
4115 && ! (regno_first == FRAME_POINTER_REGNUM
4116 && (! reload_completed || frame_pointer_needed))
4117 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4118 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4119 && (! reload_completed || frame_pointer_needed))
4121 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4122 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4126 int some_was_live = 0, some_was_dead = 0;
4128 for (i = regno_first; i <= regno_last; ++i)
4130 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4132 SET_REGNO_REG_SET (pbi->local_set, i);
4133 if (code != CLOBBER)
4134 SET_REGNO_REG_SET (pbi->new_set, i);
4136 some_was_live |= needed_regno;
4137 some_was_dead |= ! needed_regno;
4140 /* Additional data to record if this is the final pass. */
4141 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4142 | PROP_DEATH_NOTES | PROP_AUTOINC))
4145 register int blocknum = pbi->bb->index;
4148 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4150 y = pbi->reg_next_use[regno_first];
4152 /* The next use is no longer next, since a store intervenes. */
4153 for (i = regno_first; i <= regno_last; ++i)
4154 pbi->reg_next_use[i] = 0;
4157 if (flags & PROP_REG_INFO)
4159 for (i = regno_first; i <= regno_last; ++i)
4161 /* Count (weighted) references, stores, etc. This counts a
4162 register twice if it is modified, but that is correct. */
4163 REG_N_SETS (i) += 1;
4164 REG_N_REFS (i) += (optimize_size ? 1
4165 : pbi->bb->loop_depth + 1);
4167 /* The insns where a reg is live are normally counted
4168 elsewhere, but we want the count to include the insn
4169 where the reg is set, and the normal counting mechanism
4170 would not count it. */
4171 REG_LIVE_LENGTH (i) += 1;
4174 /* If this is a hard reg, record this function uses the reg. */
4175 if (regno_first < FIRST_PSEUDO_REGISTER)
4177 for (i = regno_first; i <= regno_last; i++)
4178 regs_ever_live[i] = 1;
4182 /* Keep track of which basic blocks each reg appears in. */
4183 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4184 REG_BASIC_BLOCK (regno_first) = blocknum;
4185 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4186 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4190 if (! some_was_dead)
4192 if (flags & PROP_LOG_LINKS)
4194 /* Make a logical link from the next following insn
4195 that uses this register, back to this insn.
4196 The following insns have already been processed.
4198 We don't build a LOG_LINK for hard registers containing
4199 in ASM_OPERANDs. If these registers get replaced,
4200 we might wind up changing the semantics of the insn,
4201 even if reload can make what appear to be valid
4202 assignments later. */
4203 if (y && (BLOCK_NUM (y) == blocknum)
4204 && (regno_first >= FIRST_PSEUDO_REGISTER
4205 || asm_noperands (PATTERN (y)) < 0))
4206 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4211 else if (! some_was_live)
4213 if (flags & PROP_REG_INFO)
4214 REG_N_DEATHS (regno_first) += 1;
4216 if (flags & PROP_DEATH_NOTES)
4218 /* Note that dead stores have already been deleted
4219 when possible. If we get here, we have found a
4220 dead store that cannot be eliminated (because the
4221 same insn does something useful). Indicate this
4222 by marking the reg being set as dying here. */
4224 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4229 if (flags & PROP_DEATH_NOTES)
4231 /* This is a case where we have a multi-word hard register
4232 and some, but not all, of the words of the register are
4233 needed in subsequent insns. Write REG_UNUSED notes
4234 for those parts that were not needed. This case should
4237 for (i = regno_first; i <= regno_last; ++i)
4238 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4240 = alloc_EXPR_LIST (REG_UNUSED,
4241 gen_rtx_REG (reg_raw_mode[i], i),
4247 /* Mark the register as being dead. */
4249 /* The stack pointer is never dead. Well, not strictly true,
4250 but it's very difficult to tell from here. Hopefully
4251 combine_stack_adjustments will fix up the most egregious
4253 && regno_first != STACK_POINTER_REGNUM)
4255 for (i = regno_first; i <= regno_last; ++i)
4256 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4259 else if (GET_CODE (reg) == REG)
4261 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4262 pbi->reg_next_use[regno_first] = 0;
4265 /* If this is the last pass and this is a SCRATCH, show it will be dying
4266 here and count it. */
4267 else if (GET_CODE (reg) == SCRATCH)
4269 if (flags & PROP_DEATH_NOTES)
4271 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4277 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4281 find_auto_inc (pbi, x, insn)
4282 struct propagate_block_info *pbi;
4286 rtx addr = XEXP (x, 0);
4287 HOST_WIDE_INT offset = 0;
4290 /* Here we detect use of an index register which might be good for
4291 postincrement, postdecrement, preincrement, or predecrement. */
4293 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4294 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4296 if (GET_CODE (addr) == REG)
4299 register int size = GET_MODE_SIZE (GET_MODE (x));
4302 int regno = REGNO (addr);
4304 /* Is the next use an increment that might make auto-increment? */
4305 if ((incr = pbi->reg_next_use[regno]) != 0
4306 && (set = single_set (incr)) != 0
4307 && GET_CODE (set) == SET
4308 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4309 /* Can't add side effects to jumps; if reg is spilled and
4310 reloaded, there's no way to store back the altered value. */
4311 && GET_CODE (insn) != JUMP_INSN
4312 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4313 && XEXP (y, 0) == addr
4314 && GET_CODE (XEXP (y, 1)) == CONST_INT
4315 && ((HAVE_POST_INCREMENT
4316 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4317 || (HAVE_POST_DECREMENT
4318 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4319 || (HAVE_PRE_INCREMENT
4320 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4321 || (HAVE_PRE_DECREMENT
4322 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4323 /* Make sure this reg appears only once in this insn. */
4324 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4325 use != 0 && use != (rtx) 1))
4327 rtx q = SET_DEST (set);
4328 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4329 ? (offset ? PRE_INC : POST_INC)
4330 : (offset ? PRE_DEC : POST_DEC));
4332 if (dead_or_set_p (incr, addr)
4333 /* Mustn't autoinc an eliminable register. */
4334 && (regno >= FIRST_PSEUDO_REGISTER
4335 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4337 /* This is the simple case. Try to make the auto-inc. If
4338 we can't, we are done. Otherwise, we will do any
4339 needed updates below. */
4340 if (! validate_change (insn, &XEXP (x, 0),
4341 gen_rtx_fmt_e (inc_code, Pmode, addr),
4345 else if (GET_CODE (q) == REG
4346 /* PREV_INSN used here to check the semi-open interval
4348 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4349 /* We must also check for sets of q as q may be
4350 a call clobbered hard register and there may
4351 be a call between PREV_INSN (insn) and incr. */
4352 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4354 /* We have *p followed sometime later by q = p+size.
4355 Both p and q must be live afterward,
4356 and q is not used between INSN and its assignment.
4357 Change it to q = p, ...*q..., q = q+size.
4358 Then fall into the usual case. */
4362 emit_move_insn (q, addr);
4363 insns = get_insns ();
4366 if (basic_block_for_insn)
4367 for (temp = insns; temp; temp = NEXT_INSN (temp))
4368 set_block_for_insn (temp, pbi->bb);
4370 /* If we can't make the auto-inc, or can't make the
4371 replacement into Y, exit. There's no point in making
4372 the change below if we can't do the auto-inc and doing
4373 so is not correct in the pre-inc case. */
4375 validate_change (insn, &XEXP (x, 0),
4376 gen_rtx_fmt_e (inc_code, Pmode, q),
4378 validate_change (incr, &XEXP (y, 0), q, 1);
4379 if (! apply_change_group ())
4382 /* We now know we'll be doing this change, so emit the
4383 new insn(s) and do the updates. */
4384 emit_insns_before (insns, insn);
4386 if (pbi->bb->head == insn)
4387 pbi->bb->head = insns;
4389 /* INCR will become a NOTE and INSN won't contain a
4390 use of ADDR. If a use of ADDR was just placed in
4391 the insn before INSN, make that the next use.
4392 Otherwise, invalidate it. */
4393 if (GET_CODE (PREV_INSN (insn)) == INSN
4394 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4395 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4396 pbi->reg_next_use[regno] = PREV_INSN (insn);
4398 pbi->reg_next_use[regno] = 0;
4403 /* REGNO is now used in INCR which is below INSN, but it
4404 previously wasn't live here. If we don't mark it as
4405 live, we'll put a REG_DEAD note for it on this insn,
4406 which is incorrect. */
4407 SET_REGNO_REG_SET (pbi->reg_live, regno);
4409 /* If there are any calls between INSN and INCR, show
4410 that REGNO now crosses them. */
4411 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4412 if (GET_CODE (temp) == CALL_INSN)
4413 REG_N_CALLS_CROSSED (regno)++;
4418 /* If we haven't returned, it means we were able to make the
4419 auto-inc, so update the status. First, record that this insn
4420 has an implicit side effect. */
4423 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4425 /* Modify the old increment-insn to simply copy
4426 the already-incremented value of our register. */
4427 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4430 /* If that makes it a no-op (copying the register into itself) delete
4431 it so it won't appear to be a "use" and a "set" of this
4433 if (SET_DEST (set) == addr)
4435 /* If the original source was dead, it's dead now. */
4436 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4437 if (note && XEXP (note, 0) != addr)
4438 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4440 PUT_CODE (incr, NOTE);
4441 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4442 NOTE_SOURCE_FILE (incr) = 0;
4445 if (regno >= FIRST_PSEUDO_REGISTER)
4447 /* Count an extra reference to the reg. When a reg is
4448 incremented, spilling it is worse, so we want to make
4449 that less likely. */
4450 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4452 /* Count the increment as a setting of the register,
4453 even though it isn't a SET in rtl. */
4454 REG_N_SETS (regno)++;
4459 #endif /* AUTO_INC_DEC */
4462 mark_used_reg (pbi, reg, cond, insn)
4463 struct propagate_block_info *pbi;
4465 rtx cond ATTRIBUTE_UNUSED;
4468 int regno = REGNO (reg);
4469 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4470 int some_was_dead = ! some_was_live;
4474 /* A hard reg in a wide mode may really be multiple registers.
4475 If so, mark all of them just like the first. */
4476 if (regno < FIRST_PSEUDO_REGISTER)
4478 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4481 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4482 some_was_live |= needed_regno;
4483 some_was_dead |= ! needed_regno;
4487 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4489 /* Record where each reg is used, so when the reg is set we know
4490 the next insn that uses it. */
4491 pbi->reg_next_use[regno] = insn;
4494 if (pbi->flags & PROP_REG_INFO)
4496 if (regno < FIRST_PSEUDO_REGISTER)
4498 /* If this is a register we are going to try to eliminate,
4499 don't mark it live here. If we are successful in
4500 eliminating it, it need not be live unless it is used for
4501 pseudos, in which case it will have been set live when it
4502 was allocated to the pseudos. If the register will not
4503 be eliminated, reload will set it live at that point.
4505 Otherwise, record that this function uses this register. */
4506 /* ??? The PPC backend tries to "eliminate" on the pic
4507 register to itself. This should be fixed. In the mean
4508 time, hack around it. */
4510 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4511 && (regno == FRAME_POINTER_REGNUM
4512 || regno == ARG_POINTER_REGNUM)))
4514 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4516 regs_ever_live[regno + --n] = 1;
4522 /* Keep track of which basic block each reg appears in. */
4524 register int blocknum = pbi->bb->index;
4525 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4526 REG_BASIC_BLOCK (regno) = blocknum;
4527 else if (REG_BASIC_BLOCK (regno) != blocknum)
4528 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4530 /* Count (weighted) number of uses of each reg. */
4531 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4535 /* Find out if any of the register was set this insn. */
4536 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
4537 if (regno < FIRST_PSEUDO_REGISTER)
4539 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4541 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
4544 /* Record and count the insns in which a reg dies. If it is used in
4545 this insn and was dead below the insn then it dies in this insn.
4546 If it was set in this insn, we do not make a REG_DEAD note;
4547 likewise if we already made such a note. */
4548 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
4552 /* Check for the case where the register dying partially
4553 overlaps the register set by this insn. */
4554 if (regno < FIRST_PSEUDO_REGISTER
4555 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4557 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4559 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
4562 /* If none of the words in X is needed, make a REG_DEAD note.
4563 Otherwise, we must make partial REG_DEAD notes. */
4564 if (! some_was_live)
4566 if ((pbi->flags & PROP_DEATH_NOTES)
4567 && ! find_regno_note (insn, REG_DEAD, regno))
4569 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4571 if (pbi->flags & PROP_REG_INFO)
4572 REG_N_DEATHS (regno)++;
4576 /* Don't make a REG_DEAD note for a part of a register
4577 that is set in the insn. */
4579 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4580 for (; n >= regno; n--)
4581 if (! REGNO_REG_SET_P (pbi->reg_live, n)
4582 && ! dead_or_set_regno_p (insn, n))
4584 = alloc_EXPR_LIST (REG_DEAD,
4585 gen_rtx_REG (reg_raw_mode[n], n),
4590 SET_REGNO_REG_SET (pbi->reg_live, regno);
4591 if (regno < FIRST_PSEUDO_REGISTER)
4593 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4595 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
4599 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4600 This is done assuming the registers needed from X are those that
4601 have 1-bits in PBI->REG_LIVE.
4603 INSN is the containing instruction. If INSN is dead, this function
4607 mark_used_regs (pbi, x, cond, insn)
4608 struct propagate_block_info *pbi;
4611 register RTX_CODE code;
4613 int flags = pbi->flags;
4616 code = GET_CODE (x);
4636 /* If we are clobbering a MEM, mark any registers inside the address
4638 if (GET_CODE (XEXP (x, 0)) == MEM)
4639 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4643 /* Don't bother watching stores to mems if this is not the
4644 final pass. We'll not be deleting dead stores this round. */
4645 if (flags & PROP_SCAN_DEAD_CODE)
4647 /* Invalidate the data for the last MEM stored, but only if MEM is
4648 something that can be stored into. */
4649 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4650 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4651 ; /* needn't clear the memory set list */
4654 rtx temp = pbi->mem_set_list;
4655 rtx prev = NULL_RTX;
4660 next = XEXP (temp, 1);
4661 if (anti_dependence (XEXP (temp, 0), x))
4663 /* Splice temp out of the list. */
4665 XEXP (prev, 1) = next;
4667 pbi->mem_set_list = next;
4668 free_EXPR_LIST_node (temp);
4676 /* If the memory reference had embedded side effects (autoincrement
4677 address modes. Then we may need to kill some entries on the
4680 invalidate_mems_from_autoinc (pbi, insn);
4684 if (flags & PROP_AUTOINC)
4685 find_auto_inc (pbi, x, insn);
4690 if (GET_CODE (SUBREG_REG (x)) == REG
4691 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4692 && (GET_MODE_SIZE (GET_MODE (x))
4693 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4694 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4696 /* While we're here, optimize this case. */
4698 if (GET_CODE (x) != REG)
4703 /* See a register other than being set => mark it as needed. */
4704 mark_used_reg (pbi, x, cond, insn);
4709 register rtx testreg = SET_DEST (x);
4712 /* If storing into MEM, don't show it as being used. But do
4713 show the address as being used. */
4714 if (GET_CODE (testreg) == MEM)
4717 if (flags & PROP_AUTOINC)
4718 find_auto_inc (pbi, testreg, insn);
4720 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4721 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4725 /* Storing in STRICT_LOW_PART is like storing in a reg
4726 in that this SET might be dead, so ignore it in TESTREG.
4727 but in some other ways it is like using the reg.
4729 Storing in a SUBREG or a bit field is like storing the entire
4730 register in that if the register's value is not used
4731 then this SET is not needed. */
4732 while (GET_CODE (testreg) == STRICT_LOW_PART
4733 || GET_CODE (testreg) == ZERO_EXTRACT
4734 || GET_CODE (testreg) == SIGN_EXTRACT
4735 || GET_CODE (testreg) == SUBREG)
4737 if (GET_CODE (testreg) == SUBREG
4738 && GET_CODE (SUBREG_REG (testreg)) == REG
4739 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4740 && (GET_MODE_SIZE (GET_MODE (testreg))
4741 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4742 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4744 /* Modifying a single register in an alternate mode
4745 does not use any of the old value. But these other
4746 ways of storing in a register do use the old value. */
4747 if (GET_CODE (testreg) == SUBREG
4748 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4753 testreg = XEXP (testreg, 0);
4756 /* If this is a store into a register, recursively scan the
4757 value being stored. */
4759 if ((GET_CODE (testreg) == PARALLEL
4760 && GET_MODE (testreg) == BLKmode)
4761 || (GET_CODE (testreg) == REG
4762 && (regno = REGNO (testreg),
4763 ! (regno == FRAME_POINTER_REGNUM
4764 && (! reload_completed || frame_pointer_needed)))
4765 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4766 && ! (regno == HARD_FRAME_POINTER_REGNUM
4767 && (! reload_completed || frame_pointer_needed))
4769 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4770 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4775 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4776 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4783 case UNSPEC_VOLATILE:
4787 /* Traditional and volatile asm instructions must be considered to use
4788 and clobber all hard registers, all pseudo-registers and all of
4789 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4791 Consider for instance a volatile asm that changes the fpu rounding
4792 mode. An insn should not be moved across this even if it only uses
4793 pseudo-regs because it might give an incorrectly rounded result.
4795 ?!? Unfortunately, marking all hard registers as live causes massive
4796 problems for the register allocator and marking all pseudos as live
4797 creates mountains of uninitialized variable warnings.
4799 So for now, just clear the memory set list and mark any regs
4800 we can find in ASM_OPERANDS as used. */
4801 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4802 free_EXPR_LIST_list (&pbi->mem_set_list);
4804 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4805 We can not just fall through here since then we would be confused
4806 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4807 traditional asms unlike their normal usage. */
4808 if (code == ASM_OPERANDS)
4812 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4813 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4819 if (cond != NULL_RTX)
4822 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4824 cond = COND_EXEC_TEST (x);
4825 x = COND_EXEC_CODE (x);
4829 /* We _do_not_ want to scan operands of phi nodes. Operands of
4830 a phi function are evaluated only when control reaches this
4831 block along a particular edge. Therefore, regs that appear
4832 as arguments to phi should not be added to the global live at
4840 /* Recursively scan the operands of this expression. */
4843 register const char *fmt = GET_RTX_FORMAT (code);
4846 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4850 /* Tail recursive case: save a function call level. */
4856 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4858 else if (fmt[i] == 'E')
4861 for (j = 0; j < XVECLEN (x, i); j++)
4862 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4871 try_pre_increment_1 (pbi, insn)
4872 struct propagate_block_info *pbi;
4875 /* Find the next use of this reg. If in same basic block,
4876 make it do pre-increment or pre-decrement if appropriate. */
4877 rtx x = single_set (insn);
4878 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4879 * INTVAL (XEXP (SET_SRC (x), 1)));
4880 int regno = REGNO (SET_DEST (x));
4881 rtx y = pbi->reg_next_use[regno];
4883 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4884 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4885 mode would be better. */
4886 && ! dead_or_set_p (y, SET_DEST (x))
4887 && try_pre_increment (y, SET_DEST (x), amount))
4889 /* We have found a suitable auto-increment
4890 and already changed insn Y to do it.
4891 So flush this increment-instruction. */
4892 PUT_CODE (insn, NOTE);
4893 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4894 NOTE_SOURCE_FILE (insn) = 0;
4895 /* Count a reference to this reg for the increment
4896 insn we are deleting. When a reg is incremented.
4897 spilling it is worse, so we want to make that
4899 if (regno >= FIRST_PSEUDO_REGISTER)
4901 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4902 REG_N_SETS (regno)++;
4909 /* Try to change INSN so that it does pre-increment or pre-decrement
4910 addressing on register REG in order to add AMOUNT to REG.
4911 AMOUNT is negative for pre-decrement.
4912 Returns 1 if the change could be made.
4913 This checks all about the validity of the result of modifying INSN. */
4916 try_pre_increment (insn, reg, amount)
4918 HOST_WIDE_INT amount;
4922 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4923 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4925 /* Nonzero if we can try to make a post-increment or post-decrement.
4926 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4927 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4928 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4931 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4934 /* From the sign of increment, see which possibilities are conceivable
4935 on this target machine. */
4936 if (HAVE_PRE_INCREMENT && amount > 0)
4938 if (HAVE_POST_INCREMENT && amount > 0)
4941 if (HAVE_PRE_DECREMENT && amount < 0)
4943 if (HAVE_POST_DECREMENT && amount < 0)
4946 if (! (pre_ok || post_ok))
4949 /* It is not safe to add a side effect to a jump insn
4950 because if the incremented register is spilled and must be reloaded
4951 there would be no way to store the incremented value back in memory. */
4953 if (GET_CODE (insn) == JUMP_INSN)
4958 use = find_use_as_address (PATTERN (insn), reg, 0);
4959 if (post_ok && (use == 0 || use == (rtx) 1))
4961 use = find_use_as_address (PATTERN (insn), reg, -amount);
4965 if (use == 0 || use == (rtx) 1)
4968 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4971 /* See if this combination of instruction and addressing mode exists. */
4972 if (! validate_change (insn, &XEXP (use, 0),
4973 gen_rtx_fmt_e (amount > 0
4974 ? (do_post ? POST_INC : PRE_INC)
4975 : (do_post ? POST_DEC : PRE_DEC),
4979 /* Record that this insn now has an implicit side effect on X. */
4980 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4984 #endif /* AUTO_INC_DEC */
4986 /* Find the place in the rtx X where REG is used as a memory address.
4987 Return the MEM rtx that so uses it.
4988 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4989 (plus REG (const_int PLUSCONST)).
4991 If such an address does not appear, return 0.
4992 If REG appears more than once, or is used other than in such an address,
4996 find_use_as_address (x, reg, plusconst)
4999 HOST_WIDE_INT plusconst;
5001 enum rtx_code code = GET_CODE (x);
5002 const char *fmt = GET_RTX_FORMAT (code);
5004 register rtx value = 0;
5007 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5010 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5011 && XEXP (XEXP (x, 0), 0) == reg
5012 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5013 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5016 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5018 /* If REG occurs inside a MEM used in a bit-field reference,
5019 that is unacceptable. */
5020 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5021 return (rtx) (HOST_WIDE_INT) 1;
5025 return (rtx) (HOST_WIDE_INT) 1;
5027 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5031 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5035 return (rtx) (HOST_WIDE_INT) 1;
5037 else if (fmt[i] == 'E')
5040 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5042 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5046 return (rtx) (HOST_WIDE_INT) 1;
5054 /* Write information about registers and basic blocks into FILE.
5055 This is part of making a debugging dump. */
5058 dump_regset (r, outf)
5065 fputs (" (nil)", outf);
5069 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5071 fprintf (outf, " %d", i);
5072 if (i < FIRST_PSEUDO_REGISTER)
5073 fprintf (outf, " [%s]",
5082 dump_regset (r, stderr);
5083 putc ('\n', stderr);
5087 dump_flow_info (file)
5091 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5093 fprintf (file, "%d registers.\n", max_regno);
5094 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5097 enum reg_class class, altclass;
5098 fprintf (file, "\nRegister %d used %d times across %d insns",
5099 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5100 if (REG_BASIC_BLOCK (i) >= 0)
5101 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5103 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5104 (REG_N_SETS (i) == 1) ? "" : "s");
5105 if (REG_USERVAR_P (regno_reg_rtx[i]))
5106 fprintf (file, "; user var");
5107 if (REG_N_DEATHS (i) != 1)
5108 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5109 if (REG_N_CALLS_CROSSED (i) == 1)
5110 fprintf (file, "; crosses 1 call");
5111 else if (REG_N_CALLS_CROSSED (i))
5112 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5113 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5114 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5115 class = reg_preferred_class (i);
5116 altclass = reg_alternate_class (i);
5117 if (class != GENERAL_REGS || altclass != ALL_REGS)
5119 if (altclass == ALL_REGS || class == ALL_REGS)
5120 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5121 else if (altclass == NO_REGS)
5122 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5124 fprintf (file, "; pref %s, else %s",
5125 reg_class_names[(int) class],
5126 reg_class_names[(int) altclass]);
5128 if (REGNO_POINTER_FLAG (i))
5129 fprintf (file, "; pointer");
5130 fprintf (file, ".\n");
5133 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5134 for (i = 0; i < n_basic_blocks; i++)
5136 register basic_block bb = BASIC_BLOCK (i);
5139 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5140 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5142 fprintf (file, "Predecessors: ");
5143 for (e = bb->pred; e ; e = e->pred_next)
5144 dump_edge_info (file, e, 0);
5146 fprintf (file, "\nSuccessors: ");
5147 for (e = bb->succ; e ; e = e->succ_next)
5148 dump_edge_info (file, e, 1);
5150 fprintf (file, "\nRegisters live at start:");
5151 dump_regset (bb->global_live_at_start, file);
5153 fprintf (file, "\nRegisters live at end:");
5154 dump_regset (bb->global_live_at_end, file);
5165 dump_flow_info (stderr);
5169 dump_edge_info (file, e, do_succ)
5174 basic_block side = (do_succ ? e->dest : e->src);
5176 if (side == ENTRY_BLOCK_PTR)
5177 fputs (" ENTRY", file);
5178 else if (side == EXIT_BLOCK_PTR)
5179 fputs (" EXIT", file);
5181 fprintf (file, " %d", side->index);
5185 static const char * const bitnames[] = {
5186 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5189 int i, flags = e->flags;
5193 for (i = 0; flags; i++)
5194 if (flags & (1 << i))
5200 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5201 fputs (bitnames[i], file);
5203 fprintf (file, "%d", i);
5211 /* Print out one basic block with live information at start and end. */
5221 fprintf (outf, ";; Basic block %d, loop depth %d",
5222 bb->index, bb->loop_depth);
5223 if (bb->eh_beg != -1 || bb->eh_end != -1)
5224 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5227 fputs (";; Predecessors: ", outf);
5228 for (e = bb->pred; e ; e = e->pred_next)
5229 dump_edge_info (outf, e, 0);
5232 fputs (";; Registers live at start:", outf);
5233 dump_regset (bb->global_live_at_start, outf);
5236 for (insn = bb->head, last = NEXT_INSN (bb->end);
5238 insn = NEXT_INSN (insn))
5239 print_rtl_single (outf, insn);
5241 fputs (";; Registers live at end:", outf);
5242 dump_regset (bb->global_live_at_end, outf);
5245 fputs (";; Successors: ", outf);
5246 for (e = bb->succ; e; e = e->succ_next)
5247 dump_edge_info (outf, e, 1);
5255 dump_bb (bb, stderr);
5262 dump_bb (BASIC_BLOCK(n), stderr);
5265 /* Like print_rtl, but also print out live information for the start of each
5269 print_rtl_with_bb (outf, rtx_first)
5273 register rtx tmp_rtx;
5276 fprintf (outf, "(nil)\n");
5280 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5281 int max_uid = get_max_uid ();
5282 basic_block *start = (basic_block *)
5283 xcalloc (max_uid, sizeof (basic_block));
5284 basic_block *end = (basic_block *)
5285 xcalloc (max_uid, sizeof (basic_block));
5286 enum bb_state *in_bb_p = (enum bb_state *)
5287 xcalloc (max_uid, sizeof (enum bb_state));
5289 for (i = n_basic_blocks - 1; i >= 0; i--)
5291 basic_block bb = BASIC_BLOCK (i);
5294 start[INSN_UID (bb->head)] = bb;
5295 end[INSN_UID (bb->end)] = bb;
5296 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5298 enum bb_state state = IN_MULTIPLE_BB;
5299 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5301 in_bb_p[INSN_UID(x)] = state;
5308 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5313 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5315 fprintf (outf, ";; Start of basic block %d, registers live:",
5317 dump_regset (bb->global_live_at_start, outf);
5321 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5322 && GET_CODE (tmp_rtx) != NOTE
5323 && GET_CODE (tmp_rtx) != BARRIER)
5324 fprintf (outf, ";; Insn is not within a basic block\n");
5325 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5326 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5328 did_output = print_rtl_single (outf, tmp_rtx);
5330 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5332 fprintf (outf, ";; End of basic block %d, registers live:\n",
5334 dump_regset (bb->global_live_at_end, outf);
5347 if (current_function_epilogue_delay_list != 0)
5349 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5350 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5351 tmp_rtx = XEXP (tmp_rtx, 1))
5352 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5356 /* Compute dominator relationships using new flow graph structures. */
5358 compute_flow_dominators (dominators, post_dominators)
5359 sbitmap *dominators;
5360 sbitmap *post_dominators;
5363 sbitmap *temp_bitmap;
5365 basic_block *worklist, *workend, *qin, *qout;
5368 /* Allocate a worklist array/queue. Entries are only added to the
5369 list if they were not already on the list. So the size is
5370 bounded by the number of basic blocks. */
5371 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5372 workend = &worklist[n_basic_blocks];
5374 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5375 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5379 /* The optimistic setting of dominators requires us to put every
5380 block on the work list initially. */
5381 qin = qout = worklist;
5382 for (bb = 0; bb < n_basic_blocks; bb++)
5384 *qin++ = BASIC_BLOCK (bb);
5385 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5387 qlen = n_basic_blocks;
5390 /* We want a maximal solution, so initially assume everything dominates
5392 sbitmap_vector_ones (dominators, n_basic_blocks);
5394 /* Mark successors of the entry block so we can identify them below. */
5395 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5396 e->dest->aux = ENTRY_BLOCK_PTR;
5398 /* Iterate until the worklist is empty. */
5401 /* Take the first entry off the worklist. */
5402 basic_block b = *qout++;
5403 if (qout >= workend)
5409 /* Compute the intersection of the dominators of all the
5412 If one of the predecessor blocks is the ENTRY block, then the
5413 intersection of the dominators of the predecessor blocks is
5414 defined as the null set. We can identify such blocks by the
5415 special value in the AUX field in the block structure. */
5416 if (b->aux == ENTRY_BLOCK_PTR)
5418 /* Do not clear the aux field for blocks which are
5419 successors of the ENTRY block. That way we we never
5420 add them to the worklist again.
5422 The intersect of dominators of the preds of this block is
5423 defined as the null set. */
5424 sbitmap_zero (temp_bitmap[bb]);
5428 /* Clear the aux field of this block so it can be added to
5429 the worklist again if necessary. */
5431 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5434 /* Make sure each block always dominates itself. */
5435 SET_BIT (temp_bitmap[bb], bb);
5437 /* If the out state of this block changed, then we need to
5438 add the successors of this block to the worklist if they
5439 are not already on the worklist. */
5440 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5442 for (e = b->succ; e; e = e->succ_next)
5444 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5458 if (post_dominators)
5460 /* The optimistic setting of dominators requires us to put every
5461 block on the work list initially. */
5462 qin = qout = worklist;
5463 for (bb = 0; bb < n_basic_blocks; bb++)
5465 *qin++ = BASIC_BLOCK (bb);
5466 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5468 qlen = n_basic_blocks;
5471 /* We want a maximal solution, so initially assume everything post
5472 dominates everything else. */
5473 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5475 /* Mark predecessors of the exit block so we can identify them below. */
5476 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5477 e->src->aux = EXIT_BLOCK_PTR;
5479 /* Iterate until the worklist is empty. */
5482 /* Take the first entry off the worklist. */
5483 basic_block b = *qout++;
5484 if (qout >= workend)
5490 /* Compute the intersection of the post dominators of all the
5493 If one of the successor blocks is the EXIT block, then the
5494 intersection of the dominators of the successor blocks is
5495 defined as the null set. We can identify such blocks by the
5496 special value in the AUX field in the block structure. */
5497 if (b->aux == EXIT_BLOCK_PTR)
5499 /* Do not clear the aux field for blocks which are
5500 predecessors of the EXIT block. That way we we never
5501 add them to the worklist again.
5503 The intersect of dominators of the succs of this block is
5504 defined as the null set. */
5505 sbitmap_zero (temp_bitmap[bb]);
5509 /* Clear the aux field of this block so it can be added to
5510 the worklist again if necessary. */
5512 sbitmap_intersection_of_succs (temp_bitmap[bb],
5513 post_dominators, bb);
5516 /* Make sure each block always post dominates itself. */
5517 SET_BIT (temp_bitmap[bb], bb);
5519 /* If the out state of this block changed, then we need to
5520 add the successors of this block to the worklist if they
5521 are not already on the worklist. */
5522 if (sbitmap_a_and_b (post_dominators[bb],
5523 post_dominators[bb],
5526 for (e = b->pred; e; e = e->pred_next)
5528 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5546 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5549 compute_immediate_dominators (idom, dominators)
5551 sbitmap *dominators;
5556 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5558 /* Begin with tmp(n) = dom(n) - { n }. */
5559 for (b = n_basic_blocks; --b >= 0; )
5561 sbitmap_copy (tmp[b], dominators[b]);
5562 RESET_BIT (tmp[b], b);
5565 /* Subtract out all of our dominator's dominators. */
5566 for (b = n_basic_blocks; --b >= 0; )
5568 sbitmap tmp_b = tmp[b];
5571 for (s = n_basic_blocks; --s >= 0; )
5572 if (TEST_BIT (tmp_b, s))
5573 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5576 /* Find the one bit set in the bitmap and put it in the output array. */
5577 for (b = n_basic_blocks; --b >= 0; )
5580 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5583 sbitmap_vector_free (tmp);
5586 /* Recompute register set/reference counts immediately prior to register
5589 This avoids problems with set/reference counts changing to/from values
5590 which have special meanings to the register allocators.
5592 Additionally, the reference counts are the primary component used by the
5593 register allocators to prioritize pseudos for allocation to hard regs.
5594 More accurate reference counts generally lead to better register allocation.
5596 F is the first insn to be scanned.
5598 LOOP_STEP denotes how much loop_depth should be incremented per
5599 loop nesting level in order to increase the ref count more for
5600 references in a loop.
5602 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5603 possibly other information which is used by the register allocators. */
5606 recompute_reg_usage (f, loop_step)
5607 rtx f ATTRIBUTE_UNUSED;
5608 int loop_step ATTRIBUTE_UNUSED;
5610 allocate_reg_life_data ();
5611 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
5614 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5615 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5616 of the number of registers that died. */
5619 count_or_remove_death_notes (blocks, kill)
5625 for (i = n_basic_blocks - 1; i >= 0; --i)
5630 if (blocks && ! TEST_BIT (blocks, i))
5633 bb = BASIC_BLOCK (i);
5635 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5637 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5639 rtx *pprev = ®_NOTES (insn);
5644 switch (REG_NOTE_KIND (link))
5647 if (GET_CODE (XEXP (link, 0)) == REG)
5649 rtx reg = XEXP (link, 0);
5652 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5655 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5663 rtx next = XEXP (link, 1);
5664 free_EXPR_LIST_node (link);
5665 *pprev = link = next;
5671 pprev = &XEXP (link, 1);
5678 if (insn == bb->end)
5686 /* Record INSN's block as BB. */
5689 set_block_for_insn (insn, bb)
5693 size_t uid = INSN_UID (insn);
5694 if (uid >= basic_block_for_insn->num_elements)
5698 /* Add one-eighth the size so we don't keep calling xrealloc. */
5699 new_size = uid + (uid + 7) / 8;
5701 VARRAY_GROW (basic_block_for_insn, new_size);
5703 VARRAY_BB (basic_block_for_insn, uid) = bb;
5706 /* Record INSN's block number as BB. */
5707 /* ??? This has got to go. */
5710 set_block_num (insn, bb)
5714 set_block_for_insn (insn, BASIC_BLOCK (bb));
5717 /* Verify the CFG consistency. This function check some CFG invariants and
5718 aborts when something is wrong. Hope that this function will help to
5719 convert many optimization passes to preserve CFG consistent.
5721 Currently it does following checks:
5723 - test head/end pointers
5724 - overlapping of basic blocks
5725 - edge list corectness
5726 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5727 - tails of basic blocks (ensure that boundary is necesary)
5728 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5729 and NOTE_INSN_BASIC_BLOCK
5730 - check that all insns are in the basic blocks
5731 (except the switch handling code, barriers and notes)
5732 - check that all returns are followed by barriers
5734 In future it can be extended check a lot of other stuff as well
5735 (reachability of basic blocks, life information, etc. etc.). */
5740 const int max_uid = get_max_uid ();
5741 const rtx rtx_first = get_insns ();
5742 basic_block *bb_info;
5746 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5748 /* First pass check head/end pointers and set bb_info array used by
5750 for (i = n_basic_blocks - 1; i >= 0; i--)
5752 basic_block bb = BASIC_BLOCK (i);
5754 /* Check the head pointer and make sure that it is pointing into
5756 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5761 error ("Head insn %d for block %d not found in the insn stream.",
5762 INSN_UID (bb->head), bb->index);
5766 /* Check the end pointer and make sure that it is pointing into
5768 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5770 if (bb_info[INSN_UID (x)] != NULL)
5772 error ("Insn %d is in multiple basic blocks (%d and %d)",
5773 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5776 bb_info[INSN_UID (x)] = bb;
5783 error ("End insn %d for block %d not found in the insn stream.",
5784 INSN_UID (bb->end), bb->index);
5789 /* Now check the basic blocks (boundaries etc.) */
5790 for (i = n_basic_blocks - 1; i >= 0; i--)
5792 basic_block bb = BASIC_BLOCK (i);
5793 /* Check corectness of edge lists */
5801 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5803 fprintf (stderr, "Predecessor: ");
5804 dump_edge_info (stderr, e, 0);
5805 fprintf (stderr, "\nSuccessor: ");
5806 dump_edge_info (stderr, e, 1);
5810 if (e->dest != EXIT_BLOCK_PTR)
5812 edge e2 = e->dest->pred;
5813 while (e2 && e2 != e)
5817 error ("Basic block %i edge lists are corrupted", bb->index);
5829 error ("Basic block %d pred edge is corrupted", bb->index);
5830 fputs ("Predecessor: ", stderr);
5831 dump_edge_info (stderr, e, 0);
5832 fputs ("\nSuccessor: ", stderr);
5833 dump_edge_info (stderr, e, 1);
5834 fputc ('\n', stderr);
5837 if (e->src != ENTRY_BLOCK_PTR)
5839 edge e2 = e->src->succ;
5840 while (e2 && e2 != e)
5844 error ("Basic block %i edge lists are corrupted", bb->index);
5851 /* OK pointers are correct. Now check the header of basic
5852 block. It ought to contain optional CODE_LABEL followed
5853 by NOTE_BASIC_BLOCK. */
5855 if (GET_CODE (x) == CODE_LABEL)
5859 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5865 if (GET_CODE (x) != NOTE
5866 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5867 || NOTE_BASIC_BLOCK (x) != bb)
5869 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5876 /* Do checks for empty blocks here */
5883 if (GET_CODE (x) == NOTE
5884 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5886 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5887 INSN_UID (x), bb->index);
5894 if (GET_CODE (x) == JUMP_INSN
5895 || GET_CODE (x) == CODE_LABEL
5896 || GET_CODE (x) == BARRIER)
5898 error ("In basic block %d:", bb->index);
5899 fatal_insn ("Flow control insn inside a basic block", x);
5910 if (!bb_info[INSN_UID (x)])
5912 switch (GET_CODE (x))
5919 /* An addr_vec is placed outside any block block. */
5921 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5922 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5923 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5928 /* But in any case, non-deletable labels can appear anywhere. */
5932 fatal_insn ("Insn outside basic block", x);
5936 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
5937 && GET_CODE (x) == JUMP_INSN
5938 && returnjump_p (x) && ! condjump_p (x)
5939 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
5940 fatal_insn ("Return not followed by barrier", x);
5952 /* Functions to access an edge list with a vector representation.
5953 Enough data is kept such that given an index number, the
5954 pred and succ that edge reprsents can be determined, or
5955 given a pred and a succ, it's index number can be returned.
5956 This allows algorithms which comsume a lot of memory to
5957 represent the normally full matrix of edge (pred,succ) with a
5958 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5959 wasted space in the client code due to sparse flow graphs. */
5961 /* This functions initializes the edge list. Basically the entire
5962 flowgraph is processed, and all edges are assigned a number,
5963 and the data structure is filed in. */
5967 struct edge_list *elist;
5973 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5977 /* Determine the number of edges in the flow graph by counting successor
5978 edges on each basic block. */
5979 for (x = 0; x < n_basic_blocks; x++)
5981 basic_block bb = BASIC_BLOCK (x);
5983 for (e = bb->succ; e; e = e->succ_next)
5986 /* Don't forget successors of the entry block. */
5987 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5990 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
5991 elist->num_blocks = block_count;
5992 elist->num_edges = num_edges;
5993 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
5997 /* Follow successors of the entry block, and register these edges. */
5998 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6000 elist->index_to_edge[num_edges] = e;
6004 for (x = 0; x < n_basic_blocks; x++)
6006 basic_block bb = BASIC_BLOCK (x);
6008 /* Follow all successors of blocks, and register these edges. */
6009 for (e = bb->succ; e; e = e->succ_next)
6011 elist->index_to_edge[num_edges] = e;
6018 /* This function free's memory associated with an edge list. */
6020 free_edge_list (elist)
6021 struct edge_list *elist;
6025 free (elist->index_to_edge);
6030 /* This function provides debug output showing an edge list. */
6032 print_edge_list (f, elist)
6034 struct edge_list *elist;
6037 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6038 elist->num_blocks - 2, elist->num_edges);
6040 for (x = 0; x < elist->num_edges; x++)
6042 fprintf (f, " %-4d - edge(", x);
6043 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6044 fprintf (f,"entry,");
6046 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6048 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6049 fprintf (f,"exit)\n");
6051 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6055 /* This function provides an internal consistancy check of an edge list,
6056 verifying that all edges are present, and that there are no
6059 verify_edge_list (f, elist)
6061 struct edge_list *elist;
6063 int x, pred, succ, index;
6066 for (x = 0; x < n_basic_blocks; x++)
6068 basic_block bb = BASIC_BLOCK (x);
6070 for (e = bb->succ; e; e = e->succ_next)
6072 pred = e->src->index;
6073 succ = e->dest->index;
6074 index = EDGE_INDEX (elist, e->src, e->dest);
6075 if (index == EDGE_INDEX_NO_EDGE)
6077 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6080 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6081 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6082 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6083 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6084 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6085 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6088 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6090 pred = e->src->index;
6091 succ = e->dest->index;
6092 index = EDGE_INDEX (elist, e->src, e->dest);
6093 if (index == EDGE_INDEX_NO_EDGE)
6095 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6098 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6099 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6100 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6101 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6102 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6103 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6105 /* We've verified that all the edges are in the list, no lets make sure
6106 there are no spurious edges in the list. */
6108 for (pred = 0 ; pred < n_basic_blocks; pred++)
6109 for (succ = 0 ; succ < n_basic_blocks; succ++)
6111 basic_block p = BASIC_BLOCK (pred);
6112 basic_block s = BASIC_BLOCK (succ);
6116 for (e = p->succ; e; e = e->succ_next)
6122 for (e = s->pred; e; e = e->pred_next)
6128 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6129 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6130 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6132 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6133 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6134 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6135 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6136 BASIC_BLOCK (succ)));
6138 for (succ = 0 ; succ < n_basic_blocks; succ++)
6140 basic_block p = ENTRY_BLOCK_PTR;
6141 basic_block s = BASIC_BLOCK (succ);
6145 for (e = p->succ; e; e = e->succ_next)
6151 for (e = s->pred; e; e = e->pred_next)
6157 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6158 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6159 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6161 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6162 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6163 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6164 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6165 BASIC_BLOCK (succ)));
6167 for (pred = 0 ; pred < n_basic_blocks; pred++)
6169 basic_block p = BASIC_BLOCK (pred);
6170 basic_block s = EXIT_BLOCK_PTR;
6174 for (e = p->succ; e; e = e->succ_next)
6180 for (e = s->pred; e; e = e->pred_next)
6186 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6187 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6188 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6190 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6191 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6192 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6193 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6198 /* This routine will determine what, if any, edge there is between
6199 a specified predecessor and successor. */
6202 find_edge_index (edge_list, pred, succ)
6203 struct edge_list *edge_list;
6204 basic_block pred, succ;
6207 for (x = 0; x < NUM_EDGES (edge_list); x++)
6209 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6210 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6213 return (EDGE_INDEX_NO_EDGE);
6216 /* This function will remove an edge from the flow graph. */
6221 edge last_pred = NULL;
6222 edge last_succ = NULL;
6224 basic_block src, dest;
6227 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6233 last_succ->succ_next = e->succ_next;
6235 src->succ = e->succ_next;
6237 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6243 last_pred->pred_next = e->pred_next;
6245 dest->pred = e->pred_next;
6251 /* This routine will remove any fake successor edges for a basic block.
6252 When the edge is removed, it is also removed from whatever predecessor
6255 remove_fake_successors (bb)
6259 for (e = bb->succ; e ; )
6263 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6268 /* This routine will remove all fake edges from the flow graph. If
6269 we remove all fake successors, it will automatically remove all
6270 fake predecessors. */
6272 remove_fake_edges ()
6276 for (x = 0; x < n_basic_blocks; x++)
6277 remove_fake_successors (BASIC_BLOCK (x));
6279 /* We've handled all successors except the entry block's. */
6280 remove_fake_successors (ENTRY_BLOCK_PTR);
6283 /* This functions will add a fake edge between any block which has no
6284 successors, and the exit block. Some data flow equations require these
6287 add_noreturn_fake_exit_edges ()
6291 for (x = 0; x < n_basic_blocks; x++)
6292 if (BASIC_BLOCK (x)->succ == NULL)
6293 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6296 /* Dump the list of basic blocks in the bitmap NODES. */
6298 flow_nodes_print (str, nodes, file)
6300 const sbitmap nodes;
6305 fprintf (file, "%s { ", str);
6306 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6307 fputs ("}\n", file);
6311 /* Dump the list of exiting edges in the array EDGES. */
6313 flow_exits_print (str, edges, num_edges, file)
6321 fprintf (file, "%s { ", str);
6322 for (i = 0; i < num_edges; i++)
6323 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6324 fputs ("}\n", file);
6328 /* Dump loop related CFG information. */
6330 flow_loops_cfg_dump (loops, file)
6331 const struct loops *loops;
6336 if (! loops->num || ! file || ! loops->cfg.dom)
6339 for (i = 0; i < n_basic_blocks; i++)
6343 fprintf (file, ";; %d succs { ", i);
6344 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6345 fprintf (file, "%d ", succ->dest->index);
6346 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6350 /* Dump the DFS node order. */
6351 if (loops->cfg.dfs_order)
6353 fputs (";; DFS order: ", file);
6354 for (i = 0; i < n_basic_blocks; i++)
6355 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6361 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6363 flow_loop_nested_p (outer, loop)
6367 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6371 /* Dump the loop information specified by LOOPS to the stream FILE. */
6373 flow_loops_dump (loops, file, verbose)
6374 const struct loops *loops;
6381 num_loops = loops->num;
6382 if (! num_loops || ! file)
6385 fprintf (file, ";; %d loops found, %d levels\n",
6386 num_loops, loops->levels);
6388 for (i = 0; i < num_loops; i++)
6390 struct loop *loop = &loops->array[i];
6392 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6393 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6394 loop->header->index, loop->latch->index,
6395 loop->pre_header ? loop->pre_header->index : -1,
6396 loop->depth, loop->level,
6397 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6398 fprintf (file, ";; %d", loop->num_nodes);
6399 flow_nodes_print (" nodes", loop->nodes, file);
6400 fprintf (file, ";; %d", loop->num_exits);
6401 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6407 for (j = 0; j < i; j++)
6409 struct loop *oloop = &loops->array[j];
6411 if (loop->header == oloop->header)
6416 smaller = loop->num_nodes < oloop->num_nodes;
6418 /* If the union of LOOP and OLOOP is different than
6419 the larger of LOOP and OLOOP then LOOP and OLOOP
6420 must be disjoint. */
6421 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6422 smaller ? oloop : loop);
6423 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6424 loop->header->index, i, j,
6425 disjoint ? "disjoint" : "nested");
6432 /* Print diagnostics to compare our concept of a loop with
6433 what the loop notes say. */
6434 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6435 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6436 != NOTE_INSN_LOOP_BEG)
6437 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6438 INSN_UID (PREV_INSN (loop->first->head)));
6439 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6440 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6441 != NOTE_INSN_LOOP_END)
6442 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6443 INSN_UID (NEXT_INSN (loop->last->end)));
6448 flow_loops_cfg_dump (loops, file);
6452 /* Free all the memory allocated for LOOPS. */
6454 flow_loops_free (loops)
6455 struct loops *loops;
6464 /* Free the loop descriptors. */
6465 for (i = 0; i < loops->num; i++)
6467 struct loop *loop = &loops->array[i];
6470 sbitmap_free (loop->nodes);
6474 free (loops->array);
6475 loops->array = NULL;
6478 sbitmap_vector_free (loops->cfg.dom);
6479 if (loops->cfg.dfs_order)
6480 free (loops->cfg.dfs_order);
6482 sbitmap_free (loops->shared_headers);
6487 /* Find the exits from the loop using the bitmap of loop nodes NODES
6488 and store in EXITS array. Return the number of exits from the
6491 flow_loop_exits_find (nodes, exits)
6492 const sbitmap nodes;
6501 /* Check all nodes within the loop to see if there are any
6502 successors not in the loop. Note that a node may have multiple
6505 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6506 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6508 basic_block dest = e->dest;
6510 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6518 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6520 /* Store all exiting edges into an array. */
6522 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6523 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6525 basic_block dest = e->dest;
6527 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6528 (*exits)[num_exits++] = e;
6536 /* Find the nodes contained within the loop with header HEADER and
6537 latch LATCH and store in NODES. Return the number of nodes within
6540 flow_loop_nodes_find (header, latch, nodes)
6549 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6552 /* Start with only the loop header in the set of loop nodes. */
6553 sbitmap_zero (nodes);
6554 SET_BIT (nodes, header->index);
6556 header->loop_depth++;
6558 /* Push the loop latch on to the stack. */
6559 if (! TEST_BIT (nodes, latch->index))
6561 SET_BIT (nodes, latch->index);
6562 latch->loop_depth++;
6564 stack[sp++] = latch;
6573 for (e = node->pred; e; e = e->pred_next)
6575 basic_block ancestor = e->src;
6577 /* If each ancestor not marked as part of loop, add to set of
6578 loop nodes and push on to stack. */
6579 if (ancestor != ENTRY_BLOCK_PTR
6580 && ! TEST_BIT (nodes, ancestor->index))
6582 SET_BIT (nodes, ancestor->index);
6583 ancestor->loop_depth++;
6585 stack[sp++] = ancestor;
6594 /* Compute the depth first search order and store in the array
6595 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6596 number of nodes visited. */
6598 flow_depth_first_order_compute (dfs_order)
6607 /* Allocate stack for back-tracking up CFG. */
6608 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6611 /* Allocate bitmap to track nodes that have been visited. */
6612 visited = sbitmap_alloc (n_basic_blocks);
6614 /* None of the nodes in the CFG have been visited yet. */
6615 sbitmap_zero (visited);
6617 /* Start with the first successor edge from the entry block. */
6618 e = ENTRY_BLOCK_PTR->succ;
6621 basic_block src = e->src;
6622 basic_block dest = e->dest;
6624 /* Mark that we have visited this node. */
6625 if (src != ENTRY_BLOCK_PTR)
6626 SET_BIT (visited, src->index);
6628 /* If this node has not been visited before, push the current
6629 edge on to the stack and proceed with the first successor
6630 edge of this node. */
6631 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6639 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6642 /* DEST has no successors (for example, a non-returning
6643 function is called) so do not push the current edge
6644 but carry on with its next successor. */
6645 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6646 SET_BIT (visited, dest->index);
6649 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6651 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6653 /* Pop edge off stack. */
6661 sbitmap_free (visited);
6663 /* The number of nodes visited should not be greater than
6665 if (dfsnum > n_basic_blocks)
6668 /* There are some nodes left in the CFG that are unreachable. */
6669 if (dfsnum < n_basic_blocks)
6675 /* Return the block for the pre-header of the loop with header
6676 HEADER where DOM specifies the dominator information. Return NULL if
6677 there is no pre-header. */
6679 flow_loop_pre_header_find (header, dom)
6683 basic_block pre_header;
6686 /* If block p is a predecessor of the header and is the only block
6687 that the header does not dominate, then it is the pre-header. */
6689 for (e = header->pred; e; e = e->pred_next)
6691 basic_block node = e->src;
6693 if (node != ENTRY_BLOCK_PTR
6694 && ! TEST_BIT (dom[node->index], header->index))
6696 if (pre_header == NULL)
6700 /* There are multiple edges into the header from outside
6701 the loop so there is no pre-header block. */
6711 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6712 previously added. The insertion algorithm assumes that the loops
6713 are added in the order found by a depth first search of the CFG. */
6715 flow_loop_tree_node_add (prevloop, loop)
6716 struct loop *prevloop;
6720 if (flow_loop_nested_p (prevloop, loop))
6722 prevloop->inner = loop;
6723 loop->outer = prevloop;
6727 while (prevloop->outer)
6729 if (flow_loop_nested_p (prevloop->outer, loop))
6731 prevloop->next = loop;
6732 loop->outer = prevloop->outer;
6735 prevloop = prevloop->outer;
6738 prevloop->next = loop;
6743 /* Build the loop hierarchy tree for LOOPS. */
6745 flow_loops_tree_build (loops)
6746 struct loops *loops;
6751 num_loops = loops->num;
6755 /* Root the loop hierarchy tree with the first loop found.
6756 Since we used a depth first search this should be the
6758 loops->tree = &loops->array[0];
6759 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6761 /* Add the remaining loops to the tree. */
6762 for (i = 1; i < num_loops; i++)
6763 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6767 /* Helper function to compute loop nesting depth and enclosed loop level
6768 for the natural loop specified by LOOP at the loop depth DEPTH.
6769 Returns the loop level. */
6771 flow_loop_level_compute (loop, depth)
6781 /* Traverse loop tree assigning depth and computing level as the
6782 maximum level of all the inner loops of this loop. The loop
6783 level is equivalent to the height of the loop in the loop tree
6784 and corresponds to the number of enclosed loop levels (including
6786 for (inner = loop->inner; inner; inner = inner->next)
6790 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6795 loop->level = level;
6796 loop->depth = depth;
6801 /* Compute the loop nesting depth and enclosed loop level for the loop
6802 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6806 flow_loops_level_compute (loops)
6807 struct loops *loops;
6813 /* Traverse all the outer level loops. */
6814 for (loop = loops->tree; loop; loop = loop->next)
6816 level = flow_loop_level_compute (loop, 1);
6824 /* Find all the natural loops in the function and save in LOOPS structure
6825 and recalculate loop_depth information in basic block structures.
6826 Return the number of natural loops found. */
6829 flow_loops_find (loops)
6830 struct loops *loops;
6841 loops->array = NULL;
6845 /* Taking care of this degenerate case makes the rest of
6846 this code simpler. */
6847 if (n_basic_blocks == 0)
6850 /* Compute the dominators. */
6851 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6852 compute_flow_dominators (dom, NULL);
6854 /* Count the number of loop edges (back edges). This should be the
6855 same as the number of natural loops. Also clear the loop_depth
6856 and as we work from inner->outer in a loop nest we call
6857 find_loop_nodes_find which will increment loop_depth for nodes
6858 within the current loop, which happens to enclose inner loops. */
6861 for (b = 0; b < n_basic_blocks; b++)
6863 BASIC_BLOCK (b)->loop_depth = 0;
6864 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6866 basic_block latch = e->src;
6868 /* Look for back edges where a predecessor is dominated
6869 by this block. A natural loop has a single entry
6870 node (header) that dominates all the nodes in the
6871 loop. It also has single back edge to the header
6872 from a latch node. Note that multiple natural loops
6873 may share the same header. */
6874 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6881 /* Compute depth first search order of the CFG so that outer
6882 natural loops will be found before inner natural loops. */
6883 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6884 flow_depth_first_order_compute (dfs_order);
6886 /* Allocate loop structures. */
6888 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6890 headers = sbitmap_alloc (n_basic_blocks);
6891 sbitmap_zero (headers);
6893 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6894 sbitmap_zero (loops->shared_headers);
6896 /* Find and record information about all the natural loops
6899 for (b = 0; b < n_basic_blocks; b++)
6903 /* Search the nodes of the CFG in DFS order that we can find
6904 outer loops first. */
6905 header = BASIC_BLOCK (dfs_order[b]);
6907 /* Look for all the possible latch blocks for this header. */
6908 for (e = header->pred; e; e = e->pred_next)
6910 basic_block latch = e->src;
6912 /* Look for back edges where a predecessor is dominated
6913 by this block. A natural loop has a single entry
6914 node (header) that dominates all the nodes in the
6915 loop. It also has single back edge to the header
6916 from a latch node. Note that multiple natural loops
6917 may share the same header. */
6918 if (latch != ENTRY_BLOCK_PTR
6919 && TEST_BIT (dom[latch->index], header->index))
6923 loop = loops->array + num_loops;
6925 loop->header = header;
6926 loop->latch = latch;
6928 /* Keep track of blocks that are loop headers so
6929 that we can tell which loops should be merged. */
6930 if (TEST_BIT (headers, header->index))
6931 SET_BIT (loops->shared_headers, header->index);
6932 SET_BIT (headers, header->index);
6934 /* Find nodes contained within the loop. */
6935 loop->nodes = sbitmap_alloc (n_basic_blocks);
6937 = flow_loop_nodes_find (header, latch, loop->nodes);
6939 /* Compute first and last blocks within the loop.
6940 These are often the same as the loop header and
6941 loop latch respectively, but this is not always
6944 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6946 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6948 /* Find edges which exit the loop. Note that a node
6949 may have several exit edges. */
6951 = flow_loop_exits_find (loop->nodes, &loop->exits);
6953 /* Look to see if the loop has a pre-header node. */
6955 = flow_loop_pre_header_find (header, dom);
6962 /* Natural loops with shared headers may either be disjoint or
6963 nested. Disjoint loops with shared headers cannot be inner
6964 loops and should be merged. For now just mark loops that share
6966 for (i = 0; i < num_loops; i++)
6967 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6968 loops->array[i].shared = 1;
6970 sbitmap_free (headers);
6973 loops->num = num_loops;
6975 /* Save CFG derived information to avoid recomputing it. */
6976 loops->cfg.dom = dom;
6977 loops->cfg.dfs_order = dfs_order;
6979 /* Build the loop hierarchy tree. */
6980 flow_loops_tree_build (loops);
6982 /* Assign the loop nesting depth and enclosed loop level for each
6984 loops->levels = flow_loops_level_compute (loops);
6990 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
6992 flow_loop_outside_edge_p (loop, e)
6993 const struct loop *loop;
6996 if (e->dest != loop->header)
6998 return (e->src == ENTRY_BLOCK_PTR)
6999 || ! TEST_BIT (loop->nodes, e->src->index);
7003 /* Clear LOG_LINKS fields of insns in a chain. */
7005 clear_log_links (insns)
7009 for (i = insns; i; i = NEXT_INSN (i))
7010 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')