1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static int insn_dead_p PARAMS ((struct propagate_block_info *,
334 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
336 static void mark_set_regs PARAMS ((struct propagate_block_info *,
338 static void mark_set_1 PARAMS ((struct propagate_block_info *,
339 enum rtx_code, rtx, rtx,
342 static void find_auto_inc PARAMS ((struct propagate_block_info *,
344 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
346 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
348 static void mark_used_reg PARAMS ((struct propagate_block_info *,
350 static void mark_used_regs PARAMS ((struct propagate_block_info *,
352 void dump_flow_info PARAMS ((FILE *));
353 void debug_flow_info PARAMS ((void));
354 static void dump_edge_info PARAMS ((FILE *, edge, int));
356 static void count_reg_sets_1 PARAMS ((rtx, int));
357 static void count_reg_sets PARAMS ((rtx, int));
358 static void count_reg_references PARAMS ((rtx, int));
359 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
361 static void remove_fake_successors PARAMS ((basic_block));
362 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
363 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
364 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
365 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
366 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
367 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
368 static int flow_depth_first_order_compute PARAMS ((int *));
369 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
370 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
371 static void flow_loops_tree_build PARAMS ((struct loops *));
372 static int flow_loop_level_compute PARAMS ((struct loop *, int));
373 static int flow_loops_level_compute PARAMS ((struct loops *));
375 /* Find basic blocks of the current function.
376 F is the first insn of the function and NREGS the number of register
380 find_basic_blocks (f, nregs, file)
382 int nregs ATTRIBUTE_UNUSED;
383 FILE *file ATTRIBUTE_UNUSED;
387 /* Flush out existing data. */
388 if (basic_block_info != NULL)
394 /* Clear bb->aux on all extant basic blocks. We'll use this as a
395 tag for reuse during create_basic_block, just in case some pass
396 copies around basic block notes improperly. */
397 for (i = 0; i < n_basic_blocks; ++i)
398 BASIC_BLOCK (i)->aux = NULL;
400 VARRAY_FREE (basic_block_info);
403 n_basic_blocks = count_basic_blocks (f);
405 /* Size the basic block table. The actual structures will be allocated
406 by find_basic_blocks_1, since we want to keep the structure pointers
407 stable across calls to find_basic_blocks. */
408 /* ??? This whole issue would be much simpler if we called find_basic_blocks
409 exactly once, and thereafter we don't have a single long chain of
410 instructions at all until close to the end of compilation when we
411 actually lay them out. */
413 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
415 label_value_list = find_basic_blocks_1 (f);
417 /* Record the block to which an insn belongs. */
418 /* ??? This should be done another way, by which (perhaps) a label is
419 tagged directly with the basic block that it starts. It is used for
420 more than that currently, but IMO that is the only valid use. */
422 max_uid = get_max_uid ();
424 /* Leave space for insns life_analysis makes in some cases for auto-inc.
425 These cases are rare, so we don't need too much space. */
426 max_uid += max_uid / 10;
429 compute_bb_for_insn (max_uid);
431 /* Discover the edges of our cfg. */
432 record_active_eh_regions (f);
433 make_edges (label_value_list);
435 /* Do very simple cleanup now, for the benefit of code that runs between
436 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
437 tidy_fallthru_edges ();
439 mark_critical_edges ();
441 #ifdef ENABLE_CHECKING
446 /* Count the basic blocks of the function. */
449 count_basic_blocks (f)
453 register RTX_CODE prev_code;
454 register int count = 0;
456 int call_had_abnormal_edge = 0;
458 prev_code = JUMP_INSN;
459 for (insn = f; insn; insn = NEXT_INSN (insn))
461 register RTX_CODE code = GET_CODE (insn);
463 if (code == CODE_LABEL
464 || (GET_RTX_CLASS (code) == 'i'
465 && (prev_code == JUMP_INSN
466 || prev_code == BARRIER
467 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
470 /* Record whether this call created an edge. */
471 if (code == CALL_INSN)
473 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
474 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
476 call_had_abnormal_edge = 0;
478 /* If there is an EH region or rethrow, we have an edge. */
479 if ((eh_region && region > 0)
480 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
481 call_had_abnormal_edge = 1;
482 else if (nonlocal_goto_handler_labels && region >= 0)
483 /* If there is a nonlocal goto label and the specified
484 region number isn't -1, we have an edge. (0 means
485 no throw, but might have a nonlocal goto). */
486 call_had_abnormal_edge = 1;
491 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
497 /* The rest of the compiler works a bit smoother when we don't have to
498 check for the edge case of do-nothing functions with no basic blocks. */
501 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
508 /* Find all basic blocks of the function whose first insn is F.
510 Collect and return a list of labels whose addresses are taken. This
511 will be used in make_edges for use with computed gotos. */
514 find_basic_blocks_1 (f)
517 register rtx insn, next;
519 rtx bb_note = NULL_RTX;
520 rtx eh_list = NULL_RTX;
521 rtx label_value_list = NULL_RTX;
525 /* We process the instructions in a slightly different way than we did
526 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
527 closed out the previous block, so that it gets attached at the proper
528 place. Since this form should be equivalent to the previous,
529 count_basic_blocks continues to use the old form as a check. */
531 for (insn = f; insn; insn = next)
533 enum rtx_code code = GET_CODE (insn);
535 next = NEXT_INSN (insn);
541 int kind = NOTE_LINE_NUMBER (insn);
543 /* Keep a LIFO list of the currently active exception notes. */
544 if (kind == NOTE_INSN_EH_REGION_BEG)
545 eh_list = alloc_INSN_LIST (insn, eh_list);
546 else if (kind == NOTE_INSN_EH_REGION_END)
550 eh_list = XEXP (eh_list, 1);
551 free_INSN_LIST_node (t);
554 /* Look for basic block notes with which to keep the
555 basic_block_info pointers stable. Unthread the note now;
556 we'll put it back at the right place in create_basic_block.
557 Or not at all if we've already found a note in this block. */
558 else if (kind == NOTE_INSN_BASIC_BLOCK)
560 if (bb_note == NULL_RTX)
563 next = flow_delete_insn (insn);
569 /* A basic block starts at a label. If we've closed one off due
570 to a barrier or some such, no need to do it again. */
571 if (head != NULL_RTX)
573 /* While we now have edge lists with which other portions of
574 the compiler might determine a call ending a basic block
575 does not imply an abnormal edge, it will be a bit before
576 everything can be updated. So continue to emit a noop at
577 the end of such a block. */
578 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
580 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
581 end = emit_insn_after (nop, end);
584 create_basic_block (i++, head, end, bb_note);
592 /* A basic block ends at a jump. */
593 if (head == NULL_RTX)
597 /* ??? Make a special check for table jumps. The way this
598 happens is truly and amazingly gross. We are about to
599 create a basic block that contains just a code label and
600 an addr*vec jump insn. Worse, an addr_diff_vec creates
601 its own natural loop.
603 Prevent this bit of brain damage, pasting things together
604 correctly in make_edges.
606 The correct solution involves emitting the table directly
607 on the tablejump instruction as a note, or JUMP_LABEL. */
609 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
610 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
618 goto new_bb_inclusive;
621 /* A basic block ends at a barrier. It may be that an unconditional
622 jump already closed the basic block -- no need to do it again. */
623 if (head == NULL_RTX)
626 /* While we now have edge lists with which other portions of the
627 compiler might determine a call ending a basic block does not
628 imply an abnormal edge, it will be a bit before everything can
629 be updated. So continue to emit a noop at the end of such a
631 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
633 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
634 end = emit_insn_after (nop, end);
636 goto new_bb_exclusive;
640 /* Record whether this call created an edge. */
641 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
642 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
643 int call_has_abnormal_edge = 0;
645 /* If there is an EH region or rethrow, we have an edge. */
646 if ((eh_list && region > 0)
647 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
648 call_has_abnormal_edge = 1;
649 else if (nonlocal_goto_handler_labels && region >= 0)
650 /* If there is a nonlocal goto label and the specified
651 region number isn't -1, we have an edge. (0 means
652 no throw, but might have a nonlocal goto). */
653 call_has_abnormal_edge = 1;
655 /* A basic block ends at a call that can either throw or
656 do a non-local goto. */
657 if (call_has_abnormal_edge)
660 if (head == NULL_RTX)
665 create_basic_block (i++, head, end, bb_note);
666 head = end = NULL_RTX;
674 if (GET_RTX_CLASS (code) == 'i')
676 if (head == NULL_RTX)
683 if (GET_RTX_CLASS (code) == 'i')
687 /* Make a list of all labels referred to other than by jumps
688 (which just don't have the REG_LABEL notes).
690 Make a special exception for labels followed by an ADDR*VEC,
691 as this would be a part of the tablejump setup code.
693 Make a special exception for the eh_return_stub_label, which
694 we know isn't part of any otherwise visible control flow. */
696 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
697 if (REG_NOTE_KIND (note) == REG_LABEL)
699 rtx lab = XEXP (note, 0), next;
701 if (lab == eh_return_stub_label)
703 else if ((next = next_nonnote_insn (lab)) != NULL
704 && GET_CODE (next) == JUMP_INSN
705 && (GET_CODE (PATTERN (next)) == ADDR_VEC
706 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
710 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
715 if (head != NULL_RTX)
716 create_basic_block (i++, head, end, bb_note);
718 if (i != n_basic_blocks)
721 return label_value_list;
724 /* Tidy the CFG by deleting unreachable code and whatnot. */
730 delete_unreachable_blocks ();
731 move_stray_eh_region_notes ();
732 record_active_eh_regions (f);
734 mark_critical_edges ();
736 /* Kill the data we won't maintain. */
737 label_value_list = NULL_RTX;
740 /* Create a new basic block consisting of the instructions between
741 HEAD and END inclusive. Reuses the note and basic block struct
742 in BB_NOTE, if any. */
745 create_basic_block (index, head, end, bb_note)
747 rtx head, end, bb_note;
752 && ! RTX_INTEGRATED_P (bb_note)
753 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 /* If we found an existing note, thread it back onto the chain. */
758 if (GET_CODE (head) == CODE_LABEL)
759 add_insn_after (bb_note, head);
762 add_insn_before (bb_note, head);
768 /* Otherwise we must create a note and a basic block structure.
769 Since we allow basic block structs in rtl, give the struct
770 the same lifetime by allocating it off the function obstack
771 rather than using malloc. */
773 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
774 memset (bb, 0, sizeof (*bb));
776 if (GET_CODE (head) == CODE_LABEL)
777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 NOTE_BASIC_BLOCK (bb_note) = bb;
786 /* Always include the bb note in the block. */
787 if (NEXT_INSN (end) == bb_note)
793 BASIC_BLOCK (index) = bb;
795 /* Tag the block so that we know it has been used when considering
796 other basic block notes. */
800 /* Records the basic block struct in BB_FOR_INSN, for every instruction
801 indexed by INSN_UID. MAX is the size of the array. */
804 compute_bb_for_insn (max)
809 if (basic_block_for_insn)
810 VARRAY_FREE (basic_block_for_insn);
811 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
813 for (i = 0; i < n_basic_blocks; ++i)
815 basic_block bb = BASIC_BLOCK (i);
822 int uid = INSN_UID (insn);
824 VARRAY_BB (basic_block_for_insn, uid) = bb;
827 insn = NEXT_INSN (insn);
832 /* Free the memory associated with the edge structures. */
840 for (i = 0; i < n_basic_blocks; ++i)
842 basic_block bb = BASIC_BLOCK (i);
844 for (e = bb->succ; e ; e = n)
854 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
860 ENTRY_BLOCK_PTR->succ = 0;
861 EXIT_BLOCK_PTR->pred = 0;
866 /* Identify the edges between basic blocks.
868 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
869 that are otherwise unreachable may be reachable with a non-local goto.
871 BB_EH_END is an array indexed by basic block number in which we record
872 the list of exception regions active at the end of the basic block. */
875 make_edges (label_value_list)
876 rtx label_value_list;
879 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
880 sbitmap *edge_cache = NULL;
882 /* Assume no computed jump; revise as we create edges. */
883 current_function_has_computed_jump = 0;
885 /* Heavy use of computed goto in machine-generated code can lead to
886 nearly fully-connected CFGs. In that case we spend a significant
887 amount of time searching the edge lists for duplicates. */
888 if (forced_labels || label_value_list)
890 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
891 sbitmap_vector_zero (edge_cache, n_basic_blocks);
894 /* By nature of the way these get numbered, block 0 is always the entry. */
895 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
897 for (i = 0; i < n_basic_blocks; ++i)
899 basic_block bb = BASIC_BLOCK (i);
902 int force_fallthru = 0;
904 /* Examine the last instruction of the block, and discover the
905 ways we can leave the block. */
908 code = GET_CODE (insn);
911 if (code == JUMP_INSN)
915 /* ??? Recognize a tablejump and do the right thing. */
916 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
917 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
918 && GET_CODE (tmp) == JUMP_INSN
919 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
920 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
925 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
926 vec = XVEC (PATTERN (tmp), 0);
928 vec = XVEC (PATTERN (tmp), 1);
930 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
931 make_label_edge (edge_cache, bb,
932 XEXP (RTVEC_ELT (vec, j), 0), 0);
934 /* Some targets (eg, ARM) emit a conditional jump that also
935 contains the out-of-range target. Scan for these and
936 add an edge if necessary. */
937 if ((tmp = single_set (insn)) != NULL
938 && SET_DEST (tmp) == pc_rtx
939 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
940 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
941 make_label_edge (edge_cache, bb,
942 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944 #ifdef CASE_DROPS_THROUGH
945 /* Silly VAXen. The ADDR_VEC is going to be in the way of
946 us naturally detecting fallthru into the next block. */
951 /* If this is a computed jump, then mark it as reaching
952 everything on the label_value_list and forced_labels list. */
953 else if (computed_jump_p (insn))
955 current_function_has_computed_jump = 1;
957 for (x = label_value_list; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 for (x = forced_labels; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 /* Returns create an exit out. */
965 else if (returnjump_p (insn))
966 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
968 /* Otherwise, we have a plain conditional or unconditional jump. */
971 if (! JUMP_LABEL (insn))
973 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
977 /* If this is a sibling call insn, then this is in effect a
978 combined call and return, and so we need an edge to the
979 exit block. No need to worry about EH edges, since we
980 wouldn't have created the sibling call in the first place. */
982 if (code == CALL_INSN && SIBLING_CALL_P (insn))
983 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
986 /* If this is a CALL_INSN, then mark it as reaching the active EH
987 handler for this CALL_INSN. If we're handling asynchronous
988 exceptions then any insn can reach any of the active handlers.
990 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
992 if (code == CALL_INSN || asynchronous_exceptions)
994 /* Add any appropriate EH edges. We do this unconditionally
995 since there may be a REG_EH_REGION or REG_EH_RETHROW note
996 on the call, and this needn't be within an EH region. */
997 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
999 /* If we have asynchronous exceptions, do the same for *all*
1000 exception regions active in the block. */
1001 if (asynchronous_exceptions
1002 && bb->eh_beg != bb->eh_end)
1004 if (bb->eh_beg >= 0)
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, bb->eh_beg);
1008 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1009 if (GET_CODE (x) == NOTE
1010 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1011 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1013 int region = NOTE_EH_HANDLER (x);
1014 make_eh_edge (edge_cache, eh_nest_info, bb,
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1028 /* We do know that a REG_EH_REGION note with a value less
1029 than 0 is guaranteed not to perform a non-local goto. */
1030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1031 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1032 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1033 make_label_edge (edge_cache, bb, XEXP (x, 0),
1034 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1038 /* We know something about the structure of the function __throw in
1039 libgcc2.c. It is the only function that ever contains eh_stub
1040 labels. It modifies its return address so that the last block
1041 returns to one of the eh_stub labels within it. So we have to
1042 make additional edges in the flow graph. */
1043 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1044 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1046 /* Find out if we can drop through to the next block. */
1047 insn = next_nonnote_insn (insn);
1048 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1049 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1050 else if (i + 1 < n_basic_blocks)
1052 rtx tmp = BLOCK_HEAD (i + 1);
1053 if (GET_CODE (tmp) == NOTE)
1054 tmp = next_nonnote_insn (tmp);
1055 if (force_fallthru || insn == tmp)
1056 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1060 free_eh_nesting_info (eh_nest_info);
1062 sbitmap_vector_free (edge_cache);
1065 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1066 about the edge that is accumulated between calls. */
1069 make_edge (edge_cache, src, dst, flags)
1070 sbitmap *edge_cache;
1071 basic_block src, dst;
1077 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1078 many edges to them, and we didn't allocate memory for it. */
1079 use_edge_cache = (edge_cache
1080 && src != ENTRY_BLOCK_PTR
1081 && dst != EXIT_BLOCK_PTR);
1083 /* Make sure we don't add duplicate edges. */
1084 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1085 for (e = src->succ; e ; e = e->succ_next)
1092 e = (edge) xcalloc (1, sizeof (*e));
1095 e->succ_next = src->succ;
1096 e->pred_next = dst->pred;
1105 SET_BIT (edge_cache[src->index], dst->index);
1108 /* Create an edge from a basic block to a label. */
1111 make_label_edge (edge_cache, src, label, flags)
1112 sbitmap *edge_cache;
1117 if (GET_CODE (label) != CODE_LABEL)
1120 /* If the label was never emitted, this insn is junk, but avoid a
1121 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1122 as a result of a syntax error and a diagnostic has already been
1125 if (INSN_UID (label) == 0)
1128 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1131 /* Create the edges generated by INSN in REGION. */
1134 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1135 sbitmap *edge_cache;
1136 eh_nesting_info *eh_nest_info;
1141 handler_info **handler_list;
1144 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1145 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1148 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1149 EDGE_ABNORMAL | EDGE_EH | is_call);
1153 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1154 dangerous if we intend to move basic blocks around. Move such notes
1155 into the following block. */
1158 move_stray_eh_region_notes ()
1163 if (n_basic_blocks < 2)
1166 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1167 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1169 rtx insn, next, list = NULL_RTX;
1171 b1 = BASIC_BLOCK (i);
1172 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1174 next = NEXT_INSN (insn);
1175 if (GET_CODE (insn) == NOTE
1176 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1177 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1179 /* Unlink from the insn chain. */
1180 NEXT_INSN (PREV_INSN (insn)) = next;
1181 PREV_INSN (next) = PREV_INSN (insn);
1184 NEXT_INSN (insn) = list;
1189 if (list == NULL_RTX)
1192 /* Find where to insert these things. */
1194 if (GET_CODE (insn) == CODE_LABEL)
1195 insn = NEXT_INSN (insn);
1199 next = NEXT_INSN (list);
1200 add_insn_after (list, insn);
1206 /* Recompute eh_beg/eh_end for each basic block. */
1209 record_active_eh_regions (f)
1212 rtx insn, eh_list = NULL_RTX;
1214 basic_block bb = BASIC_BLOCK (0);
1216 for (insn = f; insn ; insn = NEXT_INSN (insn))
1218 if (bb->head == insn)
1219 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1221 if (GET_CODE (insn) == NOTE)
1223 int kind = NOTE_LINE_NUMBER (insn);
1224 if (kind == NOTE_INSN_EH_REGION_BEG)
1225 eh_list = alloc_INSN_LIST (insn, eh_list);
1226 else if (kind == NOTE_INSN_EH_REGION_END)
1228 rtx t = XEXP (eh_list, 1);
1229 free_INSN_LIST_node (eh_list);
1234 if (bb->end == insn)
1236 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (i == n_basic_blocks)
1240 bb = BASIC_BLOCK (i);
1245 /* Identify critical edges and set the bits appropriately. */
1248 mark_critical_edges ()
1250 int i, n = n_basic_blocks;
1253 /* We begin with the entry block. This is not terribly important now,
1254 but could be if a front end (Fortran) implemented alternate entry
1256 bb = ENTRY_BLOCK_PTR;
1263 /* (1) Critical edges must have a source with multiple successors. */
1264 if (bb->succ && bb->succ->succ_next)
1266 for (e = bb->succ; e ; e = e->succ_next)
1268 /* (2) Critical edges must have a destination with multiple
1269 predecessors. Note that we know there is at least one
1270 predecessor -- the edge we followed to get here. */
1271 if (e->dest->pred->pred_next)
1272 e->flags |= EDGE_CRITICAL;
1274 e->flags &= ~EDGE_CRITICAL;
1279 for (e = bb->succ; e ; e = e->succ_next)
1280 e->flags &= ~EDGE_CRITICAL;
1285 bb = BASIC_BLOCK (i);
1289 /* Split a (typically critical) edge. Return the new block.
1290 Abort on abnormal edges.
1292 ??? The code generally expects to be called on critical edges.
1293 The case of a block ending in an unconditional jump to a
1294 block with multiple predecessors is not handled optimally. */
1297 split_edge (edge_in)
1300 basic_block old_pred, bb, old_succ;
1305 /* Abnormal edges cannot be split. */
1306 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1309 old_pred = edge_in->src;
1310 old_succ = edge_in->dest;
1312 /* Remove the existing edge from the destination's pred list. */
1315 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1317 *pp = edge_in->pred_next;
1318 edge_in->pred_next = NULL;
1321 /* Create the new structures. */
1322 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1323 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1326 memset (bb, 0, sizeof (*bb));
1327 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1328 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1330 /* ??? This info is likely going to be out of date very soon. */
1331 if (old_succ->global_live_at_start)
1333 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1334 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1338 CLEAR_REG_SET (bb->global_live_at_start);
1339 CLEAR_REG_SET (bb->global_live_at_end);
1344 bb->succ = edge_out;
1347 edge_in->flags &= ~EDGE_CRITICAL;
1349 edge_out->pred_next = old_succ->pred;
1350 edge_out->succ_next = NULL;
1352 edge_out->dest = old_succ;
1353 edge_out->flags = EDGE_FALLTHRU;
1354 edge_out->probability = REG_BR_PROB_BASE;
1356 old_succ->pred = edge_out;
1358 /* Tricky case -- if there existed a fallthru into the successor
1359 (and we're not it) we must add a new unconditional jump around
1360 the new block we're actually interested in.
1362 Further, if that edge is critical, this means a second new basic
1363 block must be created to hold it. In order to simplify correct
1364 insn placement, do this before we touch the existing basic block
1365 ordering for the block we were really wanting. */
1366 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1369 for (e = edge_out->pred_next; e ; e = e->pred_next)
1370 if (e->flags & EDGE_FALLTHRU)
1375 basic_block jump_block;
1378 if ((e->flags & EDGE_CRITICAL) == 0
1379 && e->src != ENTRY_BLOCK_PTR)
1381 /* Non critical -- we can simply add a jump to the end
1382 of the existing predecessor. */
1383 jump_block = e->src;
1387 /* We need a new block to hold the jump. The simplest
1388 way to do the bulk of the work here is to recursively
1390 jump_block = split_edge (e);
1391 e = jump_block->succ;
1394 /* Now add the jump insn ... */
1395 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1397 jump_block->end = pos;
1398 if (basic_block_for_insn)
1399 set_block_for_insn (pos, jump_block);
1400 emit_barrier_after (pos);
1402 /* ... let jump know that label is in use, ... */
1403 JUMP_LABEL (pos) = old_succ->head;
1404 ++LABEL_NUSES (old_succ->head);
1406 /* ... and clear fallthru on the outgoing edge. */
1407 e->flags &= ~EDGE_FALLTHRU;
1409 /* Continue splitting the interesting edge. */
1413 /* Place the new block just in front of the successor. */
1414 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1415 if (old_succ == EXIT_BLOCK_PTR)
1416 j = n_basic_blocks - 1;
1418 j = old_succ->index;
1419 for (i = n_basic_blocks - 1; i > j; --i)
1421 basic_block tmp = BASIC_BLOCK (i - 1);
1422 BASIC_BLOCK (i) = tmp;
1425 BASIC_BLOCK (i) = bb;
1428 /* Create the basic block note.
1430 Where we place the note can have a noticable impact on the generated
1431 code. Consider this cfg:
1442 If we need to insert an insn on the edge from block 0 to block 1,
1443 we want to ensure the instructions we insert are outside of any
1444 loop notes that physically sit between block 0 and block 1. Otherwise
1445 we confuse the loop optimizer into thinking the loop is a phony. */
1446 if (old_succ != EXIT_BLOCK_PTR
1447 && PREV_INSN (old_succ->head)
1448 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1449 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1451 PREV_INSN (old_succ->head));
1452 else if (old_succ != EXIT_BLOCK_PTR)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1455 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1456 NOTE_BASIC_BLOCK (bb_note) = bb;
1457 bb->head = bb->end = bb_note;
1459 /* Not quite simple -- for non-fallthru edges, we must adjust the
1460 predecessor's jump instruction to target our new block. */
1461 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1463 rtx tmp, insn = old_pred->end;
1464 rtx old_label = old_succ->head;
1465 rtx new_label = gen_label_rtx ();
1467 if (GET_CODE (insn) != JUMP_INSN)
1470 /* ??? Recognize a tablejump and adjust all matching cases. */
1471 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1472 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1473 && GET_CODE (tmp) == JUMP_INSN
1474 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1475 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1480 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1481 vec = XVEC (PATTERN (tmp), 0);
1483 vec = XVEC (PATTERN (tmp), 1);
1485 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1486 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1488 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1489 --LABEL_NUSES (old_label);
1490 ++LABEL_NUSES (new_label);
1493 /* Handle casesi dispatch insns */
1494 if ((tmp = single_set (insn)) != NULL
1495 && SET_DEST (tmp) == pc_rtx
1496 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1497 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1498 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1500 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1502 --LABEL_NUSES (old_label);
1503 ++LABEL_NUSES (new_label);
1508 /* This would have indicated an abnormal edge. */
1509 if (computed_jump_p (insn))
1512 /* A return instruction can't be redirected. */
1513 if (returnjump_p (insn))
1516 /* If the insn doesn't go where we think, we're confused. */
1517 if (JUMP_LABEL (insn) != old_label)
1520 redirect_jump (insn, new_label);
1523 emit_label_before (new_label, bb_note);
1524 bb->head = new_label;
1530 /* Queue instructions for insertion on an edge between two basic blocks.
1531 The new instructions and basic blocks (if any) will not appear in the
1532 CFG until commit_edge_insertions is called. */
1535 insert_insn_on_edge (pattern, e)
1539 /* We cannot insert instructions on an abnormal critical edge.
1540 It will be easier to find the culprit if we die now. */
1541 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1542 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 if (e->insns == NULL_RTX)
1548 push_to_sequence (e->insns);
1550 emit_insn (pattern);
1552 e->insns = get_insns ();
1556 /* Update the CFG for the instructions queued on edge E. */
1559 commit_one_edge_insertion (e)
1562 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1565 /* Pull the insns off the edge now since the edge might go away. */
1567 e->insns = NULL_RTX;
1569 /* Figure out where to put these things. If the destination has
1570 one predecessor, insert there. Except for the exit block. */
1571 if (e->dest->pred->pred_next == NULL
1572 && e->dest != EXIT_BLOCK_PTR)
1576 /* Get the location correct wrt a code label, and "nice" wrt
1577 a basic block note, and before everything else. */
1579 if (GET_CODE (tmp) == CODE_LABEL)
1580 tmp = NEXT_INSN (tmp);
1581 if (GET_CODE (tmp) == NOTE
1582 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1583 tmp = NEXT_INSN (tmp);
1584 if (tmp == bb->head)
1587 after = PREV_INSN (tmp);
1590 /* If the source has one successor and the edge is not abnormal,
1591 insert there. Except for the entry block. */
1592 else if ((e->flags & EDGE_ABNORMAL) == 0
1593 && e->src->succ->succ_next == NULL
1594 && e->src != ENTRY_BLOCK_PTR)
1597 /* It is possible to have a non-simple jump here. Consider a target
1598 where some forms of unconditional jumps clobber a register. This
1599 happens on the fr30 for example.
1601 We know this block has a single successor, so we can just emit
1602 the queued insns before the jump. */
1603 if (GET_CODE (bb->end) == JUMP_INSN)
1609 /* We'd better be fallthru, or we've lost track of what's what. */
1610 if ((e->flags & EDGE_FALLTHRU) == 0)
1617 /* Otherwise we must split the edge. */
1620 bb = split_edge (e);
1624 /* Now that we've found the spot, do the insertion. */
1626 /* Set the new block number for these insns, if structure is allocated. */
1627 if (basic_block_for_insn)
1630 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1631 set_block_for_insn (i, bb);
1636 emit_insns_before (insns, before);
1637 if (before == bb->head)
1642 rtx last = emit_insns_after (insns, after);
1643 if (after == bb->end)
1647 if (GET_CODE (last) == JUMP_INSN)
1649 if (returnjump_p (last))
1651 /* ??? Remove all outgoing edges from BB and add one
1652 for EXIT. This is not currently a problem because
1653 this only happens for the (single) epilogue, which
1654 already has a fallthru edge to EXIT. */
1657 if (e->dest != EXIT_BLOCK_PTR
1658 || e->succ_next != NULL
1659 || (e->flags & EDGE_FALLTHRU) == 0)
1661 e->flags &= ~EDGE_FALLTHRU;
1663 emit_barrier_after (last);
1672 /* Update the CFG for all queued instructions. */
1675 commit_edge_insertions ()
1680 #ifdef ENABLE_CHECKING
1681 verify_flow_info ();
1685 bb = ENTRY_BLOCK_PTR;
1690 for (e = bb->succ; e ; e = next)
1692 next = e->succ_next;
1694 commit_one_edge_insertion (e);
1697 if (++i >= n_basic_blocks)
1699 bb = BASIC_BLOCK (i);
1703 /* Delete all unreachable basic blocks. */
1706 delete_unreachable_blocks ()
1708 basic_block *worklist, *tos;
1709 int deleted_handler;
1714 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1716 /* Use basic_block->aux as a marker. Clear them all. */
1718 for (i = 0; i < n; ++i)
1719 BASIC_BLOCK (i)->aux = NULL;
1721 /* Add our starting points to the worklist. Almost always there will
1722 be only one. It isn't inconcievable that we might one day directly
1723 support Fortran alternate entry points. */
1725 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1729 /* Mark the block with a handy non-null value. */
1733 /* Iterate: find everything reachable from what we've already seen. */
1735 while (tos != worklist)
1737 basic_block b = *--tos;
1739 for (e = b->succ; e ; e = e->succ_next)
1747 /* Delete all unreachable basic blocks. Count down so that we don't
1748 interfere with the block renumbering that happens in flow_delete_block. */
1750 deleted_handler = 0;
1752 for (i = n - 1; i >= 0; --i)
1754 basic_block b = BASIC_BLOCK (i);
1757 /* This block was found. Tidy up the mark. */
1760 deleted_handler |= flow_delete_block (b);
1763 tidy_fallthru_edges ();
1765 /* If we deleted an exception handler, we may have EH region begin/end
1766 blocks to remove as well. */
1767 if (deleted_handler)
1768 delete_eh_regions ();
1773 /* Find EH regions for which there is no longer a handler, and delete them. */
1776 delete_eh_regions ()
1780 update_rethrow_references ();
1782 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1783 if (GET_CODE (insn) == NOTE)
1785 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1786 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1788 int num = NOTE_EH_HANDLER (insn);
1789 /* A NULL handler indicates a region is no longer needed,
1790 as long as its rethrow label isn't used. */
1791 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1793 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1794 NOTE_SOURCE_FILE (insn) = 0;
1800 /* Return true if NOTE is not one of the ones that must be kept paired,
1801 so that we may simply delete them. */
1804 can_delete_note_p (note)
1807 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1808 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1811 /* Unlink a chain of insns between START and FINISH, leaving notes
1812 that must be paired. */
1815 flow_delete_insn_chain (start, finish)
1818 /* Unchain the insns one by one. It would be quicker to delete all
1819 of these with a single unchaining, rather than one at a time, but
1820 we need to keep the NOTE's. */
1826 next = NEXT_INSN (start);
1827 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1829 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1832 next = flow_delete_insn (start);
1834 if (start == finish)
1840 /* Delete the insns in a (non-live) block. We physically delete every
1841 non-deleted-note insn, and update the flow graph appropriately.
1843 Return nonzero if we deleted an exception handler. */
1845 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1846 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1849 flow_delete_block (b)
1852 int deleted_handler = 0;
1855 /* If the head of this block is a CODE_LABEL, then it might be the
1856 label for an exception handler which can't be reached.
1858 We need to remove the label from the exception_handler_label list
1859 and remove the associated NOTE_INSN_EH_REGION_BEG and
1860 NOTE_INSN_EH_REGION_END notes. */
1864 never_reached_warning (insn);
1866 if (GET_CODE (insn) == CODE_LABEL)
1868 rtx x, *prev = &exception_handler_labels;
1870 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1872 if (XEXP (x, 0) == insn)
1874 /* Found a match, splice this label out of the EH label list. */
1875 *prev = XEXP (x, 1);
1876 XEXP (x, 1) = NULL_RTX;
1877 XEXP (x, 0) = NULL_RTX;
1879 /* Remove the handler from all regions */
1880 remove_handler (insn);
1881 deleted_handler = 1;
1884 prev = &XEXP (x, 1);
1887 /* This label may be referenced by code solely for its value, or
1888 referenced by static data, or something. We have determined
1889 that it is not reachable, but cannot delete the label itself.
1890 Save code space and continue to delete the balance of the block,
1891 along with properly updating the cfg. */
1892 if (!can_delete_label_p (insn))
1894 /* If we've only got one of these, skip the whole deleting
1897 goto no_delete_insns;
1898 insn = NEXT_INSN (insn);
1902 /* Include any jump table following the basic block. */
1904 if (GET_CODE (end) == JUMP_INSN
1905 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1906 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1907 && GET_CODE (tmp) == JUMP_INSN
1908 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1909 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1912 /* Include any barrier that may follow the basic block. */
1913 tmp = next_nonnote_insn (end);
1914 if (tmp && GET_CODE (tmp) == BARRIER)
1917 /* Selectively delete the entire chain. */
1918 flow_delete_insn_chain (insn, end);
1922 /* Remove the edges into and out of this block. Note that there may
1923 indeed be edges in, if we are removing an unreachable loop. */
1927 for (e = b->pred; e ; e = next)
1929 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1932 next = e->pred_next;
1936 for (e = b->succ; e ; e = next)
1938 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1941 next = e->succ_next;
1950 /* Remove the basic block from the array, and compact behind it. */
1953 return deleted_handler;
1956 /* Remove block B from the basic block array and compact behind it. */
1962 int i, n = n_basic_blocks;
1964 for (i = b->index; i + 1 < n; ++i)
1966 basic_block x = BASIC_BLOCK (i + 1);
1967 BASIC_BLOCK (i) = x;
1971 basic_block_info->num_elements--;
1975 /* Delete INSN by patching it out. Return the next insn. */
1978 flow_delete_insn (insn)
1981 rtx prev = PREV_INSN (insn);
1982 rtx next = NEXT_INSN (insn);
1985 PREV_INSN (insn) = NULL_RTX;
1986 NEXT_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (prev) = next;
1991 PREV_INSN (next) = prev;
1993 set_last_insn (prev);
1995 if (GET_CODE (insn) == CODE_LABEL)
1996 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1998 /* If deleting a jump, decrement the use count of the label. Deleting
1999 the label itself should happen in the normal course of block merging. */
2000 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2001 LABEL_NUSES (JUMP_LABEL (insn))--;
2003 /* Also if deleting an insn that references a label. */
2004 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2005 LABEL_NUSES (XEXP (note, 0))--;
2010 /* True if a given label can be deleted. */
2013 can_delete_label_p (label)
2018 if (LABEL_PRESERVE_P (label))
2021 for (x = forced_labels; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2024 for (x = label_value_list; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2031 /* User declared labels must be preserved. */
2032 if (LABEL_NAME (label) != 0)
2038 /* Blocks A and B are to be merged into a single block A. The insns
2039 are already contiguous, hence `nomove'. */
2042 merge_blocks_nomove (a, b)
2046 rtx b_head, b_end, a_end;
2047 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2050 /* If there was a CODE_LABEL beginning B, delete it. */
2053 if (GET_CODE (b_head) == CODE_LABEL)
2055 /* Detect basic blocks with nothing but a label. This can happen
2056 in particular at the end of a function. */
2057 if (b_head == b_end)
2059 del_first = del_last = b_head;
2060 b_head = NEXT_INSN (b_head);
2063 /* Delete the basic block note. */
2064 if (GET_CODE (b_head) == NOTE
2065 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2067 if (b_head == b_end)
2072 b_head = NEXT_INSN (b_head);
2075 /* If there was a jump out of A, delete it. */
2077 if (GET_CODE (a_end) == JUMP_INSN)
2081 prev = prev_nonnote_insn (a_end);
2088 /* If this was a conditional jump, we need to also delete
2089 the insn that set cc0. */
2090 if (prev && sets_cc0_p (prev))
2093 prev = prev_nonnote_insn (prev);
2103 /* Delete everything marked above as well as crap that might be
2104 hanging out between the two blocks. */
2105 flow_delete_insn_chain (del_first, del_last);
2107 /* Normally there should only be one successor of A and that is B, but
2108 partway though the merge of blocks for conditional_execution we'll
2109 be merging a TEST block with THEN and ELSE successors. Free the
2110 whole lot of them and hope the caller knows what they're doing. */
2112 remove_edge (a->succ);
2114 /* Adjust the edges out of B for the new owner. */
2115 for (e = b->succ; e ; e = e->succ_next)
2119 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2120 b->pred = b->succ = NULL;
2122 /* Reassociate the insns of B with A. */
2125 if (basic_block_for_insn)
2127 BLOCK_FOR_INSN (b_head) = a;
2128 while (b_head != b_end)
2130 b_head = NEXT_INSN (b_head);
2131 BLOCK_FOR_INSN (b_head) = a;
2141 /* Blocks A and B are to be merged into a single block. A has no incoming
2142 fallthru edge, so it can be moved before B without adding or modifying
2143 any jumps (aside from the jump from A to B). */
2146 merge_blocks_move_predecessor_nojumps (a, b)
2149 rtx start, end, barrier;
2155 /* We want to delete the BARRIER after the end of the insns we are
2156 going to move. If we don't find a BARRIER, then do nothing. This
2157 can happen in some cases if we have labels we can not delete.
2159 Similarly, do nothing if we can not delete the label at the start
2160 of the target block. */
2161 barrier = next_nonnote_insn (end);
2162 if (GET_CODE (barrier) != BARRIER
2163 || (GET_CODE (b->head) == CODE_LABEL
2164 && ! can_delete_label_p (b->head)))
2167 flow_delete_insn (barrier);
2169 /* Move block and loop notes out of the chain so that we do not
2170 disturb their order.
2172 ??? A better solution would be to squeeze out all the non-nested notes
2173 and adjust the block trees appropriately. Even better would be to have
2174 a tighter connection between block trees and rtl so that this is not
2176 start = squeeze_notes (start, end);
2178 /* Scramble the insn chain. */
2179 if (end != PREV_INSN (b->head))
2180 reorder_insns (start, end, PREV_INSN (b->head));
2184 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2185 a->index, b->index);
2188 /* Swap the records for the two blocks around. Although we are deleting B,
2189 A is now where B was and we want to compact the BB array from where
2191 BASIC_BLOCK(a->index) = b;
2192 BASIC_BLOCK(b->index) = a;
2194 a->index = b->index;
2197 /* Now blocks A and B are contiguous. Merge them. */
2198 merge_blocks_nomove (a, b);
2203 /* Blocks A and B are to be merged into a single block. B has no outgoing
2204 fallthru edge, so it can be moved after A without adding or modifying
2205 any jumps (aside from the jump from A to B). */
2208 merge_blocks_move_successor_nojumps (a, b)
2211 rtx start, end, barrier;
2216 /* We want to delete the BARRIER after the end of the insns we are
2217 going to move. If we don't find a BARRIER, then do nothing. This
2218 can happen in some cases if we have labels we can not delete.
2220 Similarly, do nothing if we can not delete the label at the start
2221 of the target block. */
2222 barrier = next_nonnote_insn (end);
2223 if (GET_CODE (barrier) != BARRIER
2224 || (GET_CODE (b->head) == CODE_LABEL
2225 && ! can_delete_label_p (b->head)))
2228 flow_delete_insn (barrier);
2230 /* Move block and loop notes out of the chain so that we do not
2231 disturb their order.
2233 ??? A better solution would be to squeeze out all the non-nested notes
2234 and adjust the block trees appropriately. Even better would be to have
2235 a tighter connection between block trees and rtl so that this is not
2237 start = squeeze_notes (start, end);
2239 /* Scramble the insn chain. */
2240 reorder_insns (start, end, a->end);
2242 /* Now blocks A and B are contiguous. Merge them. */
2243 merge_blocks_nomove (a, b);
2247 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2248 b->index, a->index);
2254 /* Attempt to merge basic blocks that are potentially non-adjacent.
2255 Return true iff the attempt succeeded. */
2258 merge_blocks (e, b, c)
2262 /* If B has a fallthru edge to C, no need to move anything. */
2263 if (e->flags & EDGE_FALLTHRU)
2265 /* If a label still appears somewhere and we cannot delete the label,
2266 then we cannot merge the blocks. The edge was tidied already. */
2268 rtx insn, stop = NEXT_INSN (c->head);
2269 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2270 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2273 merge_blocks_nomove (b, c);
2277 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2278 b->index, c->index);
2287 int c_has_outgoing_fallthru;
2288 int b_has_incoming_fallthru;
2290 /* We must make sure to not munge nesting of exception regions,
2291 lexical blocks, and loop notes.
2293 The first is taken care of by requiring that the active eh
2294 region at the end of one block always matches the active eh
2295 region at the beginning of the next block.
2297 The later two are taken care of by squeezing out all the notes. */
2299 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2300 executed and we may want to treat blocks which have two out
2301 edges, one normal, one abnormal as only having one edge for
2302 block merging purposes. */
2304 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2305 if (tmp_edge->flags & EDGE_FALLTHRU)
2307 c_has_outgoing_fallthru = (tmp_edge != NULL);
2309 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2310 if (tmp_edge->flags & EDGE_FALLTHRU)
2312 b_has_incoming_fallthru = (tmp_edge != NULL);
2314 /* If B does not have an incoming fallthru, and the exception regions
2315 match, then it can be moved immediately before C without introducing
2318 C can not be the first block, so we do not have to worry about
2319 accessing a non-existent block. */
2320 d = BASIC_BLOCK (c->index - 1);
2321 if (! b_has_incoming_fallthru
2322 && d->eh_end == b->eh_beg
2323 && b->eh_end == c->eh_beg)
2324 return merge_blocks_move_predecessor_nojumps (b, c);
2326 /* Otherwise, we're going to try to move C after B. Make sure the
2327 exception regions match.
2329 If B is the last basic block, then we must not try to access the
2330 block structure for block B + 1. Luckily in that case we do not
2331 need to worry about matching exception regions. */
2332 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2333 if (b->eh_end == c->eh_beg
2334 && (d == NULL || c->eh_end == d->eh_beg))
2336 /* If C does not have an outgoing fallthru, then it can be moved
2337 immediately after B without introducing or modifying jumps. */
2338 if (! c_has_outgoing_fallthru)
2339 return merge_blocks_move_successor_nojumps (b, c);
2341 /* Otherwise, we'll need to insert an extra jump, and possibly
2342 a new block to contain it. */
2343 /* ??? Not implemented yet. */
2350 /* Top level driver for merge_blocks. */
2357 /* Attempt to merge blocks as made possible by edge removal. If a block
2358 has only one successor, and the successor has only one predecessor,
2359 they may be combined. */
2361 for (i = 0; i < n_basic_blocks; )
2363 basic_block c, b = BASIC_BLOCK (i);
2366 /* A loop because chains of blocks might be combineable. */
2367 while ((s = b->succ) != NULL
2368 && s->succ_next == NULL
2369 && (s->flags & EDGE_EH) == 0
2370 && (c = s->dest) != EXIT_BLOCK_PTR
2371 && c->pred->pred_next == NULL
2372 /* If the jump insn has side effects, we can't kill the edge. */
2373 && (GET_CODE (b->end) != JUMP_INSN
2374 || onlyjump_p (b->end))
2375 && merge_blocks (s, b, c))
2378 /* Don't get confused by the index shift caused by deleting blocks. */
2383 /* The given edge should potentially be a fallthru edge. If that is in
2384 fact true, delete the jump and barriers that are in the way. */
2387 tidy_fallthru_edge (e, b, c)
2393 /* ??? In a late-running flow pass, other folks may have deleted basic
2394 blocks by nopping out blocks, leaving multiple BARRIERs between here
2395 and the target label. They ought to be chastized and fixed.
2397 We can also wind up with a sequence of undeletable labels between
2398 one block and the next.
2400 So search through a sequence of barriers, labels, and notes for
2401 the head of block C and assert that we really do fall through. */
2403 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2406 /* Remove what will soon cease being the jump insn from the source block.
2407 If block B consisted only of this single jump, turn it into a deleted
2410 if (GET_CODE (q) == JUMP_INSN
2411 && (simplejump_p (q)
2412 || (b->succ == e && e->succ_next == NULL)))
2415 /* If this was a conditional jump, we need to also delete
2416 the insn that set cc0. */
2417 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2424 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2425 NOTE_SOURCE_FILE (q) = 0;
2428 b->end = q = PREV_INSN (q);
2431 /* Selectively unlink the sequence. */
2432 if (q != PREV_INSN (c->head))
2433 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2435 e->flags |= EDGE_FALLTHRU;
2438 /* Fix up edges that now fall through, or rather should now fall through
2439 but previously required a jump around now deleted blocks. Simplify
2440 the search by only examining blocks numerically adjacent, since this
2441 is how find_basic_blocks created them. */
2444 tidy_fallthru_edges ()
2448 for (i = 1; i < n_basic_blocks; ++i)
2450 basic_block b = BASIC_BLOCK (i - 1);
2451 basic_block c = BASIC_BLOCK (i);
2454 /* We care about simple conditional or unconditional jumps with
2457 If we had a conditional branch to the next instruction when
2458 find_basic_blocks was called, then there will only be one
2459 out edge for the block which ended with the conditional
2460 branch (since we do not create duplicate edges).
2462 Furthermore, the edge will be marked as a fallthru because we
2463 merge the flags for the duplicate edges. So we do not want to
2464 check that the edge is not a FALLTHRU edge. */
2465 if ((s = b->succ) != NULL
2466 && s->succ_next == NULL
2468 /* If the jump insn has side effects, we can't tidy the edge. */
2469 && (GET_CODE (b->end) != JUMP_INSN
2470 || onlyjump_p (b->end)))
2471 tidy_fallthru_edge (s, b, c);
2475 /* Perform data flow analysis.
2476 F is the first insn of the function; FLAGS is a set of PROP_* flags
2477 to be used in accumulating flow info. */
2480 life_analysis (f, file, flags)
2485 #ifdef ELIMINABLE_REGS
2487 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2490 /* Record which registers will be eliminated. We use this in
2493 CLEAR_HARD_REG_SET (elim_reg_set);
2495 #ifdef ELIMINABLE_REGS
2496 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2497 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2499 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2503 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2505 /* The post-reload life analysis have (on a global basis) the same
2506 registers live as was computed by reload itself. elimination
2507 Otherwise offsets and such may be incorrect.
2509 Reload will make some registers as live even though they do not
2510 appear in the rtl. */
2511 if (reload_completed)
2512 flags &= ~PROP_REG_INFO;
2514 /* We want alias analysis information for local dead store elimination. */
2515 if (flags & PROP_SCAN_DEAD_CODE)
2516 init_alias_analysis ();
2518 max_regno = max_reg_num ();
2520 /* Always remove no-op moves. Do this before other processing so
2521 that we don't have to keep re-scanning them. */
2522 delete_noop_moves (f);
2524 /* Some targets can emit simpler epilogues if they know that sp was
2525 not ever modified during the function. After reload, of course,
2526 we've already emitted the epilogue so there's no sense searching. */
2527 if (! reload_completed)
2528 notice_stack_pointer_modification (f);
2530 /* Allocate and zero out data structures that will record the
2531 data from lifetime analysis. */
2532 allocate_reg_life_data ();
2533 allocate_bb_life_data ();
2535 /* Find the set of registers live on function exit. */
2536 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2538 /* "Update" life info from zero. It'd be nice to begin the
2539 relaxation with just the exit and noreturn blocks, but that set
2540 is not immediately handy. */
2542 if (flags & PROP_REG_INFO)
2543 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2544 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2547 if (flags & PROP_SCAN_DEAD_CODE)
2548 end_alias_analysis ();
2551 dump_flow_info (file);
2553 free_basic_block_vars (1);
2556 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2557 Search for REGNO. If found, abort if it is not wider than word_mode. */
2560 verify_wide_reg_1 (px, pregno)
2565 unsigned int regno = *(int *) pregno;
2567 if (GET_CODE (x) == REG && REGNO (x) == regno)
2569 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2576 /* A subroutine of verify_local_live_at_start. Search through insns
2577 between HEAD and END looking for register REGNO. */
2580 verify_wide_reg (regno, head, end)
2586 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2587 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2591 head = NEXT_INSN (head);
2594 /* We didn't find the register at all. Something's way screwy. */
2598 /* A subroutine of update_life_info. Verify that there are no untoward
2599 changes in live_at_start during a local update. */
2602 verify_local_live_at_start (new_live_at_start, bb)
2603 regset new_live_at_start;
2606 if (reload_completed)
2608 /* After reload, there are no pseudos, nor subregs of multi-word
2609 registers. The regsets should exactly match. */
2610 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2617 /* Find the set of changed registers. */
2618 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2620 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2622 /* No registers should die. */
2623 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2625 /* Verify that the now-live register is wider than word_mode. */
2626 verify_wide_reg (i, bb->head, bb->end);
2631 /* Updates life information starting with the basic blocks set in BLOCKS.
2632 If BLOCKS is null, consider it to be the universal set.
2634 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2635 we are only expecting local modifications to basic blocks. If we find
2636 extra registers live at the beginning of a block, then we either killed
2637 useful data, or we have a broken split that wants data not provided.
2638 If we find registers removed from live_at_start, that means we have
2639 a broken peephole that is killing a register it shouldn't.
2641 ??? This is not true in one situation -- when a pre-reload splitter
2642 generates subregs of a multi-word pseudo, current life analysis will
2643 lose the kill. So we _can_ have a pseudo go live. How irritating.
2645 Including PROP_REG_INFO does not properly refresh regs_ever_live
2646 unless the caller resets it to zero. */
2649 update_life_info (blocks, extent, prop_flags)
2651 enum update_life_extent extent;
2655 regset_head tmp_head;
2658 tmp = INITIALIZE_REG_SET (tmp_head);
2660 /* For a global update, we go through the relaxation process again. */
2661 if (extent != UPDATE_LIFE_LOCAL)
2663 calculate_global_regs_live (blocks, blocks,
2664 prop_flags & PROP_SCAN_DEAD_CODE);
2666 /* If asked, remove notes from the blocks we'll update. */
2667 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2668 count_or_remove_death_notes (blocks, 1);
2673 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2675 basic_block bb = BASIC_BLOCK (i);
2677 COPY_REG_SET (tmp, bb->global_live_at_end);
2678 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2680 if (extent == UPDATE_LIFE_LOCAL)
2681 verify_local_live_at_start (tmp, bb);
2686 for (i = n_basic_blocks - 1; i >= 0; --i)
2688 basic_block bb = BASIC_BLOCK (i);
2690 COPY_REG_SET (tmp, bb->global_live_at_end);
2691 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2693 if (extent == UPDATE_LIFE_LOCAL)
2694 verify_local_live_at_start (tmp, bb);
2700 if (prop_flags & PROP_REG_INFO)
2702 /* The only pseudos that are live at the beginning of the function
2703 are those that were not set anywhere in the function. local-alloc
2704 doesn't know how to handle these correctly, so mark them as not
2705 local to any one basic block. */
2706 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2707 FIRST_PSEUDO_REGISTER, i,
2708 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2710 /* We have a problem with any pseudoreg that lives across the setjmp.
2711 ANSI says that if a user variable does not change in value between
2712 the setjmp and the longjmp, then the longjmp preserves it. This
2713 includes longjmp from a place where the pseudo appears dead.
2714 (In principle, the value still exists if it is in scope.)
2715 If the pseudo goes in a hard reg, some other value may occupy
2716 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2717 Conclusion: such a pseudo must not go in a hard reg. */
2718 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2719 FIRST_PSEUDO_REGISTER, i,
2721 if (regno_reg_rtx[i] != 0)
2723 REG_LIVE_LENGTH (i) = -1;
2724 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2730 /* Free the variables allocated by find_basic_blocks.
2732 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2735 free_basic_block_vars (keep_head_end_p)
2736 int keep_head_end_p;
2738 if (basic_block_for_insn)
2740 VARRAY_FREE (basic_block_for_insn);
2741 basic_block_for_insn = NULL;
2744 if (! keep_head_end_p)
2747 VARRAY_FREE (basic_block_info);
2750 ENTRY_BLOCK_PTR->aux = NULL;
2751 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2752 EXIT_BLOCK_PTR->aux = NULL;
2753 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2757 /* Return nonzero if the destination of SET equals the source. */
2762 rtx src = SET_SRC (set);
2763 rtx dst = SET_DEST (set);
2765 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2767 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2769 src = SUBREG_REG (src);
2770 dst = SUBREG_REG (dst);
2773 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2774 && REGNO (src) == REGNO (dst));
2777 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2783 rtx pat = PATTERN (insn);
2785 /* Insns carrying these notes are useful later on. */
2786 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2789 if (GET_CODE (pat) == SET && set_noop_p (pat))
2792 if (GET_CODE (pat) == PARALLEL)
2795 /* If nothing but SETs of registers to themselves,
2796 this insn can also be deleted. */
2797 for (i = 0; i < XVECLEN (pat, 0); i++)
2799 rtx tem = XVECEXP (pat, 0, i);
2801 if (GET_CODE (tem) == USE
2802 || GET_CODE (tem) == CLOBBER)
2805 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2814 /* Delete any insns that copy a register to itself. */
2817 delete_noop_moves (f)
2821 for (insn = f; insn; insn = NEXT_INSN (insn))
2823 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2825 PUT_CODE (insn, NOTE);
2826 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2827 NOTE_SOURCE_FILE (insn) = 0;
2832 /* Determine if the stack pointer is constant over the life of the function.
2833 Only useful before prologues have been emitted. */
2836 notice_stack_pointer_modification_1 (x, pat, data)
2838 rtx pat ATTRIBUTE_UNUSED;
2839 void *data ATTRIBUTE_UNUSED;
2841 if (x == stack_pointer_rtx
2842 /* The stack pointer is only modified indirectly as the result
2843 of a push until later in flow. See the comments in rtl.texi
2844 regarding Embedded Side-Effects on Addresses. */
2845 || (GET_CODE (x) == MEM
2846 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2847 || GET_CODE (XEXP (x, 0)) == PRE_INC
2848 || GET_CODE (XEXP (x, 0)) == POST_DEC
2849 || GET_CODE (XEXP (x, 0)) == POST_INC)
2850 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2851 current_function_sp_is_unchanging = 0;
2855 notice_stack_pointer_modification (f)
2860 /* Assume that the stack pointer is unchanging if alloca hasn't
2862 current_function_sp_is_unchanging = !current_function_calls_alloca;
2863 if (! current_function_sp_is_unchanging)
2866 for (insn = f; insn; insn = NEXT_INSN (insn))
2868 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2870 /* Check if insn modifies the stack pointer. */
2871 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2873 if (! current_function_sp_is_unchanging)
2879 /* Mark a register in SET. Hard registers in large modes get all
2880 of their component registers set as well. */
2882 mark_reg (reg, xset)
2886 regset set = (regset) xset;
2887 int regno = REGNO (reg);
2889 if (GET_MODE (reg) == BLKmode)
2892 SET_REGNO_REG_SET (set, regno);
2893 if (regno < FIRST_PSEUDO_REGISTER)
2895 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2897 SET_REGNO_REG_SET (set, regno + n);
2901 /* Mark those regs which are needed at the end of the function as live
2902 at the end of the last basic block. */
2904 mark_regs_live_at_end (set)
2909 /* If exiting needs the right stack value, consider the stack pointer
2910 live at the end of the function. */
2911 if ((HAVE_epilogue && reload_completed)
2912 || ! EXIT_IGNORE_STACK
2913 || (! FRAME_POINTER_REQUIRED
2914 && ! current_function_calls_alloca
2915 && flag_omit_frame_pointer)
2916 || current_function_sp_is_unchanging)
2918 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2921 /* Mark the frame pointer if needed at the end of the function. If
2922 we end up eliminating it, it will be removed from the live list
2923 of each basic block by reload. */
2925 if (! reload_completed || frame_pointer_needed)
2927 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2928 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2929 /* If they are different, also mark the hard frame pointer as live */
2930 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2934 #ifdef PIC_OFFSET_TABLE_REGNUM
2935 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2936 /* Many architectures have a GP register even without flag_pic.
2937 Assume the pic register is not in use, or will be handled by
2938 other means, if it is not fixed. */
2939 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2940 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2944 /* Mark all global registers, and all registers used by the epilogue
2945 as being live at the end of the function since they may be
2946 referenced by our caller. */
2947 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2949 #ifdef EPILOGUE_USES
2950 || EPILOGUE_USES (i)
2953 SET_REGNO_REG_SET (set, i);
2955 /* Mark all call-saved registers that we actaully used. */
2956 if (HAVE_epilogue && reload_completed)
2958 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2959 if (! call_used_regs[i] && regs_ever_live[i])
2960 SET_REGNO_REG_SET (set, i);
2963 /* Mark function return value. */
2964 diddle_return_value (mark_reg, set);
2967 /* Callback function for for_each_successor_phi. DATA is a regset.
2968 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2969 INSN, in the regset. */
2972 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2973 rtx insn ATTRIBUTE_UNUSED;
2974 int dest_regno ATTRIBUTE_UNUSED;
2978 regset live = (regset) data;
2979 SET_REGNO_REG_SET (live, src_regno);
2983 /* Propagate global life info around the graph of basic blocks. Begin
2984 considering blocks with their corresponding bit set in BLOCKS_IN.
2985 If BLOCKS_IN is null, consider it the universal set.
2987 BLOCKS_OUT is set for every block that was changed. */
2990 calculate_global_regs_live (blocks_in, blocks_out, flags)
2991 sbitmap blocks_in, blocks_out;
2994 basic_block *queue, *qhead, *qtail, *qend;
2995 regset tmp, new_live_at_end;
2996 regset_head tmp_head;
2997 regset_head new_live_at_end_head;
3000 tmp = INITIALIZE_REG_SET (tmp_head);
3001 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3003 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3004 because the `head == tail' style test for an empty queue doesn't
3005 work with a full queue. */
3006 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3008 qhead = qend = queue + n_basic_blocks + 2;
3010 /* Clear out the garbage that might be hanging out in bb->aux. */
3011 for (i = n_basic_blocks - 1; i >= 0; --i)
3012 BASIC_BLOCK (i)->aux = NULL;
3014 /* Queue the blocks set in the initial mask. Do this in reverse block
3015 number order so that we are more likely for the first round to do
3016 useful work. We use AUX non-null to flag that the block is queued. */
3019 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3021 basic_block bb = BASIC_BLOCK (i);
3028 for (i = 0; i < n_basic_blocks; ++i)
3030 basic_block bb = BASIC_BLOCK (i);
3037 sbitmap_zero (blocks_out);
3039 while (qhead != qtail)
3041 int rescan, changed;
3050 /* Begin by propogating live_at_start from the successor blocks. */
3051 CLEAR_REG_SET (new_live_at_end);
3052 for (e = bb->succ; e ; e = e->succ_next)
3054 basic_block sb = e->dest;
3055 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3058 /* Regs used in phi nodes are not included in
3059 global_live_at_start, since they are live only along a
3060 particular edge. Set those regs that are live because of a
3061 phi node alternative corresponding to this particular block. */
3062 for_each_successor_phi (bb, &set_phi_alternative_reg,
3065 if (bb == ENTRY_BLOCK_PTR)
3067 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3071 /* On our first pass through this block, we'll go ahead and continue.
3072 Recognize first pass by local_set NULL. On subsequent passes, we
3073 get to skip out early if live_at_end wouldn't have changed. */
3075 if (bb->local_set == NULL)
3077 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3082 /* If any bits were removed from live_at_end, we'll have to
3083 rescan the block. This wouldn't be necessary if we had
3084 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3085 local_live is really dependant on live_at_end. */
3086 CLEAR_REG_SET (tmp);
3087 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3088 new_live_at_end, BITMAP_AND_COMPL);
3092 /* Find the set of changed bits. Take this opportunity
3093 to notice that this set is empty and early out. */
3094 CLEAR_REG_SET (tmp);
3095 changed = bitmap_operation (tmp, bb->global_live_at_end,
3096 new_live_at_end, BITMAP_XOR);
3100 /* If any of the changed bits overlap with local_set,
3101 we'll have to rescan the block. Detect overlap by
3102 the AND with ~local_set turning off bits. */
3103 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3108 /* Let our caller know that BB changed enough to require its
3109 death notes updated. */
3111 SET_BIT (blocks_out, bb->index);
3115 /* Add to live_at_start the set of all registers in
3116 new_live_at_end that aren't in the old live_at_end. */
3118 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3120 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3122 changed = bitmap_operation (bb->global_live_at_start,
3123 bb->global_live_at_start,
3130 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3132 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3133 into live_at_start. */
3134 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3136 /* If live_at start didn't change, no need to go farther. */
3137 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3140 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3143 /* Queue all predecessors of BB so that we may re-examine
3144 their live_at_end. */
3145 for (e = bb->pred; e ; e = e->pred_next)
3147 basic_block pb = e->src;
3148 if (pb->aux == NULL)
3159 FREE_REG_SET (new_live_at_end);
3163 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3165 basic_block bb = BASIC_BLOCK (i);
3166 FREE_REG_SET (bb->local_set);
3171 for (i = n_basic_blocks - 1; i >= 0; --i)
3173 basic_block bb = BASIC_BLOCK (i);
3174 FREE_REG_SET (bb->local_set);
3181 /* Subroutines of life analysis. */
3183 /* Allocate the permanent data structures that represent the results
3184 of life analysis. Not static since used also for stupid life analysis. */
3187 allocate_bb_life_data ()
3191 for (i = 0; i < n_basic_blocks; i++)
3193 basic_block bb = BASIC_BLOCK (i);
3195 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3196 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3199 ENTRY_BLOCK_PTR->global_live_at_end
3200 = OBSTACK_ALLOC_REG_SET (function_obstack);
3201 EXIT_BLOCK_PTR->global_live_at_start
3202 = OBSTACK_ALLOC_REG_SET (function_obstack);
3204 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3208 allocate_reg_life_data ()
3212 /* Recalculate the register space, in case it has grown. Old style
3213 vector oriented regsets would set regset_{size,bytes} here also. */
3214 allocate_reg_info (max_regno, FALSE, FALSE);
3216 /* Reset all the data we'll collect in propagate_block and its
3218 for (i = 0; i < max_regno; i++)
3222 REG_N_DEATHS (i) = 0;
3223 REG_N_CALLS_CROSSED (i) = 0;
3224 REG_LIVE_LENGTH (i) = 0;
3225 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3229 /* Delete dead instructions for propagate_block. */
3232 propagate_block_delete_insn (bb, insn)
3236 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3238 /* If the insn referred to a label, and that label was attached to
3239 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3240 pretty much mandatory to delete it, because the ADDR_VEC may be
3241 referencing labels that no longer exist. */
3245 rtx label = XEXP (inote, 0);
3248 if (LABEL_NUSES (label) == 1
3249 && (next = next_nonnote_insn (label)) != NULL
3250 && GET_CODE (next) == JUMP_INSN
3251 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3252 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3254 rtx pat = PATTERN (next);
3255 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3256 int len = XVECLEN (pat, diff_vec_p);
3259 for (i = 0; i < len; i++)
3260 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3262 flow_delete_insn (next);
3266 if (bb->end == insn)
3267 bb->end = PREV_INSN (insn);
3268 flow_delete_insn (insn);
3271 /* Delete dead libcalls for propagate_block. Return the insn
3272 before the libcall. */
3275 propagate_block_delete_libcall (bb, insn, note)
3279 rtx first = XEXP (note, 0);
3280 rtx before = PREV_INSN (first);
3282 if (insn == bb->end)
3285 flow_delete_insn_chain (first, insn);
3289 /* Update the life-status of regs for one insn. Return the previous insn. */
3292 propagate_one_insn (pbi, insn)
3293 struct propagate_block_info *pbi;
3296 rtx prev = PREV_INSN (insn);
3297 int flags = pbi->flags;
3298 int insn_is_dead = 0;
3299 int libcall_is_dead = 0;
3303 if (! INSN_P (insn))
3306 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3307 if (flags & PROP_SCAN_DEAD_CODE)
3309 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3311 libcall_is_dead = (insn_is_dead && note != 0
3312 && libcall_dead_p (pbi, PATTERN (insn),
3316 /* We almost certainly don't want to delete prologue or epilogue
3317 instructions. Warn about probable compiler losage. */
3320 && (((HAVE_epilogue || HAVE_prologue)
3321 && prologue_epilogue_contains (insn))
3322 || (HAVE_sibcall_epilogue
3323 && sibcall_epilogue_contains (insn))))
3325 if (flags & PROP_KILL_DEAD_CODE)
3327 warning ("ICE: would have deleted prologue/epilogue insn");
3328 if (!inhibit_warnings)
3331 libcall_is_dead = insn_is_dead = 0;
3334 /* If an instruction consists of just dead store(s) on final pass,
3336 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3338 if (libcall_is_dead)
3340 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3341 insn = NEXT_INSN (prev);
3344 propagate_block_delete_insn (pbi->bb, insn);
3346 /* CC0 is now known to be dead. Either this insn used it,
3347 in which case it doesn't anymore, or clobbered it,
3348 so the next insn can't use it. */
3354 /* See if this is an increment or decrement that can be merged into
3355 a following memory address. */
3358 register rtx x = single_set (insn);
3360 /* Does this instruction increment or decrement a register? */
3361 if (!reload_completed
3362 && (flags & PROP_AUTOINC)
3364 && GET_CODE (SET_DEST (x)) == REG
3365 && (GET_CODE (SET_SRC (x)) == PLUS
3366 || GET_CODE (SET_SRC (x)) == MINUS)
3367 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3368 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3369 /* Ok, look for a following memory ref we can combine with.
3370 If one is found, change the memory ref to a PRE_INC
3371 or PRE_DEC, cancel this insn, and return 1.
3372 Return 0 if nothing has been done. */
3373 && try_pre_increment_1 (pbi, insn))
3376 #endif /* AUTO_INC_DEC */
3378 CLEAR_REG_SET (pbi->new_live);
3379 CLEAR_REG_SET (pbi->new_dead);
3381 /* If this is not the final pass, and this insn is copying the value of
3382 a library call and it's dead, don't scan the insns that perform the
3383 library call, so that the call's arguments are not marked live. */
3384 if (libcall_is_dead)
3386 /* Record the death of the dest reg. */
3387 mark_set_regs (pbi, PATTERN (insn), insn);
3389 insn = XEXP (note, 0);
3390 return PREV_INSN (insn);
3392 else if (GET_CODE (PATTERN (insn)) == SET
3393 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3394 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3395 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3396 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3397 /* We have an insn to pop a constant amount off the stack.
3398 (Such insns use PLUS regardless of the direction of the stack,
3399 and any insn to adjust the stack by a constant is always a pop.)
3400 These insns, if not dead stores, have no effect on life. */
3404 /* Any regs live at the time of a call instruction must not go
3405 in a register clobbered by calls. Find all regs now live and
3406 record this for them. */
3408 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3409 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3410 { REG_N_CALLS_CROSSED (i)++; });
3412 /* Record sets. Do this even for dead instructions, since they
3413 would have killed the values if they hadn't been deleted. */
3414 mark_set_regs (pbi, PATTERN (insn), insn);
3416 if (GET_CODE (insn) == CALL_INSN)
3422 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3423 cond = COND_EXEC_TEST (PATTERN (insn));
3425 /* Non-constant calls clobber memory. */
3426 if (! CONST_CALL_P (insn))
3427 free_EXPR_LIST_list (&pbi->mem_set_list);
3429 /* There may be extra registers to be clobbered. */
3430 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3432 note = XEXP (note, 1))
3433 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3434 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3435 cond, insn, pbi->flags);
3437 /* Calls change all call-used and global registers. */
3438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3439 if (call_used_regs[i] && ! global_regs[i]
3442 /* We do not want REG_UNUSED notes for these registers. */
3443 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3444 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3448 /* If an insn doesn't use CC0, it becomes dead since we assume
3449 that every insn clobbers it. So show it dead here;
3450 mark_used_regs will set it live if it is referenced. */
3455 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3457 /* Sometimes we may have inserted something before INSN (such as a move)
3458 when we make an auto-inc. So ensure we will scan those insns. */
3460 prev = PREV_INSN (insn);
3463 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3469 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3470 cond = COND_EXEC_TEST (PATTERN (insn));
3472 /* Calls use their arguments. */
3473 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3475 note = XEXP (note, 1))
3476 if (GET_CODE (XEXP (note, 0)) == USE)
3477 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3480 /* The stack ptr is used (honorarily) by a CALL insn. */
3481 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3483 /* Calls may also reference any of the global registers,
3484 so they are made live. */
3485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3487 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3492 /* Update reg_live for the registers killed and used. */
3493 AND_COMPL_REG_SET (pbi->reg_live, pbi->new_dead);
3494 IOR_REG_SET (pbi->reg_live, pbi->new_live);
3496 /* On final pass, update counts of how many insns in which each reg
3498 if (flags & PROP_REG_INFO)
3499 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3500 { REG_LIVE_LENGTH (i)++; });
3505 /* Initialize a propagate_block_info struct for public consumption.
3506 Note that the structure itself is opaque to this file, but that
3507 the user can use the regsets provided here. */
3509 struct propagate_block_info *
3510 init_propagate_block_info (bb, live, local_set, flags)
3516 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3519 pbi->reg_live = live;
3520 pbi->mem_set_list = NULL_RTX;
3521 pbi->local_set = local_set;
3525 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3526 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3528 pbi->reg_next_use = NULL;
3530 pbi->new_live = BITMAP_XMALLOC ();
3531 pbi->new_dead = BITMAP_XMALLOC ();
3536 /* Release a propagate_block_info struct. */
3539 free_propagate_block_info (pbi)
3540 struct propagate_block_info *pbi;
3542 free_EXPR_LIST_list (&pbi->mem_set_list);
3544 BITMAP_XFREE (pbi->new_live);
3545 BITMAP_XFREE (pbi->new_dead);
3547 if (pbi->reg_next_use)
3548 free (pbi->reg_next_use);
3553 /* Compute the registers live at the beginning of a basic block BB from
3554 those live at the end.
3556 When called, REG_LIVE contains those live at the end. On return, it
3557 contains those live at the beginning.
3559 LOCAL_SET, if non-null, will be set with all registers killed by
3560 this basic block. */
3563 propagate_block (bb, live, local_set, flags)
3569 struct propagate_block_info *pbi;
3572 pbi = init_propagate_block_info (bb, live, local_set, flags);
3574 if (flags & PROP_REG_INFO)
3578 /* Process the regs live at the end of the block.
3579 Mark them as not local to any one basic block. */
3580 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3581 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3584 /* Scan the block an insn at a time from end to beginning. */
3586 for (insn = bb->end; ; insn = prev)
3588 /* If this is a call to `setjmp' et al, warn if any
3589 non-volatile datum is live. */
3590 if ((flags & PROP_REG_INFO)
3591 && GET_CODE (insn) == NOTE
3592 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3593 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3595 prev = propagate_one_insn (pbi, insn);
3597 if (insn == bb->head)
3601 free_propagate_block_info (pbi);
3604 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3605 (SET expressions whose destinations are registers dead after the insn).
3606 NEEDED is the regset that says which regs are alive after the insn.
3608 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3610 If X is the entire body of an insn, NOTES contains the reg notes
3611 pertaining to the insn. */
3614 insn_dead_p (pbi, x, call_ok, notes)
3615 struct propagate_block_info *pbi;
3618 rtx notes ATTRIBUTE_UNUSED;
3620 enum rtx_code code = GET_CODE (x);
3623 /* If flow is invoked after reload, we must take existing AUTO_INC
3624 expresions into account. */
3625 if (reload_completed)
3627 for ( ; notes; notes = XEXP (notes, 1))
3629 if (REG_NOTE_KIND (notes) == REG_INC)
3631 int regno = REGNO (XEXP (notes, 0));
3633 /* Don't delete insns to set global regs. */
3634 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3635 || REGNO_REG_SET_P (pbi->reg_live, regno))
3642 /* If setting something that's a reg or part of one,
3643 see if that register's altered value will be live. */
3647 rtx r = SET_DEST (x);
3650 if (GET_CODE (r) == CC0)
3651 return ! pbi->cc0_live;
3654 /* A SET that is a subroutine call cannot be dead. */
3655 if (GET_CODE (SET_SRC (x)) == CALL)
3661 /* Don't eliminate loads from volatile memory or volatile asms. */
3662 else if (volatile_refs_p (SET_SRC (x)))
3665 if (GET_CODE (r) == MEM)
3669 if (MEM_VOLATILE_P (r))
3672 /* Walk the set of memory locations we are currently tracking
3673 and see if one is an identical match to this memory location.
3674 If so, this memory write is dead (remember, we're walking
3675 backwards from the end of the block to the start. */
3676 temp = pbi->mem_set_list;
3679 if (rtx_equal_p (XEXP (temp, 0), r))
3681 temp = XEXP (temp, 1);
3686 while (GET_CODE (r) == SUBREG
3687 || GET_CODE (r) == STRICT_LOW_PART
3688 || GET_CODE (r) == ZERO_EXTRACT)
3691 if (GET_CODE (r) == REG)
3693 int regno = REGNO (r);
3696 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3699 /* If this is a hard register, verify that subsequent
3700 words are not needed. */
3701 if (regno < FIRST_PSEUDO_REGISTER)
3703 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3706 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3710 /* Don't delete insns to set global regs. */
3711 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3714 /* Make sure insns to set the stack pointer aren't deleted. */
3715 if (regno == STACK_POINTER_REGNUM)
3718 /* Make sure insns to set the frame pointer aren't deleted. */
3719 if (regno == FRAME_POINTER_REGNUM
3720 && (! reload_completed || frame_pointer_needed))
3722 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3723 if (regno == HARD_FRAME_POINTER_REGNUM
3724 && (! reload_completed || frame_pointer_needed))
3728 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3729 /* Make sure insns to set arg pointer are never deleted
3730 (if the arg pointer isn't fixed, there will be a USE
3731 for it, so we can treat it normally). */
3732 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3736 /* Otherwise, the set is dead. */
3742 /* If performing several activities, insn is dead if each activity
3743 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3744 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3746 else if (code == PARALLEL)
3748 int i = XVECLEN (x, 0);
3750 for (i--; i >= 0; i--)
3751 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3752 && GET_CODE (XVECEXP (x, 0, i)) != USE
3753 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3759 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3760 is not necessarily true for hard registers. */
3761 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3762 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3763 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3766 /* We do not check other CLOBBER or USE here. An insn consisting of just
3767 a CLOBBER or just a USE should not be deleted. */
3771 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3772 return 1 if the entire library call is dead.
3773 This is true if X copies a register (hard or pseudo)
3774 and if the hard return reg of the call insn is dead.
3775 (The caller should have tested the destination of X already for death.)
3777 If this insn doesn't just copy a register, then we don't
3778 have an ordinary libcall. In that case, cse could not have
3779 managed to substitute the source for the dest later on,
3780 so we can assume the libcall is dead.
3782 NEEDED is the bit vector of pseudoregs live before this insn.
3783 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3786 libcall_dead_p (pbi, x, note, insn)
3787 struct propagate_block_info *pbi;
3792 register RTX_CODE code = GET_CODE (x);
3796 register rtx r = SET_SRC (x);
3797 if (GET_CODE (r) == REG)
3799 rtx call = XEXP (note, 0);
3803 /* Find the call insn. */
3804 while (call != insn && GET_CODE (call) != CALL_INSN)
3805 call = NEXT_INSN (call);
3807 /* If there is none, do nothing special,
3808 since ordinary death handling can understand these insns. */
3812 /* See if the hard reg holding the value is dead.
3813 If this is a PARALLEL, find the call within it. */
3814 call_pat = PATTERN (call);
3815 if (GET_CODE (call_pat) == PARALLEL)
3817 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3818 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3819 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3822 /* This may be a library call that is returning a value
3823 via invisible pointer. Do nothing special, since
3824 ordinary death handling can understand these insns. */
3828 call_pat = XVECEXP (call_pat, 0, i);
3831 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3837 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3838 live at function entry. Don't count global register variables, variables
3839 in registers that can be used for function arg passing, or variables in
3840 fixed hard registers. */
3843 regno_uninitialized (regno)
3846 if (n_basic_blocks == 0
3847 || (regno < FIRST_PSEUDO_REGISTER
3848 && (global_regs[regno]
3849 || fixed_regs[regno]
3850 || FUNCTION_ARG_REGNO_P (regno))))
3853 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3856 /* 1 if register REGNO was alive at a place where `setjmp' was called
3857 and was set more than once or is an argument.
3858 Such regs may be clobbered by `longjmp'. */
3861 regno_clobbered_at_setjmp (regno)
3864 if (n_basic_blocks == 0)
3867 return ((REG_N_SETS (regno) > 1
3868 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3869 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3872 /* INSN references memory, possibly using autoincrement addressing modes.
3873 Find any entries on the mem_set_list that need to be invalidated due
3874 to an address change. */
3876 invalidate_mems_from_autoinc (pbi, insn)
3877 struct propagate_block_info *pbi;
3880 rtx note = REG_NOTES (insn);
3881 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3883 if (REG_NOTE_KIND (note) == REG_INC)
3885 rtx temp = pbi->mem_set_list;
3886 rtx prev = NULL_RTX;
3891 next = XEXP (temp, 1);
3892 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3894 /* Splice temp out of list. */
3896 XEXP (prev, 1) = next;
3898 pbi->mem_set_list = next;
3899 free_EXPR_LIST_node (temp);
3909 /* Process the registers that are set within X. Their bits are set to
3910 1 in the regset DEAD, because they are dead prior to this insn.
3912 If INSN is nonzero, it is the insn being processed.
3914 FLAGS is the set of operations to perform. */
3917 mark_set_regs (pbi, x, insn)
3918 struct propagate_block_info *pbi;
3921 rtx cond = NULL_RTX;
3925 switch (code = GET_CODE (x))
3929 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
3933 cond = COND_EXEC_TEST (x);
3934 x = COND_EXEC_CODE (x);
3940 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3942 rtx sub = XVECEXP (x, 0, i);
3943 switch (code = GET_CODE (sub))
3946 if (cond != NULL_RTX)
3949 cond = COND_EXEC_TEST (sub);
3950 sub = COND_EXEC_CODE (sub);
3951 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3957 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
3972 /* Process a single SET rtx, X. */
3975 mark_set_1 (pbi, code, reg, cond, insn, flags)
3976 struct propagate_block_info *pbi;
3978 rtx reg, cond, insn;
3981 int regno_first = -1, regno_last = -1;
3984 /* Some targets place small structures in registers for
3985 return values of functions. We have to detect this
3986 case specially here to get correct flow information. */
3987 if (GET_CODE (reg) == PARALLEL
3988 && GET_MODE (reg) == BLKmode)
3990 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3991 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
3995 /* Modifying just one hardware register of a multi-reg value or just a
3996 byte field of a register does not mean the value from before this insn
3997 is now dead. But it does mean liveness of that register at the end of
3998 the block is significant.
4000 Within mark_set_1, however, we treat it as if the register is indeed
4001 modified. mark_used_regs will, however, also treat this register as
4002 being used. Thus, we treat these insns as setting a new value for the
4003 register as a function of its old value. This cases LOG_LINKS to be
4004 made appropriately and this will help combine.
4006 ??? This is all done incorrectly. We should not be setting bits in
4007 new_dead for these registers, since, as we just explained, they are
4008 not dead. We should be setting bits in local_set, and updating
4009 LOG_LINKS, but that is different. */
4011 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4012 || GET_CODE (reg) == SIGN_EXTRACT
4013 || GET_CODE (reg) == STRICT_LOW_PART)
4014 reg = XEXP (reg, 0);
4016 /* If this set is a MEM, then it kills any aliased writes.
4017 If this set is a REG, then it kills any MEMs which use the reg. */
4018 if (flags & PROP_SCAN_DEAD_CODE)
4020 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4022 rtx temp = pbi->mem_set_list;
4023 rtx prev = NULL_RTX;
4028 next = XEXP (temp, 1);
4029 if ((GET_CODE (reg) == MEM
4030 && output_dependence (XEXP (temp, 0), reg))
4031 || (GET_CODE (reg) == REG
4032 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4034 /* Splice this entry out of the list. */
4036 XEXP (prev, 1) = next;
4038 pbi->mem_set_list = next;
4039 free_EXPR_LIST_node (temp);
4047 /* If the memory reference had embedded side effects (autoincrement
4048 address modes. Then we may need to kill some entries on the
4050 if (insn && GET_CODE (reg) == MEM)
4051 invalidate_mems_from_autoinc (pbi, insn);
4053 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4054 /* We do not know the size of a BLKmode store, so we do not track
4055 them for redundant store elimination. */
4056 && GET_MODE (reg) != BLKmode
4057 /* There are no REG_INC notes for SP, so we can't assume we'll see
4058 everything that invalidates it. To be safe, don't eliminate any
4059 stores though SP; none of them should be redundant anyway. */
4060 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4061 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4064 if (GET_CODE (reg) == REG
4065 && (regno_first = REGNO (reg),
4066 ! (regno_first == FRAME_POINTER_REGNUM
4067 && (! reload_completed || frame_pointer_needed)))
4068 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4069 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4070 && (! reload_completed || frame_pointer_needed))
4072 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4073 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4077 int some_was_live = 0, some_was_dead = 0;
4079 if (regno_first < FIRST_PSEUDO_REGISTER)
4080 regno_last = (regno_first
4081 + HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1);
4083 regno_last = regno_first;
4085 for (i = regno_first; i <= regno_last; ++i)
4087 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4089 SET_REGNO_REG_SET (pbi->local_set, i);
4091 some_was_live |= needed_regno;
4092 some_was_dead |= ! needed_regno;
4095 /* Additional data to record if this is the final pass. */
4096 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4097 | PROP_DEATH_NOTES | PROP_AUTOINC))
4100 register int blocknum = pbi->bb->index;
4103 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4105 y = pbi->reg_next_use[regno_first];
4107 /* The next use is no longer next, since a store intervenes. */
4108 for (i = regno_first; i <= regno_last; ++i)
4109 pbi->reg_next_use[i] = 0;
4112 if (flags & PROP_REG_INFO)
4114 for (i = regno_first; i <= regno_last; ++i)
4116 /* Count (weighted) references, stores, etc. This counts a
4117 register twice if it is modified, but that is correct. */
4118 REG_N_SETS (i) += 1;
4119 REG_N_REFS (i) += (optimize_size ? 1
4120 : pbi->bb->loop_depth + 1);
4122 /* The insns where a reg is live are normally counted
4123 elsewhere, but we want the count to include the insn
4124 where the reg is set, and the normal counting mechanism
4125 would not count it. */
4126 REG_LIVE_LENGTH (i) += 1;
4129 /* If this is a hard reg, record this function uses the reg. */
4130 if (regno_first < FIRST_PSEUDO_REGISTER)
4132 for (i = regno_first; i <= regno_last; i++)
4133 regs_ever_live[i] = 1;
4137 /* Keep track of which basic blocks each reg appears in. */
4138 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4139 REG_BASIC_BLOCK (regno_first) = blocknum;
4140 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4141 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4145 if (! some_was_dead)
4147 if (flags & PROP_LOG_LINKS)
4149 /* Make a logical link from the next following insn
4150 that uses this register, back to this insn.
4151 The following insns have already been processed.
4153 We don't build a LOG_LINK for hard registers containing
4154 in ASM_OPERANDs. If these registers get replaced,
4155 we might wind up changing the semantics of the insn,
4156 even if reload can make what appear to be valid
4157 assignments later. */
4158 if (y && (BLOCK_NUM (y) == blocknum)
4159 && (regno_first >= FIRST_PSEUDO_REGISTER
4160 || asm_noperands (PATTERN (y)) < 0))
4161 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4164 else if (! some_was_live)
4166 if (flags & PROP_REG_INFO)
4167 REG_N_DEATHS (regno_first) += 1;
4169 if (flags & PROP_DEATH_NOTES)
4171 /* Note that dead stores have already been deleted
4172 when possible. If we get here, we have found a
4173 dead store that cannot be eliminated (because the
4174 same insn does something useful). Indicate this
4175 by marking the reg being set as dying here. */
4177 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4182 if (flags & PROP_DEATH_NOTES)
4184 /* This is a case where we have a multi-word hard register
4185 and some, but not all, of the words of the register are
4186 needed in subsequent insns. Write REG_UNUSED notes
4187 for those parts that were not needed. This case should
4190 for (i = regno_first; i <= regno_last; ++i)
4191 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4193 = alloc_EXPR_LIST (REG_UNUSED,
4194 gen_rtx_REG (reg_raw_mode[i], i),
4200 /* Mark the register as being dead. */
4202 /* The stack pointer is never dead. Well, not strictly true,
4203 but it's very difficult to tell from here. Hopefully
4204 combine_stack_adjustments will fix up the most egregious
4206 && regno_first != STACK_POINTER_REGNUM)
4208 for (i = regno_first; i <= regno_last; ++i)
4209 SET_REGNO_REG_SET (pbi->new_dead, i);
4212 else if (GET_CODE (reg) == REG)
4214 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4215 pbi->reg_next_use[regno_first] = 0;
4218 /* If this is the last pass and this is a SCRATCH, show it will be dying
4219 here and count it. */
4220 else if (GET_CODE (reg) == SCRATCH)
4222 if (flags & PROP_DEATH_NOTES)
4224 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4230 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4234 find_auto_inc (pbi, x, insn)
4235 struct propagate_block_info *pbi;
4239 rtx addr = XEXP (x, 0);
4240 HOST_WIDE_INT offset = 0;
4243 /* Here we detect use of an index register which might be good for
4244 postincrement, postdecrement, preincrement, or predecrement. */
4246 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4247 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4249 if (GET_CODE (addr) == REG)
4252 register int size = GET_MODE_SIZE (GET_MODE (x));
4255 int regno = REGNO (addr);
4257 /* Is the next use an increment that might make auto-increment? */
4258 if ((incr = pbi->reg_next_use[regno]) != 0
4259 && (set = single_set (incr)) != 0
4260 && GET_CODE (set) == SET
4261 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4262 /* Can't add side effects to jumps; if reg is spilled and
4263 reloaded, there's no way to store back the altered value. */
4264 && GET_CODE (insn) != JUMP_INSN
4265 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4266 && XEXP (y, 0) == addr
4267 && GET_CODE (XEXP (y, 1)) == CONST_INT
4268 && ((HAVE_POST_INCREMENT
4269 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4270 || (HAVE_POST_DECREMENT
4271 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4272 || (HAVE_PRE_INCREMENT
4273 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4274 || (HAVE_PRE_DECREMENT
4275 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4276 /* Make sure this reg appears only once in this insn. */
4277 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4278 use != 0 && use != (rtx) 1))
4280 rtx q = SET_DEST (set);
4281 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4282 ? (offset ? PRE_INC : POST_INC)
4283 : (offset ? PRE_DEC : POST_DEC));
4285 if (dead_or_set_p (incr, addr)
4286 /* Mustn't autoinc an eliminable register. */
4287 && (regno >= FIRST_PSEUDO_REGISTER
4288 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4290 /* This is the simple case. Try to make the auto-inc. If
4291 we can't, we are done. Otherwise, we will do any
4292 needed updates below. */
4293 if (! validate_change (insn, &XEXP (x, 0),
4294 gen_rtx_fmt_e (inc_code, Pmode, addr),
4298 else if (GET_CODE (q) == REG
4299 /* PREV_INSN used here to check the semi-open interval
4301 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4302 /* We must also check for sets of q as q may be
4303 a call clobbered hard register and there may
4304 be a call between PREV_INSN (insn) and incr. */
4305 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4307 /* We have *p followed sometime later by q = p+size.
4308 Both p and q must be live afterward,
4309 and q is not used between INSN and its assignment.
4310 Change it to q = p, ...*q..., q = q+size.
4311 Then fall into the usual case. */
4315 emit_move_insn (q, addr);
4316 insns = get_insns ();
4319 if (basic_block_for_insn)
4320 for (temp = insns; temp; temp = NEXT_INSN (temp))
4321 set_block_for_insn (temp, pbi->bb);
4323 /* If we can't make the auto-inc, or can't make the
4324 replacement into Y, exit. There's no point in making
4325 the change below if we can't do the auto-inc and doing
4326 so is not correct in the pre-inc case. */
4328 validate_change (insn, &XEXP (x, 0),
4329 gen_rtx_fmt_e (inc_code, Pmode, q),
4331 validate_change (incr, &XEXP (y, 0), q, 1);
4332 if (! apply_change_group ())
4335 /* We now know we'll be doing this change, so emit the
4336 new insn(s) and do the updates. */
4337 emit_insns_before (insns, insn);
4339 if (pbi->bb->head == insn)
4340 pbi->bb->head = insns;
4342 /* INCR will become a NOTE and INSN won't contain a
4343 use of ADDR. If a use of ADDR was just placed in
4344 the insn before INSN, make that the next use.
4345 Otherwise, invalidate it. */
4346 if (GET_CODE (PREV_INSN (insn)) == INSN
4347 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4348 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4349 pbi->reg_next_use[regno] = PREV_INSN (insn);
4351 pbi->reg_next_use[regno] = 0;
4356 /* REGNO is now used in INCR which is below INSN, but it
4357 previously wasn't live here. If we don't mark it as
4358 live, we'll put a REG_DEAD note for it on this insn,
4359 which is incorrect. */
4360 SET_REGNO_REG_SET (pbi->reg_live, regno);
4362 /* If there are any calls between INSN and INCR, show
4363 that REGNO now crosses them. */
4364 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4365 if (GET_CODE (temp) == CALL_INSN)
4366 REG_N_CALLS_CROSSED (regno)++;
4371 /* If we haven't returned, it means we were able to make the
4372 auto-inc, so update the status. First, record that this insn
4373 has an implicit side effect. */
4376 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4378 /* Modify the old increment-insn to simply copy
4379 the already-incremented value of our register. */
4380 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4383 /* If that makes it a no-op (copying the register into itself) delete
4384 it so it won't appear to be a "use" and a "set" of this
4386 if (SET_DEST (set) == addr)
4388 /* If the original source was dead, it's dead now. */
4389 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4390 if (note && XEXP (note, 0) != addr)
4391 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4393 PUT_CODE (incr, NOTE);
4394 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4395 NOTE_SOURCE_FILE (incr) = 0;
4398 if (regno >= FIRST_PSEUDO_REGISTER)
4400 /* Count an extra reference to the reg. When a reg is
4401 incremented, spilling it is worse, so we want to make
4402 that less likely. */
4403 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4405 /* Count the increment as a setting of the register,
4406 even though it isn't a SET in rtl. */
4407 REG_N_SETS (regno)++;
4412 #endif /* AUTO_INC_DEC */
4415 mark_used_reg (pbi, reg, cond, insn)
4416 struct propagate_block_info *pbi;
4418 rtx cond ATTRIBUTE_UNUSED;
4421 int regno = REGNO (reg);
4422 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4423 int some_was_dead = ! some_was_live;
4425 SET_REGNO_REG_SET (pbi->new_live, regno);
4427 /* A hard reg in a wide mode may really be multiple registers.
4428 If so, mark all of them just like the first. */
4429 if (regno < FIRST_PSEUDO_REGISTER)
4431 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4434 int regno_n = regno + n;
4435 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4437 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4438 some_was_live |= needed_regno;
4439 some_was_dead |= ! needed_regno;
4443 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4445 /* Record where each reg is used, so when the reg is set we know
4446 the next insn that uses it. */
4447 pbi->reg_next_use[regno] = insn;
4450 if (pbi->flags & PROP_REG_INFO)
4452 if (regno < FIRST_PSEUDO_REGISTER)
4454 /* If this is a register we are going to try to eliminate,
4455 don't mark it live here. If we are successful in
4456 eliminating it, it need not be live unless it is used for
4457 pseudos, in which case it will have been set live when it
4458 was allocated to the pseudos. If the register will not
4459 be eliminated, reload will set it live at that point.
4461 Otherwise, record that this function uses this register. */
4462 /* ??? The PPC backend tries to "eliminate" on the pic
4463 register to itself. This should be fixed. In the mean
4464 time, hack around it. */
4466 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4467 && (regno == FRAME_POINTER_REGNUM
4468 || regno == ARG_POINTER_REGNUM)))
4470 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4472 regs_ever_live[regno + --n] = 1;
4478 /* Keep track of which basic block each reg appears in. */
4480 register int blocknum = pbi->bb->index;
4481 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4482 REG_BASIC_BLOCK (regno) = blocknum;
4483 else if (REG_BASIC_BLOCK (regno) != blocknum)
4484 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4486 /* Count (weighted) number of uses of each reg. */
4487 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4491 /* Record and count the insns in which a reg dies. If it is used in
4492 this insn and was dead below the insn then it dies in this insn.
4493 If it was set in this insn, we do not make a REG_DEAD note;
4494 likewise if we already made such a note.
4496 ??? This could be done better. In new_dead we have a record of
4497 which registers are set or clobbered this insn (which in itself is
4498 slightly incorrect, see the commentary near strict_low_part in
4499 mark_set_1), which should be the set of registers that we do not
4500 wish to create death notes for under the above rule. Note that
4501 we have not yet processed the call-clobbered/call-used registers,
4502 and they do not quite follow the above rule, since we do want death
4503 notes for call-clobbered register arguments. Which begs the whole
4504 question of whether we should in fact have death notes for registers
4505 used and clobbered (but not set) in the same insn. The only useful
4506 thing we ought to be getting from dead_or_set_p is detection of
4507 duplicate death notes. */
4509 if ((pbi->flags & PROP_DEATH_NOTES)
4511 && ! dead_or_set_p (insn, reg))
4515 /* Check for the case where the register dying partially
4516 overlaps the register set by this insn. */
4517 if (regno < FIRST_PSEUDO_REGISTER
4518 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4520 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4522 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4525 /* If none of the words in X is needed, make a REG_DEAD note.
4526 Otherwise, we must make partial REG_DEAD notes. */
4527 if (! some_was_live)
4530 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4531 REG_N_DEATHS (regno)++;
4535 /* Don't make a REG_DEAD note for a part of a register
4536 that is set in the insn. */
4538 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4539 for (; n >= regno; n--)
4540 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4541 && ! dead_or_set_regno_p (insn, n))
4543 = alloc_EXPR_LIST (REG_DEAD,
4544 gen_rtx_REG (reg_raw_mode[n], n),
4550 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4551 This is done assuming the registers needed from X are those that
4552 have 1-bits in PBI->REG_LIVE.
4554 INSN is the containing instruction. If INSN is dead, this function
4558 mark_used_regs (pbi, x, cond, insn)
4559 struct propagate_block_info *pbi;
4562 register RTX_CODE code;
4564 int flags = pbi->flags;
4567 code = GET_CODE (x);
4587 /* If we are clobbering a MEM, mark any registers inside the address
4589 if (GET_CODE (XEXP (x, 0)) == MEM)
4590 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4594 /* Don't bother watching stores to mems if this is not the
4595 final pass. We'll not be deleting dead stores this round. */
4596 if (flags & PROP_SCAN_DEAD_CODE)
4598 /* Invalidate the data for the last MEM stored, but only if MEM is
4599 something that can be stored into. */
4600 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4601 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4602 ; /* needn't clear the memory set list */
4605 rtx temp = pbi->mem_set_list;
4606 rtx prev = NULL_RTX;
4611 next = XEXP (temp, 1);
4612 if (anti_dependence (XEXP (temp, 0), x))
4614 /* Splice temp out of the list. */
4616 XEXP (prev, 1) = next;
4618 pbi->mem_set_list = next;
4619 free_EXPR_LIST_node (temp);
4627 /* If the memory reference had embedded side effects (autoincrement
4628 address modes. Then we may need to kill some entries on the
4631 invalidate_mems_from_autoinc (pbi, insn);
4635 if (flags & PROP_AUTOINC)
4636 find_auto_inc (pbi, x, insn);
4641 if (GET_CODE (SUBREG_REG (x)) == REG
4642 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4643 && (GET_MODE_SIZE (GET_MODE (x))
4644 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4645 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4647 /* While we're here, optimize this case. */
4649 if (GET_CODE (x) != REG)
4654 /* See a register other than being set => mark it as needed. */
4655 mark_used_reg (pbi, x, cond, insn);
4660 register rtx testreg = SET_DEST (x);
4663 /* If storing into MEM, don't show it as being used. But do
4664 show the address as being used. */
4665 if (GET_CODE (testreg) == MEM)
4668 if (flags & PROP_AUTOINC)
4669 find_auto_inc (pbi, testreg, insn);
4671 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4672 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4676 /* Storing in STRICT_LOW_PART is like storing in a reg
4677 in that this SET might be dead, so ignore it in TESTREG.
4678 but in some other ways it is like using the reg.
4680 Storing in a SUBREG or a bit field is like storing the entire
4681 register in that if the register's value is not used
4682 then this SET is not needed. */
4683 while (GET_CODE (testreg) == STRICT_LOW_PART
4684 || GET_CODE (testreg) == ZERO_EXTRACT
4685 || GET_CODE (testreg) == SIGN_EXTRACT
4686 || GET_CODE (testreg) == SUBREG)
4688 if (GET_CODE (testreg) == SUBREG
4689 && GET_CODE (SUBREG_REG (testreg)) == REG
4690 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4691 && (GET_MODE_SIZE (GET_MODE (testreg))
4692 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4693 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4695 /* Modifying a single register in an alternate mode
4696 does not use any of the old value. But these other
4697 ways of storing in a register do use the old value. */
4698 if (GET_CODE (testreg) == SUBREG
4699 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4704 testreg = XEXP (testreg, 0);
4707 /* If this is a store into a register, recursively scan the
4708 value being stored. */
4710 if ((GET_CODE (testreg) == PARALLEL
4711 && GET_MODE (testreg) == BLKmode)
4712 || (GET_CODE (testreg) == REG
4713 && (regno = REGNO (testreg),
4714 ! (regno == FRAME_POINTER_REGNUM
4715 && (! reload_completed || frame_pointer_needed)))
4716 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4717 && ! (regno == HARD_FRAME_POINTER_REGNUM
4718 && (! reload_completed || frame_pointer_needed))
4720 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4721 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4726 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4727 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4734 case UNSPEC_VOLATILE:
4738 /* Traditional and volatile asm instructions must be considered to use
4739 and clobber all hard registers, all pseudo-registers and all of
4740 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4742 Consider for instance a volatile asm that changes the fpu rounding
4743 mode. An insn should not be moved across this even if it only uses
4744 pseudo-regs because it might give an incorrectly rounded result.
4746 ?!? Unfortunately, marking all hard registers as live causes massive
4747 problems for the register allocator and marking all pseudos as live
4748 creates mountains of uninitialized variable warnings.
4750 So for now, just clear the memory set list and mark any regs
4751 we can find in ASM_OPERANDS as used. */
4752 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4753 free_EXPR_LIST_list (&pbi->mem_set_list);
4755 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4756 We can not just fall through here since then we would be confused
4757 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4758 traditional asms unlike their normal usage. */
4759 if (code == ASM_OPERANDS)
4763 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4764 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4770 if (cond != NULL_RTX)
4773 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4775 cond = COND_EXEC_TEST (x);
4776 x = COND_EXEC_CODE (x);
4780 /* We _do_not_ want to scan operands of phi nodes. Operands of
4781 a phi function are evaluated only when control reaches this
4782 block along a particular edge. Therefore, regs that appear
4783 as arguments to phi should not be added to the global live at
4791 /* Recursively scan the operands of this expression. */
4794 register const char *fmt = GET_RTX_FORMAT (code);
4797 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4801 /* Tail recursive case: save a function call level. */
4807 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4809 else if (fmt[i] == 'E')
4812 for (j = 0; j < XVECLEN (x, i); j++)
4813 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4822 try_pre_increment_1 (pbi, insn)
4823 struct propagate_block_info *pbi;
4826 /* Find the next use of this reg. If in same basic block,
4827 make it do pre-increment or pre-decrement if appropriate. */
4828 rtx x = single_set (insn);
4829 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4830 * INTVAL (XEXP (SET_SRC (x), 1)));
4831 int regno = REGNO (SET_DEST (x));
4832 rtx y = pbi->reg_next_use[regno];
4834 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4835 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4836 mode would be better. */
4837 && ! dead_or_set_p (y, SET_DEST (x))
4838 && try_pre_increment (y, SET_DEST (x), amount))
4840 /* We have found a suitable auto-increment
4841 and already changed insn Y to do it.
4842 So flush this increment-instruction. */
4843 PUT_CODE (insn, NOTE);
4844 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4845 NOTE_SOURCE_FILE (insn) = 0;
4846 /* Count a reference to this reg for the increment
4847 insn we are deleting. When a reg is incremented.
4848 spilling it is worse, so we want to make that
4850 if (regno >= FIRST_PSEUDO_REGISTER)
4852 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4853 REG_N_SETS (regno)++;
4860 /* Try to change INSN so that it does pre-increment or pre-decrement
4861 addressing on register REG in order to add AMOUNT to REG.
4862 AMOUNT is negative for pre-decrement.
4863 Returns 1 if the change could be made.
4864 This checks all about the validity of the result of modifying INSN. */
4867 try_pre_increment (insn, reg, amount)
4869 HOST_WIDE_INT amount;
4873 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4874 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4876 /* Nonzero if we can try to make a post-increment or post-decrement.
4877 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4878 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4879 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4882 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4885 /* From the sign of increment, see which possibilities are conceivable
4886 on this target machine. */
4887 if (HAVE_PRE_INCREMENT && amount > 0)
4889 if (HAVE_POST_INCREMENT && amount > 0)
4892 if (HAVE_PRE_DECREMENT && amount < 0)
4894 if (HAVE_POST_DECREMENT && amount < 0)
4897 if (! (pre_ok || post_ok))
4900 /* It is not safe to add a side effect to a jump insn
4901 because if the incremented register is spilled and must be reloaded
4902 there would be no way to store the incremented value back in memory. */
4904 if (GET_CODE (insn) == JUMP_INSN)
4909 use = find_use_as_address (PATTERN (insn), reg, 0);
4910 if (post_ok && (use == 0 || use == (rtx) 1))
4912 use = find_use_as_address (PATTERN (insn), reg, -amount);
4916 if (use == 0 || use == (rtx) 1)
4919 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4922 /* See if this combination of instruction and addressing mode exists. */
4923 if (! validate_change (insn, &XEXP (use, 0),
4924 gen_rtx_fmt_e (amount > 0
4925 ? (do_post ? POST_INC : PRE_INC)
4926 : (do_post ? POST_DEC : PRE_DEC),
4930 /* Record that this insn now has an implicit side effect on X. */
4931 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4935 #endif /* AUTO_INC_DEC */
4937 /* Find the place in the rtx X where REG is used as a memory address.
4938 Return the MEM rtx that so uses it.
4939 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4940 (plus REG (const_int PLUSCONST)).
4942 If such an address does not appear, return 0.
4943 If REG appears more than once, or is used other than in such an address,
4947 find_use_as_address (x, reg, plusconst)
4950 HOST_WIDE_INT plusconst;
4952 enum rtx_code code = GET_CODE (x);
4953 const char *fmt = GET_RTX_FORMAT (code);
4955 register rtx value = 0;
4958 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4961 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4962 && XEXP (XEXP (x, 0), 0) == reg
4963 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4964 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4967 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4969 /* If REG occurs inside a MEM used in a bit-field reference,
4970 that is unacceptable. */
4971 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4972 return (rtx) (HOST_WIDE_INT) 1;
4976 return (rtx) (HOST_WIDE_INT) 1;
4978 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4982 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4986 return (rtx) (HOST_WIDE_INT) 1;
4988 else if (fmt[i] == 'E')
4991 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4993 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4997 return (rtx) (HOST_WIDE_INT) 1;
5005 /* Write information about registers and basic blocks into FILE.
5006 This is part of making a debugging dump. */
5009 dump_regset (r, outf)
5016 fputs (" (nil)", outf);
5020 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5022 fprintf (outf, " %d", i);
5023 if (i < FIRST_PSEUDO_REGISTER)
5024 fprintf (outf, " [%s]",
5033 dump_regset (r, stderr);
5034 putc ('\n', stderr);
5038 dump_flow_info (file)
5042 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5044 fprintf (file, "%d registers.\n", max_regno);
5045 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5048 enum reg_class class, altclass;
5049 fprintf (file, "\nRegister %d used %d times across %d insns",
5050 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5051 if (REG_BASIC_BLOCK (i) >= 0)
5052 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5054 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5055 (REG_N_SETS (i) == 1) ? "" : "s");
5056 if (REG_USERVAR_P (regno_reg_rtx[i]))
5057 fprintf (file, "; user var");
5058 if (REG_N_DEATHS (i) != 1)
5059 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5060 if (REG_N_CALLS_CROSSED (i) == 1)
5061 fprintf (file, "; crosses 1 call");
5062 else if (REG_N_CALLS_CROSSED (i))
5063 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5064 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5065 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5066 class = reg_preferred_class (i);
5067 altclass = reg_alternate_class (i);
5068 if (class != GENERAL_REGS || altclass != ALL_REGS)
5070 if (altclass == ALL_REGS || class == ALL_REGS)
5071 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5072 else if (altclass == NO_REGS)
5073 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5075 fprintf (file, "; pref %s, else %s",
5076 reg_class_names[(int) class],
5077 reg_class_names[(int) altclass]);
5079 if (REGNO_POINTER_FLAG (i))
5080 fprintf (file, "; pointer");
5081 fprintf (file, ".\n");
5084 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5085 for (i = 0; i < n_basic_blocks; i++)
5087 register basic_block bb = BASIC_BLOCK (i);
5090 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5091 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5093 fprintf (file, "Predecessors: ");
5094 for (e = bb->pred; e ; e = e->pred_next)
5095 dump_edge_info (file, e, 0);
5097 fprintf (file, "\nSuccessors: ");
5098 for (e = bb->succ; e ; e = e->succ_next)
5099 dump_edge_info (file, e, 1);
5101 fprintf (file, "\nRegisters live at start:");
5102 dump_regset (bb->global_live_at_start, file);
5104 fprintf (file, "\nRegisters live at end:");
5105 dump_regset (bb->global_live_at_end, file);
5116 dump_flow_info (stderr);
5120 dump_edge_info (file, e, do_succ)
5125 basic_block side = (do_succ ? e->dest : e->src);
5127 if (side == ENTRY_BLOCK_PTR)
5128 fputs (" ENTRY", file);
5129 else if (side == EXIT_BLOCK_PTR)
5130 fputs (" EXIT", file);
5132 fprintf (file, " %d", side->index);
5136 static const char * const bitnames[] = {
5137 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5140 int i, flags = e->flags;
5144 for (i = 0; flags; i++)
5145 if (flags & (1 << i))
5151 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5152 fputs (bitnames[i], file);
5154 fprintf (file, "%d", i);
5162 /* Print out one basic block with live information at start and end. */
5172 fprintf (outf, ";; Basic block %d, loop depth %d",
5173 bb->index, bb->loop_depth);
5174 if (bb->eh_beg != -1 || bb->eh_end != -1)
5175 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5178 fputs (";; Predecessors: ", outf);
5179 for (e = bb->pred; e ; e = e->pred_next)
5180 dump_edge_info (outf, e, 0);
5183 fputs (";; Registers live at start:", outf);
5184 dump_regset (bb->global_live_at_start, outf);
5187 for (insn = bb->head, last = NEXT_INSN (bb->end);
5189 insn = NEXT_INSN (insn))
5190 print_rtl_single (outf, insn);
5192 fputs (";; Registers live at end:", outf);
5193 dump_regset (bb->global_live_at_end, outf);
5196 fputs (";; Successors: ", outf);
5197 for (e = bb->succ; e; e = e->succ_next)
5198 dump_edge_info (outf, e, 1);
5206 dump_bb (bb, stderr);
5213 dump_bb (BASIC_BLOCK(n), stderr);
5216 /* Like print_rtl, but also print out live information for the start of each
5220 print_rtl_with_bb (outf, rtx_first)
5224 register rtx tmp_rtx;
5227 fprintf (outf, "(nil)\n");
5231 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5232 int max_uid = get_max_uid ();
5233 basic_block *start = (basic_block *)
5234 xcalloc (max_uid, sizeof (basic_block));
5235 basic_block *end = (basic_block *)
5236 xcalloc (max_uid, sizeof (basic_block));
5237 enum bb_state *in_bb_p = (enum bb_state *)
5238 xcalloc (max_uid, sizeof (enum bb_state));
5240 for (i = n_basic_blocks - 1; i >= 0; i--)
5242 basic_block bb = BASIC_BLOCK (i);
5245 start[INSN_UID (bb->head)] = bb;
5246 end[INSN_UID (bb->end)] = bb;
5247 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5249 enum bb_state state = IN_MULTIPLE_BB;
5250 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5252 in_bb_p[INSN_UID(x)] = state;
5259 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5264 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5266 fprintf (outf, ";; Start of basic block %d, registers live:",
5268 dump_regset (bb->global_live_at_start, outf);
5272 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5273 && GET_CODE (tmp_rtx) != NOTE
5274 && GET_CODE (tmp_rtx) != BARRIER)
5275 fprintf (outf, ";; Insn is not within a basic block\n");
5276 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5277 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5279 did_output = print_rtl_single (outf, tmp_rtx);
5281 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5283 fprintf (outf, ";; End of basic block %d, registers live:\n",
5285 dump_regset (bb->global_live_at_end, outf);
5298 if (current_function_epilogue_delay_list != 0)
5300 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5301 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5302 tmp_rtx = XEXP (tmp_rtx, 1))
5303 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5307 /* Compute dominator relationships using new flow graph structures. */
5309 compute_flow_dominators (dominators, post_dominators)
5310 sbitmap *dominators;
5311 sbitmap *post_dominators;
5314 sbitmap *temp_bitmap;
5316 basic_block *worklist, *workend, *qin, *qout;
5319 /* Allocate a worklist array/queue. Entries are only added to the
5320 list if they were not already on the list. So the size is
5321 bounded by the number of basic blocks. */
5322 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5323 workend = &worklist[n_basic_blocks];
5325 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5326 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5330 /* The optimistic setting of dominators requires us to put every
5331 block on the work list initially. */
5332 qin = qout = worklist;
5333 for (bb = 0; bb < n_basic_blocks; bb++)
5335 *qin++ = BASIC_BLOCK (bb);
5336 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5338 qlen = n_basic_blocks;
5341 /* We want a maximal solution, so initially assume everything dominates
5343 sbitmap_vector_ones (dominators, n_basic_blocks);
5345 /* Mark successors of the entry block so we can identify them below. */
5346 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5347 e->dest->aux = ENTRY_BLOCK_PTR;
5349 /* Iterate until the worklist is empty. */
5352 /* Take the first entry off the worklist. */
5353 basic_block b = *qout++;
5354 if (qout >= workend)
5360 /* Compute the intersection of the dominators of all the
5363 If one of the predecessor blocks is the ENTRY block, then the
5364 intersection of the dominators of the predecessor blocks is
5365 defined as the null set. We can identify such blocks by the
5366 special value in the AUX field in the block structure. */
5367 if (b->aux == ENTRY_BLOCK_PTR)
5369 /* Do not clear the aux field for blocks which are
5370 successors of the ENTRY block. That way we we never
5371 add them to the worklist again.
5373 The intersect of dominators of the preds of this block is
5374 defined as the null set. */
5375 sbitmap_zero (temp_bitmap[bb]);
5379 /* Clear the aux field of this block so it can be added to
5380 the worklist again if necessary. */
5382 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5385 /* Make sure each block always dominates itself. */
5386 SET_BIT (temp_bitmap[bb], bb);
5388 /* If the out state of this block changed, then we need to
5389 add the successors of this block to the worklist if they
5390 are not already on the worklist. */
5391 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5393 for (e = b->succ; e; e = e->succ_next)
5395 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5409 if (post_dominators)
5411 /* The optimistic setting of dominators requires us to put every
5412 block on the work list initially. */
5413 qin = qout = worklist;
5414 for (bb = 0; bb < n_basic_blocks; bb++)
5416 *qin++ = BASIC_BLOCK (bb);
5417 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5419 qlen = n_basic_blocks;
5422 /* We want a maximal solution, so initially assume everything post
5423 dominates everything else. */
5424 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5426 /* Mark predecessors of the exit block so we can identify them below. */
5427 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5428 e->src->aux = EXIT_BLOCK_PTR;
5430 /* Iterate until the worklist is empty. */
5433 /* Take the first entry off the worklist. */
5434 basic_block b = *qout++;
5435 if (qout >= workend)
5441 /* Compute the intersection of the post dominators of all the
5444 If one of the successor blocks is the EXIT block, then the
5445 intersection of the dominators of the successor blocks is
5446 defined as the null set. We can identify such blocks by the
5447 special value in the AUX field in the block structure. */
5448 if (b->aux == EXIT_BLOCK_PTR)
5450 /* Do not clear the aux field for blocks which are
5451 predecessors of the EXIT block. That way we we never
5452 add them to the worklist again.
5454 The intersect of dominators of the succs of this block is
5455 defined as the null set. */
5456 sbitmap_zero (temp_bitmap[bb]);
5460 /* Clear the aux field of this block so it can be added to
5461 the worklist again if necessary. */
5463 sbitmap_intersection_of_succs (temp_bitmap[bb],
5464 post_dominators, bb);
5467 /* Make sure each block always post dominates itself. */
5468 SET_BIT (temp_bitmap[bb], bb);
5470 /* If the out state of this block changed, then we need to
5471 add the successors of this block to the worklist if they
5472 are not already on the worklist. */
5473 if (sbitmap_a_and_b (post_dominators[bb],
5474 post_dominators[bb],
5477 for (e = b->pred; e; e = e->pred_next)
5479 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5497 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5500 compute_immediate_dominators (idom, dominators)
5502 sbitmap *dominators;
5507 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5509 /* Begin with tmp(n) = dom(n) - { n }. */
5510 for (b = n_basic_blocks; --b >= 0; )
5512 sbitmap_copy (tmp[b], dominators[b]);
5513 RESET_BIT (tmp[b], b);
5516 /* Subtract out all of our dominator's dominators. */
5517 for (b = n_basic_blocks; --b >= 0; )
5519 sbitmap tmp_b = tmp[b];
5522 for (s = n_basic_blocks; --s >= 0; )
5523 if (TEST_BIT (tmp_b, s))
5524 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5527 /* Find the one bit set in the bitmap and put it in the output array. */
5528 for (b = n_basic_blocks; --b >= 0; )
5531 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5534 sbitmap_vector_free (tmp);
5537 /* Count for a single SET rtx, X. */
5540 count_reg_sets_1 (x, loop_depth)
5545 register rtx reg = SET_DEST (x);
5547 /* Find the register that's set/clobbered. */
5548 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5549 || GET_CODE (reg) == SIGN_EXTRACT
5550 || GET_CODE (reg) == STRICT_LOW_PART)
5551 reg = XEXP (reg, 0);
5553 if (GET_CODE (reg) == PARALLEL
5554 && GET_MODE (reg) == BLKmode)
5557 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5558 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5562 if (GET_CODE (reg) == REG)
5564 regno = REGNO (reg);
5565 if (regno >= FIRST_PSEUDO_REGISTER)
5567 /* Count (weighted) references, stores, etc. This counts a
5568 register twice if it is modified, but that is correct. */
5569 REG_N_SETS (regno)++;
5570 REG_N_REFS (regno) += loop_depth + 1;
5575 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5576 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5579 count_reg_sets (x, loop_depth)
5583 register RTX_CODE code = GET_CODE (x);
5585 if (code == SET || code == CLOBBER)
5586 count_reg_sets_1 (x, loop_depth);
5587 else if (code == PARALLEL)
5590 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5592 code = GET_CODE (XVECEXP (x, 0, i));
5593 if (code == SET || code == CLOBBER)
5594 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5599 /* Increment REG_N_REFS by the current loop depth each register reference
5603 count_reg_references (x, loop_depth)
5607 register RTX_CODE code;
5610 code = GET_CODE (x);
5630 /* If we are clobbering a MEM, mark any registers inside the address
5632 if (GET_CODE (XEXP (x, 0)) == MEM)
5633 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5637 /* While we're here, optimize this case. */
5640 /* In case the SUBREG is not of a register, don't optimize */
5641 if (GET_CODE (x) != REG)
5643 count_reg_references (x, loop_depth);
5647 /* ... fall through ... */
5650 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5651 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5656 register rtx testreg = SET_DEST (x);
5659 /* If storing into MEM, don't show it as being used. But do
5660 show the address as being used. */
5661 if (GET_CODE (testreg) == MEM)
5663 count_reg_references (XEXP (testreg, 0), loop_depth);
5664 count_reg_references (SET_SRC (x), loop_depth);
5668 /* Storing in STRICT_LOW_PART is like storing in a reg
5669 in that this SET might be dead, so ignore it in TESTREG.
5670 but in some other ways it is like using the reg.
5672 Storing in a SUBREG or a bit field is like storing the entire
5673 register in that if the register's value is not used
5674 then this SET is not needed. */
5675 while (GET_CODE (testreg) == STRICT_LOW_PART
5676 || GET_CODE (testreg) == ZERO_EXTRACT
5677 || GET_CODE (testreg) == SIGN_EXTRACT
5678 || GET_CODE (testreg) == SUBREG)
5680 /* Modifying a single register in an alternate mode
5681 does not use any of the old value. But these other
5682 ways of storing in a register do use the old value. */
5683 if (GET_CODE (testreg) == SUBREG
5684 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5689 testreg = XEXP (testreg, 0);
5692 /* If this is a store into a register,
5693 recursively scan the value being stored. */
5695 if ((GET_CODE (testreg) == PARALLEL
5696 && GET_MODE (testreg) == BLKmode)
5697 || GET_CODE (testreg) == REG)
5699 count_reg_references (SET_SRC (x), loop_depth);
5701 count_reg_references (SET_DEST (x), loop_depth);
5711 /* Recursively scan the operands of this expression. */
5714 register const char *fmt = GET_RTX_FORMAT (code);
5717 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5721 /* Tail recursive case: save a function call level. */
5727 count_reg_references (XEXP (x, i), loop_depth);
5729 else if (fmt[i] == 'E')
5732 for (j = 0; j < XVECLEN (x, i); j++)
5733 count_reg_references (XVECEXP (x, i, j), loop_depth);
5739 /* Recompute register set/reference counts immediately prior to register
5742 This avoids problems with set/reference counts changing to/from values
5743 which have special meanings to the register allocators.
5745 Additionally, the reference counts are the primary component used by the
5746 register allocators to prioritize pseudos for allocation to hard regs.
5747 More accurate reference counts generally lead to better register allocation.
5749 F is the first insn to be scanned.
5751 LOOP_STEP denotes how much loop_depth should be incremented per
5752 loop nesting level in order to increase the ref count more for
5753 references in a loop.
5755 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5756 possibly other information which is used by the register allocators. */
5759 recompute_reg_usage (f, loop_step)
5760 rtx f ATTRIBUTE_UNUSED;
5761 int loop_step ATTRIBUTE_UNUSED;
5768 /* Clear out the old data. */
5769 max_reg = max_reg_num ();
5770 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5776 /* Scan each insn in the chain and count how many times each register is
5778 for (index = 0; index < n_basic_blocks; index++)
5780 basic_block bb = BASIC_BLOCK (index);
5781 loop_depth = bb->loop_depth;
5782 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5784 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5788 /* This call will increment REG_N_SETS for each SET or CLOBBER
5789 of a register in INSN. It will also increment REG_N_REFS
5790 by the loop depth for each set of a register in INSN. */
5791 count_reg_sets (PATTERN (insn), loop_depth);
5793 /* count_reg_sets does not detect autoincrement address modes, so
5794 detect them here by looking at the notes attached to INSN. */
5795 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5797 if (REG_NOTE_KIND (links) == REG_INC)
5798 /* Count (weighted) references, stores, etc. This
5799 counts a register twice if it is modified, but
5801 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5804 /* This call will increment REG_N_REFS by the current loop depth
5805 for each reference to a register in INSN. */
5806 count_reg_references (PATTERN (insn), loop_depth);
5808 /* count_reg_references will not include counts for arguments to
5809 function calls, so detect them here by examining the
5810 CALL_INSN_FUNCTION_USAGE data. */
5811 if (GET_CODE (insn) == CALL_INSN)
5815 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5817 note = XEXP (note, 1))
5818 if (GET_CODE (XEXP (note, 0)) == USE)
5819 count_reg_references (XEXP (XEXP (note, 0), 0),
5823 if (insn == bb->end)
5829 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5830 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5831 of the number of registers that died. */
5834 count_or_remove_death_notes (blocks, kill)
5840 for (i = n_basic_blocks - 1; i >= 0; --i)
5845 if (blocks && ! TEST_BIT (blocks, i))
5848 bb = BASIC_BLOCK (i);
5850 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5852 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5854 rtx *pprev = ®_NOTES (insn);
5859 switch (REG_NOTE_KIND (link))
5862 if (GET_CODE (XEXP (link, 0)) == REG)
5864 rtx reg = XEXP (link, 0);
5867 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5870 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5878 rtx next = XEXP (link, 1);
5879 free_EXPR_LIST_node (link);
5880 *pprev = link = next;
5886 pprev = &XEXP (link, 1);
5893 if (insn == bb->end)
5901 /* Record INSN's block as BB. */
5904 set_block_for_insn (insn, bb)
5908 size_t uid = INSN_UID (insn);
5909 if (uid >= basic_block_for_insn->num_elements)
5913 /* Add one-eighth the size so we don't keep calling xrealloc. */
5914 new_size = uid + (uid + 7) / 8;
5916 VARRAY_GROW (basic_block_for_insn, new_size);
5918 VARRAY_BB (basic_block_for_insn, uid) = bb;
5921 /* Record INSN's block number as BB. */
5922 /* ??? This has got to go. */
5925 set_block_num (insn, bb)
5929 set_block_for_insn (insn, BASIC_BLOCK (bb));
5932 /* Verify the CFG consistency. This function check some CFG invariants and
5933 aborts when something is wrong. Hope that this function will help to
5934 convert many optimization passes to preserve CFG consistent.
5936 Currently it does following checks:
5938 - test head/end pointers
5939 - overlapping of basic blocks
5940 - edge list corectness
5941 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5942 - tails of basic blocks (ensure that boundary is necesary)
5943 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5944 and NOTE_INSN_BASIC_BLOCK
5945 - check that all insns are in the basic blocks
5946 (except the switch handling code, barriers and notes)
5947 - check that all returns are followed by barriers
5949 In future it can be extended check a lot of other stuff as well
5950 (reachability of basic blocks, life information, etc. etc.). */
5955 const int max_uid = get_max_uid ();
5956 const rtx rtx_first = get_insns ();
5957 basic_block *bb_info;
5961 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5963 /* First pass check head/end pointers and set bb_info array used by
5965 for (i = n_basic_blocks - 1; i >= 0; i--)
5967 basic_block bb = BASIC_BLOCK (i);
5969 /* Check the head pointer and make sure that it is pointing into
5971 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5976 error ("Head insn %d for block %d not found in the insn stream.",
5977 INSN_UID (bb->head), bb->index);
5981 /* Check the end pointer and make sure that it is pointing into
5983 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5985 if (bb_info[INSN_UID (x)] != NULL)
5987 error ("Insn %d is in multiple basic blocks (%d and %d)",
5988 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5991 bb_info[INSN_UID (x)] = bb;
5998 error ("End insn %d for block %d not found in the insn stream.",
5999 INSN_UID (bb->end), bb->index);
6004 /* Now check the basic blocks (boundaries etc.) */
6005 for (i = n_basic_blocks - 1; i >= 0; i--)
6007 basic_block bb = BASIC_BLOCK (i);
6008 /* Check corectness of edge lists */
6016 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6018 fprintf (stderr, "Predecessor: ");
6019 dump_edge_info (stderr, e, 0);
6020 fprintf (stderr, "\nSuccessor: ");
6021 dump_edge_info (stderr, e, 1);
6025 if (e->dest != EXIT_BLOCK_PTR)
6027 edge e2 = e->dest->pred;
6028 while (e2 && e2 != e)
6032 error ("Basic block %i edge lists are corrupted", bb->index);
6044 error ("Basic block %d pred edge is corrupted", bb->index);
6045 fputs ("Predecessor: ", stderr);
6046 dump_edge_info (stderr, e, 0);
6047 fputs ("\nSuccessor: ", stderr);
6048 dump_edge_info (stderr, e, 1);
6049 fputc ('\n', stderr);
6052 if (e->src != ENTRY_BLOCK_PTR)
6054 edge e2 = e->src->succ;
6055 while (e2 && e2 != e)
6059 error ("Basic block %i edge lists are corrupted", bb->index);
6066 /* OK pointers are correct. Now check the header of basic
6067 block. It ought to contain optional CODE_LABEL followed
6068 by NOTE_BASIC_BLOCK. */
6070 if (GET_CODE (x) == CODE_LABEL)
6074 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6080 if (GET_CODE (x) != NOTE
6081 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6082 || NOTE_BASIC_BLOCK (x) != bb)
6084 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6091 /* Do checks for empty blocks here */
6098 if (GET_CODE (x) == NOTE
6099 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6101 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6102 INSN_UID (x), bb->index);
6109 if (GET_CODE (x) == JUMP_INSN
6110 || GET_CODE (x) == CODE_LABEL
6111 || GET_CODE (x) == BARRIER)
6113 error ("In basic block %d:", bb->index);
6114 fatal_insn ("Flow control insn inside a basic block", x);
6125 if (!bb_info[INSN_UID (x)])
6127 switch (GET_CODE (x))
6134 /* An addr_vec is placed outside any block block. */
6136 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6137 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6138 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6143 /* But in any case, non-deletable labels can appear anywhere. */
6147 fatal_insn ("Insn outside basic block", x);
6151 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6152 && GET_CODE (x) == JUMP_INSN
6153 && returnjump_p (x) && ! condjump_p (x)
6154 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6155 fatal_insn ("Return not followed by barrier", x);
6167 /* Functions to access an edge list with a vector representation.
6168 Enough data is kept such that given an index number, the
6169 pred and succ that edge reprsents can be determined, or
6170 given a pred and a succ, it's index number can be returned.
6171 This allows algorithms which comsume a lot of memory to
6172 represent the normally full matrix of edge (pred,succ) with a
6173 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6174 wasted space in the client code due to sparse flow graphs. */
6176 /* This functions initializes the edge list. Basically the entire
6177 flowgraph is processed, and all edges are assigned a number,
6178 and the data structure is filed in. */
6182 struct edge_list *elist;
6188 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6192 /* Determine the number of edges in the flow graph by counting successor
6193 edges on each basic block. */
6194 for (x = 0; x < n_basic_blocks; x++)
6196 basic_block bb = BASIC_BLOCK (x);
6198 for (e = bb->succ; e; e = e->succ_next)
6201 /* Don't forget successors of the entry block. */
6202 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6205 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6206 elist->num_blocks = block_count;
6207 elist->num_edges = num_edges;
6208 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6212 /* Follow successors of the entry block, and register these edges. */
6213 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6215 elist->index_to_edge[num_edges] = e;
6219 for (x = 0; x < n_basic_blocks; x++)
6221 basic_block bb = BASIC_BLOCK (x);
6223 /* Follow all successors of blocks, and register these edges. */
6224 for (e = bb->succ; e; e = e->succ_next)
6226 elist->index_to_edge[num_edges] = e;
6233 /* This function free's memory associated with an edge list. */
6235 free_edge_list (elist)
6236 struct edge_list *elist;
6240 free (elist->index_to_edge);
6245 /* This function provides debug output showing an edge list. */
6247 print_edge_list (f, elist)
6249 struct edge_list *elist;
6252 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6253 elist->num_blocks - 2, elist->num_edges);
6255 for (x = 0; x < elist->num_edges; x++)
6257 fprintf (f, " %-4d - edge(", x);
6258 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6259 fprintf (f,"entry,");
6261 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6263 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6264 fprintf (f,"exit)\n");
6266 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6270 /* This function provides an internal consistancy check of an edge list,
6271 verifying that all edges are present, and that there are no
6274 verify_edge_list (f, elist)
6276 struct edge_list *elist;
6278 int x, pred, succ, index;
6281 for (x = 0; x < n_basic_blocks; x++)
6283 basic_block bb = BASIC_BLOCK (x);
6285 for (e = bb->succ; e; e = e->succ_next)
6287 pred = e->src->index;
6288 succ = e->dest->index;
6289 index = EDGE_INDEX (elist, e->src, e->dest);
6290 if (index == EDGE_INDEX_NO_EDGE)
6292 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6295 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6296 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6297 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6298 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6299 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6300 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6303 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6305 pred = e->src->index;
6306 succ = e->dest->index;
6307 index = EDGE_INDEX (elist, e->src, e->dest);
6308 if (index == EDGE_INDEX_NO_EDGE)
6310 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6313 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6314 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6315 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6316 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6317 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6318 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6320 /* We've verified that all the edges are in the list, no lets make sure
6321 there are no spurious edges in the list. */
6323 for (pred = 0 ; pred < n_basic_blocks; pred++)
6324 for (succ = 0 ; succ < n_basic_blocks; succ++)
6326 basic_block p = BASIC_BLOCK (pred);
6327 basic_block s = BASIC_BLOCK (succ);
6331 for (e = p->succ; e; e = e->succ_next)
6337 for (e = s->pred; e; e = e->pred_next)
6343 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6344 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6345 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6347 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6348 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6349 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6350 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6351 BASIC_BLOCK (succ)));
6353 for (succ = 0 ; succ < n_basic_blocks; succ++)
6355 basic_block p = ENTRY_BLOCK_PTR;
6356 basic_block s = BASIC_BLOCK (succ);
6360 for (e = p->succ; e; e = e->succ_next)
6366 for (e = s->pred; e; e = e->pred_next)
6372 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6373 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6374 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6376 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6377 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6378 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6379 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6380 BASIC_BLOCK (succ)));
6382 for (pred = 0 ; pred < n_basic_blocks; pred++)
6384 basic_block p = BASIC_BLOCK (pred);
6385 basic_block s = EXIT_BLOCK_PTR;
6389 for (e = p->succ; e; e = e->succ_next)
6395 for (e = s->pred; e; e = e->pred_next)
6401 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6402 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6403 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6405 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6406 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6407 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6408 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6413 /* This routine will determine what, if any, edge there is between
6414 a specified predecessor and successor. */
6417 find_edge_index (edge_list, pred, succ)
6418 struct edge_list *edge_list;
6419 basic_block pred, succ;
6422 for (x = 0; x < NUM_EDGES (edge_list); x++)
6424 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6425 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6428 return (EDGE_INDEX_NO_EDGE);
6431 /* This function will remove an edge from the flow graph. */
6436 edge last_pred = NULL;
6437 edge last_succ = NULL;
6439 basic_block src, dest;
6442 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6448 last_succ->succ_next = e->succ_next;
6450 src->succ = e->succ_next;
6452 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6458 last_pred->pred_next = e->pred_next;
6460 dest->pred = e->pred_next;
6466 /* This routine will remove any fake successor edges for a basic block.
6467 When the edge is removed, it is also removed from whatever predecessor
6470 remove_fake_successors (bb)
6474 for (e = bb->succ; e ; )
6478 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6483 /* This routine will remove all fake edges from the flow graph. If
6484 we remove all fake successors, it will automatically remove all
6485 fake predecessors. */
6487 remove_fake_edges ()
6491 for (x = 0; x < n_basic_blocks; x++)
6492 remove_fake_successors (BASIC_BLOCK (x));
6494 /* We've handled all successors except the entry block's. */
6495 remove_fake_successors (ENTRY_BLOCK_PTR);
6498 /* This functions will add a fake edge between any block which has no
6499 successors, and the exit block. Some data flow equations require these
6502 add_noreturn_fake_exit_edges ()
6506 for (x = 0; x < n_basic_blocks; x++)
6507 if (BASIC_BLOCK (x)->succ == NULL)
6508 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6511 /* Dump the list of basic blocks in the bitmap NODES. */
6513 flow_nodes_print (str, nodes, file)
6515 const sbitmap nodes;
6520 fprintf (file, "%s { ", str);
6521 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6522 fputs ("}\n", file);
6526 /* Dump the list of exiting edges in the array EDGES. */
6528 flow_exits_print (str, edges, num_edges, file)
6536 fprintf (file, "%s { ", str);
6537 for (i = 0; i < num_edges; i++)
6538 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6539 fputs ("}\n", file);
6543 /* Dump loop related CFG information. */
6545 flow_loops_cfg_dump (loops, file)
6546 const struct loops *loops;
6551 if (! loops->num || ! file || ! loops->cfg.dom)
6554 for (i = 0; i < n_basic_blocks; i++)
6558 fprintf (file, ";; %d succs { ", i);
6559 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6560 fprintf (file, "%d ", succ->dest->index);
6561 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6565 /* Dump the DFS node order. */
6566 if (loops->cfg.dfs_order)
6568 fputs (";; DFS order: ", file);
6569 for (i = 0; i < n_basic_blocks; i++)
6570 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6576 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6578 flow_loop_nested_p (outer, loop)
6582 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6586 /* Dump the loop information specified by LOOPS to the stream FILE. */
6588 flow_loops_dump (loops, file, verbose)
6589 const struct loops *loops;
6596 num_loops = loops->num;
6597 if (! num_loops || ! file)
6600 fprintf (file, ";; %d loops found, %d levels\n",
6601 num_loops, loops->levels);
6603 for (i = 0; i < num_loops; i++)
6605 struct loop *loop = &loops->array[i];
6607 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6608 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6609 loop->header->index, loop->latch->index,
6610 loop->pre_header ? loop->pre_header->index : -1,
6611 loop->depth, loop->level,
6612 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6613 fprintf (file, ";; %d", loop->num_nodes);
6614 flow_nodes_print (" nodes", loop->nodes, file);
6615 fprintf (file, ";; %d", loop->num_exits);
6616 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6622 for (j = 0; j < i; j++)
6624 struct loop *oloop = &loops->array[j];
6626 if (loop->header == oloop->header)
6631 smaller = loop->num_nodes < oloop->num_nodes;
6633 /* If the union of LOOP and OLOOP is different than
6634 the larger of LOOP and OLOOP then LOOP and OLOOP
6635 must be disjoint. */
6636 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6637 smaller ? oloop : loop);
6638 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6639 loop->header->index, i, j,
6640 disjoint ? "disjoint" : "nested");
6647 /* Print diagnostics to compare our concept of a loop with
6648 what the loop notes say. */
6649 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6650 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6651 != NOTE_INSN_LOOP_BEG)
6652 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6653 INSN_UID (PREV_INSN (loop->first->head)));
6654 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6655 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6656 != NOTE_INSN_LOOP_END)
6657 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6658 INSN_UID (NEXT_INSN (loop->last->end)));
6663 flow_loops_cfg_dump (loops, file);
6667 /* Free all the memory allocated for LOOPS. */
6669 flow_loops_free (loops)
6670 struct loops *loops;
6679 /* Free the loop descriptors. */
6680 for (i = 0; i < loops->num; i++)
6682 struct loop *loop = &loops->array[i];
6685 sbitmap_free (loop->nodes);
6689 free (loops->array);
6690 loops->array = NULL;
6693 sbitmap_vector_free (loops->cfg.dom);
6694 if (loops->cfg.dfs_order)
6695 free (loops->cfg.dfs_order);
6697 sbitmap_free (loops->shared_headers);
6702 /* Find the exits from the loop using the bitmap of loop nodes NODES
6703 and store in EXITS array. Return the number of exits from the
6706 flow_loop_exits_find (nodes, exits)
6707 const sbitmap nodes;
6716 /* Check all nodes within the loop to see if there are any
6717 successors not in the loop. Note that a node may have multiple
6720 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6721 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6723 basic_block dest = e->dest;
6725 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6733 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6735 /* Store all exiting edges into an array. */
6737 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6738 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6740 basic_block dest = e->dest;
6742 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6743 (*exits)[num_exits++] = e;
6751 /* Find the nodes contained within the loop with header HEADER and
6752 latch LATCH and store in NODES. Return the number of nodes within
6755 flow_loop_nodes_find (header, latch, nodes)
6764 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6767 /* Start with only the loop header in the set of loop nodes. */
6768 sbitmap_zero (nodes);
6769 SET_BIT (nodes, header->index);
6771 header->loop_depth++;
6773 /* Push the loop latch on to the stack. */
6774 if (! TEST_BIT (nodes, latch->index))
6776 SET_BIT (nodes, latch->index);
6777 latch->loop_depth++;
6779 stack[sp++] = latch;
6788 for (e = node->pred; e; e = e->pred_next)
6790 basic_block ancestor = e->src;
6792 /* If each ancestor not marked as part of loop, add to set of
6793 loop nodes and push on to stack. */
6794 if (ancestor != ENTRY_BLOCK_PTR
6795 && ! TEST_BIT (nodes, ancestor->index))
6797 SET_BIT (nodes, ancestor->index);
6798 ancestor->loop_depth++;
6800 stack[sp++] = ancestor;
6809 /* Compute the depth first search order and store in the array
6810 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6811 number of nodes visited. */
6813 flow_depth_first_order_compute (dfs_order)
6822 /* Allocate stack for back-tracking up CFG. */
6823 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6826 /* Allocate bitmap to track nodes that have been visited. */
6827 visited = sbitmap_alloc (n_basic_blocks);
6829 /* None of the nodes in the CFG have been visited yet. */
6830 sbitmap_zero (visited);
6832 /* Start with the first successor edge from the entry block. */
6833 e = ENTRY_BLOCK_PTR->succ;
6836 basic_block src = e->src;
6837 basic_block dest = e->dest;
6839 /* Mark that we have visited this node. */
6840 if (src != ENTRY_BLOCK_PTR)
6841 SET_BIT (visited, src->index);
6843 /* If this node has not been visited before, push the current
6844 edge on to the stack and proceed with the first successor
6845 edge of this node. */
6846 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6854 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6857 /* DEST has no successors (for example, a non-returning
6858 function is called) so do not push the current edge
6859 but carry on with its next successor. */
6860 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6861 SET_BIT (visited, dest->index);
6864 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6866 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6868 /* Pop edge off stack. */
6876 sbitmap_free (visited);
6878 /* The number of nodes visited should not be greater than
6880 if (dfsnum > n_basic_blocks)
6883 /* There are some nodes left in the CFG that are unreachable. */
6884 if (dfsnum < n_basic_blocks)
6890 /* Return the block for the pre-header of the loop with header
6891 HEADER where DOM specifies the dominator information. Return NULL if
6892 there is no pre-header. */
6894 flow_loop_pre_header_find (header, dom)
6898 basic_block pre_header;
6901 /* If block p is a predecessor of the header and is the only block
6902 that the header does not dominate, then it is the pre-header. */
6904 for (e = header->pred; e; e = e->pred_next)
6906 basic_block node = e->src;
6908 if (node != ENTRY_BLOCK_PTR
6909 && ! TEST_BIT (dom[node->index], header->index))
6911 if (pre_header == NULL)
6915 /* There are multiple edges into the header from outside
6916 the loop so there is no pre-header block. */
6926 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6927 previously added. The insertion algorithm assumes that the loops
6928 are added in the order found by a depth first search of the CFG. */
6930 flow_loop_tree_node_add (prevloop, loop)
6931 struct loop *prevloop;
6935 if (flow_loop_nested_p (prevloop, loop))
6937 prevloop->inner = loop;
6938 loop->outer = prevloop;
6942 while (prevloop->outer)
6944 if (flow_loop_nested_p (prevloop->outer, loop))
6946 prevloop->next = loop;
6947 loop->outer = prevloop->outer;
6950 prevloop = prevloop->outer;
6953 prevloop->next = loop;
6958 /* Build the loop hierarchy tree for LOOPS. */
6960 flow_loops_tree_build (loops)
6961 struct loops *loops;
6966 num_loops = loops->num;
6970 /* Root the loop hierarchy tree with the first loop found.
6971 Since we used a depth first search this should be the
6973 loops->tree = &loops->array[0];
6974 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6976 /* Add the remaining loops to the tree. */
6977 for (i = 1; i < num_loops; i++)
6978 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6982 /* Helper function to compute loop nesting depth and enclosed loop level
6983 for the natural loop specified by LOOP at the loop depth DEPTH.
6984 Returns the loop level. */
6986 flow_loop_level_compute (loop, depth)
6996 /* Traverse loop tree assigning depth and computing level as the
6997 maximum level of all the inner loops of this loop. The loop
6998 level is equivalent to the height of the loop in the loop tree
6999 and corresponds to the number of enclosed loop levels (including
7001 for (inner = loop->inner; inner; inner = inner->next)
7005 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7010 loop->level = level;
7011 loop->depth = depth;
7016 /* Compute the loop nesting depth and enclosed loop level for the loop
7017 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7021 flow_loops_level_compute (loops)
7022 struct loops *loops;
7028 /* Traverse all the outer level loops. */
7029 for (loop = loops->tree; loop; loop = loop->next)
7031 level = flow_loop_level_compute (loop, 1);
7039 /* Find all the natural loops in the function and save in LOOPS structure
7040 and recalculate loop_depth information in basic block structures.
7041 Return the number of natural loops found. */
7044 flow_loops_find (loops)
7045 struct loops *loops;
7056 loops->array = NULL;
7060 /* Taking care of this degenerate case makes the rest of
7061 this code simpler. */
7062 if (n_basic_blocks == 0)
7065 /* Compute the dominators. */
7066 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7067 compute_flow_dominators (dom, NULL);
7069 /* Count the number of loop edges (back edges). This should be the
7070 same as the number of natural loops. Also clear the loop_depth
7071 and as we work from inner->outer in a loop nest we call
7072 find_loop_nodes_find which will increment loop_depth for nodes
7073 within the current loop, which happens to enclose inner loops. */
7076 for (b = 0; b < n_basic_blocks; b++)
7078 BASIC_BLOCK (b)->loop_depth = 0;
7079 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7081 basic_block latch = e->src;
7083 /* Look for back edges where a predecessor is dominated
7084 by this block. A natural loop has a single entry
7085 node (header) that dominates all the nodes in the
7086 loop. It also has single back edge to the header
7087 from a latch node. Note that multiple natural loops
7088 may share the same header. */
7089 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7096 /* Compute depth first search order of the CFG so that outer
7097 natural loops will be found before inner natural loops. */
7098 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7099 flow_depth_first_order_compute (dfs_order);
7101 /* Allocate loop structures. */
7103 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7105 headers = sbitmap_alloc (n_basic_blocks);
7106 sbitmap_zero (headers);
7108 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7109 sbitmap_zero (loops->shared_headers);
7111 /* Find and record information about all the natural loops
7114 for (b = 0; b < n_basic_blocks; b++)
7118 /* Search the nodes of the CFG in DFS order that we can find
7119 outer loops first. */
7120 header = BASIC_BLOCK (dfs_order[b]);
7122 /* Look for all the possible latch blocks for this header. */
7123 for (e = header->pred; e; e = e->pred_next)
7125 basic_block latch = e->src;
7127 /* Look for back edges where a predecessor is dominated
7128 by this block. A natural loop has a single entry
7129 node (header) that dominates all the nodes in the
7130 loop. It also has single back edge to the header
7131 from a latch node. Note that multiple natural loops
7132 may share the same header. */
7133 if (latch != ENTRY_BLOCK_PTR
7134 && TEST_BIT (dom[latch->index], header->index))
7138 loop = loops->array + num_loops;
7140 loop->header = header;
7141 loop->latch = latch;
7143 /* Keep track of blocks that are loop headers so
7144 that we can tell which loops should be merged. */
7145 if (TEST_BIT (headers, header->index))
7146 SET_BIT (loops->shared_headers, header->index);
7147 SET_BIT (headers, header->index);
7149 /* Find nodes contained within the loop. */
7150 loop->nodes = sbitmap_alloc (n_basic_blocks);
7152 = flow_loop_nodes_find (header, latch, loop->nodes);
7154 /* Compute first and last blocks within the loop.
7155 These are often the same as the loop header and
7156 loop latch respectively, but this is not always
7159 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7161 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7163 /* Find edges which exit the loop. Note that a node
7164 may have several exit edges. */
7166 = flow_loop_exits_find (loop->nodes, &loop->exits);
7168 /* Look to see if the loop has a pre-header node. */
7170 = flow_loop_pre_header_find (header, dom);
7177 /* Natural loops with shared headers may either be disjoint or
7178 nested. Disjoint loops with shared headers cannot be inner
7179 loops and should be merged. For now just mark loops that share
7181 for (i = 0; i < num_loops; i++)
7182 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7183 loops->array[i].shared = 1;
7185 sbitmap_free (headers);
7188 loops->num = num_loops;
7190 /* Save CFG derived information to avoid recomputing it. */
7191 loops->cfg.dom = dom;
7192 loops->cfg.dfs_order = dfs_order;
7194 /* Build the loop hierarchy tree. */
7195 flow_loops_tree_build (loops);
7197 /* Assign the loop nesting depth and enclosed loop level for each
7199 loops->levels = flow_loops_level_compute (loops);
7205 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7207 flow_loop_outside_edge_p (loop, e)
7208 const struct loop *loop;
7211 if (e->dest != loop->header)
7213 return (e->src == ENTRY_BLOCK_PTR)
7214 || ! TEST_BIT (loop->nodes, e->src->index);
7218 /* Clear LOG_LINKS fields of insns in a chain. */
7220 clear_log_links (insns)
7224 for (i = insns; i; i = NEXT_INSN (i))
7225 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')