1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static int insn_dead_p PARAMS ((struct propagate_block_info *,
334 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
336 static void mark_set_regs PARAMS ((struct propagate_block_info *,
338 static void mark_set_1 PARAMS ((struct propagate_block_info *,
339 enum rtx_code, rtx, rtx,
342 static void find_auto_inc PARAMS ((struct propagate_block_info *,
344 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
346 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
348 static void mark_used_reg PARAMS ((struct propagate_block_info *,
350 static void mark_used_regs PARAMS ((struct propagate_block_info *,
352 void dump_flow_info PARAMS ((FILE *));
353 void debug_flow_info PARAMS ((void));
354 static void dump_edge_info PARAMS ((FILE *, edge, int));
356 static void count_reg_sets_1 PARAMS ((rtx, int));
357 static void count_reg_sets PARAMS ((rtx, int));
358 static void count_reg_references PARAMS ((rtx, int));
359 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
361 static void remove_fake_successors PARAMS ((basic_block));
362 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
363 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
364 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
365 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
366 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
367 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
368 static int flow_depth_first_order_compute PARAMS ((int *));
369 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
370 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
371 static void flow_loops_tree_build PARAMS ((struct loops *));
372 static int flow_loop_level_compute PARAMS ((struct loop *, int));
373 static int flow_loops_level_compute PARAMS ((struct loops *));
375 /* Find basic blocks of the current function.
376 F is the first insn of the function and NREGS the number of register
380 find_basic_blocks (f, nregs, file)
382 int nregs ATTRIBUTE_UNUSED;
383 FILE *file ATTRIBUTE_UNUSED;
387 /* Flush out existing data. */
388 if (basic_block_info != NULL)
394 /* Clear bb->aux on all extant basic blocks. We'll use this as a
395 tag for reuse during create_basic_block, just in case some pass
396 copies around basic block notes improperly. */
397 for (i = 0; i < n_basic_blocks; ++i)
398 BASIC_BLOCK (i)->aux = NULL;
400 VARRAY_FREE (basic_block_info);
403 n_basic_blocks = count_basic_blocks (f);
405 /* Size the basic block table. The actual structures will be allocated
406 by find_basic_blocks_1, since we want to keep the structure pointers
407 stable across calls to find_basic_blocks. */
408 /* ??? This whole issue would be much simpler if we called find_basic_blocks
409 exactly once, and thereafter we don't have a single long chain of
410 instructions at all until close to the end of compilation when we
411 actually lay them out. */
413 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
415 label_value_list = find_basic_blocks_1 (f);
417 /* Record the block to which an insn belongs. */
418 /* ??? This should be done another way, by which (perhaps) a label is
419 tagged directly with the basic block that it starts. It is used for
420 more than that currently, but IMO that is the only valid use. */
422 max_uid = get_max_uid ();
424 /* Leave space for insns life_analysis makes in some cases for auto-inc.
425 These cases are rare, so we don't need too much space. */
426 max_uid += max_uid / 10;
429 compute_bb_for_insn (max_uid);
431 /* Discover the edges of our cfg. */
432 record_active_eh_regions (f);
433 make_edges (label_value_list);
435 /* Do very simple cleanup now, for the benefit of code that runs between
436 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
437 tidy_fallthru_edges ();
439 mark_critical_edges ();
441 #ifdef ENABLE_CHECKING
446 /* Count the basic blocks of the function. */
449 count_basic_blocks (f)
453 register RTX_CODE prev_code;
454 register int count = 0;
456 int call_had_abnormal_edge = 0;
458 prev_code = JUMP_INSN;
459 for (insn = f; insn; insn = NEXT_INSN (insn))
461 register RTX_CODE code = GET_CODE (insn);
463 if (code == CODE_LABEL
464 || (GET_RTX_CLASS (code) == 'i'
465 && (prev_code == JUMP_INSN
466 || prev_code == BARRIER
467 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
470 /* Record whether this call created an edge. */
471 if (code == CALL_INSN)
473 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
474 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
476 call_had_abnormal_edge = 0;
478 /* If there is an EH region or rethrow, we have an edge. */
479 if ((eh_region && region > 0)
480 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
481 call_had_abnormal_edge = 1;
482 else if (nonlocal_goto_handler_labels && region >= 0)
483 /* If there is a nonlocal goto label and the specified
484 region number isn't -1, we have an edge. (0 means
485 no throw, but might have a nonlocal goto). */
486 call_had_abnormal_edge = 1;
491 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
497 /* The rest of the compiler works a bit smoother when we don't have to
498 check for the edge case of do-nothing functions with no basic blocks. */
501 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
508 /* Find all basic blocks of the function whose first insn is F.
510 Collect and return a list of labels whose addresses are taken. This
511 will be used in make_edges for use with computed gotos. */
514 find_basic_blocks_1 (f)
517 register rtx insn, next;
519 rtx bb_note = NULL_RTX;
520 rtx eh_list = NULL_RTX;
521 rtx label_value_list = NULL_RTX;
525 /* We process the instructions in a slightly different way than we did
526 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
527 closed out the previous block, so that it gets attached at the proper
528 place. Since this form should be equivalent to the previous,
529 count_basic_blocks continues to use the old form as a check. */
531 for (insn = f; insn; insn = next)
533 enum rtx_code code = GET_CODE (insn);
535 next = NEXT_INSN (insn);
541 int kind = NOTE_LINE_NUMBER (insn);
543 /* Keep a LIFO list of the currently active exception notes. */
544 if (kind == NOTE_INSN_EH_REGION_BEG)
545 eh_list = alloc_INSN_LIST (insn, eh_list);
546 else if (kind == NOTE_INSN_EH_REGION_END)
550 eh_list = XEXP (eh_list, 1);
551 free_INSN_LIST_node (t);
554 /* Look for basic block notes with which to keep the
555 basic_block_info pointers stable. Unthread the note now;
556 we'll put it back at the right place in create_basic_block.
557 Or not at all if we've already found a note in this block. */
558 else if (kind == NOTE_INSN_BASIC_BLOCK)
560 if (bb_note == NULL_RTX)
563 next = flow_delete_insn (insn);
569 /* A basic block starts at a label. If we've closed one off due
570 to a barrier or some such, no need to do it again. */
571 if (head != NULL_RTX)
573 /* While we now have edge lists with which other portions of
574 the compiler might determine a call ending a basic block
575 does not imply an abnormal edge, it will be a bit before
576 everything can be updated. So continue to emit a noop at
577 the end of such a block. */
578 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
580 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
581 end = emit_insn_after (nop, end);
584 create_basic_block (i++, head, end, bb_note);
592 /* A basic block ends at a jump. */
593 if (head == NULL_RTX)
597 /* ??? Make a special check for table jumps. The way this
598 happens is truly and amazingly gross. We are about to
599 create a basic block that contains just a code label and
600 an addr*vec jump insn. Worse, an addr_diff_vec creates
601 its own natural loop.
603 Prevent this bit of brain damage, pasting things together
604 correctly in make_edges.
606 The correct solution involves emitting the table directly
607 on the tablejump instruction as a note, or JUMP_LABEL. */
609 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
610 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
618 goto new_bb_inclusive;
621 /* A basic block ends at a barrier. It may be that an unconditional
622 jump already closed the basic block -- no need to do it again. */
623 if (head == NULL_RTX)
626 /* While we now have edge lists with which other portions of the
627 compiler might determine a call ending a basic block does not
628 imply an abnormal edge, it will be a bit before everything can
629 be updated. So continue to emit a noop at the end of such a
631 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
633 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
634 end = emit_insn_after (nop, end);
636 goto new_bb_exclusive;
640 /* Record whether this call created an edge. */
641 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
642 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
643 int call_has_abnormal_edge = 0;
645 /* If there is an EH region or rethrow, we have an edge. */
646 if ((eh_list && region > 0)
647 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
648 call_has_abnormal_edge = 1;
649 else if (nonlocal_goto_handler_labels && region >= 0)
650 /* If there is a nonlocal goto label and the specified
651 region number isn't -1, we have an edge. (0 means
652 no throw, but might have a nonlocal goto). */
653 call_has_abnormal_edge = 1;
655 /* A basic block ends at a call that can either throw or
656 do a non-local goto. */
657 if (call_has_abnormal_edge)
660 if (head == NULL_RTX)
665 create_basic_block (i++, head, end, bb_note);
666 head = end = NULL_RTX;
674 if (GET_RTX_CLASS (code) == 'i')
676 if (head == NULL_RTX)
683 if (GET_RTX_CLASS (code) == 'i')
687 /* Make a list of all labels referred to other than by jumps
688 (which just don't have the REG_LABEL notes).
690 Make a special exception for labels followed by an ADDR*VEC,
691 as this would be a part of the tablejump setup code.
693 Make a special exception for the eh_return_stub_label, which
694 we know isn't part of any otherwise visible control flow. */
696 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
697 if (REG_NOTE_KIND (note) == REG_LABEL)
699 rtx lab = XEXP (note, 0), next;
701 if (lab == eh_return_stub_label)
703 else if ((next = next_nonnote_insn (lab)) != NULL
704 && GET_CODE (next) == JUMP_INSN
705 && (GET_CODE (PATTERN (next)) == ADDR_VEC
706 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
710 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
715 if (head != NULL_RTX)
716 create_basic_block (i++, head, end, bb_note);
718 if (i != n_basic_blocks)
721 return label_value_list;
724 /* Tidy the CFG by deleting unreachable code and whatnot. */
730 delete_unreachable_blocks ();
731 move_stray_eh_region_notes ();
732 record_active_eh_regions (f);
734 mark_critical_edges ();
736 /* Kill the data we won't maintain. */
737 label_value_list = NULL_RTX;
740 /* Create a new basic block consisting of the instructions between
741 HEAD and END inclusive. Reuses the note and basic block struct
742 in BB_NOTE, if any. */
745 create_basic_block (index, head, end, bb_note)
747 rtx head, end, bb_note;
752 && ! RTX_INTEGRATED_P (bb_note)
753 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 /* If we found an existing note, thread it back onto the chain. */
758 if (GET_CODE (head) == CODE_LABEL)
759 add_insn_after (bb_note, head);
762 add_insn_before (bb_note, head);
768 /* Otherwise we must create a note and a basic block structure.
769 Since we allow basic block structs in rtl, give the struct
770 the same lifetime by allocating it off the function obstack
771 rather than using malloc. */
773 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
774 memset (bb, 0, sizeof (*bb));
776 if (GET_CODE (head) == CODE_LABEL)
777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 NOTE_BASIC_BLOCK (bb_note) = bb;
786 /* Always include the bb note in the block. */
787 if (NEXT_INSN (end) == bb_note)
793 BASIC_BLOCK (index) = bb;
795 /* Tag the block so that we know it has been used when considering
796 other basic block notes. */
800 /* Records the basic block struct in BB_FOR_INSN, for every instruction
801 indexed by INSN_UID. MAX is the size of the array. */
804 compute_bb_for_insn (max)
809 if (basic_block_for_insn)
810 VARRAY_FREE (basic_block_for_insn);
811 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
813 for (i = 0; i < n_basic_blocks; ++i)
815 basic_block bb = BASIC_BLOCK (i);
822 int uid = INSN_UID (insn);
824 VARRAY_BB (basic_block_for_insn, uid) = bb;
827 insn = NEXT_INSN (insn);
832 /* Free the memory associated with the edge structures. */
840 for (i = 0; i < n_basic_blocks; ++i)
842 basic_block bb = BASIC_BLOCK (i);
844 for (e = bb->succ; e ; e = n)
854 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
860 ENTRY_BLOCK_PTR->succ = 0;
861 EXIT_BLOCK_PTR->pred = 0;
866 /* Identify the edges between basic blocks.
868 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
869 that are otherwise unreachable may be reachable with a non-local goto.
871 BB_EH_END is an array indexed by basic block number in which we record
872 the list of exception regions active at the end of the basic block. */
875 make_edges (label_value_list)
876 rtx label_value_list;
879 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
880 sbitmap *edge_cache = NULL;
882 /* Assume no computed jump; revise as we create edges. */
883 current_function_has_computed_jump = 0;
885 /* Heavy use of computed goto in machine-generated code can lead to
886 nearly fully-connected CFGs. In that case we spend a significant
887 amount of time searching the edge lists for duplicates. */
888 if (forced_labels || label_value_list)
890 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
891 sbitmap_vector_zero (edge_cache, n_basic_blocks);
894 /* By nature of the way these get numbered, block 0 is always the entry. */
895 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
897 for (i = 0; i < n_basic_blocks; ++i)
899 basic_block bb = BASIC_BLOCK (i);
902 int force_fallthru = 0;
904 /* Examine the last instruction of the block, and discover the
905 ways we can leave the block. */
908 code = GET_CODE (insn);
911 if (code == JUMP_INSN)
915 /* ??? Recognize a tablejump and do the right thing. */
916 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
917 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
918 && GET_CODE (tmp) == JUMP_INSN
919 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
920 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
925 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
926 vec = XVEC (PATTERN (tmp), 0);
928 vec = XVEC (PATTERN (tmp), 1);
930 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
931 make_label_edge (edge_cache, bb,
932 XEXP (RTVEC_ELT (vec, j), 0), 0);
934 /* Some targets (eg, ARM) emit a conditional jump that also
935 contains the out-of-range target. Scan for these and
936 add an edge if necessary. */
937 if ((tmp = single_set (insn)) != NULL
938 && SET_DEST (tmp) == pc_rtx
939 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
940 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
941 make_label_edge (edge_cache, bb,
942 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944 #ifdef CASE_DROPS_THROUGH
945 /* Silly VAXen. The ADDR_VEC is going to be in the way of
946 us naturally detecting fallthru into the next block. */
951 /* If this is a computed jump, then mark it as reaching
952 everything on the label_value_list and forced_labels list. */
953 else if (computed_jump_p (insn))
955 current_function_has_computed_jump = 1;
957 for (x = label_value_list; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 for (x = forced_labels; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 /* Returns create an exit out. */
965 else if (returnjump_p (insn))
966 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
968 /* Otherwise, we have a plain conditional or unconditional jump. */
971 if (! JUMP_LABEL (insn))
973 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
977 /* If this is a sibling call insn, then this is in effect a
978 combined call and return, and so we need an edge to the
979 exit block. No need to worry about EH edges, since we
980 wouldn't have created the sibling call in the first place. */
982 if (code == CALL_INSN && SIBLING_CALL_P (insn))
983 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
986 /* If this is a CALL_INSN, then mark it as reaching the active EH
987 handler for this CALL_INSN. If we're handling asynchronous
988 exceptions then any insn can reach any of the active handlers.
990 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
992 if (code == CALL_INSN || asynchronous_exceptions)
994 /* Add any appropriate EH edges. We do this unconditionally
995 since there may be a REG_EH_REGION or REG_EH_RETHROW note
996 on the call, and this needn't be within an EH region. */
997 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
999 /* If we have asynchronous exceptions, do the same for *all*
1000 exception regions active in the block. */
1001 if (asynchronous_exceptions
1002 && bb->eh_beg != bb->eh_end)
1004 if (bb->eh_beg >= 0)
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, bb->eh_beg);
1008 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1009 if (GET_CODE (x) == NOTE
1010 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1011 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1013 int region = NOTE_EH_HANDLER (x);
1014 make_eh_edge (edge_cache, eh_nest_info, bb,
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1028 /* We do know that a REG_EH_REGION note with a value less
1029 than 0 is guaranteed not to perform a non-local goto. */
1030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1031 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1032 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1033 make_label_edge (edge_cache, bb, XEXP (x, 0),
1034 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1038 /* We know something about the structure of the function __throw in
1039 libgcc2.c. It is the only function that ever contains eh_stub
1040 labels. It modifies its return address so that the last block
1041 returns to one of the eh_stub labels within it. So we have to
1042 make additional edges in the flow graph. */
1043 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1044 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1046 /* Find out if we can drop through to the next block. */
1047 insn = next_nonnote_insn (insn);
1048 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1049 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1050 else if (i + 1 < n_basic_blocks)
1052 rtx tmp = BLOCK_HEAD (i + 1);
1053 if (GET_CODE (tmp) == NOTE)
1054 tmp = next_nonnote_insn (tmp);
1055 if (force_fallthru || insn == tmp)
1056 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1060 free_eh_nesting_info (eh_nest_info);
1062 sbitmap_vector_free (edge_cache);
1065 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1066 about the edge that is accumulated between calls. */
1069 make_edge (edge_cache, src, dst, flags)
1070 sbitmap *edge_cache;
1071 basic_block src, dst;
1077 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1078 many edges to them, and we didn't allocate memory for it. */
1079 use_edge_cache = (edge_cache
1080 && src != ENTRY_BLOCK_PTR
1081 && dst != EXIT_BLOCK_PTR);
1083 /* Make sure we don't add duplicate edges. */
1084 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1085 for (e = src->succ; e ; e = e->succ_next)
1092 e = (edge) xcalloc (1, sizeof (*e));
1095 e->succ_next = src->succ;
1096 e->pred_next = dst->pred;
1105 SET_BIT (edge_cache[src->index], dst->index);
1108 /* Create an edge from a basic block to a label. */
1111 make_label_edge (edge_cache, src, label, flags)
1112 sbitmap *edge_cache;
1117 if (GET_CODE (label) != CODE_LABEL)
1120 /* If the label was never emitted, this insn is junk, but avoid a
1121 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1122 as a result of a syntax error and a diagnostic has already been
1125 if (INSN_UID (label) == 0)
1128 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1131 /* Create the edges generated by INSN in REGION. */
1134 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1135 sbitmap *edge_cache;
1136 eh_nesting_info *eh_nest_info;
1141 handler_info **handler_list;
1144 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1145 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1148 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1149 EDGE_ABNORMAL | EDGE_EH | is_call);
1153 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1154 dangerous if we intend to move basic blocks around. Move such notes
1155 into the following block. */
1158 move_stray_eh_region_notes ()
1163 if (n_basic_blocks < 2)
1166 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1167 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1169 rtx insn, next, list = NULL_RTX;
1171 b1 = BASIC_BLOCK (i);
1172 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1174 next = NEXT_INSN (insn);
1175 if (GET_CODE (insn) == NOTE
1176 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1177 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1179 /* Unlink from the insn chain. */
1180 NEXT_INSN (PREV_INSN (insn)) = next;
1181 PREV_INSN (next) = PREV_INSN (insn);
1184 NEXT_INSN (insn) = list;
1189 if (list == NULL_RTX)
1192 /* Find where to insert these things. */
1194 if (GET_CODE (insn) == CODE_LABEL)
1195 insn = NEXT_INSN (insn);
1199 next = NEXT_INSN (list);
1200 add_insn_after (list, insn);
1206 /* Recompute eh_beg/eh_end for each basic block. */
1209 record_active_eh_regions (f)
1212 rtx insn, eh_list = NULL_RTX;
1214 basic_block bb = BASIC_BLOCK (0);
1216 for (insn = f; insn ; insn = NEXT_INSN (insn))
1218 if (bb->head == insn)
1219 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1221 if (GET_CODE (insn) == NOTE)
1223 int kind = NOTE_LINE_NUMBER (insn);
1224 if (kind == NOTE_INSN_EH_REGION_BEG)
1225 eh_list = alloc_INSN_LIST (insn, eh_list);
1226 else if (kind == NOTE_INSN_EH_REGION_END)
1228 rtx t = XEXP (eh_list, 1);
1229 free_INSN_LIST_node (eh_list);
1234 if (bb->end == insn)
1236 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (i == n_basic_blocks)
1240 bb = BASIC_BLOCK (i);
1245 /* Identify critical edges and set the bits appropriately. */
1248 mark_critical_edges ()
1250 int i, n = n_basic_blocks;
1253 /* We begin with the entry block. This is not terribly important now,
1254 but could be if a front end (Fortran) implemented alternate entry
1256 bb = ENTRY_BLOCK_PTR;
1263 /* (1) Critical edges must have a source with multiple successors. */
1264 if (bb->succ && bb->succ->succ_next)
1266 for (e = bb->succ; e ; e = e->succ_next)
1268 /* (2) Critical edges must have a destination with multiple
1269 predecessors. Note that we know there is at least one
1270 predecessor -- the edge we followed to get here. */
1271 if (e->dest->pred->pred_next)
1272 e->flags |= EDGE_CRITICAL;
1274 e->flags &= ~EDGE_CRITICAL;
1279 for (e = bb->succ; e ; e = e->succ_next)
1280 e->flags &= ~EDGE_CRITICAL;
1285 bb = BASIC_BLOCK (i);
1289 /* Split a (typically critical) edge. Return the new block.
1290 Abort on abnormal edges.
1292 ??? The code generally expects to be called on critical edges.
1293 The case of a block ending in an unconditional jump to a
1294 block with multiple predecessors is not handled optimally. */
1297 split_edge (edge_in)
1300 basic_block old_pred, bb, old_succ;
1305 /* Abnormal edges cannot be split. */
1306 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1309 old_pred = edge_in->src;
1310 old_succ = edge_in->dest;
1312 /* Remove the existing edge from the destination's pred list. */
1315 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1317 *pp = edge_in->pred_next;
1318 edge_in->pred_next = NULL;
1321 /* Create the new structures. */
1322 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1323 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1326 memset (bb, 0, sizeof (*bb));
1327 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1328 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1330 /* ??? This info is likely going to be out of date very soon. */
1331 if (old_succ->global_live_at_start)
1333 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1334 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1338 CLEAR_REG_SET (bb->global_live_at_start);
1339 CLEAR_REG_SET (bb->global_live_at_end);
1344 bb->succ = edge_out;
1347 edge_in->flags &= ~EDGE_CRITICAL;
1349 edge_out->pred_next = old_succ->pred;
1350 edge_out->succ_next = NULL;
1352 edge_out->dest = old_succ;
1353 edge_out->flags = EDGE_FALLTHRU;
1354 edge_out->probability = REG_BR_PROB_BASE;
1356 old_succ->pred = edge_out;
1358 /* Tricky case -- if there existed a fallthru into the successor
1359 (and we're not it) we must add a new unconditional jump around
1360 the new block we're actually interested in.
1362 Further, if that edge is critical, this means a second new basic
1363 block must be created to hold it. In order to simplify correct
1364 insn placement, do this before we touch the existing basic block
1365 ordering for the block we were really wanting. */
1366 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1369 for (e = edge_out->pred_next; e ; e = e->pred_next)
1370 if (e->flags & EDGE_FALLTHRU)
1375 basic_block jump_block;
1378 if ((e->flags & EDGE_CRITICAL) == 0
1379 && e->src != ENTRY_BLOCK_PTR)
1381 /* Non critical -- we can simply add a jump to the end
1382 of the existing predecessor. */
1383 jump_block = e->src;
1387 /* We need a new block to hold the jump. The simplest
1388 way to do the bulk of the work here is to recursively
1390 jump_block = split_edge (e);
1391 e = jump_block->succ;
1394 /* Now add the jump insn ... */
1395 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1397 jump_block->end = pos;
1398 if (basic_block_for_insn)
1399 set_block_for_insn (pos, jump_block);
1400 emit_barrier_after (pos);
1402 /* ... let jump know that label is in use, ... */
1403 JUMP_LABEL (pos) = old_succ->head;
1404 ++LABEL_NUSES (old_succ->head);
1406 /* ... and clear fallthru on the outgoing edge. */
1407 e->flags &= ~EDGE_FALLTHRU;
1409 /* Continue splitting the interesting edge. */
1413 /* Place the new block just in front of the successor. */
1414 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1415 if (old_succ == EXIT_BLOCK_PTR)
1416 j = n_basic_blocks - 1;
1418 j = old_succ->index;
1419 for (i = n_basic_blocks - 1; i > j; --i)
1421 basic_block tmp = BASIC_BLOCK (i - 1);
1422 BASIC_BLOCK (i) = tmp;
1425 BASIC_BLOCK (i) = bb;
1428 /* Create the basic block note.
1430 Where we place the note can have a noticable impact on the generated
1431 code. Consider this cfg:
1442 If we need to insert an insn on the edge from block 0 to block 1,
1443 we want to ensure the instructions we insert are outside of any
1444 loop notes that physically sit between block 0 and block 1. Otherwise
1445 we confuse the loop optimizer into thinking the loop is a phony. */
1446 if (old_succ != EXIT_BLOCK_PTR
1447 && PREV_INSN (old_succ->head)
1448 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1449 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1451 PREV_INSN (old_succ->head));
1452 else if (old_succ != EXIT_BLOCK_PTR)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1455 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1456 NOTE_BASIC_BLOCK (bb_note) = bb;
1457 bb->head = bb->end = bb_note;
1459 /* Not quite simple -- for non-fallthru edges, we must adjust the
1460 predecessor's jump instruction to target our new block. */
1461 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1463 rtx tmp, insn = old_pred->end;
1464 rtx old_label = old_succ->head;
1465 rtx new_label = gen_label_rtx ();
1467 if (GET_CODE (insn) != JUMP_INSN)
1470 /* ??? Recognize a tablejump and adjust all matching cases. */
1471 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1472 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1473 && GET_CODE (tmp) == JUMP_INSN
1474 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1475 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1480 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1481 vec = XVEC (PATTERN (tmp), 0);
1483 vec = XVEC (PATTERN (tmp), 1);
1485 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1486 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1488 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1489 --LABEL_NUSES (old_label);
1490 ++LABEL_NUSES (new_label);
1493 /* Handle casesi dispatch insns */
1494 if ((tmp = single_set (insn)) != NULL
1495 && SET_DEST (tmp) == pc_rtx
1496 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1497 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1498 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1500 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1502 --LABEL_NUSES (old_label);
1503 ++LABEL_NUSES (new_label);
1508 /* This would have indicated an abnormal edge. */
1509 if (computed_jump_p (insn))
1512 /* A return instruction can't be redirected. */
1513 if (returnjump_p (insn))
1516 /* If the insn doesn't go where we think, we're confused. */
1517 if (JUMP_LABEL (insn) != old_label)
1520 redirect_jump (insn, new_label);
1523 emit_label_before (new_label, bb_note);
1524 bb->head = new_label;
1530 /* Queue instructions for insertion on an edge between two basic blocks.
1531 The new instructions and basic blocks (if any) will not appear in the
1532 CFG until commit_edge_insertions is called. */
1535 insert_insn_on_edge (pattern, e)
1539 /* We cannot insert instructions on an abnormal critical edge.
1540 It will be easier to find the culprit if we die now. */
1541 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1542 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 if (e->insns == NULL_RTX)
1548 push_to_sequence (e->insns);
1550 emit_insn (pattern);
1552 e->insns = get_insns ();
1556 /* Update the CFG for the instructions queued on edge E. */
1559 commit_one_edge_insertion (e)
1562 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1565 /* Pull the insns off the edge now since the edge might go away. */
1567 e->insns = NULL_RTX;
1569 /* Figure out where to put these things. If the destination has
1570 one predecessor, insert there. Except for the exit block. */
1571 if (e->dest->pred->pred_next == NULL
1572 && e->dest != EXIT_BLOCK_PTR)
1576 /* Get the location correct wrt a code label, and "nice" wrt
1577 a basic block note, and before everything else. */
1579 if (GET_CODE (tmp) == CODE_LABEL)
1580 tmp = NEXT_INSN (tmp);
1581 if (GET_CODE (tmp) == NOTE
1582 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1583 tmp = NEXT_INSN (tmp);
1584 if (tmp == bb->head)
1587 after = PREV_INSN (tmp);
1590 /* If the source has one successor and the edge is not abnormal,
1591 insert there. Except for the entry block. */
1592 else if ((e->flags & EDGE_ABNORMAL) == 0
1593 && e->src->succ->succ_next == NULL
1594 && e->src != ENTRY_BLOCK_PTR)
1597 /* It is possible to have a non-simple jump here. Consider a target
1598 where some forms of unconditional jumps clobber a register. This
1599 happens on the fr30 for example.
1601 We know this block has a single successor, so we can just emit
1602 the queued insns before the jump. */
1603 if (GET_CODE (bb->end) == JUMP_INSN)
1609 /* We'd better be fallthru, or we've lost track of what's what. */
1610 if ((e->flags & EDGE_FALLTHRU) == 0)
1617 /* Otherwise we must split the edge. */
1620 bb = split_edge (e);
1624 /* Now that we've found the spot, do the insertion. */
1626 /* Set the new block number for these insns, if structure is allocated. */
1627 if (basic_block_for_insn)
1630 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1631 set_block_for_insn (i, bb);
1636 emit_insns_before (insns, before);
1637 if (before == bb->head)
1642 rtx last = emit_insns_after (insns, after);
1643 if (after == bb->end)
1647 if (GET_CODE (last) == JUMP_INSN)
1649 if (returnjump_p (last))
1651 /* ??? Remove all outgoing edges from BB and add one
1652 for EXIT. This is not currently a problem because
1653 this only happens for the (single) epilogue, which
1654 already has a fallthru edge to EXIT. */
1657 if (e->dest != EXIT_BLOCK_PTR
1658 || e->succ_next != NULL
1659 || (e->flags & EDGE_FALLTHRU) == 0)
1661 e->flags &= ~EDGE_FALLTHRU;
1663 emit_barrier_after (last);
1672 /* Update the CFG for all queued instructions. */
1675 commit_edge_insertions ()
1680 #ifdef ENABLE_CHECKING
1681 verify_flow_info ();
1685 bb = ENTRY_BLOCK_PTR;
1690 for (e = bb->succ; e ; e = next)
1692 next = e->succ_next;
1694 commit_one_edge_insertion (e);
1697 if (++i >= n_basic_blocks)
1699 bb = BASIC_BLOCK (i);
1703 /* Delete all unreachable basic blocks. */
1706 delete_unreachable_blocks ()
1708 basic_block *worklist, *tos;
1709 int deleted_handler;
1714 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1716 /* Use basic_block->aux as a marker. Clear them all. */
1718 for (i = 0; i < n; ++i)
1719 BASIC_BLOCK (i)->aux = NULL;
1721 /* Add our starting points to the worklist. Almost always there will
1722 be only one. It isn't inconcievable that we might one day directly
1723 support Fortran alternate entry points. */
1725 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1729 /* Mark the block with a handy non-null value. */
1733 /* Iterate: find everything reachable from what we've already seen. */
1735 while (tos != worklist)
1737 basic_block b = *--tos;
1739 for (e = b->succ; e ; e = e->succ_next)
1747 /* Delete all unreachable basic blocks. Count down so that we don't
1748 interfere with the block renumbering that happens in flow_delete_block. */
1750 deleted_handler = 0;
1752 for (i = n - 1; i >= 0; --i)
1754 basic_block b = BASIC_BLOCK (i);
1757 /* This block was found. Tidy up the mark. */
1760 deleted_handler |= flow_delete_block (b);
1763 tidy_fallthru_edges ();
1765 /* If we deleted an exception handler, we may have EH region begin/end
1766 blocks to remove as well. */
1767 if (deleted_handler)
1768 delete_eh_regions ();
1773 /* Find EH regions for which there is no longer a handler, and delete them. */
1776 delete_eh_regions ()
1780 update_rethrow_references ();
1782 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1783 if (GET_CODE (insn) == NOTE)
1785 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1786 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1788 int num = NOTE_EH_HANDLER (insn);
1789 /* A NULL handler indicates a region is no longer needed,
1790 as long as its rethrow label isn't used. */
1791 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1793 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1794 NOTE_SOURCE_FILE (insn) = 0;
1800 /* Return true if NOTE is not one of the ones that must be kept paired,
1801 so that we may simply delete them. */
1804 can_delete_note_p (note)
1807 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1808 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1811 /* Unlink a chain of insns between START and FINISH, leaving notes
1812 that must be paired. */
1815 flow_delete_insn_chain (start, finish)
1818 /* Unchain the insns one by one. It would be quicker to delete all
1819 of these with a single unchaining, rather than one at a time, but
1820 we need to keep the NOTE's. */
1826 next = NEXT_INSN (start);
1827 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1829 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1832 next = flow_delete_insn (start);
1834 if (start == finish)
1840 /* Delete the insns in a (non-live) block. We physically delete every
1841 non-deleted-note insn, and update the flow graph appropriately.
1843 Return nonzero if we deleted an exception handler. */
1845 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1846 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1849 flow_delete_block (b)
1852 int deleted_handler = 0;
1855 /* If the head of this block is a CODE_LABEL, then it might be the
1856 label for an exception handler which can't be reached.
1858 We need to remove the label from the exception_handler_label list
1859 and remove the associated NOTE_INSN_EH_REGION_BEG and
1860 NOTE_INSN_EH_REGION_END notes. */
1864 never_reached_warning (insn);
1866 if (GET_CODE (insn) == CODE_LABEL)
1868 rtx x, *prev = &exception_handler_labels;
1870 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1872 if (XEXP (x, 0) == insn)
1874 /* Found a match, splice this label out of the EH label list. */
1875 *prev = XEXP (x, 1);
1876 XEXP (x, 1) = NULL_RTX;
1877 XEXP (x, 0) = NULL_RTX;
1879 /* Remove the handler from all regions */
1880 remove_handler (insn);
1881 deleted_handler = 1;
1884 prev = &XEXP (x, 1);
1887 /* This label may be referenced by code solely for its value, or
1888 referenced by static data, or something. We have determined
1889 that it is not reachable, but cannot delete the label itself.
1890 Save code space and continue to delete the balance of the block,
1891 along with properly updating the cfg. */
1892 if (!can_delete_label_p (insn))
1894 /* If we've only got one of these, skip the whole deleting
1897 goto no_delete_insns;
1898 insn = NEXT_INSN (insn);
1902 /* Include any jump table following the basic block. */
1904 if (GET_CODE (end) == JUMP_INSN
1905 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1906 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1907 && GET_CODE (tmp) == JUMP_INSN
1908 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1909 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1912 /* Include any barrier that may follow the basic block. */
1913 tmp = next_nonnote_insn (end);
1914 if (tmp && GET_CODE (tmp) == BARRIER)
1917 /* Selectively delete the entire chain. */
1918 flow_delete_insn_chain (insn, end);
1922 /* Remove the edges into and out of this block. Note that there may
1923 indeed be edges in, if we are removing an unreachable loop. */
1927 for (e = b->pred; e ; e = next)
1929 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1932 next = e->pred_next;
1936 for (e = b->succ; e ; e = next)
1938 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1941 next = e->succ_next;
1950 /* Remove the basic block from the array, and compact behind it. */
1953 return deleted_handler;
1956 /* Remove block B from the basic block array and compact behind it. */
1962 int i, n = n_basic_blocks;
1964 for (i = b->index; i + 1 < n; ++i)
1966 basic_block x = BASIC_BLOCK (i + 1);
1967 BASIC_BLOCK (i) = x;
1971 basic_block_info->num_elements--;
1975 /* Delete INSN by patching it out. Return the next insn. */
1978 flow_delete_insn (insn)
1981 rtx prev = PREV_INSN (insn);
1982 rtx next = NEXT_INSN (insn);
1985 PREV_INSN (insn) = NULL_RTX;
1986 NEXT_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (prev) = next;
1991 PREV_INSN (next) = prev;
1993 set_last_insn (prev);
1995 if (GET_CODE (insn) == CODE_LABEL)
1996 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1998 /* If deleting a jump, decrement the use count of the label. Deleting
1999 the label itself should happen in the normal course of block merging. */
2000 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2001 LABEL_NUSES (JUMP_LABEL (insn))--;
2003 /* Also if deleting an insn that references a label. */
2004 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2005 LABEL_NUSES (XEXP (note, 0))--;
2010 /* True if a given label can be deleted. */
2013 can_delete_label_p (label)
2018 if (LABEL_PRESERVE_P (label))
2021 for (x = forced_labels; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2024 for (x = label_value_list; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2031 /* User declared labels must be preserved. */
2032 if (LABEL_NAME (label) != 0)
2038 /* Blocks A and B are to be merged into a single block A. The insns
2039 are already contiguous, hence `nomove'. */
2042 merge_blocks_nomove (a, b)
2046 rtx b_head, b_end, a_end;
2047 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2050 /* If there was a CODE_LABEL beginning B, delete it. */
2053 if (GET_CODE (b_head) == CODE_LABEL)
2055 /* Detect basic blocks with nothing but a label. This can happen
2056 in particular at the end of a function. */
2057 if (b_head == b_end)
2059 del_first = del_last = b_head;
2060 b_head = NEXT_INSN (b_head);
2063 /* Delete the basic block note. */
2064 if (GET_CODE (b_head) == NOTE
2065 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2067 if (b_head == b_end)
2072 b_head = NEXT_INSN (b_head);
2075 /* If there was a jump out of A, delete it. */
2077 if (GET_CODE (a_end) == JUMP_INSN)
2081 prev = prev_nonnote_insn (a_end);
2088 /* If this was a conditional jump, we need to also delete
2089 the insn that set cc0. */
2090 if (prev && sets_cc0_p (prev))
2093 prev = prev_nonnote_insn (prev);
2103 /* Delete everything marked above as well as crap that might be
2104 hanging out between the two blocks. */
2105 flow_delete_insn_chain (del_first, del_last);
2107 /* Normally there should only be one successor of A and that is B, but
2108 partway though the merge of blocks for conditional_execution we'll
2109 be merging a TEST block with THEN and ELSE successors. Free the
2110 whole lot of them and hope the caller knows what they're doing. */
2112 remove_edge (a->succ);
2114 /* Adjust the edges out of B for the new owner. */
2115 for (e = b->succ; e ; e = e->succ_next)
2119 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2120 b->pred = b->succ = NULL;
2122 /* Reassociate the insns of B with A. */
2125 if (basic_block_for_insn)
2127 BLOCK_FOR_INSN (b_head) = a;
2128 while (b_head != b_end)
2130 b_head = NEXT_INSN (b_head);
2131 BLOCK_FOR_INSN (b_head) = a;
2141 /* Blocks A and B are to be merged into a single block. A has no incoming
2142 fallthru edge, so it can be moved before B without adding or modifying
2143 any jumps (aside from the jump from A to B). */
2146 merge_blocks_move_predecessor_nojumps (a, b)
2149 rtx start, end, barrier;
2155 /* We want to delete the BARRIER after the end of the insns we are
2156 going to move. If we don't find a BARRIER, then do nothing. This
2157 can happen in some cases if we have labels we can not delete.
2159 Similarly, do nothing if we can not delete the label at the start
2160 of the target block. */
2161 barrier = next_nonnote_insn (end);
2162 if (GET_CODE (barrier) != BARRIER
2163 || (GET_CODE (b->head) == CODE_LABEL
2164 && ! can_delete_label_p (b->head)))
2167 flow_delete_insn (barrier);
2169 /* Move block and loop notes out of the chain so that we do not
2170 disturb their order.
2172 ??? A better solution would be to squeeze out all the non-nested notes
2173 and adjust the block trees appropriately. Even better would be to have
2174 a tighter connection between block trees and rtl so that this is not
2176 start = squeeze_notes (start, end);
2178 /* Scramble the insn chain. */
2179 if (end != PREV_INSN (b->head))
2180 reorder_insns (start, end, PREV_INSN (b->head));
2184 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2185 a->index, b->index);
2188 /* Swap the records for the two blocks around. Although we are deleting B,
2189 A is now where B was and we want to compact the BB array from where
2191 BASIC_BLOCK(a->index) = b;
2192 BASIC_BLOCK(b->index) = a;
2194 a->index = b->index;
2197 /* Now blocks A and B are contiguous. Merge them. */
2198 merge_blocks_nomove (a, b);
2203 /* Blocks A and B are to be merged into a single block. B has no outgoing
2204 fallthru edge, so it can be moved after A without adding or modifying
2205 any jumps (aside from the jump from A to B). */
2208 merge_blocks_move_successor_nojumps (a, b)
2211 rtx start, end, barrier;
2216 /* We want to delete the BARRIER after the end of the insns we are
2217 going to move. If we don't find a BARRIER, then do nothing. This
2218 can happen in some cases if we have labels we can not delete.
2220 Similarly, do nothing if we can not delete the label at the start
2221 of the target block. */
2222 barrier = next_nonnote_insn (end);
2223 if (GET_CODE (barrier) != BARRIER
2224 || (GET_CODE (b->head) == CODE_LABEL
2225 && ! can_delete_label_p (b->head)))
2228 flow_delete_insn (barrier);
2230 /* Move block and loop notes out of the chain so that we do not
2231 disturb their order.
2233 ??? A better solution would be to squeeze out all the non-nested notes
2234 and adjust the block trees appropriately. Even better would be to have
2235 a tighter connection between block trees and rtl so that this is not
2237 start = squeeze_notes (start, end);
2239 /* Scramble the insn chain. */
2240 reorder_insns (start, end, a->end);
2242 /* Now blocks A and B are contiguous. Merge them. */
2243 merge_blocks_nomove (a, b);
2247 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2248 b->index, a->index);
2254 /* Attempt to merge basic blocks that are potentially non-adjacent.
2255 Return true iff the attempt succeeded. */
2258 merge_blocks (e, b, c)
2262 /* If B has a fallthru edge to C, no need to move anything. */
2263 if (e->flags & EDGE_FALLTHRU)
2265 /* If a label still appears somewhere and we cannot delete the label,
2266 then we cannot merge the blocks. The edge was tidied already. */
2268 rtx insn, stop = NEXT_INSN (c->head);
2269 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2270 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2273 merge_blocks_nomove (b, c);
2277 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2278 b->index, c->index);
2287 int c_has_outgoing_fallthru;
2288 int b_has_incoming_fallthru;
2290 /* We must make sure to not munge nesting of exception regions,
2291 lexical blocks, and loop notes.
2293 The first is taken care of by requiring that the active eh
2294 region at the end of one block always matches the active eh
2295 region at the beginning of the next block.
2297 The later two are taken care of by squeezing out all the notes. */
2299 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2300 executed and we may want to treat blocks which have two out
2301 edges, one normal, one abnormal as only having one edge for
2302 block merging purposes. */
2304 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2305 if (tmp_edge->flags & EDGE_FALLTHRU)
2307 c_has_outgoing_fallthru = (tmp_edge != NULL);
2309 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2310 if (tmp_edge->flags & EDGE_FALLTHRU)
2312 b_has_incoming_fallthru = (tmp_edge != NULL);
2314 /* If B does not have an incoming fallthru, and the exception regions
2315 match, then it can be moved immediately before C without introducing
2318 C can not be the first block, so we do not have to worry about
2319 accessing a non-existent block. */
2320 d = BASIC_BLOCK (c->index - 1);
2321 if (! b_has_incoming_fallthru
2322 && d->eh_end == b->eh_beg
2323 && b->eh_end == c->eh_beg)
2324 return merge_blocks_move_predecessor_nojumps (b, c);
2326 /* Otherwise, we're going to try to move C after B. Make sure the
2327 exception regions match.
2329 If B is the last basic block, then we must not try to access the
2330 block structure for block B + 1. Luckily in that case we do not
2331 need to worry about matching exception regions. */
2332 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2333 if (b->eh_end == c->eh_beg
2334 && (d == NULL || c->eh_end == d->eh_beg))
2336 /* If C does not have an outgoing fallthru, then it can be moved
2337 immediately after B without introducing or modifying jumps. */
2338 if (! c_has_outgoing_fallthru)
2339 return merge_blocks_move_successor_nojumps (b, c);
2341 /* Otherwise, we'll need to insert an extra jump, and possibly
2342 a new block to contain it. */
2343 /* ??? Not implemented yet. */
2350 /* Top level driver for merge_blocks. */
2357 /* Attempt to merge blocks as made possible by edge removal. If a block
2358 has only one successor, and the successor has only one predecessor,
2359 they may be combined. */
2361 for (i = 0; i < n_basic_blocks; )
2363 basic_block c, b = BASIC_BLOCK (i);
2366 /* A loop because chains of blocks might be combineable. */
2367 while ((s = b->succ) != NULL
2368 && s->succ_next == NULL
2369 && (s->flags & EDGE_EH) == 0
2370 && (c = s->dest) != EXIT_BLOCK_PTR
2371 && c->pred->pred_next == NULL
2372 /* If the jump insn has side effects, we can't kill the edge. */
2373 && (GET_CODE (b->end) != JUMP_INSN
2374 || onlyjump_p (b->end))
2375 && merge_blocks (s, b, c))
2378 /* Don't get confused by the index shift caused by deleting blocks. */
2383 /* The given edge should potentially be a fallthru edge. If that is in
2384 fact true, delete the jump and barriers that are in the way. */
2387 tidy_fallthru_edge (e, b, c)
2393 /* ??? In a late-running flow pass, other folks may have deleted basic
2394 blocks by nopping out blocks, leaving multiple BARRIERs between here
2395 and the target label. They ought to be chastized and fixed.
2397 We can also wind up with a sequence of undeletable labels between
2398 one block and the next.
2400 So search through a sequence of barriers, labels, and notes for
2401 the head of block C and assert that we really do fall through. */
2403 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2406 /* Remove what will soon cease being the jump insn from the source block.
2407 If block B consisted only of this single jump, turn it into a deleted
2410 if (GET_CODE (q) == JUMP_INSN)
2413 /* If this was a conditional jump, we need to also delete
2414 the insn that set cc0. */
2415 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2422 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2423 NOTE_SOURCE_FILE (q) = 0;
2426 b->end = q = PREV_INSN (q);
2429 /* Selectively unlink the sequence. */
2430 if (q != PREV_INSN (c->head))
2431 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2433 e->flags |= EDGE_FALLTHRU;
2436 /* Fix up edges that now fall through, or rather should now fall through
2437 but previously required a jump around now deleted blocks. Simplify
2438 the search by only examining blocks numerically adjacent, since this
2439 is how find_basic_blocks created them. */
2442 tidy_fallthru_edges ()
2446 for (i = 1; i < n_basic_blocks; ++i)
2448 basic_block b = BASIC_BLOCK (i - 1);
2449 basic_block c = BASIC_BLOCK (i);
2452 /* We care about simple conditional or unconditional jumps with
2455 If we had a conditional branch to the next instruction when
2456 find_basic_blocks was called, then there will only be one
2457 out edge for the block which ended with the conditional
2458 branch (since we do not create duplicate edges).
2460 Furthermore, the edge will be marked as a fallthru because we
2461 merge the flags for the duplicate edges. So we do not want to
2462 check that the edge is not a FALLTHRU edge. */
2463 if ((s = b->succ) != NULL
2464 && s->succ_next == NULL
2466 /* If the jump insn has side effects, we can't tidy the edge. */
2467 && (GET_CODE (b->end) != JUMP_INSN
2468 || onlyjump_p (b->end)))
2469 tidy_fallthru_edge (s, b, c);
2473 /* Perform data flow analysis.
2474 F is the first insn of the function; FLAGS is a set of PROP_* flags
2475 to be used in accumulating flow info. */
2478 life_analysis (f, file, flags)
2483 #ifdef ELIMINABLE_REGS
2485 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2489 /* Record which registers will be eliminated. We use this in
2492 CLEAR_HARD_REG_SET (elim_reg_set);
2494 #ifdef ELIMINABLE_REGS
2495 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2496 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2498 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2502 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2504 /* The post-reload life analysis have (on a global basis) the same
2505 registers live as was computed by reload itself. elimination
2506 Otherwise offsets and such may be incorrect.
2508 Reload will make some registers as live even though they do not
2509 appear in the rtl. */
2510 if (reload_completed)
2511 flags &= ~PROP_REG_INFO;
2513 /* We want alias analysis information for local dead store elimination. */
2514 if (flags & PROP_SCAN_DEAD_CODE)
2515 init_alias_analysis ();
2517 max_regno = max_reg_num ();
2519 /* Always remove no-op moves. Do this before other processing so
2520 that we don't have to keep re-scanning them. */
2521 delete_noop_moves (f);
2523 /* Some targets can emit simpler epilogues if they know that sp was
2524 not ever modified during the function. After reload, of course,
2525 we've already emitted the epilogue so there's no sense searching. */
2526 if (! reload_completed)
2527 notice_stack_pointer_modification (f);
2529 /* Allocate and zero out data structures that will record the
2530 data from lifetime analysis. */
2531 allocate_reg_life_data ();
2532 allocate_bb_life_data ();
2533 all_blocks = sbitmap_alloc (n_basic_blocks);
2534 sbitmap_ones (all_blocks);
2536 /* Find the set of registers live on function exit. */
2537 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2539 /* "Update" life info from zero. It'd be nice to begin the
2540 relaxation with just the exit and noreturn blocks, but that set
2541 is not immediately handy. */
2543 if (flags & PROP_REG_INFO)
2544 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2545 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2548 sbitmap_free (all_blocks);
2550 if (flags & PROP_SCAN_DEAD_CODE)
2551 end_alias_analysis ();
2554 dump_flow_info (file);
2556 free_basic_block_vars (1);
2559 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2560 Search for REGNO. If found, abort if it is not wider than word_mode. */
2563 verify_wide_reg_1 (px, pregno)
2568 unsigned int regno = *(int *) pregno;
2570 if (GET_CODE (x) == REG && REGNO (x) == regno)
2572 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2579 /* A subroutine of verify_local_live_at_start. Search through insns
2580 between HEAD and END looking for register REGNO. */
2583 verify_wide_reg (regno, head, end)
2589 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2590 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2594 head = NEXT_INSN (head);
2597 /* We didn't find the register at all. Something's way screwy. */
2601 /* A subroutine of update_life_info. Verify that there are no untoward
2602 changes in live_at_start during a local update. */
2605 verify_local_live_at_start (new_live_at_start, bb)
2606 regset new_live_at_start;
2609 if (reload_completed)
2611 /* After reload, there are no pseudos, nor subregs of multi-word
2612 registers. The regsets should exactly match. */
2613 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2620 /* Find the set of changed registers. */
2621 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2623 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2625 /* No registers should die. */
2626 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2628 /* Verify that the now-live register is wider than word_mode. */
2629 verify_wide_reg (i, bb->head, bb->end);
2634 /* Updates life information starting with the basic blocks set in BLOCKS.
2636 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2637 expecting local modifications to basic blocks. If we find extra
2638 registers live at the beginning of a block, then we either killed
2639 useful data, or we have a broken split that wants data not provided.
2640 If we find registers removed from live_at_start, that means we have
2641 a broken peephole that is killing a register it shouldn't.
2643 ??? This is not true in one situation -- when a pre-reload splitter
2644 generates subregs of a multi-word pseudo, current life analysis will
2645 lose the kill. So we _can_ have a pseudo go live. How irritating.
2647 Including PROP_REG_INFO does not properly refresh regs_ever_live
2648 unless the caller resets it to zero. */
2651 update_life_info (blocks, extent, prop_flags)
2653 enum update_life_extent extent;
2657 regset_head tmp_head;
2660 tmp = INITIALIZE_REG_SET (tmp_head);
2662 /* For a global update, we go through the relaxation process again. */
2663 if (extent != UPDATE_LIFE_LOCAL)
2665 calculate_global_regs_live (blocks, blocks,
2666 prop_flags & PROP_SCAN_DEAD_CODE);
2668 /* If asked, remove notes from the blocks we'll update. */
2669 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2670 count_or_remove_death_notes (blocks, 1);
2673 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2675 basic_block bb = BASIC_BLOCK (i);
2677 COPY_REG_SET (tmp, bb->global_live_at_end);
2678 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2680 if (extent == UPDATE_LIFE_LOCAL)
2681 verify_local_live_at_start (tmp, bb);
2686 if (prop_flags & PROP_REG_INFO)
2688 /* The only pseudos that are live at the beginning of the function
2689 are those that were not set anywhere in the function. local-alloc
2690 doesn't know how to handle these correctly, so mark them as not
2691 local to any one basic block. */
2692 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2693 FIRST_PSEUDO_REGISTER, i,
2694 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2696 /* We have a problem with any pseudoreg that lives across the setjmp.
2697 ANSI says that if a user variable does not change in value between
2698 the setjmp and the longjmp, then the longjmp preserves it. This
2699 includes longjmp from a place where the pseudo appears dead.
2700 (In principle, the value still exists if it is in scope.)
2701 If the pseudo goes in a hard reg, some other value may occupy
2702 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2703 Conclusion: such a pseudo must not go in a hard reg. */
2704 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2705 FIRST_PSEUDO_REGISTER, i,
2707 if (regno_reg_rtx[i] != 0)
2709 REG_LIVE_LENGTH (i) = -1;
2710 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2716 /* Free the variables allocated by find_basic_blocks.
2718 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2721 free_basic_block_vars (keep_head_end_p)
2722 int keep_head_end_p;
2724 if (basic_block_for_insn)
2726 VARRAY_FREE (basic_block_for_insn);
2727 basic_block_for_insn = NULL;
2730 if (! keep_head_end_p)
2733 VARRAY_FREE (basic_block_info);
2736 ENTRY_BLOCK_PTR->aux = NULL;
2737 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2738 EXIT_BLOCK_PTR->aux = NULL;
2739 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2743 /* Return nonzero if the destination of SET equals the source. */
2748 rtx src = SET_SRC (set);
2749 rtx dst = SET_DEST (set);
2751 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2753 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2755 src = SUBREG_REG (src);
2756 dst = SUBREG_REG (dst);
2759 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2760 && REGNO (src) == REGNO (dst));
2763 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2769 rtx pat = PATTERN (insn);
2771 /* Insns carrying these notes are useful later on. */
2772 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2775 if (GET_CODE (pat) == SET && set_noop_p (pat))
2778 if (GET_CODE (pat) == PARALLEL)
2781 /* If nothing but SETs of registers to themselves,
2782 this insn can also be deleted. */
2783 for (i = 0; i < XVECLEN (pat, 0); i++)
2785 rtx tem = XVECEXP (pat, 0, i);
2787 if (GET_CODE (tem) == USE
2788 || GET_CODE (tem) == CLOBBER)
2791 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2800 /* Delete any insns that copy a register to itself. */
2803 delete_noop_moves (f)
2807 for (insn = f; insn; insn = NEXT_INSN (insn))
2809 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2811 PUT_CODE (insn, NOTE);
2812 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2813 NOTE_SOURCE_FILE (insn) = 0;
2818 /* Determine if the stack pointer is constant over the life of the function.
2819 Only useful before prologues have been emitted. */
2822 notice_stack_pointer_modification_1 (x, pat, data)
2824 rtx pat ATTRIBUTE_UNUSED;
2825 void *data ATTRIBUTE_UNUSED;
2827 if (x == stack_pointer_rtx
2828 /* The stack pointer is only modified indirectly as the result
2829 of a push until later in flow. See the comments in rtl.texi
2830 regarding Embedded Side-Effects on Addresses. */
2831 || (GET_CODE (x) == MEM
2832 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2833 || GET_CODE (XEXP (x, 0)) == PRE_INC
2834 || GET_CODE (XEXP (x, 0)) == POST_DEC
2835 || GET_CODE (XEXP (x, 0)) == POST_INC)
2836 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2837 current_function_sp_is_unchanging = 0;
2841 notice_stack_pointer_modification (f)
2846 /* Assume that the stack pointer is unchanging if alloca hasn't
2848 current_function_sp_is_unchanging = !current_function_calls_alloca;
2849 if (! current_function_sp_is_unchanging)
2852 for (insn = f; insn; insn = NEXT_INSN (insn))
2854 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2856 /* Check if insn modifies the stack pointer. */
2857 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2859 if (! current_function_sp_is_unchanging)
2865 /* Mark a register in SET. Hard registers in large modes get all
2866 of their component registers set as well. */
2868 mark_reg (reg, xset)
2872 regset set = (regset) xset;
2873 int regno = REGNO (reg);
2875 if (GET_MODE (reg) == BLKmode)
2878 SET_REGNO_REG_SET (set, regno);
2879 if (regno < FIRST_PSEUDO_REGISTER)
2881 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2883 SET_REGNO_REG_SET (set, regno + n);
2887 /* Mark those regs which are needed at the end of the function as live
2888 at the end of the last basic block. */
2890 mark_regs_live_at_end (set)
2895 /* If exiting needs the right stack value, consider the stack pointer
2896 live at the end of the function. */
2897 if ((HAVE_epilogue && reload_completed)
2898 || ! EXIT_IGNORE_STACK
2899 || (! FRAME_POINTER_REQUIRED
2900 && ! current_function_calls_alloca
2901 && flag_omit_frame_pointer)
2902 || current_function_sp_is_unchanging)
2904 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2907 /* Mark the frame pointer if needed at the end of the function. If
2908 we end up eliminating it, it will be removed from the live list
2909 of each basic block by reload. */
2911 if (! reload_completed || frame_pointer_needed)
2913 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2914 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2915 /* If they are different, also mark the hard frame pointer as live */
2916 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2920 #ifdef PIC_OFFSET_TABLE_REGNUM
2921 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2922 /* Many architectures have a GP register even without flag_pic.
2923 Assume the pic register is not in use, or will be handled by
2924 other means, if it is not fixed. */
2925 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2926 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2930 /* Mark all global registers, and all registers used by the epilogue
2931 as being live at the end of the function since they may be
2932 referenced by our caller. */
2933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2935 #ifdef EPILOGUE_USES
2936 || EPILOGUE_USES (i)
2939 SET_REGNO_REG_SET (set, i);
2941 /* Mark all call-saved registers that we actaully used. */
2942 if (HAVE_epilogue && reload_completed)
2944 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2945 if (! call_used_regs[i] && regs_ever_live[i])
2946 SET_REGNO_REG_SET (set, i);
2949 /* Mark function return value. */
2950 diddle_return_value (mark_reg, set);
2953 /* Callback function for for_each_successor_phi. DATA is a regset.
2954 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2955 INSN, in the regset. */
2958 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2959 rtx insn ATTRIBUTE_UNUSED;
2960 int dest_regno ATTRIBUTE_UNUSED;
2964 regset live = (regset) data;
2965 SET_REGNO_REG_SET (live, src_regno);
2969 /* Propagate global life info around the graph of basic blocks. Begin
2970 considering blocks with their corresponding bit set in BLOCKS_IN.
2971 BLOCKS_OUT is set for every block that was changed. */
2974 calculate_global_regs_live (blocks_in, blocks_out, flags)
2975 sbitmap blocks_in, blocks_out;
2978 basic_block *queue, *qhead, *qtail, *qend;
2979 regset tmp, new_live_at_end;
2980 regset_head tmp_head;
2981 regset_head new_live_at_end_head;
2984 tmp = INITIALIZE_REG_SET (tmp_head);
2985 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2987 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2988 because the `head == tail' style test for an empty queue doesn't
2989 work with a full queue. */
2990 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2992 qhead = qend = queue + n_basic_blocks + 2;
2994 /* Clear out the garbage that might be hanging out in bb->aux. */
2995 for (i = n_basic_blocks - 1; i >= 0; --i)
2996 BASIC_BLOCK (i)->aux = NULL;
2998 /* Queue the blocks set in the initial mask. Do this in reverse block
2999 number order so that we are more likely for the first round to do
3000 useful work. We use AUX non-null to flag that the block is queued. */
3001 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3003 basic_block bb = BASIC_BLOCK (i);
3008 sbitmap_zero (blocks_out);
3010 while (qhead != qtail)
3012 int rescan, changed;
3021 /* Begin by propogating live_at_start from the successor blocks. */
3022 CLEAR_REG_SET (new_live_at_end);
3023 for (e = bb->succ; e ; e = e->succ_next)
3025 basic_block sb = e->dest;
3026 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3029 /* Regs used in phi nodes are not included in
3030 global_live_at_start, since they are live only along a
3031 particular edge. Set those regs that are live because of a
3032 phi node alternative corresponding to this particular block. */
3033 for_each_successor_phi (bb, &set_phi_alternative_reg,
3036 if (bb == ENTRY_BLOCK_PTR)
3038 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3042 /* On our first pass through this block, we'll go ahead and continue.
3043 Recognize first pass by local_set NULL. On subsequent passes, we
3044 get to skip out early if live_at_end wouldn't have changed. */
3046 if (bb->local_set == NULL)
3048 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3053 /* If any bits were removed from live_at_end, we'll have to
3054 rescan the block. This wouldn't be necessary if we had
3055 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3056 local_live is really dependant on live_at_end. */
3057 CLEAR_REG_SET (tmp);
3058 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3059 new_live_at_end, BITMAP_AND_COMPL);
3063 /* Find the set of changed bits. Take this opportunity
3064 to notice that this set is empty and early out. */
3065 CLEAR_REG_SET (tmp);
3066 changed = bitmap_operation (tmp, bb->global_live_at_end,
3067 new_live_at_end, BITMAP_XOR);
3071 /* If any of the changed bits overlap with local_set,
3072 we'll have to rescan the block. Detect overlap by
3073 the AND with ~local_set turning off bits. */
3074 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3079 /* Let our caller know that BB changed enough to require its
3080 death notes updated. */
3081 SET_BIT (blocks_out, bb->index);
3085 /* Add to live_at_start the set of all registers in
3086 new_live_at_end that aren't in the old live_at_end. */
3088 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3090 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3092 changed = bitmap_operation (bb->global_live_at_start,
3093 bb->global_live_at_start,
3100 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3102 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3103 into live_at_start. */
3104 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3106 /* If live_at start didn't change, no need to go farther. */
3107 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3110 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3113 /* Queue all predecessors of BB so that we may re-examine
3114 their live_at_end. */
3115 for (e = bb->pred; e ; e = e->pred_next)
3117 basic_block pb = e->src;
3118 if (pb->aux == NULL)
3129 FREE_REG_SET (new_live_at_end);
3131 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3133 basic_block bb = BASIC_BLOCK (i);
3134 FREE_REG_SET (bb->local_set);
3140 /* Subroutines of life analysis. */
3142 /* Allocate the permanent data structures that represent the results
3143 of life analysis. Not static since used also for stupid life analysis. */
3146 allocate_bb_life_data ()
3150 for (i = 0; i < n_basic_blocks; i++)
3152 basic_block bb = BASIC_BLOCK (i);
3154 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3155 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3158 ENTRY_BLOCK_PTR->global_live_at_end
3159 = OBSTACK_ALLOC_REG_SET (function_obstack);
3160 EXIT_BLOCK_PTR->global_live_at_start
3161 = OBSTACK_ALLOC_REG_SET (function_obstack);
3163 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3167 allocate_reg_life_data ()
3171 /* Recalculate the register space, in case it has grown. Old style
3172 vector oriented regsets would set regset_{size,bytes} here also. */
3173 allocate_reg_info (max_regno, FALSE, FALSE);
3175 /* Reset all the data we'll collect in propagate_block and its
3177 for (i = 0; i < max_regno; i++)
3181 REG_N_DEATHS (i) = 0;
3182 REG_N_CALLS_CROSSED (i) = 0;
3183 REG_LIVE_LENGTH (i) = 0;
3184 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3188 /* Delete dead instructions for propagate_block. */
3191 propagate_block_delete_insn (bb, insn)
3195 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3197 /* If the insn referred to a label, and that label was attached to
3198 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3199 pretty much mandatory to delete it, because the ADDR_VEC may be
3200 referencing labels that no longer exist. */
3204 rtx label = XEXP (inote, 0);
3207 if (LABEL_NUSES (label) == 1
3208 && (next = next_nonnote_insn (label)) != NULL
3209 && GET_CODE (next) == JUMP_INSN
3210 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3211 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3213 rtx pat = PATTERN (next);
3214 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3215 int len = XVECLEN (pat, diff_vec_p);
3218 for (i = 0; i < len; i++)
3219 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3221 flow_delete_insn (next);
3225 if (bb->end == insn)
3226 bb->end = PREV_INSN (insn);
3227 flow_delete_insn (insn);
3230 /* Delete dead libcalls for propagate_block. Return the insn
3231 before the libcall. */
3234 propagate_block_delete_libcall (bb, insn, note)
3238 rtx first = XEXP (note, 0);
3239 rtx before = PREV_INSN (first);
3241 if (insn == bb->end)
3244 flow_delete_insn_chain (first, insn);
3248 /* Update the life-status of regs for one insn. Return the previous insn. */
3251 propagate_one_insn (pbi, insn)
3252 struct propagate_block_info *pbi;
3255 rtx prev = PREV_INSN (insn);
3256 int flags = pbi->flags;
3257 int insn_is_dead = 0;
3258 int libcall_is_dead = 0;
3262 if (! INSN_P (insn))
3265 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3266 if (flags & PROP_SCAN_DEAD_CODE)
3268 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3270 libcall_is_dead = (insn_is_dead && note != 0
3271 && libcall_dead_p (pbi, PATTERN (insn),
3275 /* We almost certainly don't want to delete prologue or epilogue
3276 instructions. Warn about probable compiler losage. */
3279 && (((HAVE_epilogue || HAVE_prologue)
3280 && prologue_epilogue_contains (insn))
3281 || (HAVE_sibcall_epilogue
3282 && sibcall_epilogue_contains (insn))))
3284 if (flags & PROP_KILL_DEAD_CODE)
3286 warning ("ICE: would have deleted prologue/epilogue insn");
3287 if (!inhibit_warnings)
3290 libcall_is_dead = insn_is_dead = 0;
3293 /* If an instruction consists of just dead store(s) on final pass,
3295 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3297 if (libcall_is_dead)
3299 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3300 insn = NEXT_INSN (prev);
3303 propagate_block_delete_insn (pbi->bb, insn);
3305 /* CC0 is now known to be dead. Either this insn used it,
3306 in which case it doesn't anymore, or clobbered it,
3307 so the next insn can't use it. */
3313 /* See if this is an increment or decrement that can be merged into
3314 a following memory address. */
3317 register rtx x = single_set (insn);
3319 /* Does this instruction increment or decrement a register? */
3320 if (!reload_completed
3321 && (flags & PROP_AUTOINC)
3323 && GET_CODE (SET_DEST (x)) == REG
3324 && (GET_CODE (SET_SRC (x)) == PLUS
3325 || GET_CODE (SET_SRC (x)) == MINUS)
3326 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3327 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3328 /* Ok, look for a following memory ref we can combine with.
3329 If one is found, change the memory ref to a PRE_INC
3330 or PRE_DEC, cancel this insn, and return 1.
3331 Return 0 if nothing has been done. */
3332 && try_pre_increment_1 (pbi, insn))
3335 #endif /* AUTO_INC_DEC */
3337 CLEAR_REG_SET (pbi->new_live);
3338 CLEAR_REG_SET (pbi->new_dead);
3340 /* If this is not the final pass, and this insn is copying the value of
3341 a library call and it's dead, don't scan the insns that perform the
3342 library call, so that the call's arguments are not marked live. */
3343 if (libcall_is_dead)
3345 /* Record the death of the dest reg. */
3346 mark_set_regs (pbi, PATTERN (insn), insn);
3348 insn = XEXP (note, 0);
3349 return PREV_INSN (insn);
3351 else if (GET_CODE (PATTERN (insn)) == SET
3352 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3353 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3354 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3355 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3356 /* We have an insn to pop a constant amount off the stack.
3357 (Such insns use PLUS regardless of the direction of the stack,
3358 and any insn to adjust the stack by a constant is always a pop.)
3359 These insns, if not dead stores, have no effect on life. */
3363 /* Any regs live at the time of a call instruction must not go
3364 in a register clobbered by calls. Find all regs now live and
3365 record this for them. */
3367 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3368 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3369 { REG_N_CALLS_CROSSED (i)++; });
3371 /* Record sets. Do this even for dead instructions, since they
3372 would have killed the values if they hadn't been deleted. */
3373 mark_set_regs (pbi, PATTERN (insn), insn);
3375 if (GET_CODE (insn) == CALL_INSN)
3381 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3382 cond = COND_EXEC_TEST (PATTERN (insn));
3384 /* Non-constant calls clobber memory. */
3385 if (! CONST_CALL_P (insn))
3386 free_EXPR_LIST_list (&pbi->mem_set_list);
3388 /* There may be extra registers to be clobbered. */
3389 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3391 note = XEXP (note, 1))
3392 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3393 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3394 cond, insn, pbi->flags);
3396 /* Calls change all call-used and global registers. */
3397 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3398 if (call_used_regs[i] && ! global_regs[i]
3401 /* We do not want REG_UNUSED notes for these registers. */
3402 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3403 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3407 /* If an insn doesn't use CC0, it becomes dead since we assume
3408 that every insn clobbers it. So show it dead here;
3409 mark_used_regs will set it live if it is referenced. */
3414 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3416 /* Sometimes we may have inserted something before INSN (such as a move)
3417 when we make an auto-inc. So ensure we will scan those insns. */
3419 prev = PREV_INSN (insn);
3422 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3428 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3429 cond = COND_EXEC_TEST (PATTERN (insn));
3431 /* Calls use their arguments. */
3432 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3434 note = XEXP (note, 1))
3435 if (GET_CODE (XEXP (note, 0)) == USE)
3436 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3439 /* The stack ptr is used (honorarily) by a CALL insn. */
3440 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3442 /* Calls may also reference any of the global registers,
3443 so they are made live. */
3444 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3446 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3451 /* Update reg_live for the registers killed and used. */
3452 AND_COMPL_REG_SET (pbi->reg_live, pbi->new_dead);
3453 IOR_REG_SET (pbi->reg_live, pbi->new_live);
3455 /* On final pass, update counts of how many insns in which each reg
3457 if (flags & PROP_REG_INFO)
3458 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3459 { REG_LIVE_LENGTH (i)++; });
3464 /* Initialize a propagate_block_info struct for public consumption.
3465 Note that the structure itself is opaque to this file, but that
3466 the user can use the regsets provided here. */
3468 struct propagate_block_info *
3469 init_propagate_block_info (bb, live, local_set, flags)
3475 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3478 pbi->reg_live = live;
3479 pbi->mem_set_list = NULL_RTX;
3480 pbi->local_set = local_set;
3484 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3485 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3487 pbi->reg_next_use = NULL;
3489 pbi->new_live = BITMAP_XMALLOC ();
3490 pbi->new_dead = BITMAP_XMALLOC ();
3495 /* Release a propagate_block_info struct. */
3498 free_propagate_block_info (pbi)
3499 struct propagate_block_info *pbi;
3501 free_EXPR_LIST_list (&pbi->mem_set_list);
3503 BITMAP_XFREE (pbi->new_live);
3504 BITMAP_XFREE (pbi->new_dead);
3506 if (pbi->reg_next_use)
3507 free (pbi->reg_next_use);
3512 /* Compute the registers live at the beginning of a basic block BB from
3513 those live at the end.
3515 When called, REG_LIVE contains those live at the end. On return, it
3516 contains those live at the beginning.
3518 LOCAL_SET, if non-null, will be set with all registers killed by
3519 this basic block. */
3522 propagate_block (bb, live, local_set, flags)
3528 struct propagate_block_info *pbi;
3531 pbi = init_propagate_block_info (bb, live, local_set, flags);
3533 if (flags & PROP_REG_INFO)
3537 /* Process the regs live at the end of the block.
3538 Mark them as not local to any one basic block. */
3539 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3540 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3543 /* Scan the block an insn at a time from end to beginning. */
3545 for (insn = bb->end; ; insn = prev)
3547 /* If this is a call to `setjmp' et al, warn if any
3548 non-volatile datum is live. */
3549 if ((flags & PROP_REG_INFO)
3550 && GET_CODE (insn) == NOTE
3551 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3552 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3554 prev = propagate_one_insn (pbi, insn);
3556 if (insn == bb->head)
3560 free_propagate_block_info (pbi);
3563 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3564 (SET expressions whose destinations are registers dead after the insn).
3565 NEEDED is the regset that says which regs are alive after the insn.
3567 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3569 If X is the entire body of an insn, NOTES contains the reg notes
3570 pertaining to the insn. */
3573 insn_dead_p (pbi, x, call_ok, notes)
3574 struct propagate_block_info *pbi;
3577 rtx notes ATTRIBUTE_UNUSED;
3579 enum rtx_code code = GET_CODE (x);
3582 /* If flow is invoked after reload, we must take existing AUTO_INC
3583 expresions into account. */
3584 if (reload_completed)
3586 for ( ; notes; notes = XEXP (notes, 1))
3588 if (REG_NOTE_KIND (notes) == REG_INC)
3590 int regno = REGNO (XEXP (notes, 0));
3592 /* Don't delete insns to set global regs. */
3593 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3594 || REGNO_REG_SET_P (pbi->reg_live, regno))
3601 /* If setting something that's a reg or part of one,
3602 see if that register's altered value will be live. */
3606 rtx r = SET_DEST (x);
3609 if (GET_CODE (r) == CC0)
3610 return ! pbi->cc0_live;
3613 /* A SET that is a subroutine call cannot be dead. */
3614 if (GET_CODE (SET_SRC (x)) == CALL)
3620 /* Don't eliminate loads from volatile memory or volatile asms. */
3621 else if (volatile_refs_p (SET_SRC (x)))
3624 if (GET_CODE (r) == MEM)
3628 if (MEM_VOLATILE_P (r))
3631 /* Walk the set of memory locations we are currently tracking
3632 and see if one is an identical match to this memory location.
3633 If so, this memory write is dead (remember, we're walking
3634 backwards from the end of the block to the start. */
3635 temp = pbi->mem_set_list;
3638 if (rtx_equal_p (XEXP (temp, 0), r))
3640 temp = XEXP (temp, 1);
3645 while (GET_CODE (r) == SUBREG
3646 || GET_CODE (r) == STRICT_LOW_PART
3647 || GET_CODE (r) == ZERO_EXTRACT)
3650 if (GET_CODE (r) == REG)
3652 int regno = REGNO (r);
3655 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3658 /* If this is a hard register, verify that subsequent
3659 words are not needed. */
3660 if (regno < FIRST_PSEUDO_REGISTER)
3662 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3665 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3669 /* Don't delete insns to set global regs. */
3670 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3673 /* Make sure insns to set the stack pointer aren't deleted. */
3674 if (regno == STACK_POINTER_REGNUM)
3677 /* Make sure insns to set the frame pointer aren't deleted. */
3678 if (regno == FRAME_POINTER_REGNUM
3679 && (! reload_completed || frame_pointer_needed))
3681 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3682 if (regno == HARD_FRAME_POINTER_REGNUM
3683 && (! reload_completed || frame_pointer_needed))
3687 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3688 /* Make sure insns to set arg pointer are never deleted
3689 (if the arg pointer isn't fixed, there will be a USE
3690 for it, so we can treat it normally). */
3691 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3695 /* Otherwise, the set is dead. */
3701 /* If performing several activities, insn is dead if each activity
3702 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3703 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3705 else if (code == PARALLEL)
3707 int i = XVECLEN (x, 0);
3709 for (i--; i >= 0; i--)
3710 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3711 && GET_CODE (XVECEXP (x, 0, i)) != USE
3712 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3718 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3719 is not necessarily true for hard registers. */
3720 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3721 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3722 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3725 /* We do not check other CLOBBER or USE here. An insn consisting of just
3726 a CLOBBER or just a USE should not be deleted. */
3730 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3731 return 1 if the entire library call is dead.
3732 This is true if X copies a register (hard or pseudo)
3733 and if the hard return reg of the call insn is dead.
3734 (The caller should have tested the destination of X already for death.)
3736 If this insn doesn't just copy a register, then we don't
3737 have an ordinary libcall. In that case, cse could not have
3738 managed to substitute the source for the dest later on,
3739 so we can assume the libcall is dead.
3741 NEEDED is the bit vector of pseudoregs live before this insn.
3742 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3745 libcall_dead_p (pbi, x, note, insn)
3746 struct propagate_block_info *pbi;
3751 register RTX_CODE code = GET_CODE (x);
3755 register rtx r = SET_SRC (x);
3756 if (GET_CODE (r) == REG)
3758 rtx call = XEXP (note, 0);
3762 /* Find the call insn. */
3763 while (call != insn && GET_CODE (call) != CALL_INSN)
3764 call = NEXT_INSN (call);
3766 /* If there is none, do nothing special,
3767 since ordinary death handling can understand these insns. */
3771 /* See if the hard reg holding the value is dead.
3772 If this is a PARALLEL, find the call within it. */
3773 call_pat = PATTERN (call);
3774 if (GET_CODE (call_pat) == PARALLEL)
3776 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3777 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3778 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3781 /* This may be a library call that is returning a value
3782 via invisible pointer. Do nothing special, since
3783 ordinary death handling can understand these insns. */
3787 call_pat = XVECEXP (call_pat, 0, i);
3790 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3796 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3797 live at function entry. Don't count global register variables, variables
3798 in registers that can be used for function arg passing, or variables in
3799 fixed hard registers. */
3802 regno_uninitialized (regno)
3805 if (n_basic_blocks == 0
3806 || (regno < FIRST_PSEUDO_REGISTER
3807 && (global_regs[regno]
3808 || fixed_regs[regno]
3809 || FUNCTION_ARG_REGNO_P (regno))))
3812 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3815 /* 1 if register REGNO was alive at a place where `setjmp' was called
3816 and was set more than once or is an argument.
3817 Such regs may be clobbered by `longjmp'. */
3820 regno_clobbered_at_setjmp (regno)
3823 if (n_basic_blocks == 0)
3826 return ((REG_N_SETS (regno) > 1
3827 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3828 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3831 /* INSN references memory, possibly using autoincrement addressing modes.
3832 Find any entries on the mem_set_list that need to be invalidated due
3833 to an address change. */
3835 invalidate_mems_from_autoinc (pbi, insn)
3836 struct propagate_block_info *pbi;
3839 rtx note = REG_NOTES (insn);
3840 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3842 if (REG_NOTE_KIND (note) == REG_INC)
3844 rtx temp = pbi->mem_set_list;
3845 rtx prev = NULL_RTX;
3850 next = XEXP (temp, 1);
3851 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3853 /* Splice temp out of list. */
3855 XEXP (prev, 1) = next;
3857 pbi->mem_set_list = next;
3858 free_EXPR_LIST_node (temp);
3868 /* Process the registers that are set within X. Their bits are set to
3869 1 in the regset DEAD, because they are dead prior to this insn.
3871 If INSN is nonzero, it is the insn being processed.
3873 FLAGS is the set of operations to perform. */
3876 mark_set_regs (pbi, x, insn)
3877 struct propagate_block_info *pbi;
3880 rtx cond = NULL_RTX;
3884 switch (code = GET_CODE (x))
3888 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
3892 cond = COND_EXEC_TEST (x);
3893 x = COND_EXEC_CODE (x);
3899 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3901 rtx sub = XVECEXP (x, 0, i);
3902 switch (code = GET_CODE (sub))
3905 if (cond != NULL_RTX)
3908 cond = COND_EXEC_TEST (sub);
3909 sub = COND_EXEC_CODE (sub);
3910 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3916 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
3931 /* Process a single SET rtx, X. */
3934 mark_set_1 (pbi, code, reg, cond, insn, flags)
3935 struct propagate_block_info *pbi;
3937 rtx reg, cond, insn;
3940 int regno_first = -1, regno_last = -1;
3943 /* Some targets place small structures in registers for
3944 return values of functions. We have to detect this
3945 case specially here to get correct flow information. */
3946 if (GET_CODE (reg) == PARALLEL
3947 && GET_MODE (reg) == BLKmode)
3949 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3950 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
3954 /* Modifying just one hardware register of a multi-reg value or just a
3955 byte field of a register does not mean the value from before this insn
3956 is now dead. But it does mean liveness of that register at the end of
3957 the block is significant.
3959 Within mark_set_1, however, we treat it as if the register is indeed
3960 modified. mark_used_regs will, however, also treat this register as
3961 being used. Thus, we treat these insns as setting a new value for the
3962 register as a function of its old value. This cases LOG_LINKS to be
3963 made appropriately and this will help combine.
3965 ??? This is all done incorrectly. We should not be setting bits in
3966 new_dead for these registers, since, as we just explained, they are
3967 not dead. We should be setting bits in local_set, and updating
3968 LOG_LINKS, but that is different. */
3970 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3971 || GET_CODE (reg) == SIGN_EXTRACT
3972 || GET_CODE (reg) == STRICT_LOW_PART)
3973 reg = XEXP (reg, 0);
3975 /* If this set is a MEM, then it kills any aliased writes.
3976 If this set is a REG, then it kills any MEMs which use the reg. */
3977 if (flags & PROP_SCAN_DEAD_CODE)
3979 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
3981 rtx temp = pbi->mem_set_list;
3982 rtx prev = NULL_RTX;
3987 next = XEXP (temp, 1);
3988 if ((GET_CODE (reg) == MEM
3989 && output_dependence (XEXP (temp, 0), reg))
3990 || (GET_CODE (reg) == REG
3991 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3993 /* Splice this entry out of the list. */
3995 XEXP (prev, 1) = next;
3997 pbi->mem_set_list = next;
3998 free_EXPR_LIST_node (temp);
4006 /* If the memory reference had embedded side effects (autoincrement
4007 address modes. Then we may need to kill some entries on the
4009 if (insn && GET_CODE (reg) == MEM)
4010 invalidate_mems_from_autoinc (pbi, insn);
4012 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4013 /* We do not know the size of a BLKmode store, so we do not track
4014 them for redundant store elimination. */
4015 && GET_MODE (reg) != BLKmode
4016 /* There are no REG_INC notes for SP, so we can't assume we'll see
4017 everything that invalidates it. To be safe, don't eliminate any
4018 stores though SP; none of them should be redundant anyway. */
4019 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4020 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4023 if (GET_CODE (reg) == REG
4024 && (regno_first = REGNO (reg),
4025 ! (regno_first == FRAME_POINTER_REGNUM
4026 && (! reload_completed || frame_pointer_needed)))
4027 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4028 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4029 && (! reload_completed || frame_pointer_needed))
4031 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4032 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4036 int some_was_live = 0, some_was_dead = 0;
4038 if (regno_first < FIRST_PSEUDO_REGISTER)
4039 regno_last = (regno_first
4040 + HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1);
4042 regno_last = regno_first;
4044 for (i = regno_first; i <= regno_last; ++i)
4046 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4048 SET_REGNO_REG_SET (pbi->local_set, i);
4050 some_was_live |= needed_regno;
4051 some_was_dead |= ! needed_regno;
4054 /* Additional data to record if this is the final pass. */
4055 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4056 | PROP_DEATH_NOTES | PROP_AUTOINC))
4059 register int blocknum = pbi->bb->index;
4062 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4064 y = pbi->reg_next_use[regno_first];
4066 /* The next use is no longer next, since a store intervenes. */
4067 for (i = regno_first; i <= regno_last; ++i)
4068 pbi->reg_next_use[i] = 0;
4071 if (flags & PROP_REG_INFO)
4073 for (i = regno_first; i <= regno_last; ++i)
4075 /* Count (weighted) references, stores, etc. This counts a
4076 register twice if it is modified, but that is correct. */
4077 REG_N_SETS (i) += 1;
4078 REG_N_REFS (i) += (optimize_size ? 1
4079 : pbi->bb->loop_depth + 1);
4081 /* The insns where a reg is live are normally counted
4082 elsewhere, but we want the count to include the insn
4083 where the reg is set, and the normal counting mechanism
4084 would not count it. */
4085 REG_LIVE_LENGTH (i) += 1;
4088 /* If this is a hard reg, record this function uses the reg. */
4089 if (regno_first < FIRST_PSEUDO_REGISTER)
4091 for (i = regno_first; i <= regno_last; i++)
4092 regs_ever_live[i] = 1;
4096 /* Keep track of which basic blocks each reg appears in. */
4097 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4098 REG_BASIC_BLOCK (regno_first) = blocknum;
4099 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4100 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4104 if (! some_was_dead)
4106 if (flags & PROP_LOG_LINKS)
4108 /* Make a logical link from the next following insn
4109 that uses this register, back to this insn.
4110 The following insns have already been processed.
4112 We don't build a LOG_LINK for hard registers containing
4113 in ASM_OPERANDs. If these registers get replaced,
4114 we might wind up changing the semantics of the insn,
4115 even if reload can make what appear to be valid
4116 assignments later. */
4117 if (y && (BLOCK_NUM (y) == blocknum)
4118 && (regno_first >= FIRST_PSEUDO_REGISTER
4119 || asm_noperands (PATTERN (y)) < 0))
4120 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4123 else if (! some_was_live)
4125 if (flags & PROP_REG_INFO)
4126 REG_N_DEATHS (regno_first) += 1;
4128 if (flags & PROP_DEATH_NOTES)
4130 /* Note that dead stores have already been deleted
4131 when possible. If we get here, we have found a
4132 dead store that cannot be eliminated (because the
4133 same insn does something useful). Indicate this
4134 by marking the reg being set as dying here. */
4136 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4141 if (flags & PROP_DEATH_NOTES)
4143 /* This is a case where we have a multi-word hard register
4144 and some, but not all, of the words of the register are
4145 needed in subsequent insns. Write REG_UNUSED notes
4146 for those parts that were not needed. This case should
4149 for (i = regno_first; i <= regno_last; ++i)
4150 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4152 = alloc_EXPR_LIST (REG_UNUSED,
4153 gen_rtx_REG (reg_raw_mode[i], i),
4159 /* Mark the register as being dead. */
4161 /* The stack pointer is never dead. Well, not strictly true,
4162 but it's very difficult to tell from here. Hopefully
4163 combine_stack_adjustments will fix up the most egregious
4165 && regno_first != STACK_POINTER_REGNUM)
4167 for (i = regno_first; i <= regno_last; ++i)
4168 SET_REGNO_REG_SET (pbi->new_dead, i);
4171 else if (GET_CODE (reg) == REG)
4173 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4174 pbi->reg_next_use[regno_first] = 0;
4177 /* If this is the last pass and this is a SCRATCH, show it will be dying
4178 here and count it. */
4179 else if (GET_CODE (reg) == SCRATCH)
4181 if (flags & PROP_DEATH_NOTES)
4183 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4189 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4193 find_auto_inc (pbi, x, insn)
4194 struct propagate_block_info *pbi;
4198 rtx addr = XEXP (x, 0);
4199 HOST_WIDE_INT offset = 0;
4202 /* Here we detect use of an index register which might be good for
4203 postincrement, postdecrement, preincrement, or predecrement. */
4205 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4206 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4208 if (GET_CODE (addr) == REG)
4211 register int size = GET_MODE_SIZE (GET_MODE (x));
4214 int regno = REGNO (addr);
4216 /* Is the next use an increment that might make auto-increment? */
4217 if ((incr = pbi->reg_next_use[regno]) != 0
4218 && (set = single_set (incr)) != 0
4219 && GET_CODE (set) == SET
4220 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4221 /* Can't add side effects to jumps; if reg is spilled and
4222 reloaded, there's no way to store back the altered value. */
4223 && GET_CODE (insn) != JUMP_INSN
4224 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4225 && XEXP (y, 0) == addr
4226 && GET_CODE (XEXP (y, 1)) == CONST_INT
4227 && ((HAVE_POST_INCREMENT
4228 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4229 || (HAVE_POST_DECREMENT
4230 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4231 || (HAVE_PRE_INCREMENT
4232 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4233 || (HAVE_PRE_DECREMENT
4234 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4235 /* Make sure this reg appears only once in this insn. */
4236 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4237 use != 0 && use != (rtx) 1))
4239 rtx q = SET_DEST (set);
4240 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4241 ? (offset ? PRE_INC : POST_INC)
4242 : (offset ? PRE_DEC : POST_DEC));
4244 if (dead_or_set_p (incr, addr)
4245 /* Mustn't autoinc an eliminable register. */
4246 && (regno >= FIRST_PSEUDO_REGISTER
4247 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4249 /* This is the simple case. Try to make the auto-inc. If
4250 we can't, we are done. Otherwise, we will do any
4251 needed updates below. */
4252 if (! validate_change (insn, &XEXP (x, 0),
4253 gen_rtx_fmt_e (inc_code, Pmode, addr),
4257 else if (GET_CODE (q) == REG
4258 /* PREV_INSN used here to check the semi-open interval
4260 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4261 /* We must also check for sets of q as q may be
4262 a call clobbered hard register and there may
4263 be a call between PREV_INSN (insn) and incr. */
4264 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4266 /* We have *p followed sometime later by q = p+size.
4267 Both p and q must be live afterward,
4268 and q is not used between INSN and its assignment.
4269 Change it to q = p, ...*q..., q = q+size.
4270 Then fall into the usual case. */
4274 emit_move_insn (q, addr);
4275 insns = get_insns ();
4278 if (basic_block_for_insn)
4279 for (temp = insns; temp; temp = NEXT_INSN (temp))
4280 set_block_for_insn (temp, pbi->bb);
4282 /* If we can't make the auto-inc, or can't make the
4283 replacement into Y, exit. There's no point in making
4284 the change below if we can't do the auto-inc and doing
4285 so is not correct in the pre-inc case. */
4287 validate_change (insn, &XEXP (x, 0),
4288 gen_rtx_fmt_e (inc_code, Pmode, q),
4290 validate_change (incr, &XEXP (y, 0), q, 1);
4291 if (! apply_change_group ())
4294 /* We now know we'll be doing this change, so emit the
4295 new insn(s) and do the updates. */
4296 emit_insns_before (insns, insn);
4298 if (pbi->bb->head == insn)
4299 pbi->bb->head = insns;
4301 /* INCR will become a NOTE and INSN won't contain a
4302 use of ADDR. If a use of ADDR was just placed in
4303 the insn before INSN, make that the next use.
4304 Otherwise, invalidate it. */
4305 if (GET_CODE (PREV_INSN (insn)) == INSN
4306 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4307 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4308 pbi->reg_next_use[regno] = PREV_INSN (insn);
4310 pbi->reg_next_use[regno] = 0;
4315 /* REGNO is now used in INCR which is below INSN, but it
4316 previously wasn't live here. If we don't mark it as
4317 live, we'll put a REG_DEAD note for it on this insn,
4318 which is incorrect. */
4319 SET_REGNO_REG_SET (pbi->reg_live, regno);
4321 /* If there are any calls between INSN and INCR, show
4322 that REGNO now crosses them. */
4323 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4324 if (GET_CODE (temp) == CALL_INSN)
4325 REG_N_CALLS_CROSSED (regno)++;
4330 /* If we haven't returned, it means we were able to make the
4331 auto-inc, so update the status. First, record that this insn
4332 has an implicit side effect. */
4335 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4337 /* Modify the old increment-insn to simply copy
4338 the already-incremented value of our register. */
4339 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4342 /* If that makes it a no-op (copying the register into itself) delete
4343 it so it won't appear to be a "use" and a "set" of this
4345 if (SET_DEST (set) == addr)
4347 /* If the original source was dead, it's dead now. */
4348 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4349 if (note && XEXP (note, 0) != addr)
4350 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4352 PUT_CODE (incr, NOTE);
4353 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4354 NOTE_SOURCE_FILE (incr) = 0;
4357 if (regno >= FIRST_PSEUDO_REGISTER)
4359 /* Count an extra reference to the reg. When a reg is
4360 incremented, spilling it is worse, so we want to make
4361 that less likely. */
4362 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4364 /* Count the increment as a setting of the register,
4365 even though it isn't a SET in rtl. */
4366 REG_N_SETS (regno)++;
4371 #endif /* AUTO_INC_DEC */
4374 mark_used_reg (pbi, reg, cond, insn)
4375 struct propagate_block_info *pbi;
4377 rtx cond ATTRIBUTE_UNUSED;
4380 int regno = REGNO (reg);
4381 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4382 int some_was_dead = ! some_was_live;
4384 SET_REGNO_REG_SET (pbi->new_live, regno);
4386 /* A hard reg in a wide mode may really be multiple registers.
4387 If so, mark all of them just like the first. */
4388 if (regno < FIRST_PSEUDO_REGISTER)
4390 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4393 int regno_n = regno + n;
4394 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4396 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4397 some_was_live |= needed_regno;
4398 some_was_dead |= ! needed_regno;
4402 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4404 /* Record where each reg is used, so when the reg is set we know
4405 the next insn that uses it. */
4406 pbi->reg_next_use[regno] = insn;
4409 if (pbi->flags & PROP_REG_INFO)
4411 if (regno < FIRST_PSEUDO_REGISTER)
4413 /* If this is a register we are going to try to eliminate,
4414 don't mark it live here. If we are successful in
4415 eliminating it, it need not be live unless it is used for
4416 pseudos, in which case it will have been set live when it
4417 was allocated to the pseudos. If the register will not
4418 be eliminated, reload will set it live at that point.
4420 Otherwise, record that this function uses this register. */
4421 /* ??? The PPC backend tries to "eliminate" on the pic
4422 register to itself. This should be fixed. In the mean
4423 time, hack around it. */
4425 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4426 && (regno == FRAME_POINTER_REGNUM
4427 || regno == ARG_POINTER_REGNUM)))
4429 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4431 regs_ever_live[regno + --n] = 1;
4437 /* Keep track of which basic block each reg appears in. */
4439 register int blocknum = pbi->bb->index;
4440 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4441 REG_BASIC_BLOCK (regno) = blocknum;
4442 else if (REG_BASIC_BLOCK (regno) != blocknum)
4443 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4445 /* Count (weighted) number of uses of each reg. */
4446 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4450 /* Record and count the insns in which a reg dies. If it is used in
4451 this insn and was dead below the insn then it dies in this insn.
4452 If it was set in this insn, we do not make a REG_DEAD note;
4453 likewise if we already made such a note.
4455 ??? This could be done better. In new_dead we have a record of
4456 which registers are set or clobbered this insn (which in itself is
4457 slightly incorrect, see the commentary near strict_low_part in
4458 mark_set_1), which should be the set of registers that we do not
4459 wish to create death notes for under the above rule. Note that
4460 we have not yet processed the call-clobbered/call-used registers,
4461 and they do not quite follow the above rule, since we do want death
4462 notes for call-clobbered register arguments. Which begs the whole
4463 question of whether we should in fact have death notes for registers
4464 used and clobbered (but not set) in the same insn. The only useful
4465 thing we ought to be getting from dead_or_set_p is detection of
4466 duplicate death notes. */
4468 if ((pbi->flags & PROP_DEATH_NOTES)
4470 && ! dead_or_set_p (insn, reg))
4474 /* Check for the case where the register dying partially
4475 overlaps the register set by this insn. */
4476 if (regno < FIRST_PSEUDO_REGISTER
4477 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4479 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4481 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4484 /* If none of the words in X is needed, make a REG_DEAD note.
4485 Otherwise, we must make partial REG_DEAD notes. */
4486 if (! some_was_live)
4489 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4490 REG_N_DEATHS (regno)++;
4494 /* Don't make a REG_DEAD note for a part of a register
4495 that is set in the insn. */
4497 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4498 for (; n >= regno; n--)
4499 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4500 && ! dead_or_set_regno_p (insn, n))
4502 = alloc_EXPR_LIST (REG_DEAD,
4503 gen_rtx_REG (reg_raw_mode[n], n),
4509 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4510 This is done assuming the registers needed from X are those that
4511 have 1-bits in PBI->REG_LIVE.
4513 INSN is the containing instruction. If INSN is dead, this function
4517 mark_used_regs (pbi, x, cond, insn)
4518 struct propagate_block_info *pbi;
4521 register RTX_CODE code;
4523 int flags = pbi->flags;
4526 code = GET_CODE (x);
4546 /* If we are clobbering a MEM, mark any registers inside the address
4548 if (GET_CODE (XEXP (x, 0)) == MEM)
4549 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4553 /* Don't bother watching stores to mems if this is not the
4554 final pass. We'll not be deleting dead stores this round. */
4555 if (flags & PROP_SCAN_DEAD_CODE)
4557 /* Invalidate the data for the last MEM stored, but only if MEM is
4558 something that can be stored into. */
4559 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4560 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4561 ; /* needn't clear the memory set list */
4564 rtx temp = pbi->mem_set_list;
4565 rtx prev = NULL_RTX;
4570 next = XEXP (temp, 1);
4571 if (anti_dependence (XEXP (temp, 0), x))
4573 /* Splice temp out of the list. */
4575 XEXP (prev, 1) = next;
4577 pbi->mem_set_list = next;
4578 free_EXPR_LIST_node (temp);
4586 /* If the memory reference had embedded side effects (autoincrement
4587 address modes. Then we may need to kill some entries on the
4590 invalidate_mems_from_autoinc (pbi, insn);
4594 if (flags & PROP_AUTOINC)
4595 find_auto_inc (pbi, x, insn);
4600 if (GET_CODE (SUBREG_REG (x)) == REG
4601 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4602 && (GET_MODE_SIZE (GET_MODE (x))
4603 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4604 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4606 /* While we're here, optimize this case. */
4608 if (GET_CODE (x) != REG)
4613 /* See a register other than being set => mark it as needed. */
4614 mark_used_reg (pbi, x, cond, insn);
4619 register rtx testreg = SET_DEST (x);
4622 /* If storing into MEM, don't show it as being used. But do
4623 show the address as being used. */
4624 if (GET_CODE (testreg) == MEM)
4627 if (flags & PROP_AUTOINC)
4628 find_auto_inc (pbi, testreg, insn);
4630 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4631 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4635 /* Storing in STRICT_LOW_PART is like storing in a reg
4636 in that this SET might be dead, so ignore it in TESTREG.
4637 but in some other ways it is like using the reg.
4639 Storing in a SUBREG or a bit field is like storing the entire
4640 register in that if the register's value is not used
4641 then this SET is not needed. */
4642 while (GET_CODE (testreg) == STRICT_LOW_PART
4643 || GET_CODE (testreg) == ZERO_EXTRACT
4644 || GET_CODE (testreg) == SIGN_EXTRACT
4645 || GET_CODE (testreg) == SUBREG)
4647 if (GET_CODE (testreg) == SUBREG
4648 && GET_CODE (SUBREG_REG (testreg)) == REG
4649 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4650 && (GET_MODE_SIZE (GET_MODE (testreg))
4651 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4652 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4654 /* Modifying a single register in an alternate mode
4655 does not use any of the old value. But these other
4656 ways of storing in a register do use the old value. */
4657 if (GET_CODE (testreg) == SUBREG
4658 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4663 testreg = XEXP (testreg, 0);
4666 /* If this is a store into a register, recursively scan the
4667 value being stored. */
4669 if ((GET_CODE (testreg) == PARALLEL
4670 && GET_MODE (testreg) == BLKmode)
4671 || (GET_CODE (testreg) == REG
4672 && (regno = REGNO (testreg),
4673 ! (regno == FRAME_POINTER_REGNUM
4674 && (! reload_completed || frame_pointer_needed)))
4675 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4676 && ! (regno == HARD_FRAME_POINTER_REGNUM
4677 && (! reload_completed || frame_pointer_needed))
4679 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4680 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4685 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4686 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4693 case UNSPEC_VOLATILE:
4697 /* Traditional and volatile asm instructions must be considered to use
4698 and clobber all hard registers, all pseudo-registers and all of
4699 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4701 Consider for instance a volatile asm that changes the fpu rounding
4702 mode. An insn should not be moved across this even if it only uses
4703 pseudo-regs because it might give an incorrectly rounded result.
4705 ?!? Unfortunately, marking all hard registers as live causes massive
4706 problems for the register allocator and marking all pseudos as live
4707 creates mountains of uninitialized variable warnings.
4709 So for now, just clear the memory set list and mark any regs
4710 we can find in ASM_OPERANDS as used. */
4711 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4712 free_EXPR_LIST_list (&pbi->mem_set_list);
4714 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4715 We can not just fall through here since then we would be confused
4716 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4717 traditional asms unlike their normal usage. */
4718 if (code == ASM_OPERANDS)
4722 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4723 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4729 if (cond != NULL_RTX)
4732 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4734 cond = COND_EXEC_TEST (x);
4735 x = COND_EXEC_CODE (x);
4739 /* We _do_not_ want to scan operands of phi nodes. Operands of
4740 a phi function are evaluated only when control reaches this
4741 block along a particular edge. Therefore, regs that appear
4742 as arguments to phi should not be added to the global live at
4750 /* Recursively scan the operands of this expression. */
4753 register const char *fmt = GET_RTX_FORMAT (code);
4756 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4760 /* Tail recursive case: save a function call level. */
4766 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4768 else if (fmt[i] == 'E')
4771 for (j = 0; j < XVECLEN (x, i); j++)
4772 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4781 try_pre_increment_1 (pbi, insn)
4782 struct propagate_block_info *pbi;
4785 /* Find the next use of this reg. If in same basic block,
4786 make it do pre-increment or pre-decrement if appropriate. */
4787 rtx x = single_set (insn);
4788 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4789 * INTVAL (XEXP (SET_SRC (x), 1)));
4790 int regno = REGNO (SET_DEST (x));
4791 rtx y = pbi->reg_next_use[regno];
4793 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4794 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4795 mode would be better. */
4796 && ! dead_or_set_p (y, SET_DEST (x))
4797 && try_pre_increment (y, SET_DEST (x), amount))
4799 /* We have found a suitable auto-increment
4800 and already changed insn Y to do it.
4801 So flush this increment-instruction. */
4802 PUT_CODE (insn, NOTE);
4803 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4804 NOTE_SOURCE_FILE (insn) = 0;
4805 /* Count a reference to this reg for the increment
4806 insn we are deleting. When a reg is incremented.
4807 spilling it is worse, so we want to make that
4809 if (regno >= FIRST_PSEUDO_REGISTER)
4811 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4812 REG_N_SETS (regno)++;
4819 /* Try to change INSN so that it does pre-increment or pre-decrement
4820 addressing on register REG in order to add AMOUNT to REG.
4821 AMOUNT is negative for pre-decrement.
4822 Returns 1 if the change could be made.
4823 This checks all about the validity of the result of modifying INSN. */
4826 try_pre_increment (insn, reg, amount)
4828 HOST_WIDE_INT amount;
4832 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4833 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4835 /* Nonzero if we can try to make a post-increment or post-decrement.
4836 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4837 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4838 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4841 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4844 /* From the sign of increment, see which possibilities are conceivable
4845 on this target machine. */
4846 if (HAVE_PRE_INCREMENT && amount > 0)
4848 if (HAVE_POST_INCREMENT && amount > 0)
4851 if (HAVE_PRE_DECREMENT && amount < 0)
4853 if (HAVE_POST_DECREMENT && amount < 0)
4856 if (! (pre_ok || post_ok))
4859 /* It is not safe to add a side effect to a jump insn
4860 because if the incremented register is spilled and must be reloaded
4861 there would be no way to store the incremented value back in memory. */
4863 if (GET_CODE (insn) == JUMP_INSN)
4868 use = find_use_as_address (PATTERN (insn), reg, 0);
4869 if (post_ok && (use == 0 || use == (rtx) 1))
4871 use = find_use_as_address (PATTERN (insn), reg, -amount);
4875 if (use == 0 || use == (rtx) 1)
4878 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4881 /* See if this combination of instruction and addressing mode exists. */
4882 if (! validate_change (insn, &XEXP (use, 0),
4883 gen_rtx_fmt_e (amount > 0
4884 ? (do_post ? POST_INC : PRE_INC)
4885 : (do_post ? POST_DEC : PRE_DEC),
4889 /* Record that this insn now has an implicit side effect on X. */
4890 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4894 #endif /* AUTO_INC_DEC */
4896 /* Find the place in the rtx X where REG is used as a memory address.
4897 Return the MEM rtx that so uses it.
4898 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4899 (plus REG (const_int PLUSCONST)).
4901 If such an address does not appear, return 0.
4902 If REG appears more than once, or is used other than in such an address,
4906 find_use_as_address (x, reg, plusconst)
4909 HOST_WIDE_INT plusconst;
4911 enum rtx_code code = GET_CODE (x);
4912 const char *fmt = GET_RTX_FORMAT (code);
4914 register rtx value = 0;
4917 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4920 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4921 && XEXP (XEXP (x, 0), 0) == reg
4922 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4923 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4926 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4928 /* If REG occurs inside a MEM used in a bit-field reference,
4929 that is unacceptable. */
4930 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4931 return (rtx) (HOST_WIDE_INT) 1;
4935 return (rtx) (HOST_WIDE_INT) 1;
4937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4941 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4945 return (rtx) (HOST_WIDE_INT) 1;
4947 else if (fmt[i] == 'E')
4950 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4952 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4956 return (rtx) (HOST_WIDE_INT) 1;
4964 /* Write information about registers and basic blocks into FILE.
4965 This is part of making a debugging dump. */
4968 dump_regset (r, outf)
4975 fputs (" (nil)", outf);
4979 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4981 fprintf (outf, " %d", i);
4982 if (i < FIRST_PSEUDO_REGISTER)
4983 fprintf (outf, " [%s]",
4992 dump_regset (r, stderr);
4993 putc ('\n', stderr);
4997 dump_flow_info (file)
5001 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5003 fprintf (file, "%d registers.\n", max_regno);
5004 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5007 enum reg_class class, altclass;
5008 fprintf (file, "\nRegister %d used %d times across %d insns",
5009 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5010 if (REG_BASIC_BLOCK (i) >= 0)
5011 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5013 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5014 (REG_N_SETS (i) == 1) ? "" : "s");
5015 if (REG_USERVAR_P (regno_reg_rtx[i]))
5016 fprintf (file, "; user var");
5017 if (REG_N_DEATHS (i) != 1)
5018 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5019 if (REG_N_CALLS_CROSSED (i) == 1)
5020 fprintf (file, "; crosses 1 call");
5021 else if (REG_N_CALLS_CROSSED (i))
5022 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5023 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5024 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5025 class = reg_preferred_class (i);
5026 altclass = reg_alternate_class (i);
5027 if (class != GENERAL_REGS || altclass != ALL_REGS)
5029 if (altclass == ALL_REGS || class == ALL_REGS)
5030 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5031 else if (altclass == NO_REGS)
5032 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5034 fprintf (file, "; pref %s, else %s",
5035 reg_class_names[(int) class],
5036 reg_class_names[(int) altclass]);
5038 if (REGNO_POINTER_FLAG (i))
5039 fprintf (file, "; pointer");
5040 fprintf (file, ".\n");
5043 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5044 for (i = 0; i < n_basic_blocks; i++)
5046 register basic_block bb = BASIC_BLOCK (i);
5049 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5050 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5052 fprintf (file, "Predecessors: ");
5053 for (e = bb->pred; e ; e = e->pred_next)
5054 dump_edge_info (file, e, 0);
5056 fprintf (file, "\nSuccessors: ");
5057 for (e = bb->succ; e ; e = e->succ_next)
5058 dump_edge_info (file, e, 1);
5060 fprintf (file, "\nRegisters live at start:");
5061 dump_regset (bb->global_live_at_start, file);
5063 fprintf (file, "\nRegisters live at end:");
5064 dump_regset (bb->global_live_at_end, file);
5075 dump_flow_info (stderr);
5079 dump_edge_info (file, e, do_succ)
5084 basic_block side = (do_succ ? e->dest : e->src);
5086 if (side == ENTRY_BLOCK_PTR)
5087 fputs (" ENTRY", file);
5088 else if (side == EXIT_BLOCK_PTR)
5089 fputs (" EXIT", file);
5091 fprintf (file, " %d", side->index);
5095 static const char * const bitnames[] = {
5096 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5099 int i, flags = e->flags;
5103 for (i = 0; flags; i++)
5104 if (flags & (1 << i))
5110 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5111 fputs (bitnames[i], file);
5113 fprintf (file, "%d", i);
5121 /* Print out one basic block with live information at start and end. */
5131 fprintf (outf, ";; Basic block %d, loop depth %d",
5132 bb->index, bb->loop_depth);
5133 if (bb->eh_beg != -1 || bb->eh_end != -1)
5134 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5137 fputs (";; Predecessors: ", outf);
5138 for (e = bb->pred; e ; e = e->pred_next)
5139 dump_edge_info (outf, e, 0);
5142 fputs (";; Registers live at start:", outf);
5143 dump_regset (bb->global_live_at_start, outf);
5146 for (insn = bb->head, last = NEXT_INSN (bb->end);
5148 insn = NEXT_INSN (insn))
5149 print_rtl_single (outf, insn);
5151 fputs (";; Registers live at end:", outf);
5152 dump_regset (bb->global_live_at_end, outf);
5155 fputs (";; Successors: ", outf);
5156 for (e = bb->succ; e; e = e->succ_next)
5157 dump_edge_info (outf, e, 1);
5165 dump_bb (bb, stderr);
5172 dump_bb (BASIC_BLOCK(n), stderr);
5175 /* Like print_rtl, but also print out live information for the start of each
5179 print_rtl_with_bb (outf, rtx_first)
5183 register rtx tmp_rtx;
5186 fprintf (outf, "(nil)\n");
5190 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5191 int max_uid = get_max_uid ();
5192 basic_block *start = (basic_block *)
5193 xcalloc (max_uid, sizeof (basic_block));
5194 basic_block *end = (basic_block *)
5195 xcalloc (max_uid, sizeof (basic_block));
5196 enum bb_state *in_bb_p = (enum bb_state *)
5197 xcalloc (max_uid, sizeof (enum bb_state));
5199 for (i = n_basic_blocks - 1; i >= 0; i--)
5201 basic_block bb = BASIC_BLOCK (i);
5204 start[INSN_UID (bb->head)] = bb;
5205 end[INSN_UID (bb->end)] = bb;
5206 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5208 enum bb_state state = IN_MULTIPLE_BB;
5209 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5211 in_bb_p[INSN_UID(x)] = state;
5218 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5223 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5225 fprintf (outf, ";; Start of basic block %d, registers live:",
5227 dump_regset (bb->global_live_at_start, outf);
5231 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5232 && GET_CODE (tmp_rtx) != NOTE
5233 && GET_CODE (tmp_rtx) != BARRIER)
5234 fprintf (outf, ";; Insn is not within a basic block\n");
5235 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5236 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5238 did_output = print_rtl_single (outf, tmp_rtx);
5240 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5242 fprintf (outf, ";; End of basic block %d, registers live:\n",
5244 dump_regset (bb->global_live_at_end, outf);
5257 if (current_function_epilogue_delay_list != 0)
5259 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5260 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5261 tmp_rtx = XEXP (tmp_rtx, 1))
5262 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5266 /* Compute dominator relationships using new flow graph structures. */
5268 compute_flow_dominators (dominators, post_dominators)
5269 sbitmap *dominators;
5270 sbitmap *post_dominators;
5273 sbitmap *temp_bitmap;
5275 basic_block *worklist, *workend, *qin, *qout;
5278 /* Allocate a worklist array/queue. Entries are only added to the
5279 list if they were not already on the list. So the size is
5280 bounded by the number of basic blocks. */
5281 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5282 workend = &worklist[n_basic_blocks];
5284 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5285 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5289 /* The optimistic setting of dominators requires us to put every
5290 block on the work list initially. */
5291 qin = qout = worklist;
5292 for (bb = 0; bb < n_basic_blocks; bb++)
5294 *qin++ = BASIC_BLOCK (bb);
5295 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5297 qlen = n_basic_blocks;
5300 /* We want a maximal solution, so initially assume everything dominates
5302 sbitmap_vector_ones (dominators, n_basic_blocks);
5304 /* Mark successors of the entry block so we can identify them below. */
5305 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5306 e->dest->aux = ENTRY_BLOCK_PTR;
5308 /* Iterate until the worklist is empty. */
5311 /* Take the first entry off the worklist. */
5312 basic_block b = *qout++;
5313 if (qout >= workend)
5319 /* Compute the intersection of the dominators of all the
5322 If one of the predecessor blocks is the ENTRY block, then the
5323 intersection of the dominators of the predecessor blocks is
5324 defined as the null set. We can identify such blocks by the
5325 special value in the AUX field in the block structure. */
5326 if (b->aux == ENTRY_BLOCK_PTR)
5328 /* Do not clear the aux field for blocks which are
5329 successors of the ENTRY block. That way we we never
5330 add them to the worklist again.
5332 The intersect of dominators of the preds of this block is
5333 defined as the null set. */
5334 sbitmap_zero (temp_bitmap[bb]);
5338 /* Clear the aux field of this block so it can be added to
5339 the worklist again if necessary. */
5341 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5344 /* Make sure each block always dominates itself. */
5345 SET_BIT (temp_bitmap[bb], bb);
5347 /* If the out state of this block changed, then we need to
5348 add the successors of this block to the worklist if they
5349 are not already on the worklist. */
5350 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5352 for (e = b->succ; e; e = e->succ_next)
5354 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5368 if (post_dominators)
5370 /* The optimistic setting of dominators requires us to put every
5371 block on the work list initially. */
5372 qin = qout = worklist;
5373 for (bb = 0; bb < n_basic_blocks; bb++)
5375 *qin++ = BASIC_BLOCK (bb);
5376 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5378 qlen = n_basic_blocks;
5381 /* We want a maximal solution, so initially assume everything post
5382 dominates everything else. */
5383 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5385 /* Mark predecessors of the exit block so we can identify them below. */
5386 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5387 e->src->aux = EXIT_BLOCK_PTR;
5389 /* Iterate until the worklist is empty. */
5392 /* Take the first entry off the worklist. */
5393 basic_block b = *qout++;
5394 if (qout >= workend)
5400 /* Compute the intersection of the post dominators of all the
5403 If one of the successor blocks is the EXIT block, then the
5404 intersection of the dominators of the successor blocks is
5405 defined as the null set. We can identify such blocks by the
5406 special value in the AUX field in the block structure. */
5407 if (b->aux == EXIT_BLOCK_PTR)
5409 /* Do not clear the aux field for blocks which are
5410 predecessors of the EXIT block. That way we we never
5411 add them to the worklist again.
5413 The intersect of dominators of the succs of this block is
5414 defined as the null set. */
5415 sbitmap_zero (temp_bitmap[bb]);
5419 /* Clear the aux field of this block so it can be added to
5420 the worklist again if necessary. */
5422 sbitmap_intersection_of_succs (temp_bitmap[bb],
5423 post_dominators, bb);
5426 /* Make sure each block always post dominates itself. */
5427 SET_BIT (temp_bitmap[bb], bb);
5429 /* If the out state of this block changed, then we need to
5430 add the successors of this block to the worklist if they
5431 are not already on the worklist. */
5432 if (sbitmap_a_and_b (post_dominators[bb],
5433 post_dominators[bb],
5436 for (e = b->pred; e; e = e->pred_next)
5438 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5456 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5459 compute_immediate_dominators (idom, dominators)
5461 sbitmap *dominators;
5466 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5468 /* Begin with tmp(n) = dom(n) - { n }. */
5469 for (b = n_basic_blocks; --b >= 0; )
5471 sbitmap_copy (tmp[b], dominators[b]);
5472 RESET_BIT (tmp[b], b);
5475 /* Subtract out all of our dominator's dominators. */
5476 for (b = n_basic_blocks; --b >= 0; )
5478 sbitmap tmp_b = tmp[b];
5481 for (s = n_basic_blocks; --s >= 0; )
5482 if (TEST_BIT (tmp_b, s))
5483 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5486 /* Find the one bit set in the bitmap and put it in the output array. */
5487 for (b = n_basic_blocks; --b >= 0; )
5490 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5493 sbitmap_vector_free (tmp);
5496 /* Count for a single SET rtx, X. */
5499 count_reg_sets_1 (x, loop_depth)
5504 register rtx reg = SET_DEST (x);
5506 /* Find the register that's set/clobbered. */
5507 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5508 || GET_CODE (reg) == SIGN_EXTRACT
5509 || GET_CODE (reg) == STRICT_LOW_PART)
5510 reg = XEXP (reg, 0);
5512 if (GET_CODE (reg) == PARALLEL
5513 && GET_MODE (reg) == BLKmode)
5516 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5517 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5521 if (GET_CODE (reg) == REG)
5523 regno = REGNO (reg);
5524 if (regno >= FIRST_PSEUDO_REGISTER)
5526 /* Count (weighted) references, stores, etc. This counts a
5527 register twice if it is modified, but that is correct. */
5528 REG_N_SETS (regno)++;
5529 REG_N_REFS (regno) += loop_depth + 1;
5534 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5535 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5538 count_reg_sets (x, loop_depth)
5542 register RTX_CODE code = GET_CODE (x);
5544 if (code == SET || code == CLOBBER)
5545 count_reg_sets_1 (x, loop_depth);
5546 else if (code == PARALLEL)
5549 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5551 code = GET_CODE (XVECEXP (x, 0, i));
5552 if (code == SET || code == CLOBBER)
5553 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5558 /* Increment REG_N_REFS by the current loop depth each register reference
5562 count_reg_references (x, loop_depth)
5566 register RTX_CODE code;
5569 code = GET_CODE (x);
5589 /* If we are clobbering a MEM, mark any registers inside the address
5591 if (GET_CODE (XEXP (x, 0)) == MEM)
5592 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5596 /* While we're here, optimize this case. */
5599 /* In case the SUBREG is not of a register, don't optimize */
5600 if (GET_CODE (x) != REG)
5602 count_reg_references (x, loop_depth);
5606 /* ... fall through ... */
5609 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5610 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5615 register rtx testreg = SET_DEST (x);
5618 /* If storing into MEM, don't show it as being used. But do
5619 show the address as being used. */
5620 if (GET_CODE (testreg) == MEM)
5622 count_reg_references (XEXP (testreg, 0), loop_depth);
5623 count_reg_references (SET_SRC (x), loop_depth);
5627 /* Storing in STRICT_LOW_PART is like storing in a reg
5628 in that this SET might be dead, so ignore it in TESTREG.
5629 but in some other ways it is like using the reg.
5631 Storing in a SUBREG or a bit field is like storing the entire
5632 register in that if the register's value is not used
5633 then this SET is not needed. */
5634 while (GET_CODE (testreg) == STRICT_LOW_PART
5635 || GET_CODE (testreg) == ZERO_EXTRACT
5636 || GET_CODE (testreg) == SIGN_EXTRACT
5637 || GET_CODE (testreg) == SUBREG)
5639 /* Modifying a single register in an alternate mode
5640 does not use any of the old value. But these other
5641 ways of storing in a register do use the old value. */
5642 if (GET_CODE (testreg) == SUBREG
5643 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5648 testreg = XEXP (testreg, 0);
5651 /* If this is a store into a register,
5652 recursively scan the value being stored. */
5654 if ((GET_CODE (testreg) == PARALLEL
5655 && GET_MODE (testreg) == BLKmode)
5656 || GET_CODE (testreg) == REG)
5658 count_reg_references (SET_SRC (x), loop_depth);
5660 count_reg_references (SET_DEST (x), loop_depth);
5670 /* Recursively scan the operands of this expression. */
5673 register const char *fmt = GET_RTX_FORMAT (code);
5676 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5680 /* Tail recursive case: save a function call level. */
5686 count_reg_references (XEXP (x, i), loop_depth);
5688 else if (fmt[i] == 'E')
5691 for (j = 0; j < XVECLEN (x, i); j++)
5692 count_reg_references (XVECEXP (x, i, j), loop_depth);
5698 /* Recompute register set/reference counts immediately prior to register
5701 This avoids problems with set/reference counts changing to/from values
5702 which have special meanings to the register allocators.
5704 Additionally, the reference counts are the primary component used by the
5705 register allocators to prioritize pseudos for allocation to hard regs.
5706 More accurate reference counts generally lead to better register allocation.
5708 F is the first insn to be scanned.
5710 LOOP_STEP denotes how much loop_depth should be incremented per
5711 loop nesting level in order to increase the ref count more for
5712 references in a loop.
5714 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5715 possibly other information which is used by the register allocators. */
5718 recompute_reg_usage (f, loop_step)
5719 rtx f ATTRIBUTE_UNUSED;
5720 int loop_step ATTRIBUTE_UNUSED;
5727 /* Clear out the old data. */
5728 max_reg = max_reg_num ();
5729 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5735 /* Scan each insn in the chain and count how many times each register is
5737 for (index = 0; index < n_basic_blocks; index++)
5739 basic_block bb = BASIC_BLOCK (index);
5740 loop_depth = bb->loop_depth;
5741 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5743 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5747 /* This call will increment REG_N_SETS for each SET or CLOBBER
5748 of a register in INSN. It will also increment REG_N_REFS
5749 by the loop depth for each set of a register in INSN. */
5750 count_reg_sets (PATTERN (insn), loop_depth);
5752 /* count_reg_sets does not detect autoincrement address modes, so
5753 detect them here by looking at the notes attached to INSN. */
5754 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5756 if (REG_NOTE_KIND (links) == REG_INC)
5757 /* Count (weighted) references, stores, etc. This
5758 counts a register twice if it is modified, but
5760 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5763 /* This call will increment REG_N_REFS by the current loop depth
5764 for each reference to a register in INSN. */
5765 count_reg_references (PATTERN (insn), loop_depth);
5767 /* count_reg_references will not include counts for arguments to
5768 function calls, so detect them here by examining the
5769 CALL_INSN_FUNCTION_USAGE data. */
5770 if (GET_CODE (insn) == CALL_INSN)
5774 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5776 note = XEXP (note, 1))
5777 if (GET_CODE (XEXP (note, 0)) == USE)
5778 count_reg_references (XEXP (XEXP (note, 0), 0),
5782 if (insn == bb->end)
5788 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5789 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5790 of the number of registers that died. */
5793 count_or_remove_death_notes (blocks, kill)
5799 for (i = n_basic_blocks - 1; i >= 0; --i)
5804 if (blocks && ! TEST_BIT (blocks, i))
5807 bb = BASIC_BLOCK (i);
5809 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5811 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5813 rtx *pprev = ®_NOTES (insn);
5818 switch (REG_NOTE_KIND (link))
5821 if (GET_CODE (XEXP (link, 0)) == REG)
5823 rtx reg = XEXP (link, 0);
5826 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5829 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5837 rtx next = XEXP (link, 1);
5838 free_EXPR_LIST_node (link);
5839 *pprev = link = next;
5845 pprev = &XEXP (link, 1);
5852 if (insn == bb->end)
5860 /* Record INSN's block as BB. */
5863 set_block_for_insn (insn, bb)
5867 size_t uid = INSN_UID (insn);
5868 if (uid >= basic_block_for_insn->num_elements)
5872 /* Add one-eighth the size so we don't keep calling xrealloc. */
5873 new_size = uid + (uid + 7) / 8;
5875 VARRAY_GROW (basic_block_for_insn, new_size);
5877 VARRAY_BB (basic_block_for_insn, uid) = bb;
5880 /* Record INSN's block number as BB. */
5881 /* ??? This has got to go. */
5884 set_block_num (insn, bb)
5888 set_block_for_insn (insn, BASIC_BLOCK (bb));
5891 /* Verify the CFG consistency. This function check some CFG invariants and
5892 aborts when something is wrong. Hope that this function will help to
5893 convert many optimization passes to preserve CFG consistent.
5895 Currently it does following checks:
5897 - test head/end pointers
5898 - overlapping of basic blocks
5899 - edge list corectness
5900 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5901 - tails of basic blocks (ensure that boundary is necesary)
5902 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5903 and NOTE_INSN_BASIC_BLOCK
5904 - check that all insns are in the basic blocks
5905 (except the switch handling code, barriers and notes)
5906 - check that all returns are followed by barriers
5908 In future it can be extended check a lot of other stuff as well
5909 (reachability of basic blocks, life information, etc. etc.). */
5914 const int max_uid = get_max_uid ();
5915 const rtx rtx_first = get_insns ();
5916 basic_block *bb_info;
5920 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5922 /* First pass check head/end pointers and set bb_info array used by
5924 for (i = n_basic_blocks - 1; i >= 0; i--)
5926 basic_block bb = BASIC_BLOCK (i);
5928 /* Check the head pointer and make sure that it is pointing into
5930 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5935 error ("Head insn %d for block %d not found in the insn stream.",
5936 INSN_UID (bb->head), bb->index);
5940 /* Check the end pointer and make sure that it is pointing into
5942 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5944 if (bb_info[INSN_UID (x)] != NULL)
5946 error ("Insn %d is in multiple basic blocks (%d and %d)",
5947 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5950 bb_info[INSN_UID (x)] = bb;
5957 error ("End insn %d for block %d not found in the insn stream.",
5958 INSN_UID (bb->end), bb->index);
5963 /* Now check the basic blocks (boundaries etc.) */
5964 for (i = n_basic_blocks - 1; i >= 0; i--)
5966 basic_block bb = BASIC_BLOCK (i);
5967 /* Check corectness of edge lists */
5975 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5977 fprintf (stderr, "Predecessor: ");
5978 dump_edge_info (stderr, e, 0);
5979 fprintf (stderr, "\nSuccessor: ");
5980 dump_edge_info (stderr, e, 1);
5984 if (e->dest != EXIT_BLOCK_PTR)
5986 edge e2 = e->dest->pred;
5987 while (e2 && e2 != e)
5991 error ("Basic block %i edge lists are corrupted", bb->index);
6003 error ("Basic block %d pred edge is corrupted", bb->index);
6004 fputs ("Predecessor: ", stderr);
6005 dump_edge_info (stderr, e, 0);
6006 fputs ("\nSuccessor: ", stderr);
6007 dump_edge_info (stderr, e, 1);
6008 fputc ('\n', stderr);
6011 if (e->src != ENTRY_BLOCK_PTR)
6013 edge e2 = e->src->succ;
6014 while (e2 && e2 != e)
6018 error ("Basic block %i edge lists are corrupted", bb->index);
6025 /* OK pointers are correct. Now check the header of basic
6026 block. It ought to contain optional CODE_LABEL followed
6027 by NOTE_BASIC_BLOCK. */
6029 if (GET_CODE (x) == CODE_LABEL)
6033 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6039 if (GET_CODE (x) != NOTE
6040 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6041 || NOTE_BASIC_BLOCK (x) != bb)
6043 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6050 /* Do checks for empty blocks here */
6057 if (GET_CODE (x) == NOTE
6058 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6060 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6061 INSN_UID (x), bb->index);
6068 if (GET_CODE (x) == JUMP_INSN
6069 || GET_CODE (x) == CODE_LABEL
6070 || GET_CODE (x) == BARRIER)
6072 error ("In basic block %d:", bb->index);
6073 fatal_insn ("Flow control insn inside a basic block", x);
6084 if (!bb_info[INSN_UID (x)])
6086 switch (GET_CODE (x))
6093 /* An addr_vec is placed outside any block block. */
6095 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6096 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6097 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6102 /* But in any case, non-deletable labels can appear anywhere. */
6106 fatal_insn ("Insn outside basic block", x);
6110 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6111 && GET_CODE (x) == JUMP_INSN
6112 && returnjump_p (x) && ! condjump_p (x)
6113 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6114 fatal_insn ("Return not followed by barrier", x);
6126 /* Functions to access an edge list with a vector representation.
6127 Enough data is kept such that given an index number, the
6128 pred and succ that edge reprsents can be determined, or
6129 given a pred and a succ, it's index number can be returned.
6130 This allows algorithms which comsume a lot of memory to
6131 represent the normally full matrix of edge (pred,succ) with a
6132 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6133 wasted space in the client code due to sparse flow graphs. */
6135 /* This functions initializes the edge list. Basically the entire
6136 flowgraph is processed, and all edges are assigned a number,
6137 and the data structure is filed in. */
6141 struct edge_list *elist;
6147 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6151 /* Determine the number of edges in the flow graph by counting successor
6152 edges on each basic block. */
6153 for (x = 0; x < n_basic_blocks; x++)
6155 basic_block bb = BASIC_BLOCK (x);
6157 for (e = bb->succ; e; e = e->succ_next)
6160 /* Don't forget successors of the entry block. */
6161 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6164 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6165 elist->num_blocks = block_count;
6166 elist->num_edges = num_edges;
6167 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6171 /* Follow successors of the entry block, and register these edges. */
6172 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6174 elist->index_to_edge[num_edges] = e;
6178 for (x = 0; x < n_basic_blocks; x++)
6180 basic_block bb = BASIC_BLOCK (x);
6182 /* Follow all successors of blocks, and register these edges. */
6183 for (e = bb->succ; e; e = e->succ_next)
6185 elist->index_to_edge[num_edges] = e;
6192 /* This function free's memory associated with an edge list. */
6194 free_edge_list (elist)
6195 struct edge_list *elist;
6199 free (elist->index_to_edge);
6204 /* This function provides debug output showing an edge list. */
6206 print_edge_list (f, elist)
6208 struct edge_list *elist;
6211 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6212 elist->num_blocks - 2, elist->num_edges);
6214 for (x = 0; x < elist->num_edges; x++)
6216 fprintf (f, " %-4d - edge(", x);
6217 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6218 fprintf (f,"entry,");
6220 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6222 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6223 fprintf (f,"exit)\n");
6225 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6229 /* This function provides an internal consistancy check of an edge list,
6230 verifying that all edges are present, and that there are no
6233 verify_edge_list (f, elist)
6235 struct edge_list *elist;
6237 int x, pred, succ, index;
6240 for (x = 0; x < n_basic_blocks; x++)
6242 basic_block bb = BASIC_BLOCK (x);
6244 for (e = bb->succ; e; e = e->succ_next)
6246 pred = e->src->index;
6247 succ = e->dest->index;
6248 index = EDGE_INDEX (elist, e->src, e->dest);
6249 if (index == EDGE_INDEX_NO_EDGE)
6251 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6254 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6255 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6256 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6257 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6258 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6259 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6262 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6264 pred = e->src->index;
6265 succ = e->dest->index;
6266 index = EDGE_INDEX (elist, e->src, e->dest);
6267 if (index == EDGE_INDEX_NO_EDGE)
6269 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6272 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6273 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6274 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6275 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6276 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6277 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6279 /* We've verified that all the edges are in the list, no lets make sure
6280 there are no spurious edges in the list. */
6282 for (pred = 0 ; pred < n_basic_blocks; pred++)
6283 for (succ = 0 ; succ < n_basic_blocks; succ++)
6285 basic_block p = BASIC_BLOCK (pred);
6286 basic_block s = BASIC_BLOCK (succ);
6290 for (e = p->succ; e; e = e->succ_next)
6296 for (e = s->pred; e; e = e->pred_next)
6302 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6303 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6304 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6306 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6307 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6308 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6309 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6310 BASIC_BLOCK (succ)));
6312 for (succ = 0 ; succ < n_basic_blocks; succ++)
6314 basic_block p = ENTRY_BLOCK_PTR;
6315 basic_block s = BASIC_BLOCK (succ);
6319 for (e = p->succ; e; e = e->succ_next)
6325 for (e = s->pred; e; e = e->pred_next)
6331 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6332 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6333 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6335 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6336 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6337 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6338 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6339 BASIC_BLOCK (succ)));
6341 for (pred = 0 ; pred < n_basic_blocks; pred++)
6343 basic_block p = BASIC_BLOCK (pred);
6344 basic_block s = EXIT_BLOCK_PTR;
6348 for (e = p->succ; e; e = e->succ_next)
6354 for (e = s->pred; e; e = e->pred_next)
6360 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6361 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6362 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6364 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6365 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6366 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6367 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6372 /* This routine will determine what, if any, edge there is between
6373 a specified predecessor and successor. */
6376 find_edge_index (edge_list, pred, succ)
6377 struct edge_list *edge_list;
6378 basic_block pred, succ;
6381 for (x = 0; x < NUM_EDGES (edge_list); x++)
6383 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6384 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6387 return (EDGE_INDEX_NO_EDGE);
6390 /* This function will remove an edge from the flow graph. */
6395 edge last_pred = NULL;
6396 edge last_succ = NULL;
6398 basic_block src, dest;
6401 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6407 last_succ->succ_next = e->succ_next;
6409 src->succ = e->succ_next;
6411 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6417 last_pred->pred_next = e->pred_next;
6419 dest->pred = e->pred_next;
6425 /* This routine will remove any fake successor edges for a basic block.
6426 When the edge is removed, it is also removed from whatever predecessor
6429 remove_fake_successors (bb)
6433 for (e = bb->succ; e ; )
6437 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6442 /* This routine will remove all fake edges from the flow graph. If
6443 we remove all fake successors, it will automatically remove all
6444 fake predecessors. */
6446 remove_fake_edges ()
6450 for (x = 0; x < n_basic_blocks; x++)
6451 remove_fake_successors (BASIC_BLOCK (x));
6453 /* We've handled all successors except the entry block's. */
6454 remove_fake_successors (ENTRY_BLOCK_PTR);
6457 /* This functions will add a fake edge between any block which has no
6458 successors, and the exit block. Some data flow equations require these
6461 add_noreturn_fake_exit_edges ()
6465 for (x = 0; x < n_basic_blocks; x++)
6466 if (BASIC_BLOCK (x)->succ == NULL)
6467 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6470 /* Dump the list of basic blocks in the bitmap NODES. */
6472 flow_nodes_print (str, nodes, file)
6474 const sbitmap nodes;
6479 fprintf (file, "%s { ", str);
6480 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6481 fputs ("}\n", file);
6485 /* Dump the list of exiting edges in the array EDGES. */
6487 flow_exits_print (str, edges, num_edges, file)
6495 fprintf (file, "%s { ", str);
6496 for (i = 0; i < num_edges; i++)
6497 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6498 fputs ("}\n", file);
6502 /* Dump loop related CFG information. */
6504 flow_loops_cfg_dump (loops, file)
6505 const struct loops *loops;
6510 if (! loops->num || ! file || ! loops->cfg.dom)
6513 for (i = 0; i < n_basic_blocks; i++)
6517 fprintf (file, ";; %d succs { ", i);
6518 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6519 fprintf (file, "%d ", succ->dest->index);
6520 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6524 /* Dump the DFS node order. */
6525 if (loops->cfg.dfs_order)
6527 fputs (";; DFS order: ", file);
6528 for (i = 0; i < n_basic_blocks; i++)
6529 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6535 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6537 flow_loop_nested_p (outer, loop)
6541 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6545 /* Dump the loop information specified by LOOPS to the stream FILE. */
6547 flow_loops_dump (loops, file, verbose)
6548 const struct loops *loops;
6555 num_loops = loops->num;
6556 if (! num_loops || ! file)
6559 fprintf (file, ";; %d loops found, %d levels\n",
6560 num_loops, loops->levels);
6562 for (i = 0; i < num_loops; i++)
6564 struct loop *loop = &loops->array[i];
6566 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6567 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6568 loop->header->index, loop->latch->index,
6569 loop->pre_header ? loop->pre_header->index : -1,
6570 loop->depth, loop->level,
6571 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6572 fprintf (file, ";; %d", loop->num_nodes);
6573 flow_nodes_print (" nodes", loop->nodes, file);
6574 fprintf (file, ";; %d", loop->num_exits);
6575 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6581 for (j = 0; j < i; j++)
6583 struct loop *oloop = &loops->array[j];
6585 if (loop->header == oloop->header)
6590 smaller = loop->num_nodes < oloop->num_nodes;
6592 /* If the union of LOOP and OLOOP is different than
6593 the larger of LOOP and OLOOP then LOOP and OLOOP
6594 must be disjoint. */
6595 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6596 smaller ? oloop : loop);
6597 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6598 loop->header->index, i, j,
6599 disjoint ? "disjoint" : "nested");
6606 /* Print diagnostics to compare our concept of a loop with
6607 what the loop notes say. */
6608 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6609 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6610 != NOTE_INSN_LOOP_BEG)
6611 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6612 INSN_UID (PREV_INSN (loop->first->head)));
6613 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6614 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6615 != NOTE_INSN_LOOP_END)
6616 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6617 INSN_UID (NEXT_INSN (loop->last->end)));
6622 flow_loops_cfg_dump (loops, file);
6626 /* Free all the memory allocated for LOOPS. */
6628 flow_loops_free (loops)
6629 struct loops *loops;
6638 /* Free the loop descriptors. */
6639 for (i = 0; i < loops->num; i++)
6641 struct loop *loop = &loops->array[i];
6644 sbitmap_free (loop->nodes);
6648 free (loops->array);
6649 loops->array = NULL;
6652 sbitmap_vector_free (loops->cfg.dom);
6653 if (loops->cfg.dfs_order)
6654 free (loops->cfg.dfs_order);
6656 sbitmap_free (loops->shared_headers);
6661 /* Find the exits from the loop using the bitmap of loop nodes NODES
6662 and store in EXITS array. Return the number of exits from the
6665 flow_loop_exits_find (nodes, exits)
6666 const sbitmap nodes;
6675 /* Check all nodes within the loop to see if there are any
6676 successors not in the loop. Note that a node may have multiple
6679 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6680 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6682 basic_block dest = e->dest;
6684 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6692 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6694 /* Store all exiting edges into an array. */
6696 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6697 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6699 basic_block dest = e->dest;
6701 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6702 (*exits)[num_exits++] = e;
6710 /* Find the nodes contained within the loop with header HEADER and
6711 latch LATCH and store in NODES. Return the number of nodes within
6714 flow_loop_nodes_find (header, latch, nodes)
6723 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6726 /* Start with only the loop header in the set of loop nodes. */
6727 sbitmap_zero (nodes);
6728 SET_BIT (nodes, header->index);
6730 header->loop_depth++;
6732 /* Push the loop latch on to the stack. */
6733 if (! TEST_BIT (nodes, latch->index))
6735 SET_BIT (nodes, latch->index);
6736 latch->loop_depth++;
6738 stack[sp++] = latch;
6747 for (e = node->pred; e; e = e->pred_next)
6749 basic_block ancestor = e->src;
6751 /* If each ancestor not marked as part of loop, add to set of
6752 loop nodes and push on to stack. */
6753 if (ancestor != ENTRY_BLOCK_PTR
6754 && ! TEST_BIT (nodes, ancestor->index))
6756 SET_BIT (nodes, ancestor->index);
6757 ancestor->loop_depth++;
6759 stack[sp++] = ancestor;
6768 /* Compute the depth first search order and store in the array
6769 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6770 number of nodes visited. */
6772 flow_depth_first_order_compute (dfs_order)
6781 /* Allocate stack for back-tracking up CFG. */
6782 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6785 /* Allocate bitmap to track nodes that have been visited. */
6786 visited = sbitmap_alloc (n_basic_blocks);
6788 /* None of the nodes in the CFG have been visited yet. */
6789 sbitmap_zero (visited);
6791 /* Start with the first successor edge from the entry block. */
6792 e = ENTRY_BLOCK_PTR->succ;
6795 basic_block src = e->src;
6796 basic_block dest = e->dest;
6798 /* Mark that we have visited this node. */
6799 if (src != ENTRY_BLOCK_PTR)
6800 SET_BIT (visited, src->index);
6802 /* If this node has not been visited before, push the current
6803 edge on to the stack and proceed with the first successor
6804 edge of this node. */
6805 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6813 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6816 /* DEST has no successors (for example, a non-returning
6817 function is called) so do not push the current edge
6818 but carry on with its next successor. */
6819 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6820 SET_BIT (visited, dest->index);
6823 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6825 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6827 /* Pop edge off stack. */
6835 sbitmap_free (visited);
6837 /* The number of nodes visited should not be greater than
6839 if (dfsnum > n_basic_blocks)
6842 /* There are some nodes left in the CFG that are unreachable. */
6843 if (dfsnum < n_basic_blocks)
6849 /* Return the block for the pre-header of the loop with header
6850 HEADER where DOM specifies the dominator information. Return NULL if
6851 there is no pre-header. */
6853 flow_loop_pre_header_find (header, dom)
6857 basic_block pre_header;
6860 /* If block p is a predecessor of the header and is the only block
6861 that the header does not dominate, then it is the pre-header. */
6863 for (e = header->pred; e; e = e->pred_next)
6865 basic_block node = e->src;
6867 if (node != ENTRY_BLOCK_PTR
6868 && ! TEST_BIT (dom[node->index], header->index))
6870 if (pre_header == NULL)
6874 /* There are multiple edges into the header from outside
6875 the loop so there is no pre-header block. */
6885 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6886 previously added. The insertion algorithm assumes that the loops
6887 are added in the order found by a depth first search of the CFG. */
6889 flow_loop_tree_node_add (prevloop, loop)
6890 struct loop *prevloop;
6894 if (flow_loop_nested_p (prevloop, loop))
6896 prevloop->inner = loop;
6897 loop->outer = prevloop;
6901 while (prevloop->outer)
6903 if (flow_loop_nested_p (prevloop->outer, loop))
6905 prevloop->next = loop;
6906 loop->outer = prevloop->outer;
6909 prevloop = prevloop->outer;
6912 prevloop->next = loop;
6917 /* Build the loop hierarchy tree for LOOPS. */
6919 flow_loops_tree_build (loops)
6920 struct loops *loops;
6925 num_loops = loops->num;
6929 /* Root the loop hierarchy tree with the first loop found.
6930 Since we used a depth first search this should be the
6932 loops->tree = &loops->array[0];
6933 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6935 /* Add the remaining loops to the tree. */
6936 for (i = 1; i < num_loops; i++)
6937 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6941 /* Helper function to compute loop nesting depth and enclosed loop level
6942 for the natural loop specified by LOOP at the loop depth DEPTH.
6943 Returns the loop level. */
6945 flow_loop_level_compute (loop, depth)
6955 /* Traverse loop tree assigning depth and computing level as the
6956 maximum level of all the inner loops of this loop. The loop
6957 level is equivalent to the height of the loop in the loop tree
6958 and corresponds to the number of enclosed loop levels (including
6960 for (inner = loop->inner; inner; inner = inner->next)
6964 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6969 loop->level = level;
6970 loop->depth = depth;
6975 /* Compute the loop nesting depth and enclosed loop level for the loop
6976 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6980 flow_loops_level_compute (loops)
6981 struct loops *loops;
6987 /* Traverse all the outer level loops. */
6988 for (loop = loops->tree; loop; loop = loop->next)
6990 level = flow_loop_level_compute (loop, 1);
6998 /* Find all the natural loops in the function and save in LOOPS structure
6999 and recalculate loop_depth information in basic block structures.
7000 Return the number of natural loops found. */
7003 flow_loops_find (loops)
7004 struct loops *loops;
7015 loops->array = NULL;
7019 /* Taking care of this degenerate case makes the rest of
7020 this code simpler. */
7021 if (n_basic_blocks == 0)
7024 /* Compute the dominators. */
7025 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7026 compute_flow_dominators (dom, NULL);
7028 /* Count the number of loop edges (back edges). This should be the
7029 same as the number of natural loops. Also clear the loop_depth
7030 and as we work from inner->outer in a loop nest we call
7031 find_loop_nodes_find which will increment loop_depth for nodes
7032 within the current loop, which happens to enclose inner loops. */
7035 for (b = 0; b < n_basic_blocks; b++)
7037 BASIC_BLOCK (b)->loop_depth = 0;
7038 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7040 basic_block latch = e->src;
7042 /* Look for back edges where a predecessor is dominated
7043 by this block. A natural loop has a single entry
7044 node (header) that dominates all the nodes in the
7045 loop. It also has single back edge to the header
7046 from a latch node. Note that multiple natural loops
7047 may share the same header. */
7048 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7055 /* Compute depth first search order of the CFG so that outer
7056 natural loops will be found before inner natural loops. */
7057 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7058 flow_depth_first_order_compute (dfs_order);
7060 /* Allocate loop structures. */
7062 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7064 headers = sbitmap_alloc (n_basic_blocks);
7065 sbitmap_zero (headers);
7067 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7068 sbitmap_zero (loops->shared_headers);
7070 /* Find and record information about all the natural loops
7073 for (b = 0; b < n_basic_blocks; b++)
7077 /* Search the nodes of the CFG in DFS order that we can find
7078 outer loops first. */
7079 header = BASIC_BLOCK (dfs_order[b]);
7081 /* Look for all the possible latch blocks for this header. */
7082 for (e = header->pred; e; e = e->pred_next)
7084 basic_block latch = e->src;
7086 /* Look for back edges where a predecessor is dominated
7087 by this block. A natural loop has a single entry
7088 node (header) that dominates all the nodes in the
7089 loop. It also has single back edge to the header
7090 from a latch node. Note that multiple natural loops
7091 may share the same header. */
7092 if (latch != ENTRY_BLOCK_PTR
7093 && TEST_BIT (dom[latch->index], header->index))
7097 loop = loops->array + num_loops;
7099 loop->header = header;
7100 loop->latch = latch;
7102 /* Keep track of blocks that are loop headers so
7103 that we can tell which loops should be merged. */
7104 if (TEST_BIT (headers, header->index))
7105 SET_BIT (loops->shared_headers, header->index);
7106 SET_BIT (headers, header->index);
7108 /* Find nodes contained within the loop. */
7109 loop->nodes = sbitmap_alloc (n_basic_blocks);
7111 = flow_loop_nodes_find (header, latch, loop->nodes);
7113 /* Compute first and last blocks within the loop.
7114 These are often the same as the loop header and
7115 loop latch respectively, but this is not always
7118 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7120 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7122 /* Find edges which exit the loop. Note that a node
7123 may have several exit edges. */
7125 = flow_loop_exits_find (loop->nodes, &loop->exits);
7127 /* Look to see if the loop has a pre-header node. */
7129 = flow_loop_pre_header_find (header, dom);
7136 /* Natural loops with shared headers may either be disjoint or
7137 nested. Disjoint loops with shared headers cannot be inner
7138 loops and should be merged. For now just mark loops that share
7140 for (i = 0; i < num_loops; i++)
7141 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7142 loops->array[i].shared = 1;
7144 sbitmap_free (headers);
7147 loops->num = num_loops;
7149 /* Save CFG derived information to avoid recomputing it. */
7150 loops->cfg.dom = dom;
7151 loops->cfg.dfs_order = dfs_order;
7153 /* Build the loop hierarchy tree. */
7154 flow_loops_tree_build (loops);
7156 /* Assign the loop nesting depth and enclosed loop level for each
7158 loops->levels = flow_loops_level_compute (loops);
7164 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7166 flow_loop_outside_edge_p (loop, e)
7167 const struct loop *loop;
7170 if (e->dest != loop->header)
7172 return (e->src == ENTRY_BLOCK_PTR)
7173 || ! TEST_BIT (loop->nodes, e->src->index);
7177 /* Clear LOG_LINKS fields of insns in a chain. */
7179 clear_log_links (insns)
7183 for (i = insns; i; i = NEXT_INSN (i))
7184 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')