1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static void propagate_block PARAMS ((basic_block, regset,
334 static int insn_dead_p PARAMS ((struct propagate_block_info *,
336 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
338 static void mark_set_regs PARAMS ((struct propagate_block_info *,
340 static void mark_set_1 PARAMS ((struct propagate_block_info *,
342 static int mark_set_reg PARAMS ((struct propagate_block_info *,
343 rtx, rtx, int *, int *));
345 static void find_auto_inc PARAMS ((struct propagate_block_info *,
347 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
349 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
351 static void mark_used_reg PARAMS ((struct propagate_block_info *,
353 static void mark_used_regs PARAMS ((struct propagate_block_info *,
355 void dump_flow_info PARAMS ((FILE *));
356 void debug_flow_info PARAMS ((void));
357 static void dump_edge_info PARAMS ((FILE *, edge, int));
359 static void count_reg_sets_1 PARAMS ((rtx, int));
360 static void count_reg_sets PARAMS ((rtx, int));
361 static void count_reg_references PARAMS ((rtx, int));
362 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
364 static void remove_fake_successors PARAMS ((basic_block));
365 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
366 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
367 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
368 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
369 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
370 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
371 static int flow_depth_first_order_compute PARAMS ((int *));
372 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
373 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
374 static void flow_loops_tree_build PARAMS ((struct loops *));
375 static int flow_loop_level_compute PARAMS ((struct loop *, int));
376 static int flow_loops_level_compute PARAMS ((struct loops *));
378 /* Find basic blocks of the current function.
379 F is the first insn of the function and NREGS the number of register
383 find_basic_blocks (f, nregs, file)
385 int nregs ATTRIBUTE_UNUSED;
386 FILE *file ATTRIBUTE_UNUSED;
390 /* Flush out existing data. */
391 if (basic_block_info != NULL)
397 /* Clear bb->aux on all extant basic blocks. We'll use this as a
398 tag for reuse during create_basic_block, just in case some pass
399 copies around basic block notes improperly. */
400 for (i = 0; i < n_basic_blocks; ++i)
401 BASIC_BLOCK (i)->aux = NULL;
403 VARRAY_FREE (basic_block_info);
406 n_basic_blocks = count_basic_blocks (f);
408 /* Size the basic block table. The actual structures will be allocated
409 by find_basic_blocks_1, since we want to keep the structure pointers
410 stable across calls to find_basic_blocks. */
411 /* ??? This whole issue would be much simpler if we called find_basic_blocks
412 exactly once, and thereafter we don't have a single long chain of
413 instructions at all until close to the end of compilation when we
414 actually lay them out. */
416 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
418 label_value_list = find_basic_blocks_1 (f);
420 /* Record the block to which an insn belongs. */
421 /* ??? This should be done another way, by which (perhaps) a label is
422 tagged directly with the basic block that it starts. It is used for
423 more than that currently, but IMO that is the only valid use. */
425 max_uid = get_max_uid ();
427 /* Leave space for insns life_analysis makes in some cases for auto-inc.
428 These cases are rare, so we don't need too much space. */
429 max_uid += max_uid / 10;
432 compute_bb_for_insn (max_uid);
434 /* Discover the edges of our cfg. */
435 record_active_eh_regions (f);
436 make_edges (label_value_list);
438 /* Do very simple cleanup now, for the benefit of code that runs between
439 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
440 tidy_fallthru_edges ();
442 mark_critical_edges ();
444 #ifdef ENABLE_CHECKING
449 /* Count the basic blocks of the function. */
452 count_basic_blocks (f)
456 register RTX_CODE prev_code;
457 register int count = 0;
459 int call_had_abnormal_edge = 0;
461 prev_code = JUMP_INSN;
462 for (insn = f; insn; insn = NEXT_INSN (insn))
464 register RTX_CODE code = GET_CODE (insn);
466 if (code == CODE_LABEL
467 || (GET_RTX_CLASS (code) == 'i'
468 && (prev_code == JUMP_INSN
469 || prev_code == BARRIER
470 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
473 /* Record whether this call created an edge. */
474 if (code == CALL_INSN)
476 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
477 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
479 call_had_abnormal_edge = 0;
481 /* If there is an EH region or rethrow, we have an edge. */
482 if ((eh_region && region > 0)
483 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
484 call_had_abnormal_edge = 1;
485 else if (nonlocal_goto_handler_labels && region >= 0)
486 /* If there is a nonlocal goto label and the specified
487 region number isn't -1, we have an edge. (0 means
488 no throw, but might have a nonlocal goto). */
489 call_had_abnormal_edge = 1;
494 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
496 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
500 /* The rest of the compiler works a bit smoother when we don't have to
501 check for the edge case of do-nothing functions with no basic blocks. */
504 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
511 /* Find all basic blocks of the function whose first insn is F.
513 Collect and return a list of labels whose addresses are taken. This
514 will be used in make_edges for use with computed gotos. */
517 find_basic_blocks_1 (f)
520 register rtx insn, next;
522 rtx bb_note = NULL_RTX;
523 rtx eh_list = NULL_RTX;
524 rtx label_value_list = NULL_RTX;
528 /* We process the instructions in a slightly different way than we did
529 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
530 closed out the previous block, so that it gets attached at the proper
531 place. Since this form should be equivalent to the previous,
532 count_basic_blocks continues to use the old form as a check. */
534 for (insn = f; insn; insn = next)
536 enum rtx_code code = GET_CODE (insn);
538 next = NEXT_INSN (insn);
544 int kind = NOTE_LINE_NUMBER (insn);
546 /* Keep a LIFO list of the currently active exception notes. */
547 if (kind == NOTE_INSN_EH_REGION_BEG)
548 eh_list = alloc_INSN_LIST (insn, eh_list);
549 else if (kind == NOTE_INSN_EH_REGION_END)
553 eh_list = XEXP (eh_list, 1);
554 free_INSN_LIST_node (t);
557 /* Look for basic block notes with which to keep the
558 basic_block_info pointers stable. Unthread the note now;
559 we'll put it back at the right place in create_basic_block.
560 Or not at all if we've already found a note in this block. */
561 else if (kind == NOTE_INSN_BASIC_BLOCK)
563 if (bb_note == NULL_RTX)
566 next = flow_delete_insn (insn);
572 /* A basic block starts at a label. If we've closed one off due
573 to a barrier or some such, no need to do it again. */
574 if (head != NULL_RTX)
576 /* While we now have edge lists with which other portions of
577 the compiler might determine a call ending a basic block
578 does not imply an abnormal edge, it will be a bit before
579 everything can be updated. So continue to emit a noop at
580 the end of such a block. */
581 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
583 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
584 end = emit_insn_after (nop, end);
587 create_basic_block (i++, head, end, bb_note);
595 /* A basic block ends at a jump. */
596 if (head == NULL_RTX)
600 /* ??? Make a special check for table jumps. The way this
601 happens is truly and amazingly gross. We are about to
602 create a basic block that contains just a code label and
603 an addr*vec jump insn. Worse, an addr_diff_vec creates
604 its own natural loop.
606 Prevent this bit of brain damage, pasting things together
607 correctly in make_edges.
609 The correct solution involves emitting the table directly
610 on the tablejump instruction as a note, or JUMP_LABEL. */
612 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
613 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
621 goto new_bb_inclusive;
624 /* A basic block ends at a barrier. It may be that an unconditional
625 jump already closed the basic block -- no need to do it again. */
626 if (head == NULL_RTX)
629 /* While we now have edge lists with which other portions of the
630 compiler might determine a call ending a basic block does not
631 imply an abnormal edge, it will be a bit before everything can
632 be updated. So continue to emit a noop at the end of such a
634 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
636 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
637 end = emit_insn_after (nop, end);
639 goto new_bb_exclusive;
643 /* Record whether this call created an edge. */
644 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
645 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
646 int call_has_abnormal_edge = 0;
648 /* If there is an EH region or rethrow, we have an edge. */
649 if ((eh_list && region > 0)
650 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
651 call_has_abnormal_edge = 1;
652 else if (nonlocal_goto_handler_labels && region >= 0)
653 /* If there is a nonlocal goto label and the specified
654 region number isn't -1, we have an edge. (0 means
655 no throw, but might have a nonlocal goto). */
656 call_has_abnormal_edge = 1;
658 /* A basic block ends at a call that can either throw or
659 do a non-local goto. */
660 if (call_has_abnormal_edge)
663 if (head == NULL_RTX)
668 create_basic_block (i++, head, end, bb_note);
669 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1095 e = (edge) xcalloc (1, sizeof (*e));
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1120 if (GET_CODE (label) != CODE_LABEL)
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1128 if (INSN_UID (label) == 0)
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1144 handler_info **handler_list;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1161 move_stray_eh_region_notes ()
1166 if (n_basic_blocks < 2)
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1187 NEXT_INSN (insn) = list;
1192 if (list == NULL_RTX)
1195 /* Find where to insert these things. */
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1209 /* Recompute eh_beg/eh_end for each basic block. */
1212 record_active_eh_regions (f)
1215 rtx insn, eh_list = NULL_RTX;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1241 if (i == n_basic_blocks)
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1259 bb = ENTRY_BLOCK_PTR;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1277 e->flags &= ~EDGE_CRITICAL;
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1300 split_edge (edge_in)
1303 basic_block old_pred, bb, old_succ;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1347 bb->succ = edge_out;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1378 basic_block jump_block;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1428 BASIC_BLOCK (i) = bb;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1538 insert_insn_on_edge (pattern, e)
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1548 if (e->insns == NULL_RTX)
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1559 /* Update the CFG for the instructions queued on edge E. */
1562 commit_one_edge_insertion (e)
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1568 /* Pull the insns off the edge now since the edge might go away. */
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1620 /* Otherwise we must split the edge. */
1623 bb = split_edge (e);
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1675 /* Update the CFG for all queued instructions. */
1678 commit_edge_insertions ()
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1688 bb = ENTRY_BLOCK_PTR;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1732 /* Mark the block with a handy non-null value. */
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in flow_delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1760 /* This block was found. Tidy up the mark. */
1763 deleted_handler |= flow_delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1779 delete_eh_regions ()
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1807 can_delete_note_p (note)
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1818 flow_delete_insn_chain (start, finish)
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1852 flow_delete_block (b)
1855 int deleted_handler = 0;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Include any jump table following the basic block. */
1907 if (GET_CODE (end) == JUMP_INSN
1908 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1910 && GET_CODE (tmp) == JUMP_INSN
1911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1915 /* Include any barrier that may follow the basic block. */
1916 tmp = next_nonnote_insn (end);
1917 if (tmp && GET_CODE (tmp) == BARRIER)
1920 /* Selectively delete the entire chain. */
1921 flow_delete_insn_chain (insn, end);
1925 /* Remove the edges into and out of this block. Note that there may
1926 indeed be edges in, if we are removing an unreachable loop. */
1930 for (e = b->pred; e ; e = next)
1932 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1935 next = e->pred_next;
1939 for (e = b->succ; e ; e = next)
1941 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1944 next = e->succ_next;
1953 /* Remove the basic block from the array, and compact behind it. */
1956 return deleted_handler;
1959 /* Remove block B from the basic block array and compact behind it. */
1965 int i, n = n_basic_blocks;
1967 for (i = b->index; i + 1 < n; ++i)
1969 basic_block x = BASIC_BLOCK (i + 1);
1970 BASIC_BLOCK (i) = x;
1974 basic_block_info->num_elements--;
1978 /* Delete INSN by patching it out. Return the next insn. */
1981 flow_delete_insn (insn)
1984 rtx prev = PREV_INSN (insn);
1985 rtx next = NEXT_INSN (insn);
1988 PREV_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (insn) = NULL_RTX;
1992 NEXT_INSN (prev) = next;
1994 PREV_INSN (next) = prev;
1996 set_last_insn (prev);
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2001 /* If deleting a jump, decrement the use count of the label. Deleting
2002 the label itself should happen in the normal course of block merging. */
2003 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2004 LABEL_NUSES (JUMP_LABEL (insn))--;
2006 /* Also if deleting an insn that references a label. */
2007 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2008 LABEL_NUSES (XEXP (note, 0))--;
2013 /* True if a given label can be deleted. */
2016 can_delete_label_p (label)
2021 if (LABEL_PRESERVE_P (label))
2024 for (x = forced_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = label_value_list; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2030 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2031 if (label == XEXP (x, 0))
2034 /* User declared labels must be preserved. */
2035 if (LABEL_NAME (label) != 0)
2041 /* Blocks A and B are to be merged into a single block A. The insns
2042 are already contiguous, hence `nomove'. */
2045 merge_blocks_nomove (a, b)
2049 rtx b_head, b_end, a_end;
2050 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2053 /* If there was a CODE_LABEL beginning B, delete it. */
2056 if (GET_CODE (b_head) == CODE_LABEL)
2058 /* Detect basic blocks with nothing but a label. This can happen
2059 in particular at the end of a function. */
2060 if (b_head == b_end)
2062 del_first = del_last = b_head;
2063 b_head = NEXT_INSN (b_head);
2066 /* Delete the basic block note. */
2067 if (GET_CODE (b_head) == NOTE
2068 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2070 if (b_head == b_end)
2075 b_head = NEXT_INSN (b_head);
2078 /* If there was a jump out of A, delete it. */
2080 if (GET_CODE (a_end) == JUMP_INSN)
2084 prev = prev_nonnote_insn (a_end);
2091 /* If this was a conditional jump, we need to also delete
2092 the insn that set cc0. */
2093 if (prev && sets_cc0_p (prev))
2096 prev = prev_nonnote_insn (prev);
2106 /* Delete everything marked above as well as crap that might be
2107 hanging out between the two blocks. */
2108 flow_delete_insn_chain (del_first, del_last);
2110 /* Normally there should only be one successor of A and that is B, but
2111 partway though the merge of blocks for conditional_execution we'll
2112 be merging a TEST block with THEN and ELSE successors. Free the
2113 whole lot of them and hope the caller knows what they're doing. */
2115 remove_edge (a->succ);
2117 /* Adjust the edges out of B for the new owner. */
2118 for (e = b->succ; e ; e = e->succ_next)
2122 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2123 b->pred = b->succ = NULL;
2125 /* Reassociate the insns of B with A. */
2128 if (basic_block_for_insn)
2130 BLOCK_FOR_INSN (b_head) = a;
2131 while (b_head != b_end)
2133 b_head = NEXT_INSN (b_head);
2134 BLOCK_FOR_INSN (b_head) = a;
2144 /* Blocks A and B are to be merged into a single block. A has no incoming
2145 fallthru edge, so it can be moved before B without adding or modifying
2146 any jumps (aside from the jump from A to B). */
2149 merge_blocks_move_predecessor_nojumps (a, b)
2152 rtx start, end, barrier;
2158 /* We want to delete the BARRIER after the end of the insns we are
2159 going to move. If we don't find a BARRIER, then do nothing. This
2160 can happen in some cases if we have labels we can not delete.
2162 Similarly, do nothing if we can not delete the label at the start
2163 of the target block. */
2164 barrier = next_nonnote_insn (end);
2165 if (GET_CODE (barrier) != BARRIER
2166 || (GET_CODE (b->head) == CODE_LABEL
2167 && ! can_delete_label_p (b->head)))
2170 flow_delete_insn (barrier);
2172 /* Move block and loop notes out of the chain so that we do not
2173 disturb their order.
2175 ??? A better solution would be to squeeze out all the non-nested notes
2176 and adjust the block trees appropriately. Even better would be to have
2177 a tighter connection between block trees and rtl so that this is not
2179 start = squeeze_notes (start, end);
2181 /* Scramble the insn chain. */
2182 if (end != PREV_INSN (b->head))
2183 reorder_insns (start, end, PREV_INSN (b->head));
2187 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2188 a->index, b->index);
2191 /* Swap the records for the two blocks around. Although we are deleting B,
2192 A is now where B was and we want to compact the BB array from where
2194 BASIC_BLOCK(a->index) = b;
2195 BASIC_BLOCK(b->index) = a;
2197 a->index = b->index;
2200 /* Now blocks A and B are contiguous. Merge them. */
2201 merge_blocks_nomove (a, b);
2206 /* Blocks A and B are to be merged into a single block. B has no outgoing
2207 fallthru edge, so it can be moved after A without adding or modifying
2208 any jumps (aside from the jump from A to B). */
2211 merge_blocks_move_successor_nojumps (a, b)
2214 rtx start, end, barrier;
2219 /* We want to delete the BARRIER after the end of the insns we are
2220 going to move. If we don't find a BARRIER, then do nothing. This
2221 can happen in some cases if we have labels we can not delete.
2223 Similarly, do nothing if we can not delete the label at the start
2224 of the target block. */
2225 barrier = next_nonnote_insn (end);
2226 if (GET_CODE (barrier) != BARRIER
2227 || (GET_CODE (b->head) == CODE_LABEL
2228 && ! can_delete_label_p (b->head)))
2231 flow_delete_insn (barrier);
2233 /* Move block and loop notes out of the chain so that we do not
2234 disturb their order.
2236 ??? A better solution would be to squeeze out all the non-nested notes
2237 and adjust the block trees appropriately. Even better would be to have
2238 a tighter connection between block trees and rtl so that this is not
2240 start = squeeze_notes (start, end);
2242 /* Scramble the insn chain. */
2243 reorder_insns (start, end, a->end);
2245 /* Now blocks A and B are contiguous. Merge them. */
2246 merge_blocks_nomove (a, b);
2250 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2251 b->index, a->index);
2257 /* Attempt to merge basic blocks that are potentially non-adjacent.
2258 Return true iff the attempt succeeded. */
2261 merge_blocks (e, b, c)
2265 /* If B has a fallthru edge to C, no need to move anything. */
2266 if (e->flags & EDGE_FALLTHRU)
2268 /* If a label still appears somewhere and we cannot delete the label,
2269 then we cannot merge the blocks. The edge was tidied already. */
2271 rtx insn, stop = NEXT_INSN (c->head);
2272 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2273 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2276 merge_blocks_nomove (b, c);
2280 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2281 b->index, c->index);
2290 int c_has_outgoing_fallthru;
2291 int b_has_incoming_fallthru;
2293 /* We must make sure to not munge nesting of exception regions,
2294 lexical blocks, and loop notes.
2296 The first is taken care of by requiring that the active eh
2297 region at the end of one block always matches the active eh
2298 region at the beginning of the next block.
2300 The later two are taken care of by squeezing out all the notes. */
2302 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2303 executed and we may want to treat blocks which have two out
2304 edges, one normal, one abnormal as only having one edge for
2305 block merging purposes. */
2307 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2308 if (tmp_edge->flags & EDGE_FALLTHRU)
2310 c_has_outgoing_fallthru = (tmp_edge != NULL);
2312 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2313 if (tmp_edge->flags & EDGE_FALLTHRU)
2315 b_has_incoming_fallthru = (tmp_edge != NULL);
2317 /* If B does not have an incoming fallthru, and the exception regions
2318 match, then it can be moved immediately before C without introducing
2321 C can not be the first block, so we do not have to worry about
2322 accessing a non-existent block. */
2323 d = BASIC_BLOCK (c->index - 1);
2324 if (! b_has_incoming_fallthru
2325 && d->eh_end == b->eh_beg
2326 && b->eh_end == c->eh_beg)
2327 return merge_blocks_move_predecessor_nojumps (b, c);
2329 /* Otherwise, we're going to try to move C after B. Make sure the
2330 exception regions match.
2332 If B is the last basic block, then we must not try to access the
2333 block structure for block B + 1. Luckily in that case we do not
2334 need to worry about matching exception regions. */
2335 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2336 if (b->eh_end == c->eh_beg
2337 && (d == NULL || c->eh_end == d->eh_beg))
2339 /* If C does not have an outgoing fallthru, then it can be moved
2340 immediately after B without introducing or modifying jumps. */
2341 if (! c_has_outgoing_fallthru)
2342 return merge_blocks_move_successor_nojumps (b, c);
2344 /* Otherwise, we'll need to insert an extra jump, and possibly
2345 a new block to contain it. */
2346 /* ??? Not implemented yet. */
2353 /* Top level driver for merge_blocks. */
2360 /* Attempt to merge blocks as made possible by edge removal. If a block
2361 has only one successor, and the successor has only one predecessor,
2362 they may be combined. */
2364 for (i = 0; i < n_basic_blocks; )
2366 basic_block c, b = BASIC_BLOCK (i);
2369 /* A loop because chains of blocks might be combineable. */
2370 while ((s = b->succ) != NULL
2371 && s->succ_next == NULL
2372 && (s->flags & EDGE_EH) == 0
2373 && (c = s->dest) != EXIT_BLOCK_PTR
2374 && c->pred->pred_next == NULL
2375 /* If the jump insn has side effects, we can't kill the edge. */
2376 && (GET_CODE (b->end) != JUMP_INSN
2377 || onlyjump_p (b->end))
2378 && merge_blocks (s, b, c))
2381 /* Don't get confused by the index shift caused by deleting blocks. */
2386 /* The given edge should potentially be a fallthru edge. If that is in
2387 fact true, delete the jump and barriers that are in the way. */
2390 tidy_fallthru_edge (e, b, c)
2396 /* ??? In a late-running flow pass, other folks may have deleted basic
2397 blocks by nopping out blocks, leaving multiple BARRIERs between here
2398 and the target label. They ought to be chastized and fixed.
2400 We can also wind up with a sequence of undeletable labels between
2401 one block and the next.
2403 So search through a sequence of barriers, labels, and notes for
2404 the head of block C and assert that we really do fall through. */
2406 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2409 /* Remove what will soon cease being the jump insn from the source block.
2410 If block B consisted only of this single jump, turn it into a deleted
2413 if (GET_CODE (q) == JUMP_INSN)
2416 /* If this was a conditional jump, we need to also delete
2417 the insn that set cc0. */
2418 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2425 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2426 NOTE_SOURCE_FILE (q) = 0;
2429 b->end = q = PREV_INSN (q);
2432 /* Selectively unlink the sequence. */
2433 if (q != PREV_INSN (c->head))
2434 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2436 e->flags |= EDGE_FALLTHRU;
2439 /* Fix up edges that now fall through, or rather should now fall through
2440 but previously required a jump around now deleted blocks. Simplify
2441 the search by only examining blocks numerically adjacent, since this
2442 is how find_basic_blocks created them. */
2445 tidy_fallthru_edges ()
2449 for (i = 1; i < n_basic_blocks; ++i)
2451 basic_block b = BASIC_BLOCK (i - 1);
2452 basic_block c = BASIC_BLOCK (i);
2455 /* We care about simple conditional or unconditional jumps with
2458 If we had a conditional branch to the next instruction when
2459 find_basic_blocks was called, then there will only be one
2460 out edge for the block which ended with the conditional
2461 branch (since we do not create duplicate edges).
2463 Furthermore, the edge will be marked as a fallthru because we
2464 merge the flags for the duplicate edges. So we do not want to
2465 check that the edge is not a FALLTHRU edge. */
2466 if ((s = b->succ) != NULL
2467 && s->succ_next == NULL
2469 /* If the jump insn has side effects, we can't tidy the edge. */
2470 && (GET_CODE (b->end) != JUMP_INSN
2471 || onlyjump_p (b->end)))
2472 tidy_fallthru_edge (s, b, c);
2476 /* Discover and record the loop depth at the head of each basic block. */
2479 calculate_loop_depth (dump)
2484 /* The loop infrastructure does the real job for us. */
2485 flow_loops_find (&loops);
2488 flow_loops_dump (&loops, dump, 0);
2490 flow_loops_free (&loops);
2493 /* Perform data flow analysis.
2494 F is the first insn of the function; FLAGS is a set of PROP_* flags
2495 to be used in accumulating flow info. */
2498 life_analysis (f, file, flags)
2503 #ifdef ELIMINABLE_REGS
2505 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2509 /* Record which registers will be eliminated. We use this in
2512 CLEAR_HARD_REG_SET (elim_reg_set);
2514 #ifdef ELIMINABLE_REGS
2515 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2516 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2518 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2522 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2524 /* The post-reload life analysis have (on a global basis) the same
2525 registers live as was computed by reload itself. elimination
2526 Otherwise offsets and such may be incorrect.
2528 Reload will make some registers as live even though they do not
2529 appear in the rtl. */
2530 if (reload_completed)
2531 flags &= ~PROP_REG_INFO;
2533 /* We want alias analysis information for local dead store elimination. */
2534 if (flags & PROP_SCAN_DEAD_CODE)
2535 init_alias_analysis ();
2537 max_regno = max_reg_num ();
2539 /* Always remove no-op moves. Do this before other processing so
2540 that we don't have to keep re-scanning them. */
2541 delete_noop_moves (f);
2543 /* Some targets can emit simpler epilogues if they know that sp was
2544 not ever modified during the function. After reload, of course,
2545 we've already emitted the epilogue so there's no sense searching. */
2546 if (! reload_completed)
2547 notice_stack_pointer_modification (f);
2549 /* Allocate and zero out data structures that will record the
2550 data from lifetime analysis. */
2551 allocate_reg_life_data ();
2552 allocate_bb_life_data ();
2553 all_blocks = sbitmap_alloc (n_basic_blocks);
2554 sbitmap_ones (all_blocks);
2556 /* Find the set of registers live on function exit. */
2557 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2559 /* "Update" life info from zero. It'd be nice to begin the
2560 relaxation with just the exit and noreturn blocks, but that set
2561 is not immediately handy. */
2563 if (flags & PROP_REG_INFO)
2564 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2565 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2568 sbitmap_free (all_blocks);
2570 if (flags & PROP_SCAN_DEAD_CODE)
2571 end_alias_analysis ();
2574 dump_flow_info (file);
2576 free_basic_block_vars (1);
2579 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2580 Search for REGNO. If found, abort if it is not wider than word_mode. */
2583 verify_wide_reg_1 (px, pregno)
2588 unsigned int regno = *(int *) pregno;
2590 if (GET_CODE (x) == REG && REGNO (x) == regno)
2592 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2599 /* A subroutine of verify_local_live_at_start. Search through insns
2600 between HEAD and END looking for register REGNO. */
2603 verify_wide_reg (regno, head, end)
2609 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2610 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2614 head = NEXT_INSN (head);
2617 /* We didn't find the register at all. Something's way screwy. */
2621 /* A subroutine of update_life_info. Verify that there are no untoward
2622 changes in live_at_start during a local update. */
2625 verify_local_live_at_start (new_live_at_start, bb)
2626 regset new_live_at_start;
2629 if (reload_completed)
2631 /* After reload, there are no pseudos, nor subregs of multi-word
2632 registers. The regsets should exactly match. */
2633 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2640 /* Find the set of changed registers. */
2641 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2643 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2645 /* No registers should die. */
2646 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2648 /* Verify that the now-live register is wider than word_mode. */
2649 verify_wide_reg (i, bb->head, bb->end);
2654 /* Updates life information starting with the basic blocks set in BLOCKS.
2656 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2657 expecting local modifications to basic blocks. If we find extra
2658 registers live at the beginning of a block, then we either killed
2659 useful data, or we have a broken split that wants data not provided.
2660 If we find registers removed from live_at_start, that means we have
2661 a broken peephole that is killing a register it shouldn't.
2663 ??? This is not true in one situation -- when a pre-reload splitter
2664 generates subregs of a multi-word pseudo, current life analysis will
2665 lose the kill. So we _can_ have a pseudo go live. How irritating.
2667 BLOCK_FOR_INSN is assumed to be correct.
2669 Including PROP_REG_INFO does not properly refresh regs_ever_live
2670 unless the caller resets it to zero. */
2673 update_life_info (blocks, extent, prop_flags)
2675 enum update_life_extent extent;
2679 regset_head tmp_head;
2682 tmp = INITIALIZE_REG_SET (tmp_head);
2684 /* For a global update, we go through the relaxation process again. */
2685 if (extent != UPDATE_LIFE_LOCAL)
2687 calculate_global_regs_live (blocks, blocks,
2688 prop_flags & PROP_SCAN_DEAD_CODE);
2690 /* If asked, remove notes from the blocks we'll update. */
2691 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2692 count_or_remove_death_notes (blocks, 1);
2695 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2697 basic_block bb = BASIC_BLOCK (i);
2699 COPY_REG_SET (tmp, bb->global_live_at_end);
2700 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2702 if (extent == UPDATE_LIFE_LOCAL)
2703 verify_local_live_at_start (tmp, bb);
2708 if (prop_flags & PROP_REG_INFO)
2710 /* The only pseudos that are live at the beginning of the function
2711 are those that were not set anywhere in the function. local-alloc
2712 doesn't know how to handle these correctly, so mark them as not
2713 local to any one basic block. */
2714 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2715 FIRST_PSEUDO_REGISTER, i,
2716 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2718 /* We have a problem with any pseudoreg that lives across the setjmp.
2719 ANSI says that if a user variable does not change in value between
2720 the setjmp and the longjmp, then the longjmp preserves it. This
2721 includes longjmp from a place where the pseudo appears dead.
2722 (In principle, the value still exists if it is in scope.)
2723 If the pseudo goes in a hard reg, some other value may occupy
2724 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2725 Conclusion: such a pseudo must not go in a hard reg. */
2726 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2727 FIRST_PSEUDO_REGISTER, i,
2729 if (regno_reg_rtx[i] != 0)
2731 REG_LIVE_LENGTH (i) = -1;
2732 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2738 /* Free the variables allocated by find_basic_blocks.
2740 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2743 free_basic_block_vars (keep_head_end_p)
2744 int keep_head_end_p;
2746 if (basic_block_for_insn)
2748 VARRAY_FREE (basic_block_for_insn);
2749 basic_block_for_insn = NULL;
2752 if (! keep_head_end_p)
2755 VARRAY_FREE (basic_block_info);
2758 ENTRY_BLOCK_PTR->aux = NULL;
2759 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2760 EXIT_BLOCK_PTR->aux = NULL;
2761 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2765 /* Return nonzero if the destination of SET equals the source. */
2770 rtx src = SET_SRC (set);
2771 rtx dst = SET_DEST (set);
2773 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2775 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2777 src = SUBREG_REG (src);
2778 dst = SUBREG_REG (dst);
2781 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2782 && REGNO (src) == REGNO (dst));
2785 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2791 rtx pat = PATTERN (insn);
2793 /* Insns carrying these notes are useful later on. */
2794 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2797 if (GET_CODE (pat) == SET && set_noop_p (pat))
2800 if (GET_CODE (pat) == PARALLEL)
2803 /* If nothing but SETs of registers to themselves,
2804 this insn can also be deleted. */
2805 for (i = 0; i < XVECLEN (pat, 0); i++)
2807 rtx tem = XVECEXP (pat, 0, i);
2809 if (GET_CODE (tem) == USE
2810 || GET_CODE (tem) == CLOBBER)
2813 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2822 /* Delete any insns that copy a register to itself. */
2825 delete_noop_moves (f)
2829 for (insn = f; insn; insn = NEXT_INSN (insn))
2831 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2833 PUT_CODE (insn, NOTE);
2834 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2835 NOTE_SOURCE_FILE (insn) = 0;
2840 /* Determine if the stack pointer is constant over the life of the function.
2841 Only useful before prologues have been emitted. */
2844 notice_stack_pointer_modification_1 (x, pat, data)
2846 rtx pat ATTRIBUTE_UNUSED;
2847 void *data ATTRIBUTE_UNUSED;
2849 if (x == stack_pointer_rtx
2850 /* The stack pointer is only modified indirectly as the result
2851 of a push until later in flow. See the comments in rtl.texi
2852 regarding Embedded Side-Effects on Addresses. */
2853 || (GET_CODE (x) == MEM
2854 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2855 || GET_CODE (XEXP (x, 0)) == PRE_INC
2856 || GET_CODE (XEXP (x, 0)) == POST_DEC
2857 || GET_CODE (XEXP (x, 0)) == POST_INC)
2858 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2859 current_function_sp_is_unchanging = 0;
2863 notice_stack_pointer_modification (f)
2868 /* Assume that the stack pointer is unchanging if alloca hasn't
2870 current_function_sp_is_unchanging = !current_function_calls_alloca;
2871 if (! current_function_sp_is_unchanging)
2874 for (insn = f; insn; insn = NEXT_INSN (insn))
2876 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2878 /* Check if insn modifies the stack pointer. */
2879 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2881 if (! current_function_sp_is_unchanging)
2887 /* Mark a register in SET. Hard registers in large modes get all
2888 of their component registers set as well. */
2890 mark_reg (reg, xset)
2894 regset set = (regset) xset;
2895 int regno = REGNO (reg);
2897 if (GET_MODE (reg) == BLKmode)
2900 SET_REGNO_REG_SET (set, regno);
2901 if (regno < FIRST_PSEUDO_REGISTER)
2903 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2905 SET_REGNO_REG_SET (set, regno + n);
2909 /* Mark those regs which are needed at the end of the function as live
2910 at the end of the last basic block. */
2912 mark_regs_live_at_end (set)
2917 /* If exiting needs the right stack value, consider the stack pointer
2918 live at the end of the function. */
2919 if ((HAVE_epilogue && reload_completed)
2920 || ! EXIT_IGNORE_STACK
2921 || (! FRAME_POINTER_REQUIRED
2922 && ! current_function_calls_alloca
2923 && flag_omit_frame_pointer)
2924 || current_function_sp_is_unchanging)
2926 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2929 /* Mark the frame pointer if needed at the end of the function. If
2930 we end up eliminating it, it will be removed from the live list
2931 of each basic block by reload. */
2933 if (! reload_completed || frame_pointer_needed)
2935 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2936 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2937 /* If they are different, also mark the hard frame pointer as live */
2938 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2942 #ifdef PIC_OFFSET_TABLE_REGNUM
2943 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2944 /* Many architectures have a GP register even without flag_pic.
2945 Assume the pic register is not in use, or will be handled by
2946 other means, if it is not fixed. */
2947 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2948 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2952 /* Mark all global registers, and all registers used by the epilogue
2953 as being live at the end of the function since they may be
2954 referenced by our caller. */
2955 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2957 #ifdef EPILOGUE_USES
2958 || EPILOGUE_USES (i)
2961 SET_REGNO_REG_SET (set, i);
2963 /* Mark all call-saved registers that we actaully used. */
2964 if (HAVE_epilogue && reload_completed)
2966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2967 if (! call_used_regs[i] && regs_ever_live[i])
2968 SET_REGNO_REG_SET (set, i);
2971 /* Mark function return value. */
2972 diddle_return_value (mark_reg, set);
2975 /* Callback function for for_each_successor_phi. DATA is a regset.
2976 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2977 INSN, in the regset. */
2980 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2981 rtx insn ATTRIBUTE_UNUSED;
2982 int dest_regno ATTRIBUTE_UNUSED;
2986 regset live = (regset) data;
2987 SET_REGNO_REG_SET (live, src_regno);
2991 /* Propagate global life info around the graph of basic blocks. Begin
2992 considering blocks with their corresponding bit set in BLOCKS_IN.
2993 BLOCKS_OUT is set for every block that was changed. */
2996 calculate_global_regs_live (blocks_in, blocks_out, flags)
2997 sbitmap blocks_in, blocks_out;
3000 basic_block *queue, *qhead, *qtail, *qend;
3001 regset tmp, new_live_at_end;
3002 regset_head tmp_head;
3003 regset_head new_live_at_end_head;
3006 tmp = INITIALIZE_REG_SET (tmp_head);
3007 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3009 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3010 because the `head == tail' style test for an empty queue doesn't
3011 work with a full queue. */
3012 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3014 qhead = qend = queue + n_basic_blocks + 2;
3016 /* Clear out the garbage that might be hanging out in bb->aux. */
3017 for (i = n_basic_blocks - 1; i >= 0; --i)
3018 BASIC_BLOCK (i)->aux = NULL;
3020 /* Queue the blocks set in the initial mask. Do this in reverse block
3021 number order so that we are more likely for the first round to do
3022 useful work. We use AUX non-null to flag that the block is queued. */
3023 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3025 basic_block bb = BASIC_BLOCK (i);
3030 sbitmap_zero (blocks_out);
3032 while (qhead != qtail)
3034 int rescan, changed;
3043 /* Begin by propogating live_at_start from the successor blocks. */
3044 CLEAR_REG_SET (new_live_at_end);
3045 for (e = bb->succ; e ; e = e->succ_next)
3047 basic_block sb = e->dest;
3048 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3051 /* Regs used in phi nodes are not included in
3052 global_live_at_start, since they are live only along a
3053 particular edge. Set those regs that are live because of a
3054 phi node alternative corresponding to this particular block. */
3055 for_each_successor_phi (bb, &set_phi_alternative_reg,
3058 if (bb == ENTRY_BLOCK_PTR)
3060 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3064 /* On our first pass through this block, we'll go ahead and continue.
3065 Recognize first pass by local_set NULL. On subsequent passes, we
3066 get to skip out early if live_at_end wouldn't have changed. */
3068 if (bb->local_set == NULL)
3070 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3075 /* If any bits were removed from live_at_end, we'll have to
3076 rescan the block. This wouldn't be necessary if we had
3077 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3078 local_live is really dependant on live_at_end. */
3079 CLEAR_REG_SET (tmp);
3080 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3081 new_live_at_end, BITMAP_AND_COMPL);
3085 /* Find the set of changed bits. Take this opportunity
3086 to notice that this set is empty and early out. */
3087 CLEAR_REG_SET (tmp);
3088 changed = bitmap_operation (tmp, bb->global_live_at_end,
3089 new_live_at_end, BITMAP_XOR);
3093 /* If any of the changed bits overlap with local_set,
3094 we'll have to rescan the block. Detect overlap by
3095 the AND with ~local_set turning off bits. */
3096 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3101 /* Let our caller know that BB changed enough to require its
3102 death notes updated. */
3103 SET_BIT (blocks_out, bb->index);
3107 /* Add to live_at_start the set of all registers in
3108 new_live_at_end that aren't in the old live_at_end. */
3110 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3112 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3114 changed = bitmap_operation (bb->global_live_at_start,
3115 bb->global_live_at_start,
3122 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3124 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3125 into live_at_start. */
3126 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3128 /* If live_at start didn't change, no need to go farther. */
3129 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3132 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3135 /* Queue all predecessors of BB so that we may re-examine
3136 their live_at_end. */
3137 for (e = bb->pred; e ; e = e->pred_next)
3139 basic_block pb = e->src;
3140 if (pb->aux == NULL)
3151 FREE_REG_SET (new_live_at_end);
3153 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3155 basic_block bb = BASIC_BLOCK (i);
3156 FREE_REG_SET (bb->local_set);
3162 /* Subroutines of life analysis. */
3164 /* Allocate the permanent data structures that represent the results
3165 of life analysis. Not static since used also for stupid life analysis. */
3168 allocate_bb_life_data ()
3172 for (i = 0; i < n_basic_blocks; i++)
3174 basic_block bb = BASIC_BLOCK (i);
3176 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3177 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 ENTRY_BLOCK_PTR->global_live_at_end
3181 = OBSTACK_ALLOC_REG_SET (function_obstack);
3182 EXIT_BLOCK_PTR->global_live_at_start
3183 = OBSTACK_ALLOC_REG_SET (function_obstack);
3185 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3189 allocate_reg_life_data ()
3193 /* Recalculate the register space, in case it has grown. Old style
3194 vector oriented regsets would set regset_{size,bytes} here also. */
3195 allocate_reg_info (max_regno, FALSE, FALSE);
3197 /* Reset all the data we'll collect in propagate_block and its
3199 for (i = 0; i < max_regno; i++)
3203 REG_N_DEATHS (i) = 0;
3204 REG_N_CALLS_CROSSED (i) = 0;
3205 REG_LIVE_LENGTH (i) = 0;
3206 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3210 /* Delete dead instructions for propagate_block. */
3213 propagate_block_delete_insn (bb, insn)
3217 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3219 /* If the insn referred to a label, and that label was attached to
3220 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3221 pretty much mandatory to delete it, because the ADDR_VEC may be
3222 referencing labels that no longer exist. */
3226 rtx label = XEXP (inote, 0);
3229 if (LABEL_NUSES (label) == 1
3230 && (next = next_nonnote_insn (label)) != NULL
3231 && GET_CODE (next) == JUMP_INSN
3232 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3233 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3235 rtx pat = PATTERN (next);
3236 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3237 int len = XVECLEN (pat, diff_vec_p);
3240 for (i = 0; i < len; i++)
3241 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3243 flow_delete_insn (next);
3247 if (bb->end == insn)
3248 bb->end = PREV_INSN (insn);
3249 flow_delete_insn (insn);
3252 /* Delete dead libcalls for propagate_block. Return the insn
3253 before the libcall. */
3256 propagate_block_delete_libcall (bb, insn, note)
3260 rtx first = XEXP (note, 0);
3261 rtx before = PREV_INSN (first);
3263 if (insn == bb->end)
3266 flow_delete_insn_chain (first, insn);
3270 /* Compute the registers live at the beginning of a basic block BB from
3271 those live at the end.
3273 When called, REG_LIVE contains those live at the end. On return, it
3274 contains those live at the beginning.
3276 LOCAL_SET, if non-null, will be set with all registers killed by
3277 this basic block. */
3280 propagate_block (bb, live, local_set, flags)
3286 struct propagate_block_info pbi;
3288 regset_head new_live_head, new_dead_head;
3291 pbi.reg_live = live;
3292 pbi.mem_set_list = NULL_RTX;
3293 pbi.local_set = local_set;
3297 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3298 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3300 pbi.reg_next_use = NULL;
3302 pbi.new_live = INITIALIZE_REG_SET (new_live_head);
3303 pbi.new_dead = INITIALIZE_REG_SET (new_dead_head);
3305 if (flags & PROP_REG_INFO)
3309 /* Process the regs live at the end of the block.
3310 Mark them as not local to any one basic block. */
3311 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3313 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3317 /* Scan the block an insn at a time from end to beginning. */
3319 for (insn = bb->end; ; insn = prev)
3321 prev = PREV_INSN (insn);
3323 if (GET_CODE (insn) == NOTE)
3325 /* If this is a call to `setjmp' et al,
3326 warn if any non-volatile datum is live. */
3328 if ((flags & PROP_REG_INFO)
3329 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3330 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3333 /* Update the life-status of regs for this insn.
3334 First DEAD gets which regs are set in this insn
3335 then LIVE gets which regs are used in this insn.
3336 Then the regs live before the insn
3337 are those live after, with DEAD regs turned off,
3338 and then LIVE regs turned on. */
3340 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3343 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3344 int insn_is_dead = 0;
3345 int libcall_is_dead = 0;
3347 if (flags & PROP_SCAN_DEAD_CODE)
3349 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3351 libcall_is_dead = (insn_is_dead && note != 0
3352 && libcall_dead_p (&pbi, PATTERN (insn),
3356 /* We almost certainly don't want to delete prologue or epilogue
3357 instructions. Warn about probable compiler losage. */
3360 && (((HAVE_epilogue || HAVE_prologue)
3361 && prologue_epilogue_contains (insn))
3362 || (HAVE_sibcall_epilogue
3363 && sibcall_epilogue_contains (insn))))
3365 if (flags & PROP_KILL_DEAD_CODE)
3367 warning ("ICE: would have deleted prologue/epilogue insn");
3368 if (!inhibit_warnings)
3371 libcall_is_dead = insn_is_dead = 0;
3374 /* If an instruction consists of just dead store(s) on final pass,
3376 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3378 if (libcall_is_dead)
3380 prev = propagate_block_delete_libcall (bb, insn, note);
3381 insn = NEXT_INSN (prev);
3384 propagate_block_delete_insn (bb, insn);
3386 /* CC0 is now known to be dead. Either this insn used it,
3387 in which case it doesn't anymore, or clobbered it,
3388 so the next insn can't use it. */
3394 /* See if this is an increment or decrement that can be
3395 merged into a following memory address. */
3398 register rtx x = single_set (insn);
3400 /* Does this instruction increment or decrement a register? */
3401 if (!reload_completed
3402 && (flags & PROP_AUTOINC)
3404 && GET_CODE (SET_DEST (x)) == REG
3405 && (GET_CODE (SET_SRC (x)) == PLUS
3406 || GET_CODE (SET_SRC (x)) == MINUS)
3407 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3408 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3409 /* Ok, look for a following memory ref we can combine with.
3410 If one is found, change the memory ref to a PRE_INC
3411 or PRE_DEC, cancel this insn, and return 1.
3412 Return 0 if nothing has been done. */
3413 && try_pre_increment_1 (&pbi, insn))
3416 #endif /* AUTO_INC_DEC */
3418 CLEAR_REG_SET (pbi.new_live);
3419 CLEAR_REG_SET (pbi.new_dead);
3421 /* If this is not the final pass, and this insn is copying the
3422 value of a library call and it's dead, don't scan the
3423 insns that perform the library call, so that the call's
3424 arguments are not marked live. */
3425 if (libcall_is_dead)
3427 /* Record the death of the dest reg. */
3428 mark_set_regs (&pbi, PATTERN (insn), insn);
3430 insn = XEXP (note, 0);
3431 prev = PREV_INSN (insn);
3433 else if (GET_CODE (PATTERN (insn)) == SET
3434 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3435 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3436 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3437 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3438 /* We have an insn to pop a constant amount off the stack.
3439 (Such insns use PLUS regardless of the direction of the stack,
3440 and any insn to adjust the stack by a constant is always a pop.)
3441 These insns, if not dead stores, have no effect on life. */
3445 /* Any regs live at the time of a call instruction
3446 must not go in a register clobbered by calls.
3447 Find all regs now live and record this for them. */
3449 if (GET_CODE (insn) == CALL_INSN
3450 && (flags & PROP_REG_INFO))
3451 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3452 { REG_N_CALLS_CROSSED (i)++; });
3454 /* Record sets. Do this even for dead instructions,
3455 since they would have killed the values if they hadn't
3457 mark_set_regs (&pbi, PATTERN (insn), insn);
3459 /* If an insn doesn't use CC0, it becomes dead since we
3460 assume that every insn clobbers it. So show it dead here;
3461 mark_used_regs will set it live if it is referenced. */
3466 mark_used_regs (&pbi, PATTERN (insn), NULL_RTX, insn);
3468 /* Sometimes we may have inserted something before INSN
3469 (such as a move) when we make an auto-inc. So ensure
3470 we will scan those insns. */
3472 prev = PREV_INSN (insn);
3475 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3481 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3482 cond = COND_EXEC_TEST (PATTERN (insn));
3484 /* Non-constant calls clobber memory. */
3485 if (! CONST_CALL_P (insn))
3486 free_EXPR_LIST_list (&pbi.mem_set_list);
3488 /* There may be extra registers to be clobbered. */
3489 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3491 note = XEXP (note, 1))
3492 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3493 mark_set_1 (&pbi, XEXP (XEXP (note, 0), 0),
3496 /* Calls change all call-used and global registers. */
3497 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3498 if (call_used_regs[i] && ! global_regs[i]
3502 mark_set_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3503 cond, &dummy, &dummy);
3506 /* Calls use their arguments. */
3507 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3509 note = XEXP (note, 1))
3510 if (GET_CODE (XEXP (note, 0)) == USE)
3511 mark_used_regs (&pbi, XEXP (XEXP (note, 0), 0),
3514 /* The stack ptr is used (honorarily) by a CALL insn. */
3515 SET_REGNO_REG_SET (pbi.new_live, STACK_POINTER_REGNUM);
3517 /* Calls may also reference any of the global registers,
3518 so they are made live. */
3519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3521 mark_used_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3526 /* Update reg_live for the registers killed and used. */
3527 AND_COMPL_REG_SET (pbi.reg_live, pbi.new_dead);
3528 IOR_REG_SET (pbi.reg_live, pbi.new_live);
3530 /* On final pass, update counts of how many insns in which
3531 each reg is live. */
3532 if (flags & PROP_REG_INFO)
3533 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3534 { REG_LIVE_LENGTH (i)++; });
3537 if (insn == bb->head)
3541 FREE_REG_SET (pbi.new_live);
3542 FREE_REG_SET (pbi.new_dead);
3543 free_EXPR_LIST_list (&pbi.mem_set_list);
3545 if (pbi.reg_next_use)
3546 free (pbi.reg_next_use);
3550 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3551 (SET expressions whose destinations are registers dead after the insn).
3552 NEEDED is the regset that says which regs are alive after the insn.
3554 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3556 If X is the entire body of an insn, NOTES contains the reg notes
3557 pertaining to the insn. */
3560 insn_dead_p (pbi, x, call_ok, notes)
3561 struct propagate_block_info *pbi;
3564 rtx notes ATTRIBUTE_UNUSED;
3566 enum rtx_code code = GET_CODE (x);
3569 /* If flow is invoked after reload, we must take existing AUTO_INC
3570 expresions into account. */
3571 if (reload_completed)
3573 for ( ; notes; notes = XEXP (notes, 1))
3575 if (REG_NOTE_KIND (notes) == REG_INC)
3577 int regno = REGNO (XEXP (notes, 0));
3579 /* Don't delete insns to set global regs. */
3580 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3581 || REGNO_REG_SET_P (pbi->reg_live, regno))
3588 /* If setting something that's a reg or part of one,
3589 see if that register's altered value will be live. */
3593 rtx r = SET_DEST (x);
3596 if (GET_CODE (r) == CC0)
3597 return ! pbi->cc0_live;
3600 /* A SET that is a subroutine call cannot be dead. */
3601 if (GET_CODE (SET_SRC (x)) == CALL)
3607 /* Don't eliminate loads from volatile memory or volatile asms. */
3608 else if (volatile_refs_p (SET_SRC (x)))
3611 if (GET_CODE (r) == MEM)
3615 if (MEM_VOLATILE_P (r))
3618 /* Walk the set of memory locations we are currently tracking
3619 and see if one is an identical match to this memory location.
3620 If so, this memory write is dead (remember, we're walking
3621 backwards from the end of the block to the start. */
3622 temp = pbi->mem_set_list;
3625 if (rtx_equal_p (XEXP (temp, 0), r))
3627 temp = XEXP (temp, 1);
3632 while (GET_CODE (r) == SUBREG
3633 || GET_CODE (r) == STRICT_LOW_PART
3634 || GET_CODE (r) == ZERO_EXTRACT)
3637 if (GET_CODE (r) == REG)
3639 int regno = REGNO (r);
3642 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3645 /* If this is a hard register, verify that subsequent
3646 words are not needed. */
3647 if (regno < FIRST_PSEUDO_REGISTER)
3649 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3652 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3656 /* Don't delete insns to set global regs. */
3657 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3660 /* Make sure insns to set the stack pointer aren't deleted. */
3661 if (regno == STACK_POINTER_REGNUM)
3664 /* Make sure insns to set the frame pointer aren't deleted. */
3665 if (regno == FRAME_POINTER_REGNUM
3666 && (! reload_completed || frame_pointer_needed))
3668 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3669 if (regno == HARD_FRAME_POINTER_REGNUM
3670 && (! reload_completed || frame_pointer_needed))
3674 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3675 /* Make sure insns to set arg pointer are never deleted
3676 (if the arg pointer isn't fixed, there will be a USE
3677 for it, so we can treat it normally). */
3678 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3682 /* Otherwise, the set is dead. */
3688 /* If performing several activities, insn is dead if each activity
3689 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3690 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3692 else if (code == PARALLEL)
3694 int i = XVECLEN (x, 0);
3696 for (i--; i >= 0; i--)
3697 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3698 && GET_CODE (XVECEXP (x, 0, i)) != USE
3699 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3705 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3706 is not necessarily true for hard registers. */
3707 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3708 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3709 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3712 /* We do not check other CLOBBER or USE here. An insn consisting of just
3713 a CLOBBER or just a USE should not be deleted. */
3717 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3718 return 1 if the entire library call is dead.
3719 This is true if X copies a register (hard or pseudo)
3720 and if the hard return reg of the call insn is dead.
3721 (The caller should have tested the destination of X already for death.)
3723 If this insn doesn't just copy a register, then we don't
3724 have an ordinary libcall. In that case, cse could not have
3725 managed to substitute the source for the dest later on,
3726 so we can assume the libcall is dead.
3728 NEEDED is the bit vector of pseudoregs live before this insn.
3729 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3732 libcall_dead_p (pbi, x, note, insn)
3733 struct propagate_block_info *pbi;
3738 register RTX_CODE code = GET_CODE (x);
3742 register rtx r = SET_SRC (x);
3743 if (GET_CODE (r) == REG)
3745 rtx call = XEXP (note, 0);
3749 /* Find the call insn. */
3750 while (call != insn && GET_CODE (call) != CALL_INSN)
3751 call = NEXT_INSN (call);
3753 /* If there is none, do nothing special,
3754 since ordinary death handling can understand these insns. */
3758 /* See if the hard reg holding the value is dead.
3759 If this is a PARALLEL, find the call within it. */
3760 call_pat = PATTERN (call);
3761 if (GET_CODE (call_pat) == PARALLEL)
3763 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3764 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3765 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3768 /* This may be a library call that is returning a value
3769 via invisible pointer. Do nothing special, since
3770 ordinary death handling can understand these insns. */
3774 call_pat = XVECEXP (call_pat, 0, i);
3777 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3783 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3784 live at function entry. Don't count global register variables, variables
3785 in registers that can be used for function arg passing, or variables in
3786 fixed hard registers. */
3789 regno_uninitialized (regno)
3792 if (n_basic_blocks == 0
3793 || (regno < FIRST_PSEUDO_REGISTER
3794 && (global_regs[regno]
3795 || fixed_regs[regno]
3796 || FUNCTION_ARG_REGNO_P (regno))))
3799 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3802 /* 1 if register REGNO was alive at a place where `setjmp' was called
3803 and was set more than once or is an argument.
3804 Such regs may be clobbered by `longjmp'. */
3807 regno_clobbered_at_setjmp (regno)
3810 if (n_basic_blocks == 0)
3813 return ((REG_N_SETS (regno) > 1
3814 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3815 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3818 /* INSN references memory, possibly using autoincrement addressing modes.
3819 Find any entries on the mem_set_list that need to be invalidated due
3820 to an address change. */
3822 invalidate_mems_from_autoinc (pbi, insn)
3823 struct propagate_block_info *pbi;
3826 rtx note = REG_NOTES (insn);
3827 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3829 if (REG_NOTE_KIND (note) == REG_INC)
3831 rtx temp = pbi->mem_set_list;
3832 rtx prev = NULL_RTX;
3837 next = XEXP (temp, 1);
3838 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3840 /* Splice temp out of list. */
3842 XEXP (prev, 1) = next;
3844 pbi->mem_set_list = next;
3845 free_EXPR_LIST_node (temp);
3855 /* Process the registers that are set within X. Their bits are set to
3856 1 in the regset DEAD, because they are dead prior to this insn.
3858 If INSN is nonzero, it is the insn being processed.
3860 FLAGS is the set of operations to perform. */
3863 mark_set_regs (pbi, x, insn)
3864 struct propagate_block_info *pbi;
3867 rtx cond = NULL_RTX;
3870 switch (GET_CODE (x))
3874 mark_set_1 (pbi, SET_DEST (x), cond, insn);
3878 cond = COND_EXEC_TEST (x);
3879 x = COND_EXEC_CODE (x);
3885 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3887 rtx sub = XVECEXP (x, 0, i);
3888 switch (GET_CODE (sub))
3891 if (cond != NULL_RTX)
3894 cond = COND_EXEC_TEST (sub);
3895 sub = COND_EXEC_CODE (sub);
3896 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3902 mark_set_1 (pbi, SET_DEST (sub), cond, insn);
3917 /* Process a single SET rtx, X. */
3920 mark_set_1 (pbi, reg, cond, insn)
3921 struct propagate_block_info *pbi;
3922 rtx reg, cond, insn;
3924 register int regno = -1;
3925 int flags = pbi->flags;
3927 /* Some targets place small structures in registers for
3928 return values of functions. We have to detect this
3929 case specially here to get correct flow information. */
3930 if (GET_CODE (reg) == PARALLEL
3931 && GET_MODE (reg) == BLKmode)
3935 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3936 mark_set_1 (pbi, XVECEXP (reg, 0, i), cond, insn);
3940 /* Modifying just one hardware register of a multi-reg value or just a
3941 byte field of a register does not mean the value from before this insn
3942 is now dead. But it does mean liveness of that register at the end of
3943 the block is significant.
3945 Within mark_set_1, however, we treat it as if the register is indeed
3946 modified. mark_used_regs will, however, also treat this register as
3947 being used. Thus, we treat these insns as setting a new value for the
3948 register as a function of its old value. This cases LOG_LINKS to be
3949 made appropriately and this will help combine.
3951 ??? This is all done incorrectly. We should not be setting bits in
3952 new_dead for these registers, since, as we just explained, they are
3953 not dead. We should be setting bits in local_set, and updating
3954 LOG_LINKS, but that is different. */
3956 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3957 || GET_CODE (reg) == SIGN_EXTRACT
3958 || GET_CODE (reg) == STRICT_LOW_PART)
3959 reg = XEXP (reg, 0);
3961 /* If this set is a MEM, then it kills any aliased writes.
3962 If this set is a REG, then it kills any MEMs which use the reg. */
3963 if (flags & PROP_SCAN_DEAD_CODE)
3965 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
3967 rtx temp = pbi->mem_set_list;
3968 rtx prev = NULL_RTX;
3973 next = XEXP (temp, 1);
3974 if ((GET_CODE (reg) == MEM
3975 && output_dependence (XEXP (temp, 0), reg))
3976 || (GET_CODE (reg) == REG
3977 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3979 /* Splice this entry out of the list. */
3981 XEXP (prev, 1) = next;
3983 pbi->mem_set_list = next;
3984 free_EXPR_LIST_node (temp);
3992 /* If the memory reference had embedded side effects (autoincrement
3993 address modes. Then we may need to kill some entries on the
3995 if (insn && GET_CODE (reg) == MEM)
3996 invalidate_mems_from_autoinc (pbi, insn);
3998 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3999 /* We do not know the size of a BLKmode store, so we do not track
4000 them for redundant store elimination. */
4001 && GET_MODE (reg) != BLKmode
4002 /* There are no REG_INC notes for SP, so we can't assume we'll see
4003 everything that invalidates it. To be safe, don't eliminate any
4004 stores though SP; none of them should be redundant anyway. */
4005 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4006 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4009 if (GET_CODE (reg) == REG
4010 && (regno = REGNO (reg),
4011 ! (regno == FRAME_POINTER_REGNUM
4012 && (! reload_completed || frame_pointer_needed)))
4013 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4014 && ! (regno == HARD_FRAME_POINTER_REGNUM
4015 && (! reload_completed || frame_pointer_needed))
4017 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4018 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4022 int some_was_live, some_was_dead;
4024 /* Perform the pbi datastructure update. */
4025 if (! mark_set_reg (pbi, reg, cond, &some_was_live, &some_was_dead))
4028 /* Additional data to record if this is the final pass. */
4029 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4030 | PROP_DEATH_NOTES | PROP_AUTOINC))
4033 register int blocknum = pbi->bb->index;
4036 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4037 y = pbi->reg_next_use[regno];
4039 /* If this is a hard reg, record this function uses the reg. */
4041 if (regno < FIRST_PSEUDO_REGISTER)
4044 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4046 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4047 for (i = regno; i < endregno; i++)
4049 /* The next use is no longer "next", since a store
4051 pbi->reg_next_use[i] = 0;
4054 if (flags & PROP_REG_INFO)
4055 for (i = regno; i < endregno; i++)
4057 regs_ever_live[i] = 1;
4063 /* The next use is no longer "next", since a store
4065 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4066 pbi->reg_next_use[regno] = 0;
4068 /* Keep track of which basic blocks each reg appears in. */
4070 if (flags & PROP_REG_INFO)
4072 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4073 REG_BASIC_BLOCK (regno) = blocknum;
4074 else if (REG_BASIC_BLOCK (regno) != blocknum)
4075 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4077 /* Count (weighted) references, stores, etc. This counts a
4078 register twice if it is modified, but that is correct. */
4079 REG_N_SETS (regno)++;
4080 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4082 /* The insns where a reg is live are normally counted
4083 elsewhere, but we want the count to include the insn
4084 where the reg is set, and the normal counting mechanism
4085 would not count it. */
4086 REG_LIVE_LENGTH (regno)++;
4090 if (! some_was_dead)
4092 if (flags & PROP_LOG_LINKS)
4094 /* Make a logical link from the next following insn
4095 that uses this register, back to this insn.
4096 The following insns have already been processed.
4098 We don't build a LOG_LINK for hard registers containing
4099 in ASM_OPERANDs. If these registers get replaced,
4100 we might wind up changing the semantics of the insn,
4101 even if reload can make what appear to be valid
4102 assignments later. */
4103 if (y && (BLOCK_NUM (y) == blocknum)
4104 && (regno >= FIRST_PSEUDO_REGISTER
4105 || asm_noperands (PATTERN (y)) < 0))
4106 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4109 else if (! some_was_live)
4111 if (flags & PROP_REG_INFO)
4112 REG_N_DEATHS (REGNO (reg))++;
4114 if (flags & PROP_DEATH_NOTES)
4116 /* Note that dead stores have already been deleted
4117 when possible. If we get here, we have found a
4118 dead store that cannot be eliminated (because the
4119 same insn does something useful). Indicate this
4120 by marking the reg being set as dying here. */
4122 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4127 if (flags & PROP_DEATH_NOTES)
4129 /* This is a case where we have a multi-word hard register
4130 and some, but not all, of the words of the register are
4131 needed in subsequent insns. Write REG_UNUSED notes
4132 for those parts that were not needed. This case should
4137 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4139 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4143 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4149 else if (GET_CODE (reg) == REG)
4151 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4152 pbi->reg_next_use[regno] = 0;
4155 /* If this is the last pass and this is a SCRATCH, show it will be dying
4156 here and count it. */
4157 else if (GET_CODE (reg) == SCRATCH)
4159 if (flags & PROP_DEATH_NOTES)
4161 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4165 /* Update data structures for a (possibly conditional) store into REG.
4166 Return true if REG is now unconditionally dead. */
4169 mark_set_reg (pbi, reg, cond, p_some_was_live, p_some_was_dead)
4170 struct propagate_block_info *pbi;
4172 rtx cond ATTRIBUTE_UNUSED;
4173 int *p_some_was_live, *p_some_was_dead;
4175 int regno = REGNO (reg);
4176 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4177 int some_was_dead = ! some_was_live;
4179 /* Mark it as a significant register for this basic block. */
4181 SET_REGNO_REG_SET (pbi->local_set, regno);
4183 /* A hard reg in a wide mode may really be multiple registers.
4184 If so, mark all of them just like the first. */
4185 if (regno < FIRST_PSEUDO_REGISTER)
4187 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4190 int regno_n = regno + n;
4191 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4193 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4195 some_was_live |= needed_regno;
4196 some_was_dead |= ! needed_regno;
4200 *p_some_was_live = some_was_live;
4201 *p_some_was_dead = some_was_dead;
4203 /* The stack pointer is never dead. Well, not strictly true, but it's
4204 very difficult to tell from here. Hopefully combine_stack_adjustments
4205 will fix up the most egregious errors. */
4206 if (regno == STACK_POINTER_REGNUM)
4209 /* Mark it as dead before this insn. */
4210 SET_REGNO_REG_SET (pbi->new_dead, regno);
4211 if (regno < FIRST_PSEUDO_REGISTER)
4213 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4215 SET_REGNO_REG_SET (pbi->new_dead, regno + n);
4218 /* Unconditionally dead. */
4224 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4228 find_auto_inc (pbi, x, insn)
4229 struct propagate_block_info *pbi;
4233 rtx addr = XEXP (x, 0);
4234 HOST_WIDE_INT offset = 0;
4237 /* Here we detect use of an index register which might be good for
4238 postincrement, postdecrement, preincrement, or predecrement. */
4240 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4241 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4243 if (GET_CODE (addr) == REG)
4246 register int size = GET_MODE_SIZE (GET_MODE (x));
4249 int regno = REGNO (addr);
4251 /* Is the next use an increment that might make auto-increment? */
4252 if ((incr = pbi->reg_next_use[regno]) != 0
4253 && (set = single_set (incr)) != 0
4254 && GET_CODE (set) == SET
4255 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4256 /* Can't add side effects to jumps; if reg is spilled and
4257 reloaded, there's no way to store back the altered value. */
4258 && GET_CODE (insn) != JUMP_INSN
4259 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4260 && XEXP (y, 0) == addr
4261 && GET_CODE (XEXP (y, 1)) == CONST_INT
4262 && ((HAVE_POST_INCREMENT
4263 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4264 || (HAVE_POST_DECREMENT
4265 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4266 || (HAVE_PRE_INCREMENT
4267 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4268 || (HAVE_PRE_DECREMENT
4269 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4270 /* Make sure this reg appears only once in this insn. */
4271 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4272 use != 0 && use != (rtx) 1))
4274 rtx q = SET_DEST (set);
4275 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4276 ? (offset ? PRE_INC : POST_INC)
4277 : (offset ? PRE_DEC : POST_DEC));
4279 if (dead_or_set_p (incr, addr)
4280 /* Mustn't autoinc an eliminable register. */
4281 && (regno >= FIRST_PSEUDO_REGISTER
4282 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4284 /* This is the simple case. Try to make the auto-inc. If
4285 we can't, we are done. Otherwise, we will do any
4286 needed updates below. */
4287 if (! validate_change (insn, &XEXP (x, 0),
4288 gen_rtx_fmt_e (inc_code, Pmode, addr),
4292 else if (GET_CODE (q) == REG
4293 /* PREV_INSN used here to check the semi-open interval
4295 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4296 /* We must also check for sets of q as q may be
4297 a call clobbered hard register and there may
4298 be a call between PREV_INSN (insn) and incr. */
4299 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4301 /* We have *p followed sometime later by q = p+size.
4302 Both p and q must be live afterward,
4303 and q is not used between INSN and its assignment.
4304 Change it to q = p, ...*q..., q = q+size.
4305 Then fall into the usual case. */
4310 emit_move_insn (q, addr);
4311 insns = get_insns ();
4314 bb = BLOCK_FOR_INSN (insn);
4315 for (temp = insns; temp; temp = NEXT_INSN (temp))
4316 set_block_for_insn (temp, bb);
4318 /* If we can't make the auto-inc, or can't make the
4319 replacement into Y, exit. There's no point in making
4320 the change below if we can't do the auto-inc and doing
4321 so is not correct in the pre-inc case. */
4323 validate_change (insn, &XEXP (x, 0),
4324 gen_rtx_fmt_e (inc_code, Pmode, q),
4326 validate_change (incr, &XEXP (y, 0), q, 1);
4327 if (! apply_change_group ())
4330 /* We now know we'll be doing this change, so emit the
4331 new insn(s) and do the updates. */
4332 emit_insns_before (insns, insn);
4334 if (BLOCK_FOR_INSN (insn)->head == insn)
4335 BLOCK_FOR_INSN (insn)->head = insns;
4337 /* INCR will become a NOTE and INSN won't contain a
4338 use of ADDR. If a use of ADDR was just placed in
4339 the insn before INSN, make that the next use.
4340 Otherwise, invalidate it. */
4341 if (GET_CODE (PREV_INSN (insn)) == INSN
4342 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4343 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4344 pbi->reg_next_use[regno] = PREV_INSN (insn);
4346 pbi->reg_next_use[regno] = 0;
4351 /* REGNO is now used in INCR which is below INSN, but it
4352 previously wasn't live here. If we don't mark it as
4353 live, we'll put a REG_DEAD note for it on this insn,
4354 which is incorrect. */
4355 SET_REGNO_REG_SET (pbi->reg_live, regno);
4357 /* If there are any calls between INSN and INCR, show
4358 that REGNO now crosses them. */
4359 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4360 if (GET_CODE (temp) == CALL_INSN)
4361 REG_N_CALLS_CROSSED (regno)++;
4366 /* If we haven't returned, it means we were able to make the
4367 auto-inc, so update the status. First, record that this insn
4368 has an implicit side effect. */
4371 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4373 /* Modify the old increment-insn to simply copy
4374 the already-incremented value of our register. */
4375 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4378 /* If that makes it a no-op (copying the register into itself) delete
4379 it so it won't appear to be a "use" and a "set" of this
4381 if (SET_DEST (set) == addr)
4383 /* If the original source was dead, it's dead now. */
4384 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4385 if (note && XEXP (note, 0) != addr)
4386 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4388 PUT_CODE (incr, NOTE);
4389 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4390 NOTE_SOURCE_FILE (incr) = 0;
4393 if (regno >= FIRST_PSEUDO_REGISTER)
4395 /* Count an extra reference to the reg. When a reg is
4396 incremented, spilling it is worse, so we want to make
4397 that less likely. */
4398 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4400 /* Count the increment as a setting of the register,
4401 even though it isn't a SET in rtl. */
4402 REG_N_SETS (regno)++;
4407 #endif /* AUTO_INC_DEC */
4410 mark_used_reg (pbi, reg, cond, insn)
4411 struct propagate_block_info *pbi;
4413 rtx cond ATTRIBUTE_UNUSED;
4416 int regno = REGNO (reg);
4417 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4418 int some_was_dead = ! some_was_live;
4420 SET_REGNO_REG_SET (pbi->new_live, regno);
4422 /* A hard reg in a wide mode may really be multiple registers.
4423 If so, mark all of them just like the first. */
4424 if (regno < FIRST_PSEUDO_REGISTER)
4426 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4429 int regno_n = regno + n;
4430 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4432 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4433 some_was_live |= needed_regno;
4434 some_was_dead |= ! needed_regno;
4438 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4440 /* Record where each reg is used, so when the reg is set we know
4441 the next insn that uses it. */
4442 pbi->reg_next_use[regno] = insn;
4445 if (pbi->flags & PROP_REG_INFO)
4447 if (regno < FIRST_PSEUDO_REGISTER)
4449 /* If this is a register we are going to try to eliminate,
4450 don't mark it live here. If we are successful in
4451 eliminating it, it need not be live unless it is used for
4452 pseudos, in which case it will have been set live when it
4453 was allocated to the pseudos. If the register will not
4454 be eliminated, reload will set it live at that point.
4456 Otherwise, record that this function uses this register. */
4457 /* ??? The PPC backend tries to "eliminate" on the pic
4458 register to itself. This should be fixed. In the mean
4459 time, hack around it. */
4461 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4462 && (regno == FRAME_POINTER_REGNUM
4463 || regno == ARG_POINTER_REGNUM)))
4465 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4467 regs_ever_live[regno + --n] = 1;
4473 /* Keep track of which basic block each reg appears in. */
4475 register int blocknum = pbi->bb->index;
4476 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4477 REG_BASIC_BLOCK (regno) = blocknum;
4478 else if (REG_BASIC_BLOCK (regno) != blocknum)
4479 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4481 /* Count (weighted) number of uses of each reg. */
4482 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4486 /* Record and count the insns in which a reg dies. If it is used in
4487 this insn and was dead below the insn then it dies in this insn.
4488 If it was set in this insn, we do not make a REG_DEAD note;
4489 likewise if we already made such a note.
4491 ??? This could be done better. In new_dead we have a record of
4492 which registers are set or clobbered this insn (which in itself is
4493 slightly incorrect, see the commentary near strict_low_part in
4494 mark_set_1), which should be the set of registers that we do not
4495 wish to create death notes for under the above rule. Note that
4496 we have not yet processed the call-clobbered/call-used registers,
4497 and they do not quite follow the above rule, since we do want death
4498 notes for call-clobbered register arguments. Which begs the whole
4499 question of whether we should in fact have death notes for registers
4500 used and clobbered (but not set) in the same insn. The only useful
4501 thing we ought to be getting from dead_or_set_p is detection of
4502 duplicate death notes. */
4504 if ((pbi->flags & PROP_DEATH_NOTES)
4506 && ! dead_or_set_p (insn, reg))
4510 /* Check for the case where the register dying partially
4511 overlaps the register set by this insn. */
4512 if (regno < FIRST_PSEUDO_REGISTER
4513 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4515 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4517 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4520 /* If none of the words in X is needed, make a REG_DEAD note.
4521 Otherwise, we must make partial REG_DEAD notes. */
4522 if (! some_was_live)
4525 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4526 REG_N_DEATHS (regno)++;
4530 /* Don't make a REG_DEAD note for a part of a register
4531 that is set in the insn. */
4533 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4534 for (; n >= regno; n--)
4535 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4536 && ! dead_or_set_regno_p (insn, n))
4538 = alloc_EXPR_LIST (REG_DEAD,
4539 gen_rtx_REG (reg_raw_mode[n], n),
4545 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4546 This is done assuming the registers needed from X are those that
4547 have 1-bits in PBI->REG_LIVE.
4549 INSN is the containing instruction. If INSN is dead, this function
4553 mark_used_regs (pbi, x, cond, insn)
4554 struct propagate_block_info *pbi;
4557 register RTX_CODE code;
4559 int flags = pbi->flags;
4562 code = GET_CODE (x);
4582 /* If we are clobbering a MEM, mark any registers inside the address
4584 if (GET_CODE (XEXP (x, 0)) == MEM)
4585 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4589 /* Don't bother watching stores to mems if this is not the
4590 final pass. We'll not be deleting dead stores this round. */
4591 if (flags & PROP_SCAN_DEAD_CODE)
4593 /* Invalidate the data for the last MEM stored, but only if MEM is
4594 something that can be stored into. */
4595 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4596 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4597 ; /* needn't clear the memory set list */
4600 rtx temp = pbi->mem_set_list;
4601 rtx prev = NULL_RTX;
4606 next = XEXP (temp, 1);
4607 if (anti_dependence (XEXP (temp, 0), x))
4609 /* Splice temp out of the list. */
4611 XEXP (prev, 1) = next;
4613 pbi->mem_set_list = next;
4614 free_EXPR_LIST_node (temp);
4622 /* If the memory reference had embedded side effects (autoincrement
4623 address modes. Then we may need to kill some entries on the
4626 invalidate_mems_from_autoinc (pbi, insn);
4630 if (flags & PROP_AUTOINC)
4631 find_auto_inc (pbi, x, insn);
4636 if (GET_CODE (SUBREG_REG (x)) == REG
4637 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4638 && (GET_MODE_SIZE (GET_MODE (x))
4639 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4640 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4642 /* While we're here, optimize this case. */
4644 if (GET_CODE (x) != REG)
4649 /* See a register other than being set => mark it as needed. */
4650 mark_used_reg (pbi, x, cond, insn);
4655 register rtx testreg = SET_DEST (x);
4658 /* If storing into MEM, don't show it as being used. But do
4659 show the address as being used. */
4660 if (GET_CODE (testreg) == MEM)
4663 if (flags & PROP_AUTOINC)
4664 find_auto_inc (pbi, testreg, insn);
4666 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4667 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4671 /* Storing in STRICT_LOW_PART is like storing in a reg
4672 in that this SET might be dead, so ignore it in TESTREG.
4673 but in some other ways it is like using the reg.
4675 Storing in a SUBREG or a bit field is like storing the entire
4676 register in that if the register's value is not used
4677 then this SET is not needed. */
4678 while (GET_CODE (testreg) == STRICT_LOW_PART
4679 || GET_CODE (testreg) == ZERO_EXTRACT
4680 || GET_CODE (testreg) == SIGN_EXTRACT
4681 || GET_CODE (testreg) == SUBREG)
4683 if (GET_CODE (testreg) == SUBREG
4684 && GET_CODE (SUBREG_REG (testreg)) == REG
4685 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4686 && (GET_MODE_SIZE (GET_MODE (testreg))
4687 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4688 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4690 /* Modifying a single register in an alternate mode
4691 does not use any of the old value. But these other
4692 ways of storing in a register do use the old value. */
4693 if (GET_CODE (testreg) == SUBREG
4694 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4699 testreg = XEXP (testreg, 0);
4702 /* If this is a store into a register, recursively scan the
4703 value being stored. */
4705 if ((GET_CODE (testreg) == PARALLEL
4706 && GET_MODE (testreg) == BLKmode)
4707 || (GET_CODE (testreg) == REG
4708 && (regno = REGNO (testreg),
4709 ! (regno == FRAME_POINTER_REGNUM
4710 && (! reload_completed || frame_pointer_needed)))
4711 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4712 && ! (regno == HARD_FRAME_POINTER_REGNUM
4713 && (! reload_completed || frame_pointer_needed))
4715 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4716 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4721 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4722 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4729 case UNSPEC_VOLATILE:
4733 /* Traditional and volatile asm instructions must be considered to use
4734 and clobber all hard registers, all pseudo-registers and all of
4735 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4737 Consider for instance a volatile asm that changes the fpu rounding
4738 mode. An insn should not be moved across this even if it only uses
4739 pseudo-regs because it might give an incorrectly rounded result.
4741 ?!? Unfortunately, marking all hard registers as live causes massive
4742 problems for the register allocator and marking all pseudos as live
4743 creates mountains of uninitialized variable warnings.
4745 So for now, just clear the memory set list and mark any regs
4746 we can find in ASM_OPERANDS as used. */
4747 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4748 free_EXPR_LIST_list (&pbi->mem_set_list);
4750 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4751 We can not just fall through here since then we would be confused
4752 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4753 traditional asms unlike their normal usage. */
4754 if (code == ASM_OPERANDS)
4758 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4759 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4765 if (cond != NULL_RTX)
4768 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4770 cond = COND_EXEC_TEST (x);
4771 x = COND_EXEC_CODE (x);
4775 /* We _do_not_ want to scan operands of phi nodes. Operands of
4776 a phi function are evaluated only when control reaches this
4777 block along a particular edge. Therefore, regs that appear
4778 as arguments to phi should not be added to the global live at
4786 /* Recursively scan the operands of this expression. */
4789 register const char *fmt = GET_RTX_FORMAT (code);
4792 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4796 /* Tail recursive case: save a function call level. */
4802 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4804 else if (fmt[i] == 'E')
4807 for (j = 0; j < XVECLEN (x, i); j++)
4808 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4817 try_pre_increment_1 (pbi, insn)
4818 struct propagate_block_info *pbi;
4821 /* Find the next use of this reg. If in same basic block,
4822 make it do pre-increment or pre-decrement if appropriate. */
4823 rtx x = single_set (insn);
4824 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4825 * INTVAL (XEXP (SET_SRC (x), 1)));
4826 int regno = REGNO (SET_DEST (x));
4827 rtx y = pbi->reg_next_use[regno];
4829 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4830 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4831 mode would be better. */
4832 && ! dead_or_set_p (y, SET_DEST (x))
4833 && try_pre_increment (y, SET_DEST (x), amount))
4835 /* We have found a suitable auto-increment
4836 and already changed insn Y to do it.
4837 So flush this increment-instruction. */
4838 PUT_CODE (insn, NOTE);
4839 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4840 NOTE_SOURCE_FILE (insn) = 0;
4841 /* Count a reference to this reg for the increment
4842 insn we are deleting. When a reg is incremented.
4843 spilling it is worse, so we want to make that
4845 if (regno >= FIRST_PSEUDO_REGISTER)
4847 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4848 REG_N_SETS (regno)++;
4855 /* Try to change INSN so that it does pre-increment or pre-decrement
4856 addressing on register REG in order to add AMOUNT to REG.
4857 AMOUNT is negative for pre-decrement.
4858 Returns 1 if the change could be made.
4859 This checks all about the validity of the result of modifying INSN. */
4862 try_pre_increment (insn, reg, amount)
4864 HOST_WIDE_INT amount;
4868 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4869 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4871 /* Nonzero if we can try to make a post-increment or post-decrement.
4872 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4873 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4874 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4877 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4880 /* From the sign of increment, see which possibilities are conceivable
4881 on this target machine. */
4882 if (HAVE_PRE_INCREMENT && amount > 0)
4884 if (HAVE_POST_INCREMENT && amount > 0)
4887 if (HAVE_PRE_DECREMENT && amount < 0)
4889 if (HAVE_POST_DECREMENT && amount < 0)
4892 if (! (pre_ok || post_ok))
4895 /* It is not safe to add a side effect to a jump insn
4896 because if the incremented register is spilled and must be reloaded
4897 there would be no way to store the incremented value back in memory. */
4899 if (GET_CODE (insn) == JUMP_INSN)
4904 use = find_use_as_address (PATTERN (insn), reg, 0);
4905 if (post_ok && (use == 0 || use == (rtx) 1))
4907 use = find_use_as_address (PATTERN (insn), reg, -amount);
4911 if (use == 0 || use == (rtx) 1)
4914 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4917 /* See if this combination of instruction and addressing mode exists. */
4918 if (! validate_change (insn, &XEXP (use, 0),
4919 gen_rtx_fmt_e (amount > 0
4920 ? (do_post ? POST_INC : PRE_INC)
4921 : (do_post ? POST_DEC : PRE_DEC),
4925 /* Record that this insn now has an implicit side effect on X. */
4926 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4930 #endif /* AUTO_INC_DEC */
4932 /* Find the place in the rtx X where REG is used as a memory address.
4933 Return the MEM rtx that so uses it.
4934 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4935 (plus REG (const_int PLUSCONST)).
4937 If such an address does not appear, return 0.
4938 If REG appears more than once, or is used other than in such an address,
4942 find_use_as_address (x, reg, plusconst)
4945 HOST_WIDE_INT plusconst;
4947 enum rtx_code code = GET_CODE (x);
4948 const char *fmt = GET_RTX_FORMAT (code);
4950 register rtx value = 0;
4953 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4956 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4957 && XEXP (XEXP (x, 0), 0) == reg
4958 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4959 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4962 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4964 /* If REG occurs inside a MEM used in a bit-field reference,
4965 that is unacceptable. */
4966 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4967 return (rtx) (HOST_WIDE_INT) 1;
4971 return (rtx) (HOST_WIDE_INT) 1;
4973 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4977 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4981 return (rtx) (HOST_WIDE_INT) 1;
4983 else if (fmt[i] == 'E')
4986 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4988 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4992 return (rtx) (HOST_WIDE_INT) 1;
5000 /* Write information about registers and basic blocks into FILE.
5001 This is part of making a debugging dump. */
5004 dump_regset (r, outf)
5011 fputs (" (nil)", outf);
5015 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5017 fprintf (outf, " %d", i);
5018 if (i < FIRST_PSEUDO_REGISTER)
5019 fprintf (outf, " [%s]",
5028 dump_regset (r, stderr);
5029 putc ('\n', stderr);
5033 dump_flow_info (file)
5037 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5039 fprintf (file, "%d registers.\n", max_regno);
5040 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5043 enum reg_class class, altclass;
5044 fprintf (file, "\nRegister %d used %d times across %d insns",
5045 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5046 if (REG_BASIC_BLOCK (i) >= 0)
5047 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5049 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5050 (REG_N_SETS (i) == 1) ? "" : "s");
5051 if (REG_USERVAR_P (regno_reg_rtx[i]))
5052 fprintf (file, "; user var");
5053 if (REG_N_DEATHS (i) != 1)
5054 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5055 if (REG_N_CALLS_CROSSED (i) == 1)
5056 fprintf (file, "; crosses 1 call");
5057 else if (REG_N_CALLS_CROSSED (i))
5058 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5059 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5060 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5061 class = reg_preferred_class (i);
5062 altclass = reg_alternate_class (i);
5063 if (class != GENERAL_REGS || altclass != ALL_REGS)
5065 if (altclass == ALL_REGS || class == ALL_REGS)
5066 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5067 else if (altclass == NO_REGS)
5068 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5070 fprintf (file, "; pref %s, else %s",
5071 reg_class_names[(int) class],
5072 reg_class_names[(int) altclass]);
5074 if (REGNO_POINTER_FLAG (i))
5075 fprintf (file, "; pointer");
5076 fprintf (file, ".\n");
5079 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5080 for (i = 0; i < n_basic_blocks; i++)
5082 register basic_block bb = BASIC_BLOCK (i);
5085 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5086 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5088 fprintf (file, "Predecessors: ");
5089 for (e = bb->pred; e ; e = e->pred_next)
5090 dump_edge_info (file, e, 0);
5092 fprintf (file, "\nSuccessors: ");
5093 for (e = bb->succ; e ; e = e->succ_next)
5094 dump_edge_info (file, e, 1);
5096 fprintf (file, "\nRegisters live at start:");
5097 dump_regset (bb->global_live_at_start, file);
5099 fprintf (file, "\nRegisters live at end:");
5100 dump_regset (bb->global_live_at_end, file);
5111 dump_flow_info (stderr);
5115 dump_edge_info (file, e, do_succ)
5120 basic_block side = (do_succ ? e->dest : e->src);
5122 if (side == ENTRY_BLOCK_PTR)
5123 fputs (" ENTRY", file);
5124 else if (side == EXIT_BLOCK_PTR)
5125 fputs (" EXIT", file);
5127 fprintf (file, " %d", side->index);
5131 static const char * const bitnames[] = {
5132 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5135 int i, flags = e->flags;
5139 for (i = 0; flags; i++)
5140 if (flags & (1 << i))
5146 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5147 fputs (bitnames[i], file);
5149 fprintf (file, "%d", i);
5157 /* Print out one basic block with live information at start and end. */
5167 fprintf (outf, ";; Basic block %d, loop depth %d",
5168 bb->index, bb->loop_depth);
5169 if (bb->eh_beg != -1 || bb->eh_end != -1)
5170 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5173 fputs (";; Predecessors: ", outf);
5174 for (e = bb->pred; e ; e = e->pred_next)
5175 dump_edge_info (outf, e, 0);
5178 fputs (";; Registers live at start:", outf);
5179 dump_regset (bb->global_live_at_start, outf);
5182 for (insn = bb->head, last = NEXT_INSN (bb->end);
5184 insn = NEXT_INSN (insn))
5185 print_rtl_single (outf, insn);
5187 fputs (";; Registers live at end:", outf);
5188 dump_regset (bb->global_live_at_end, outf);
5191 fputs (";; Successors: ", outf);
5192 for (e = bb->succ; e; e = e->succ_next)
5193 dump_edge_info (outf, e, 1);
5201 dump_bb (bb, stderr);
5208 dump_bb (BASIC_BLOCK(n), stderr);
5211 /* Like print_rtl, but also print out live information for the start of each
5215 print_rtl_with_bb (outf, rtx_first)
5219 register rtx tmp_rtx;
5222 fprintf (outf, "(nil)\n");
5226 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5227 int max_uid = get_max_uid ();
5228 basic_block *start = (basic_block *)
5229 xcalloc (max_uid, sizeof (basic_block));
5230 basic_block *end = (basic_block *)
5231 xcalloc (max_uid, sizeof (basic_block));
5232 enum bb_state *in_bb_p = (enum bb_state *)
5233 xcalloc (max_uid, sizeof (enum bb_state));
5235 for (i = n_basic_blocks - 1; i >= 0; i--)
5237 basic_block bb = BASIC_BLOCK (i);
5240 start[INSN_UID (bb->head)] = bb;
5241 end[INSN_UID (bb->end)] = bb;
5242 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5244 enum bb_state state = IN_MULTIPLE_BB;
5245 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5247 in_bb_p[INSN_UID(x)] = state;
5254 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5259 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5261 fprintf (outf, ";; Start of basic block %d, registers live:",
5263 dump_regset (bb->global_live_at_start, outf);
5267 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5268 && GET_CODE (tmp_rtx) != NOTE
5269 && GET_CODE (tmp_rtx) != BARRIER)
5270 fprintf (outf, ";; Insn is not within a basic block\n");
5271 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5272 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5274 did_output = print_rtl_single (outf, tmp_rtx);
5276 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5278 fprintf (outf, ";; End of basic block %d, registers live:\n",
5280 dump_regset (bb->global_live_at_end, outf);
5293 if (current_function_epilogue_delay_list != 0)
5295 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5296 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5297 tmp_rtx = XEXP (tmp_rtx, 1))
5298 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5302 /* Compute dominator relationships using new flow graph structures. */
5304 compute_flow_dominators (dominators, post_dominators)
5305 sbitmap *dominators;
5306 sbitmap *post_dominators;
5309 sbitmap *temp_bitmap;
5311 basic_block *worklist, *workend, *qin, *qout;
5314 /* Allocate a worklist array/queue. Entries are only added to the
5315 list if they were not already on the list. So the size is
5316 bounded by the number of basic blocks. */
5317 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5318 workend = &worklist[n_basic_blocks];
5320 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5321 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5325 /* The optimistic setting of dominators requires us to put every
5326 block on the work list initially. */
5327 qin = qout = worklist;
5328 for (bb = 0; bb < n_basic_blocks; bb++)
5330 *qin++ = BASIC_BLOCK (bb);
5331 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5333 qlen = n_basic_blocks;
5336 /* We want a maximal solution, so initially assume everything dominates
5338 sbitmap_vector_ones (dominators, n_basic_blocks);
5340 /* Mark successors of the entry block so we can identify them below. */
5341 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5342 e->dest->aux = ENTRY_BLOCK_PTR;
5344 /* Iterate until the worklist is empty. */
5347 /* Take the first entry off the worklist. */
5348 basic_block b = *qout++;
5349 if (qout >= workend)
5355 /* Compute the intersection of the dominators of all the
5358 If one of the predecessor blocks is the ENTRY block, then the
5359 intersection of the dominators of the predecessor blocks is
5360 defined as the null set. We can identify such blocks by the
5361 special value in the AUX field in the block structure. */
5362 if (b->aux == ENTRY_BLOCK_PTR)
5364 /* Do not clear the aux field for blocks which are
5365 successors of the ENTRY block. That way we we never
5366 add them to the worklist again.
5368 The intersect of dominators of the preds of this block is
5369 defined as the null set. */
5370 sbitmap_zero (temp_bitmap[bb]);
5374 /* Clear the aux field of this block so it can be added to
5375 the worklist again if necessary. */
5377 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5380 /* Make sure each block always dominates itself. */
5381 SET_BIT (temp_bitmap[bb], bb);
5383 /* If the out state of this block changed, then we need to
5384 add the successors of this block to the worklist if they
5385 are not already on the worklist. */
5386 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5388 for (e = b->succ; e; e = e->succ_next)
5390 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5404 if (post_dominators)
5406 /* The optimistic setting of dominators requires us to put every
5407 block on the work list initially. */
5408 qin = qout = worklist;
5409 for (bb = 0; bb < n_basic_blocks; bb++)
5411 *qin++ = BASIC_BLOCK (bb);
5412 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5414 qlen = n_basic_blocks;
5417 /* We want a maximal solution, so initially assume everything post
5418 dominates everything else. */
5419 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5421 /* Mark predecessors of the exit block so we can identify them below. */
5422 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5423 e->src->aux = EXIT_BLOCK_PTR;
5425 /* Iterate until the worklist is empty. */
5428 /* Take the first entry off the worklist. */
5429 basic_block b = *qout++;
5430 if (qout >= workend)
5436 /* Compute the intersection of the post dominators of all the
5439 If one of the successor blocks is the EXIT block, then the
5440 intersection of the dominators of the successor blocks is
5441 defined as the null set. We can identify such blocks by the
5442 special value in the AUX field in the block structure. */
5443 if (b->aux == EXIT_BLOCK_PTR)
5445 /* Do not clear the aux field for blocks which are
5446 predecessors of the EXIT block. That way we we never
5447 add them to the worklist again.
5449 The intersect of dominators of the succs of this block is
5450 defined as the null set. */
5451 sbitmap_zero (temp_bitmap[bb]);
5455 /* Clear the aux field of this block so it can be added to
5456 the worklist again if necessary. */
5458 sbitmap_intersection_of_succs (temp_bitmap[bb],
5459 post_dominators, bb);
5462 /* Make sure each block always post dominates itself. */
5463 SET_BIT (temp_bitmap[bb], bb);
5465 /* If the out state of this block changed, then we need to
5466 add the successors of this block to the worklist if they
5467 are not already on the worklist. */
5468 if (sbitmap_a_and_b (post_dominators[bb],
5469 post_dominators[bb],
5472 for (e = b->pred; e; e = e->pred_next)
5474 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5492 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5495 compute_immediate_dominators (idom, dominators)
5497 sbitmap *dominators;
5502 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5504 /* Begin with tmp(n) = dom(n) - { n }. */
5505 for (b = n_basic_blocks; --b >= 0; )
5507 sbitmap_copy (tmp[b], dominators[b]);
5508 RESET_BIT (tmp[b], b);
5511 /* Subtract out all of our dominator's dominators. */
5512 for (b = n_basic_blocks; --b >= 0; )
5514 sbitmap tmp_b = tmp[b];
5517 for (s = n_basic_blocks; --s >= 0; )
5518 if (TEST_BIT (tmp_b, s))
5519 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5522 /* Find the one bit set in the bitmap and put it in the output array. */
5523 for (b = n_basic_blocks; --b >= 0; )
5526 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5529 sbitmap_vector_free (tmp);
5532 /* Count for a single SET rtx, X. */
5535 count_reg_sets_1 (x, loop_depth)
5540 register rtx reg = SET_DEST (x);
5542 /* Find the register that's set/clobbered. */
5543 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5544 || GET_CODE (reg) == SIGN_EXTRACT
5545 || GET_CODE (reg) == STRICT_LOW_PART)
5546 reg = XEXP (reg, 0);
5548 if (GET_CODE (reg) == PARALLEL
5549 && GET_MODE (reg) == BLKmode)
5552 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5553 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5557 if (GET_CODE (reg) == REG)
5559 regno = REGNO (reg);
5560 if (regno >= FIRST_PSEUDO_REGISTER)
5562 /* Count (weighted) references, stores, etc. This counts a
5563 register twice if it is modified, but that is correct. */
5564 REG_N_SETS (regno)++;
5565 REG_N_REFS (regno) += loop_depth + 1;
5570 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5571 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5574 count_reg_sets (x, loop_depth)
5578 register RTX_CODE code = GET_CODE (x);
5580 if (code == SET || code == CLOBBER)
5581 count_reg_sets_1 (x, loop_depth);
5582 else if (code == PARALLEL)
5585 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5587 code = GET_CODE (XVECEXP (x, 0, i));
5588 if (code == SET || code == CLOBBER)
5589 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5594 /* Increment REG_N_REFS by the current loop depth each register reference
5598 count_reg_references (x, loop_depth)
5602 register RTX_CODE code;
5605 code = GET_CODE (x);
5625 /* If we are clobbering a MEM, mark any registers inside the address
5627 if (GET_CODE (XEXP (x, 0)) == MEM)
5628 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5632 /* While we're here, optimize this case. */
5635 /* In case the SUBREG is not of a register, don't optimize */
5636 if (GET_CODE (x) != REG)
5638 count_reg_references (x, loop_depth);
5642 /* ... fall through ... */
5645 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5646 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5651 register rtx testreg = SET_DEST (x);
5654 /* If storing into MEM, don't show it as being used. But do
5655 show the address as being used. */
5656 if (GET_CODE (testreg) == MEM)
5658 count_reg_references (XEXP (testreg, 0), loop_depth);
5659 count_reg_references (SET_SRC (x), loop_depth);
5663 /* Storing in STRICT_LOW_PART is like storing in a reg
5664 in that this SET might be dead, so ignore it in TESTREG.
5665 but in some other ways it is like using the reg.
5667 Storing in a SUBREG or a bit field is like storing the entire
5668 register in that if the register's value is not used
5669 then this SET is not needed. */
5670 while (GET_CODE (testreg) == STRICT_LOW_PART
5671 || GET_CODE (testreg) == ZERO_EXTRACT
5672 || GET_CODE (testreg) == SIGN_EXTRACT
5673 || GET_CODE (testreg) == SUBREG)
5675 /* Modifying a single register in an alternate mode
5676 does not use any of the old value. But these other
5677 ways of storing in a register do use the old value. */
5678 if (GET_CODE (testreg) == SUBREG
5679 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5684 testreg = XEXP (testreg, 0);
5687 /* If this is a store into a register,
5688 recursively scan the value being stored. */
5690 if ((GET_CODE (testreg) == PARALLEL
5691 && GET_MODE (testreg) == BLKmode)
5692 || GET_CODE (testreg) == REG)
5694 count_reg_references (SET_SRC (x), loop_depth);
5696 count_reg_references (SET_DEST (x), loop_depth);
5706 /* Recursively scan the operands of this expression. */
5709 register const char *fmt = GET_RTX_FORMAT (code);
5712 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5716 /* Tail recursive case: save a function call level. */
5722 count_reg_references (XEXP (x, i), loop_depth);
5724 else if (fmt[i] == 'E')
5727 for (j = 0; j < XVECLEN (x, i); j++)
5728 count_reg_references (XVECEXP (x, i, j), loop_depth);
5734 /* Recompute register set/reference counts immediately prior to register
5737 This avoids problems with set/reference counts changing to/from values
5738 which have special meanings to the register allocators.
5740 Additionally, the reference counts are the primary component used by the
5741 register allocators to prioritize pseudos for allocation to hard regs.
5742 More accurate reference counts generally lead to better register allocation.
5744 F is the first insn to be scanned.
5746 LOOP_STEP denotes how much loop_depth should be incremented per
5747 loop nesting level in order to increase the ref count more for
5748 references in a loop.
5750 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5751 possibly other information which is used by the register allocators. */
5754 recompute_reg_usage (f, loop_step)
5755 rtx f ATTRIBUTE_UNUSED;
5756 int loop_step ATTRIBUTE_UNUSED;
5763 /* Clear out the old data. */
5764 max_reg = max_reg_num ();
5765 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5771 /* Scan each insn in the chain and count how many times each register is
5773 for (index = 0; index < n_basic_blocks; index++)
5775 basic_block bb = BASIC_BLOCK (index);
5776 loop_depth = bb->loop_depth;
5777 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5779 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5783 /* This call will increment REG_N_SETS for each SET or CLOBBER
5784 of a register in INSN. It will also increment REG_N_REFS
5785 by the loop depth for each set of a register in INSN. */
5786 count_reg_sets (PATTERN (insn), loop_depth);
5788 /* count_reg_sets does not detect autoincrement address modes, so
5789 detect them here by looking at the notes attached to INSN. */
5790 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5792 if (REG_NOTE_KIND (links) == REG_INC)
5793 /* Count (weighted) references, stores, etc. This
5794 counts a register twice if it is modified, but
5796 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5799 /* This call will increment REG_N_REFS by the current loop depth
5800 for each reference to a register in INSN. */
5801 count_reg_references (PATTERN (insn), loop_depth);
5803 /* count_reg_references will not include counts for arguments to
5804 function calls, so detect them here by examining the
5805 CALL_INSN_FUNCTION_USAGE data. */
5806 if (GET_CODE (insn) == CALL_INSN)
5810 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5812 note = XEXP (note, 1))
5813 if (GET_CODE (XEXP (note, 0)) == USE)
5814 count_reg_references (XEXP (XEXP (note, 0), 0),
5818 if (insn == bb->end)
5824 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5825 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5826 of the number of registers that died. */
5829 count_or_remove_death_notes (blocks, kill)
5835 for (i = n_basic_blocks - 1; i >= 0; --i)
5840 if (blocks && ! TEST_BIT (blocks, i))
5843 bb = BASIC_BLOCK (i);
5845 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5847 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5849 rtx *pprev = ®_NOTES (insn);
5854 switch (REG_NOTE_KIND (link))
5857 if (GET_CODE (XEXP (link, 0)) == REG)
5859 rtx reg = XEXP (link, 0);
5862 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5865 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5873 rtx next = XEXP (link, 1);
5874 free_EXPR_LIST_node (link);
5875 *pprev = link = next;
5881 pprev = &XEXP (link, 1);
5888 if (insn == bb->end)
5896 /* Record INSN's block as BB. */
5899 set_block_for_insn (insn, bb)
5903 size_t uid = INSN_UID (insn);
5904 if (uid >= basic_block_for_insn->num_elements)
5908 /* Add one-eighth the size so we don't keep calling xrealloc. */
5909 new_size = uid + (uid + 7) / 8;
5911 VARRAY_GROW (basic_block_for_insn, new_size);
5913 VARRAY_BB (basic_block_for_insn, uid) = bb;
5916 /* Record INSN's block number as BB. */
5917 /* ??? This has got to go. */
5920 set_block_num (insn, bb)
5924 set_block_for_insn (insn, BASIC_BLOCK (bb));
5927 /* Verify the CFG consistency. This function check some CFG invariants and
5928 aborts when something is wrong. Hope that this function will help to
5929 convert many optimization passes to preserve CFG consistent.
5931 Currently it does following checks:
5933 - test head/end pointers
5934 - overlapping of basic blocks
5935 - edge list corectness
5936 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5937 - tails of basic blocks (ensure that boundary is necesary)
5938 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5939 and NOTE_INSN_BASIC_BLOCK
5940 - check that all insns are in the basic blocks
5941 (except the switch handling code, barriers and notes)
5942 - check that all returns are followed by barriers
5944 In future it can be extended check a lot of other stuff as well
5945 (reachability of basic blocks, life information, etc. etc.). */
5950 const int max_uid = get_max_uid ();
5951 const rtx rtx_first = get_insns ();
5952 basic_block *bb_info;
5956 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5958 /* First pass check head/end pointers and set bb_info array used by
5960 for (i = n_basic_blocks - 1; i >= 0; i--)
5962 basic_block bb = BASIC_BLOCK (i);
5964 /* Check the head pointer and make sure that it is pointing into
5966 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5971 error ("Head insn %d for block %d not found in the insn stream.",
5972 INSN_UID (bb->head), bb->index);
5976 /* Check the end pointer and make sure that it is pointing into
5978 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5980 if (bb_info[INSN_UID (x)] != NULL)
5982 error ("Insn %d is in multiple basic blocks (%d and %d)",
5983 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5986 bb_info[INSN_UID (x)] = bb;
5993 error ("End insn %d for block %d not found in the insn stream.",
5994 INSN_UID (bb->end), bb->index);
5999 /* Now check the basic blocks (boundaries etc.) */
6000 for (i = n_basic_blocks - 1; i >= 0; i--)
6002 basic_block bb = BASIC_BLOCK (i);
6003 /* Check corectness of edge lists */
6011 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6013 fprintf (stderr, "Predecessor: ");
6014 dump_edge_info (stderr, e, 0);
6015 fprintf (stderr, "\nSuccessor: ");
6016 dump_edge_info (stderr, e, 1);
6020 if (e->dest != EXIT_BLOCK_PTR)
6022 edge e2 = e->dest->pred;
6023 while (e2 && e2 != e)
6027 error ("Basic block %i edge lists are corrupted", bb->index);
6039 error ("Basic block %d pred edge is corrupted", bb->index);
6040 fputs ("Predecessor: ", stderr);
6041 dump_edge_info (stderr, e, 0);
6042 fputs ("\nSuccessor: ", stderr);
6043 dump_edge_info (stderr, e, 1);
6044 fputc ('\n', stderr);
6047 if (e->src != ENTRY_BLOCK_PTR)
6049 edge e2 = e->src->succ;
6050 while (e2 && e2 != e)
6054 error ("Basic block %i edge lists are corrupted", bb->index);
6061 /* OK pointers are correct. Now check the header of basic
6062 block. It ought to contain optional CODE_LABEL followed
6063 by NOTE_BASIC_BLOCK. */
6065 if (GET_CODE (x) == CODE_LABEL)
6069 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6075 if (GET_CODE (x) != NOTE
6076 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6077 || NOTE_BASIC_BLOCK (x) != bb)
6079 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6086 /* Do checks for empty blocks here */
6093 if (GET_CODE (x) == NOTE
6094 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6096 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6097 INSN_UID (x), bb->index);
6104 if (GET_CODE (x) == JUMP_INSN
6105 || GET_CODE (x) == CODE_LABEL
6106 || GET_CODE (x) == BARRIER)
6108 error ("In basic block %d:", bb->index);
6109 fatal_insn ("Flow control insn inside a basic block", x);
6120 if (!bb_info[INSN_UID (x)])
6122 switch (GET_CODE (x))
6129 /* An addr_vec is placed outside any block block. */
6131 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6132 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6133 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6138 /* But in any case, non-deletable labels can appear anywhere. */
6142 fatal_insn ("Insn outside basic block", x);
6146 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6147 && GET_CODE (x) == JUMP_INSN
6148 && returnjump_p (x) && ! condjump_p (x)
6149 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6150 fatal_insn ("Return not followed by barrier", x);
6162 /* Functions to access an edge list with a vector representation.
6163 Enough data is kept such that given an index number, the
6164 pred and succ that edge reprsents can be determined, or
6165 given a pred and a succ, it's index number can be returned.
6166 This allows algorithms which comsume a lot of memory to
6167 represent the normally full matrix of edge (pred,succ) with a
6168 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6169 wasted space in the client code due to sparse flow graphs. */
6171 /* This functions initializes the edge list. Basically the entire
6172 flowgraph is processed, and all edges are assigned a number,
6173 and the data structure is filed in. */
6177 struct edge_list *elist;
6183 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6187 /* Determine the number of edges in the flow graph by counting successor
6188 edges on each basic block. */
6189 for (x = 0; x < n_basic_blocks; x++)
6191 basic_block bb = BASIC_BLOCK (x);
6193 for (e = bb->succ; e; e = e->succ_next)
6196 /* Don't forget successors of the entry block. */
6197 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6200 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6201 elist->num_blocks = block_count;
6202 elist->num_edges = num_edges;
6203 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6207 /* Follow successors of the entry block, and register these edges. */
6208 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6210 elist->index_to_edge[num_edges] = e;
6214 for (x = 0; x < n_basic_blocks; x++)
6216 basic_block bb = BASIC_BLOCK (x);
6218 /* Follow all successors of blocks, and register these edges. */
6219 for (e = bb->succ; e; e = e->succ_next)
6221 elist->index_to_edge[num_edges] = e;
6228 /* This function free's memory associated with an edge list. */
6230 free_edge_list (elist)
6231 struct edge_list *elist;
6235 free (elist->index_to_edge);
6240 /* This function provides debug output showing an edge list. */
6242 print_edge_list (f, elist)
6244 struct edge_list *elist;
6247 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6248 elist->num_blocks - 2, elist->num_edges);
6250 for (x = 0; x < elist->num_edges; x++)
6252 fprintf (f, " %-4d - edge(", x);
6253 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6254 fprintf (f,"entry,");
6256 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6258 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6259 fprintf (f,"exit)\n");
6261 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6265 /* This function provides an internal consistancy check of an edge list,
6266 verifying that all edges are present, and that there are no
6269 verify_edge_list (f, elist)
6271 struct edge_list *elist;
6273 int x, pred, succ, index;
6276 for (x = 0; x < n_basic_blocks; x++)
6278 basic_block bb = BASIC_BLOCK (x);
6280 for (e = bb->succ; e; e = e->succ_next)
6282 pred = e->src->index;
6283 succ = e->dest->index;
6284 index = EDGE_INDEX (elist, e->src, e->dest);
6285 if (index == EDGE_INDEX_NO_EDGE)
6287 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6290 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6291 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6292 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6293 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6294 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6295 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6298 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6300 pred = e->src->index;
6301 succ = e->dest->index;
6302 index = EDGE_INDEX (elist, e->src, e->dest);
6303 if (index == EDGE_INDEX_NO_EDGE)
6305 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6308 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6309 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6310 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6311 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6312 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6313 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6315 /* We've verified that all the edges are in the list, no lets make sure
6316 there are no spurious edges in the list. */
6318 for (pred = 0 ; pred < n_basic_blocks; pred++)
6319 for (succ = 0 ; succ < n_basic_blocks; succ++)
6321 basic_block p = BASIC_BLOCK (pred);
6322 basic_block s = BASIC_BLOCK (succ);
6326 for (e = p->succ; e; e = e->succ_next)
6332 for (e = s->pred; e; e = e->pred_next)
6338 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6339 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6340 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6342 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6343 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6344 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6345 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6346 BASIC_BLOCK (succ)));
6348 for (succ = 0 ; succ < n_basic_blocks; succ++)
6350 basic_block p = ENTRY_BLOCK_PTR;
6351 basic_block s = BASIC_BLOCK (succ);
6355 for (e = p->succ; e; e = e->succ_next)
6361 for (e = s->pred; e; e = e->pred_next)
6367 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6368 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6369 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6371 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6372 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6373 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6374 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6375 BASIC_BLOCK (succ)));
6377 for (pred = 0 ; pred < n_basic_blocks; pred++)
6379 basic_block p = BASIC_BLOCK (pred);
6380 basic_block s = EXIT_BLOCK_PTR;
6384 for (e = p->succ; e; e = e->succ_next)
6390 for (e = s->pred; e; e = e->pred_next)
6396 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6397 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6398 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6400 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6401 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6402 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6403 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6408 /* This routine will determine what, if any, edge there is between
6409 a specified predecessor and successor. */
6412 find_edge_index (edge_list, pred, succ)
6413 struct edge_list *edge_list;
6414 basic_block pred, succ;
6417 for (x = 0; x < NUM_EDGES (edge_list); x++)
6419 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6420 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6423 return (EDGE_INDEX_NO_EDGE);
6426 /* This function will remove an edge from the flow graph. */
6431 edge last_pred = NULL;
6432 edge last_succ = NULL;
6434 basic_block src, dest;
6437 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6443 last_succ->succ_next = e->succ_next;
6445 src->succ = e->succ_next;
6447 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6453 last_pred->pred_next = e->pred_next;
6455 dest->pred = e->pred_next;
6461 /* This routine will remove any fake successor edges for a basic block.
6462 When the edge is removed, it is also removed from whatever predecessor
6465 remove_fake_successors (bb)
6469 for (e = bb->succ; e ; )
6473 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6478 /* This routine will remove all fake edges from the flow graph. If
6479 we remove all fake successors, it will automatically remove all
6480 fake predecessors. */
6482 remove_fake_edges ()
6486 for (x = 0; x < n_basic_blocks; x++)
6487 remove_fake_successors (BASIC_BLOCK (x));
6489 /* We've handled all successors except the entry block's. */
6490 remove_fake_successors (ENTRY_BLOCK_PTR);
6493 /* This functions will add a fake edge between any block which has no
6494 successors, and the exit block. Some data flow equations require these
6497 add_noreturn_fake_exit_edges ()
6501 for (x = 0; x < n_basic_blocks; x++)
6502 if (BASIC_BLOCK (x)->succ == NULL)
6503 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6506 /* Dump the list of basic blocks in the bitmap NODES. */
6508 flow_nodes_print (str, nodes, file)
6510 const sbitmap nodes;
6515 fprintf (file, "%s { ", str);
6516 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6517 fputs ("}\n", file);
6521 /* Dump the list of exiting edges in the array EDGES. */
6523 flow_exits_print (str, edges, num_edges, file)
6531 fprintf (file, "%s { ", str);
6532 for (i = 0; i < num_edges; i++)
6533 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6534 fputs ("}\n", file);
6538 /* Dump loop related CFG information. */
6540 flow_loops_cfg_dump (loops, file)
6541 const struct loops *loops;
6546 if (! loops->num || ! file || ! loops->cfg.dom)
6549 for (i = 0; i < n_basic_blocks; i++)
6553 fprintf (file, ";; %d succs { ", i);
6554 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6555 fprintf (file, "%d ", succ->dest->index);
6556 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6560 /* Dump the DFS node order. */
6561 if (loops->cfg.dfs_order)
6563 fputs (";; DFS order: ", file);
6564 for (i = 0; i < n_basic_blocks; i++)
6565 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6571 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6573 flow_loop_nested_p (outer, loop)
6577 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6581 /* Dump the loop information specified by LOOPS to the stream FILE. */
6583 flow_loops_dump (loops, file, verbose)
6584 const struct loops *loops;
6591 num_loops = loops->num;
6592 if (! num_loops || ! file)
6595 fprintf (file, ";; %d loops found, %d levels\n",
6596 num_loops, loops->levels);
6598 for (i = 0; i < num_loops; i++)
6600 struct loop *loop = &loops->array[i];
6602 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6603 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6604 loop->header->index, loop->latch->index,
6605 loop->pre_header ? loop->pre_header->index : -1,
6606 loop->depth, loop->level,
6607 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6608 fprintf (file, ";; %d", loop->num_nodes);
6609 flow_nodes_print (" nodes", loop->nodes, file);
6610 fprintf (file, ";; %d", loop->num_exits);
6611 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6617 for (j = 0; j < i; j++)
6619 struct loop *oloop = &loops->array[j];
6621 if (loop->header == oloop->header)
6626 smaller = loop->num_nodes < oloop->num_nodes;
6628 /* If the union of LOOP and OLOOP is different than
6629 the larger of LOOP and OLOOP then LOOP and OLOOP
6630 must be disjoint. */
6631 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6632 smaller ? oloop : loop);
6633 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6634 loop->header->index, i, j,
6635 disjoint ? "disjoint" : "nested");
6642 /* Print diagnostics to compare our concept of a loop with
6643 what the loop notes say. */
6644 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6645 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6646 != NOTE_INSN_LOOP_BEG)
6647 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6648 INSN_UID (PREV_INSN (loop->first->head)));
6649 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6650 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6651 != NOTE_INSN_LOOP_END)
6652 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6653 INSN_UID (NEXT_INSN (loop->last->end)));
6658 flow_loops_cfg_dump (loops, file);
6662 /* Free all the memory allocated for LOOPS. */
6664 flow_loops_free (loops)
6665 struct loops *loops;
6674 /* Free the loop descriptors. */
6675 for (i = 0; i < loops->num; i++)
6677 struct loop *loop = &loops->array[i];
6680 sbitmap_free (loop->nodes);
6684 free (loops->array);
6685 loops->array = NULL;
6688 sbitmap_vector_free (loops->cfg.dom);
6689 if (loops->cfg.dfs_order)
6690 free (loops->cfg.dfs_order);
6692 sbitmap_free (loops->shared_headers);
6697 /* Find the exits from the loop using the bitmap of loop nodes NODES
6698 and store in EXITS array. Return the number of exits from the
6701 flow_loop_exits_find (nodes, exits)
6702 const sbitmap nodes;
6711 /* Check all nodes within the loop to see if there are any
6712 successors not in the loop. Note that a node may have multiple
6715 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6716 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6718 basic_block dest = e->dest;
6720 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6728 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6730 /* Store all exiting edges into an array. */
6732 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6733 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6735 basic_block dest = e->dest;
6737 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6738 (*exits)[num_exits++] = e;
6746 /* Find the nodes contained within the loop with header HEADER and
6747 latch LATCH and store in NODES. Return the number of nodes within
6750 flow_loop_nodes_find (header, latch, nodes)
6759 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6762 /* Start with only the loop header in the set of loop nodes. */
6763 sbitmap_zero (nodes);
6764 SET_BIT (nodes, header->index);
6766 header->loop_depth++;
6768 /* Push the loop latch on to the stack. */
6769 if (! TEST_BIT (nodes, latch->index))
6771 SET_BIT (nodes, latch->index);
6772 latch->loop_depth++;
6774 stack[sp++] = latch;
6783 for (e = node->pred; e; e = e->pred_next)
6785 basic_block ancestor = e->src;
6787 /* If each ancestor not marked as part of loop, add to set of
6788 loop nodes and push on to stack. */
6789 if (ancestor != ENTRY_BLOCK_PTR
6790 && ! TEST_BIT (nodes, ancestor->index))
6792 SET_BIT (nodes, ancestor->index);
6793 ancestor->loop_depth++;
6795 stack[sp++] = ancestor;
6804 /* Compute the depth first search order and store in the array
6805 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6806 number of nodes visited. */
6808 flow_depth_first_order_compute (dfs_order)
6817 /* Allocate stack for back-tracking up CFG. */
6818 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6821 /* Allocate bitmap to track nodes that have been visited. */
6822 visited = sbitmap_alloc (n_basic_blocks);
6824 /* None of the nodes in the CFG have been visited yet. */
6825 sbitmap_zero (visited);
6827 /* Start with the first successor edge from the entry block. */
6828 e = ENTRY_BLOCK_PTR->succ;
6831 basic_block src = e->src;
6832 basic_block dest = e->dest;
6834 /* Mark that we have visited this node. */
6835 if (src != ENTRY_BLOCK_PTR)
6836 SET_BIT (visited, src->index);
6838 /* If this node has not been visited before, push the current
6839 edge on to the stack and proceed with the first successor
6840 edge of this node. */
6841 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6849 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6852 /* DEST has no successors (for example, a non-returning
6853 function is called) so do not push the current edge
6854 but carry on with its next successor. */
6855 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6856 SET_BIT (visited, dest->index);
6859 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6861 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6863 /* Pop edge off stack. */
6871 sbitmap_free (visited);
6873 /* The number of nodes visited should not be greater than
6875 if (dfsnum > n_basic_blocks)
6878 /* There are some nodes left in the CFG that are unreachable. */
6879 if (dfsnum < n_basic_blocks)
6885 /* Return the block for the pre-header of the loop with header
6886 HEADER where DOM specifies the dominator information. Return NULL if
6887 there is no pre-header. */
6889 flow_loop_pre_header_find (header, dom)
6893 basic_block pre_header;
6896 /* If block p is a predecessor of the header and is the only block
6897 that the header does not dominate, then it is the pre-header. */
6899 for (e = header->pred; e; e = e->pred_next)
6901 basic_block node = e->src;
6903 if (node != ENTRY_BLOCK_PTR
6904 && ! TEST_BIT (dom[node->index], header->index))
6906 if (pre_header == NULL)
6910 /* There are multiple edges into the header from outside
6911 the loop so there is no pre-header block. */
6921 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6922 previously added. The insertion algorithm assumes that the loops
6923 are added in the order found by a depth first search of the CFG. */
6925 flow_loop_tree_node_add (prevloop, loop)
6926 struct loop *prevloop;
6930 if (flow_loop_nested_p (prevloop, loop))
6932 prevloop->inner = loop;
6933 loop->outer = prevloop;
6937 while (prevloop->outer)
6939 if (flow_loop_nested_p (prevloop->outer, loop))
6941 prevloop->next = loop;
6942 loop->outer = prevloop->outer;
6945 prevloop = prevloop->outer;
6948 prevloop->next = loop;
6953 /* Build the loop hierarchy tree for LOOPS. */
6955 flow_loops_tree_build (loops)
6956 struct loops *loops;
6961 num_loops = loops->num;
6965 /* Root the loop hierarchy tree with the first loop found.
6966 Since we used a depth first search this should be the
6968 loops->tree = &loops->array[0];
6969 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6971 /* Add the remaining loops to the tree. */
6972 for (i = 1; i < num_loops; i++)
6973 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6977 /* Helper function to compute loop nesting depth and enclosed loop level
6978 for the natural loop specified by LOOP at the loop depth DEPTH.
6979 Returns the loop level. */
6981 flow_loop_level_compute (loop, depth)
6991 /* Traverse loop tree assigning depth and computing level as the
6992 maximum level of all the inner loops of this loop. The loop
6993 level is equivalent to the height of the loop in the loop tree
6994 and corresponds to the number of enclosed loop levels (including
6996 for (inner = loop->inner; inner; inner = inner->next)
7000 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7005 loop->level = level;
7006 loop->depth = depth;
7011 /* Compute the loop nesting depth and enclosed loop level for the loop
7012 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7016 flow_loops_level_compute (loops)
7017 struct loops *loops;
7023 /* Traverse all the outer level loops. */
7024 for (loop = loops->tree; loop; loop = loop->next)
7026 level = flow_loop_level_compute (loop, 1);
7034 /* Find all the natural loops in the function and save in LOOPS structure
7035 and recalculate loop_depth information in basic block structures.
7036 Return the number of natural loops found. */
7039 flow_loops_find (loops)
7040 struct loops *loops;
7051 loops->array = NULL;
7055 /* Taking care of this degenerate case makes the rest of
7056 this code simpler. */
7057 if (n_basic_blocks == 0)
7060 /* Compute the dominators. */
7061 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7062 compute_flow_dominators (dom, NULL);
7064 /* Count the number of loop edges (back edges). This should be the
7065 same as the number of natural loops. Also clear the loop_depth
7066 and as we work from inner->outer in a loop nest we call
7067 find_loop_nodes_find which will increment loop_depth for nodes
7068 within the current loop, which happens to enclose inner loops. */
7071 for (b = 0; b < n_basic_blocks; b++)
7073 BASIC_BLOCK (b)->loop_depth = 0;
7074 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7076 basic_block latch = e->src;
7078 /* Look for back edges where a predecessor is dominated
7079 by this block. A natural loop has a single entry
7080 node (header) that dominates all the nodes in the
7081 loop. It also has single back edge to the header
7082 from a latch node. Note that multiple natural loops
7083 may share the same header. */
7084 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7091 /* Compute depth first search order of the CFG so that outer
7092 natural loops will be found before inner natural loops. */
7093 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7094 flow_depth_first_order_compute (dfs_order);
7096 /* Allocate loop structures. */
7098 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7100 headers = sbitmap_alloc (n_basic_blocks);
7101 sbitmap_zero (headers);
7103 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7104 sbitmap_zero (loops->shared_headers);
7106 /* Find and record information about all the natural loops
7109 for (b = 0; b < n_basic_blocks; b++)
7113 /* Search the nodes of the CFG in DFS order that we can find
7114 outer loops first. */
7115 header = BASIC_BLOCK (dfs_order[b]);
7117 /* Look for all the possible latch blocks for this header. */
7118 for (e = header->pred; e; e = e->pred_next)
7120 basic_block latch = e->src;
7122 /* Look for back edges where a predecessor is dominated
7123 by this block. A natural loop has a single entry
7124 node (header) that dominates all the nodes in the
7125 loop. It also has single back edge to the header
7126 from a latch node. Note that multiple natural loops
7127 may share the same header. */
7128 if (latch != ENTRY_BLOCK_PTR
7129 && TEST_BIT (dom[latch->index], header->index))
7133 loop = loops->array + num_loops;
7135 loop->header = header;
7136 loop->latch = latch;
7138 /* Keep track of blocks that are loop headers so
7139 that we can tell which loops should be merged. */
7140 if (TEST_BIT (headers, header->index))
7141 SET_BIT (loops->shared_headers, header->index);
7142 SET_BIT (headers, header->index);
7144 /* Find nodes contained within the loop. */
7145 loop->nodes = sbitmap_alloc (n_basic_blocks);
7147 = flow_loop_nodes_find (header, latch, loop->nodes);
7149 /* Compute first and last blocks within the loop.
7150 These are often the same as the loop header and
7151 loop latch respectively, but this is not always
7154 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7156 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7158 /* Find edges which exit the loop. Note that a node
7159 may have several exit edges. */
7161 = flow_loop_exits_find (loop->nodes, &loop->exits);
7163 /* Look to see if the loop has a pre-header node. */
7165 = flow_loop_pre_header_find (header, dom);
7172 /* Natural loops with shared headers may either be disjoint or
7173 nested. Disjoint loops with shared headers cannot be inner
7174 loops and should be merged. For now just mark loops that share
7176 for (i = 0; i < num_loops; i++)
7177 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7178 loops->array[i].shared = 1;
7180 sbitmap_free (headers);
7183 loops->num = num_loops;
7185 /* Save CFG derived information to avoid recomputing it. */
7186 loops->cfg.dom = dom;
7187 loops->cfg.dfs_order = dfs_order;
7189 /* Build the loop hierarchy tree. */
7190 flow_loops_tree_build (loops);
7192 /* Assign the loop nesting depth and enclosed loop level for each
7194 loops->levels = flow_loops_level_compute (loops);
7200 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7202 flow_loop_outside_edge_p (loop, e)
7203 const struct loop *loop;
7206 if (e->dest != loop->header)
7208 return (e->src == ENTRY_BLOCK_PTR)
7209 || ! TEST_BIT (loop->nodes, e->src->index);
7213 /* Clear LOG_LINKS fields of insns in a chain. */
7215 clear_log_links (insns)
7219 for (i = insns; i; i = NEXT_INSN (i))
7220 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')