1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static void propagate_block PARAMS ((basic_block, regset,
334 static int insn_dead_p PARAMS ((struct propagate_block_info *,
336 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
338 static void mark_set_regs PARAMS ((struct propagate_block_info *,
340 static void mark_set_1 PARAMS ((struct propagate_block_info *,
342 static int mark_set_reg PARAMS ((struct propagate_block_info *,
343 rtx, rtx, int *, int *));
345 static void find_auto_inc PARAMS ((struct propagate_block_info *,
347 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
349 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
351 static void mark_used_reg PARAMS ((struct propagate_block_info *,
353 static void mark_used_regs PARAMS ((struct propagate_block_info *,
355 void dump_flow_info PARAMS ((FILE *));
356 void debug_flow_info PARAMS ((void));
357 static void dump_edge_info PARAMS ((FILE *, edge, int));
359 static void count_reg_sets_1 PARAMS ((rtx, int));
360 static void count_reg_sets PARAMS ((rtx, int));
361 static void count_reg_references PARAMS ((rtx, int));
362 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
364 static void remove_fake_successors PARAMS ((basic_block));
365 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
366 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
367 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
368 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
369 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
370 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
371 static int flow_depth_first_order_compute PARAMS ((int *));
372 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
373 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
374 static void flow_loops_tree_build PARAMS ((struct loops *));
375 static int flow_loop_level_compute PARAMS ((struct loop *, int));
376 static int flow_loops_level_compute PARAMS ((struct loops *));
378 /* Find basic blocks of the current function.
379 F is the first insn of the function and NREGS the number of register
383 find_basic_blocks (f, nregs, file)
385 int nregs ATTRIBUTE_UNUSED;
386 FILE *file ATTRIBUTE_UNUSED;
390 /* Flush out existing data. */
391 if (basic_block_info != NULL)
397 /* Clear bb->aux on all extant basic blocks. We'll use this as a
398 tag for reuse during create_basic_block, just in case some pass
399 copies around basic block notes improperly. */
400 for (i = 0; i < n_basic_blocks; ++i)
401 BASIC_BLOCK (i)->aux = NULL;
403 VARRAY_FREE (basic_block_info);
406 n_basic_blocks = count_basic_blocks (f);
408 /* Size the basic block table. The actual structures will be allocated
409 by find_basic_blocks_1, since we want to keep the structure pointers
410 stable across calls to find_basic_blocks. */
411 /* ??? This whole issue would be much simpler if we called find_basic_blocks
412 exactly once, and thereafter we don't have a single long chain of
413 instructions at all until close to the end of compilation when we
414 actually lay them out. */
416 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
418 label_value_list = find_basic_blocks_1 (f);
420 /* Record the block to which an insn belongs. */
421 /* ??? This should be done another way, by which (perhaps) a label is
422 tagged directly with the basic block that it starts. It is used for
423 more than that currently, but IMO that is the only valid use. */
425 max_uid = get_max_uid ();
427 /* Leave space for insns life_analysis makes in some cases for auto-inc.
428 These cases are rare, so we don't need too much space. */
429 max_uid += max_uid / 10;
432 compute_bb_for_insn (max_uid);
434 /* Discover the edges of our cfg. */
435 record_active_eh_regions (f);
436 make_edges (label_value_list);
438 /* Do very simple cleanup now, for the benefit of code that runs between
439 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
440 tidy_fallthru_edges ();
442 mark_critical_edges ();
444 #ifdef ENABLE_CHECKING
449 /* Count the basic blocks of the function. */
452 count_basic_blocks (f)
456 register RTX_CODE prev_code;
457 register int count = 0;
459 int call_had_abnormal_edge = 0;
461 prev_code = JUMP_INSN;
462 for (insn = f; insn; insn = NEXT_INSN (insn))
464 register RTX_CODE code = GET_CODE (insn);
466 if (code == CODE_LABEL
467 || (GET_RTX_CLASS (code) == 'i'
468 && (prev_code == JUMP_INSN
469 || prev_code == BARRIER
470 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
473 /* Record whether this call created an edge. */
474 if (code == CALL_INSN)
476 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
477 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
479 call_had_abnormal_edge = 0;
481 /* If there is an EH region or rethrow, we have an edge. */
482 if ((eh_region && region > 0)
483 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
484 call_had_abnormal_edge = 1;
485 else if (nonlocal_goto_handler_labels && region >= 0)
486 /* If there is a nonlocal goto label and the specified
487 region number isn't -1, we have an edge. (0 means
488 no throw, but might have a nonlocal goto). */
489 call_had_abnormal_edge = 1;
494 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
496 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
500 /* The rest of the compiler works a bit smoother when we don't have to
501 check for the edge case of do-nothing functions with no basic blocks. */
504 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
511 /* Find all basic blocks of the function whose first insn is F.
513 Collect and return a list of labels whose addresses are taken. This
514 will be used in make_edges for use with computed gotos. */
517 find_basic_blocks_1 (f)
520 register rtx insn, next;
522 rtx bb_note = NULL_RTX;
523 rtx eh_list = NULL_RTX;
524 rtx label_value_list = NULL_RTX;
528 /* We process the instructions in a slightly different way than we did
529 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
530 closed out the previous block, so that it gets attached at the proper
531 place. Since this form should be equivalent to the previous,
532 count_basic_blocks continues to use the old form as a check. */
534 for (insn = f; insn; insn = next)
536 enum rtx_code code = GET_CODE (insn);
538 next = NEXT_INSN (insn);
544 int kind = NOTE_LINE_NUMBER (insn);
546 /* Keep a LIFO list of the currently active exception notes. */
547 if (kind == NOTE_INSN_EH_REGION_BEG)
548 eh_list = alloc_INSN_LIST (insn, eh_list);
549 else if (kind == NOTE_INSN_EH_REGION_END)
553 eh_list = XEXP (eh_list, 1);
554 free_INSN_LIST_node (t);
557 /* Look for basic block notes with which to keep the
558 basic_block_info pointers stable. Unthread the note now;
559 we'll put it back at the right place in create_basic_block.
560 Or not at all if we've already found a note in this block. */
561 else if (kind == NOTE_INSN_BASIC_BLOCK)
563 if (bb_note == NULL_RTX)
566 next = flow_delete_insn (insn);
572 /* A basic block starts at a label. If we've closed one off due
573 to a barrier or some such, no need to do it again. */
574 if (head != NULL_RTX)
576 /* While we now have edge lists with which other portions of
577 the compiler might determine a call ending a basic block
578 does not imply an abnormal edge, it will be a bit before
579 everything can be updated. So continue to emit a noop at
580 the end of such a block. */
581 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
583 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
584 end = emit_insn_after (nop, end);
587 create_basic_block (i++, head, end, bb_note);
595 /* A basic block ends at a jump. */
596 if (head == NULL_RTX)
600 /* ??? Make a special check for table jumps. The way this
601 happens is truly and amazingly gross. We are about to
602 create a basic block that contains just a code label and
603 an addr*vec jump insn. Worse, an addr_diff_vec creates
604 its own natural loop.
606 Prevent this bit of brain damage, pasting things together
607 correctly in make_edges.
609 The correct solution involves emitting the table directly
610 on the tablejump instruction as a note, or JUMP_LABEL. */
612 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
613 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
621 goto new_bb_inclusive;
624 /* A basic block ends at a barrier. It may be that an unconditional
625 jump already closed the basic block -- no need to do it again. */
626 if (head == NULL_RTX)
629 /* While we now have edge lists with which other portions of the
630 compiler might determine a call ending a basic block does not
631 imply an abnormal edge, it will be a bit before everything can
632 be updated. So continue to emit a noop at the end of such a
634 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
636 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
637 end = emit_insn_after (nop, end);
639 goto new_bb_exclusive;
643 /* Record whether this call created an edge. */
644 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
645 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
646 int call_has_abnormal_edge = 0;
648 /* If there is an EH region or rethrow, we have an edge. */
649 if ((eh_list && region > 0)
650 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
651 call_has_abnormal_edge = 1;
652 else if (nonlocal_goto_handler_labels && region >= 0)
653 /* If there is a nonlocal goto label and the specified
654 region number isn't -1, we have an edge. (0 means
655 no throw, but might have a nonlocal goto). */
656 call_has_abnormal_edge = 1;
658 /* A basic block ends at a call that can either throw or
659 do a non-local goto. */
660 if (call_has_abnormal_edge)
663 if (head == NULL_RTX)
668 create_basic_block (i++, head, end, bb_note);
669 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1095 e = (edge) xcalloc (1, sizeof (*e));
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1120 if (GET_CODE (label) != CODE_LABEL)
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1128 if (INSN_UID (label) == 0)
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1144 handler_info **handler_list;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1161 move_stray_eh_region_notes ()
1166 if (n_basic_blocks < 2)
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1187 NEXT_INSN (insn) = list;
1192 if (list == NULL_RTX)
1195 /* Find where to insert these things. */
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1209 /* Recompute eh_beg/eh_end for each basic block. */
1212 record_active_eh_regions (f)
1215 rtx insn, eh_list = NULL_RTX;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1241 if (i == n_basic_blocks)
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1259 bb = ENTRY_BLOCK_PTR;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1277 e->flags &= ~EDGE_CRITICAL;
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1300 split_edge (edge_in)
1303 basic_block old_pred, bb, old_succ;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1347 bb->succ = edge_out;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1378 basic_block jump_block;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1428 BASIC_BLOCK (i) = bb;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1538 insert_insn_on_edge (pattern, e)
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1548 if (e->insns == NULL_RTX)
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1559 /* Update the CFG for the instructions queued on edge E. */
1562 commit_one_edge_insertion (e)
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1568 /* Pull the insns off the edge now since the edge might go away. */
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1620 /* Otherwise we must split the edge. */
1623 bb = split_edge (e);
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1675 /* Update the CFG for all queued instructions. */
1678 commit_edge_insertions ()
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1688 bb = ENTRY_BLOCK_PTR;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1732 /* Mark the block with a handy non-null value. */
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in flow_delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1760 /* This block was found. Tidy up the mark. */
1763 deleted_handler |= flow_delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1779 delete_eh_regions ()
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1807 can_delete_note_p (note)
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1818 flow_delete_insn_chain (start, finish)
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1852 flow_delete_block (b)
1855 int deleted_handler = 0;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Include any jump table following the basic block. */
1907 if (GET_CODE (end) == JUMP_INSN
1908 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1910 && GET_CODE (tmp) == JUMP_INSN
1911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1915 /* Include any barrier that may follow the basic block. */
1916 tmp = next_nonnote_insn (end);
1917 if (tmp && GET_CODE (tmp) == BARRIER)
1920 /* Selectively delete the entire chain. */
1921 flow_delete_insn_chain (insn, end);
1925 /* Remove the edges into and out of this block. Note that there may
1926 indeed be edges in, if we are removing an unreachable loop. */
1930 for (e = b->pred; e ; e = next)
1932 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1935 next = e->pred_next;
1939 for (e = b->succ; e ; e = next)
1941 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1944 next = e->succ_next;
1953 /* Remove the basic block from the array, and compact behind it. */
1956 return deleted_handler;
1959 /* Remove block B from the basic block array and compact behind it. */
1965 int i, n = n_basic_blocks;
1967 for (i = b->index; i + 1 < n; ++i)
1969 basic_block x = BASIC_BLOCK (i + 1);
1970 BASIC_BLOCK (i) = x;
1974 basic_block_info->num_elements--;
1978 /* Delete INSN by patching it out. Return the next insn. */
1981 flow_delete_insn (insn)
1984 rtx prev = PREV_INSN (insn);
1985 rtx next = NEXT_INSN (insn);
1988 PREV_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (insn) = NULL_RTX;
1992 NEXT_INSN (prev) = next;
1994 PREV_INSN (next) = prev;
1996 set_last_insn (prev);
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2001 /* If deleting a jump, decrement the use count of the label. Deleting
2002 the label itself should happen in the normal course of block merging. */
2003 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2004 LABEL_NUSES (JUMP_LABEL (insn))--;
2006 /* Also if deleting an insn that references a label. */
2007 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2008 LABEL_NUSES (XEXP (note, 0))--;
2013 /* True if a given label can be deleted. */
2016 can_delete_label_p (label)
2021 if (LABEL_PRESERVE_P (label))
2024 for (x = forced_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = label_value_list; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2030 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2031 if (label == XEXP (x, 0))
2034 /* User declared labels must be preserved. */
2035 if (LABEL_NAME (label) != 0)
2041 /* Blocks A and B are to be merged into a single block A. The insns
2042 are already contiguous, hence `nomove'. */
2045 merge_blocks_nomove (a, b)
2049 rtx b_head, b_end, a_end;
2050 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2053 /* If there was a CODE_LABEL beginning B, delete it. */
2056 if (GET_CODE (b_head) == CODE_LABEL)
2058 /* Detect basic blocks with nothing but a label. This can happen
2059 in particular at the end of a function. */
2060 if (b_head == b_end)
2062 del_first = del_last = b_head;
2063 b_head = NEXT_INSN (b_head);
2066 /* Delete the basic block note. */
2067 if (GET_CODE (b_head) == NOTE
2068 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2070 if (b_head == b_end)
2075 b_head = NEXT_INSN (b_head);
2078 /* If there was a jump out of A, delete it. */
2080 if (GET_CODE (a_end) == JUMP_INSN)
2084 prev = prev_nonnote_insn (a_end);
2091 /* If this was a conditional jump, we need to also delete
2092 the insn that set cc0. */
2093 if (prev && sets_cc0_p (prev))
2096 prev = prev_nonnote_insn (prev);
2106 /* Delete everything marked above as well as crap that might be
2107 hanging out between the two blocks. */
2108 flow_delete_insn_chain (del_first, del_last);
2110 /* Normally there should only be one successor of A and that is B, but
2111 partway though the merge of blocks for conditional_execution we'll
2112 be merging a TEST block with THEN and ELSE successors. Free the
2113 whole lot of them and hope the caller knows what they're doing. */
2115 remove_edge (a->succ);
2117 /* Adjust the edges out of B for the new owner. */
2118 for (e = b->succ; e ; e = e->succ_next)
2122 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2123 b->pred = b->succ = NULL;
2125 /* Reassociate the insns of B with A. */
2128 if (basic_block_for_insn)
2130 BLOCK_FOR_INSN (b_head) = a;
2131 while (b_head != b_end)
2133 b_head = NEXT_INSN (b_head);
2134 BLOCK_FOR_INSN (b_head) = a;
2144 /* Blocks A and B are to be merged into a single block. A has no incoming
2145 fallthru edge, so it can be moved before B without adding or modifying
2146 any jumps (aside from the jump from A to B). */
2149 merge_blocks_move_predecessor_nojumps (a, b)
2152 rtx start, end, barrier;
2158 /* We want to delete the BARRIER after the end of the insns we are
2159 going to move. If we don't find a BARRIER, then do nothing. This
2160 can happen in some cases if we have labels we can not delete.
2162 Similarly, do nothing if we can not delete the label at the start
2163 of the target block. */
2164 barrier = next_nonnote_insn (end);
2165 if (GET_CODE (barrier) != BARRIER
2166 || (GET_CODE (b->head) == CODE_LABEL
2167 && ! can_delete_label_p (b->head)))
2170 flow_delete_insn (barrier);
2172 /* Move block and loop notes out of the chain so that we do not
2173 disturb their order.
2175 ??? A better solution would be to squeeze out all the non-nested notes
2176 and adjust the block trees appropriately. Even better would be to have
2177 a tighter connection between block trees and rtl so that this is not
2179 start = squeeze_notes (start, end);
2181 /* Scramble the insn chain. */
2182 if (end != PREV_INSN (b->head))
2183 reorder_insns (start, end, PREV_INSN (b->head));
2187 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2188 a->index, b->index);
2191 /* Swap the records for the two blocks around. Although we are deleting B,
2192 A is now where B was and we want to compact the BB array from where
2194 BASIC_BLOCK(a->index) = b;
2195 BASIC_BLOCK(b->index) = a;
2197 a->index = b->index;
2200 /* Now blocks A and B are contiguous. Merge them. */
2201 merge_blocks_nomove (a, b);
2206 /* Blocks A and B are to be merged into a single block. B has no outgoing
2207 fallthru edge, so it can be moved after A without adding or modifying
2208 any jumps (aside from the jump from A to B). */
2211 merge_blocks_move_successor_nojumps (a, b)
2214 rtx start, end, barrier;
2219 /* We want to delete the BARRIER after the end of the insns we are
2220 going to move. If we don't find a BARRIER, then do nothing. This
2221 can happen in some cases if we have labels we can not delete.
2223 Similarly, do nothing if we can not delete the label at the start
2224 of the target block. */
2225 barrier = next_nonnote_insn (end);
2226 if (GET_CODE (barrier) != BARRIER
2227 || (GET_CODE (b->head) == CODE_LABEL
2228 && ! can_delete_label_p (b->head)))
2231 flow_delete_insn (barrier);
2233 /* Move block and loop notes out of the chain so that we do not
2234 disturb their order.
2236 ??? A better solution would be to squeeze out all the non-nested notes
2237 and adjust the block trees appropriately. Even better would be to have
2238 a tighter connection between block trees and rtl so that this is not
2240 start = squeeze_notes (start, end);
2242 /* Scramble the insn chain. */
2243 reorder_insns (start, end, a->end);
2245 /* Now blocks A and B are contiguous. Merge them. */
2246 merge_blocks_nomove (a, b);
2250 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2251 b->index, a->index);
2257 /* Attempt to merge basic blocks that are potentially non-adjacent.
2258 Return true iff the attempt succeeded. */
2261 merge_blocks (e, b, c)
2265 /* If B has a fallthru edge to C, no need to move anything. */
2266 if (e->flags & EDGE_FALLTHRU)
2268 /* If a label still appears somewhere and we cannot delete the label,
2269 then we cannot merge the blocks. The edge was tidied already. */
2271 rtx insn, stop = NEXT_INSN (c->head);
2272 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2273 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2276 merge_blocks_nomove (b, c);
2280 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2281 b->index, c->index);
2290 int c_has_outgoing_fallthru;
2291 int b_has_incoming_fallthru;
2293 /* We must make sure to not munge nesting of exception regions,
2294 lexical blocks, and loop notes.
2296 The first is taken care of by requiring that the active eh
2297 region at the end of one block always matches the active eh
2298 region at the beginning of the next block.
2300 The later two are taken care of by squeezing out all the notes. */
2302 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2303 executed and we may want to treat blocks which have two out
2304 edges, one normal, one abnormal as only having one edge for
2305 block merging purposes. */
2307 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2308 if (tmp_edge->flags & EDGE_FALLTHRU)
2310 c_has_outgoing_fallthru = (tmp_edge != NULL);
2312 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2313 if (tmp_edge->flags & EDGE_FALLTHRU)
2315 b_has_incoming_fallthru = (tmp_edge != NULL);
2317 /* If B does not have an incoming fallthru, and the exception regions
2318 match, then it can be moved immediately before C without introducing
2321 C can not be the first block, so we do not have to worry about
2322 accessing a non-existent block. */
2323 d = BASIC_BLOCK (c->index - 1);
2324 if (! b_has_incoming_fallthru
2325 && d->eh_end == b->eh_beg
2326 && b->eh_end == c->eh_beg)
2327 return merge_blocks_move_predecessor_nojumps (b, c);
2329 /* Otherwise, we're going to try to move C after B. Make sure the
2330 exception regions match.
2332 If B is the last basic block, then we must not try to access the
2333 block structure for block B + 1. Luckily in that case we do not
2334 need to worry about matching exception regions. */
2335 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2336 if (b->eh_end == c->eh_beg
2337 && (d == NULL || c->eh_end == d->eh_beg))
2339 /* If C does not have an outgoing fallthru, then it can be moved
2340 immediately after B without introducing or modifying jumps. */
2341 if (! c_has_outgoing_fallthru)
2342 return merge_blocks_move_successor_nojumps (b, c);
2344 /* Otherwise, we'll need to insert an extra jump, and possibly
2345 a new block to contain it. */
2346 /* ??? Not implemented yet. */
2353 /* Top level driver for merge_blocks. */
2360 /* Attempt to merge blocks as made possible by edge removal. If a block
2361 has only one successor, and the successor has only one predecessor,
2362 they may be combined. */
2364 for (i = 0; i < n_basic_blocks; )
2366 basic_block c, b = BASIC_BLOCK (i);
2369 /* A loop because chains of blocks might be combineable. */
2370 while ((s = b->succ) != NULL
2371 && s->succ_next == NULL
2372 && (s->flags & EDGE_EH) == 0
2373 && (c = s->dest) != EXIT_BLOCK_PTR
2374 && c->pred->pred_next == NULL
2375 /* If the jump insn has side effects, we can't kill the edge. */
2376 && (GET_CODE (b->end) != JUMP_INSN
2377 || onlyjump_p (b->end))
2378 && merge_blocks (s, b, c))
2381 /* Don't get confused by the index shift caused by deleting blocks. */
2386 /* The given edge should potentially be a fallthru edge. If that is in
2387 fact true, delete the jump and barriers that are in the way. */
2390 tidy_fallthru_edge (e, b, c)
2396 /* ??? In a late-running flow pass, other folks may have deleted basic
2397 blocks by nopping out blocks, leaving multiple BARRIERs between here
2398 and the target label. They ought to be chastized and fixed.
2400 We can also wind up with a sequence of undeletable labels between
2401 one block and the next.
2403 So search through a sequence of barriers, labels, and notes for
2404 the head of block C and assert that we really do fall through. */
2406 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2409 /* Remove what will soon cease being the jump insn from the source block.
2410 If block B consisted only of this single jump, turn it into a deleted
2413 if (GET_CODE (q) == JUMP_INSN)
2416 /* If this was a conditional jump, we need to also delete
2417 the insn that set cc0. */
2418 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2425 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2426 NOTE_SOURCE_FILE (q) = 0;
2429 b->end = q = PREV_INSN (q);
2432 /* Selectively unlink the sequence. */
2433 if (q != PREV_INSN (c->head))
2434 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2436 e->flags |= EDGE_FALLTHRU;
2439 /* Fix up edges that now fall through, or rather should now fall through
2440 but previously required a jump around now deleted blocks. Simplify
2441 the search by only examining blocks numerically adjacent, since this
2442 is how find_basic_blocks created them. */
2445 tidy_fallthru_edges ()
2449 for (i = 1; i < n_basic_blocks; ++i)
2451 basic_block b = BASIC_BLOCK (i - 1);
2452 basic_block c = BASIC_BLOCK (i);
2455 /* We care about simple conditional or unconditional jumps with
2458 If we had a conditional branch to the next instruction when
2459 find_basic_blocks was called, then there will only be one
2460 out edge for the block which ended with the conditional
2461 branch (since we do not create duplicate edges).
2463 Furthermore, the edge will be marked as a fallthru because we
2464 merge the flags for the duplicate edges. So we do not want to
2465 check that the edge is not a FALLTHRU edge. */
2466 if ((s = b->succ) != NULL
2467 && s->succ_next == NULL
2469 /* If the jump insn has side effects, we can't tidy the edge. */
2470 && (GET_CODE (b->end) != JUMP_INSN
2471 || onlyjump_p (b->end)))
2472 tidy_fallthru_edge (s, b, c);
2476 /* Discover and record the loop depth at the head of each basic block. */
2479 calculate_loop_depth (dump)
2484 /* The loop infrastructure does the real job for us. */
2485 flow_loops_find (&loops);
2488 flow_loops_dump (&loops, dump, 0);
2490 flow_loops_free (&loops);
2493 /* Perform data flow analysis.
2494 F is the first insn of the function and NREGS the number of register numbers
2498 life_analysis (f, nregs, file, remove_dead_code)
2502 int remove_dead_code;
2504 #ifdef ELIMINABLE_REGS
2506 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2511 /* Dead code elimination changes basic block structure and therefore
2512 breaks the SSA phi representation. Particularly, a phi node
2513 can have an alternative value for each incoming block, referenced
2514 by the block number. Removing dead code can bump entire blocks
2515 and therefore cause blocks to be renumbered, invalidating the
2516 numbering of phi alternatives. */
2517 if (remove_dead_code && in_ssa_form)
2520 /* Record which registers will be eliminated. We use this in
2523 CLEAR_HARD_REG_SET (elim_reg_set);
2525 #ifdef ELIMINABLE_REGS
2526 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2527 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2529 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2532 /* We want alias analysis information for local dead store elimination. */
2533 init_alias_analysis ();
2536 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2540 if (! remove_dead_code)
2541 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2544 /* The post-reload life analysis have (on a global basis) the same
2545 registers live as was computed by reload itself. elimination
2546 Otherwise offsets and such may be incorrect.
2548 Reload will make some registers as live even though they do not
2549 appear in the rtl. */
2550 if (reload_completed)
2551 flags &= ~PROP_REG_INFO;
2555 /* Always remove no-op moves. Do this before other processing so
2556 that we don't have to keep re-scanning them. */
2557 delete_noop_moves (f);
2559 /* Some targets can emit simpler epilogues if they know that sp was
2560 not ever modified during the function. After reload, of course,
2561 we've already emitted the epilogue so there's no sense searching. */
2562 if (! reload_completed)
2563 notice_stack_pointer_modification (f);
2565 /* Allocate and zero out data structures that will record the
2566 data from lifetime analysis. */
2567 allocate_reg_life_data ();
2568 allocate_bb_life_data ();
2569 all_blocks = sbitmap_alloc (n_basic_blocks);
2570 sbitmap_ones (all_blocks);
2572 /* Find the set of registers live on function exit. */
2573 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2575 /* "Update" life info from zero. It'd be nice to begin the
2576 relaxation with just the exit and noreturn blocks, but that set
2577 is not immediately handy. */
2579 if (flags & PROP_REG_INFO)
2580 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2581 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2584 sbitmap_free (all_blocks);
2585 end_alias_analysis ();
2588 dump_flow_info (file);
2590 free_basic_block_vars (1);
2593 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2594 Search for REGNO. If found, abort if it is not wider than word_mode. */
2597 verify_wide_reg_1 (px, pregno)
2602 unsigned int regno = *(int *) pregno;
2604 if (GET_CODE (x) == REG && REGNO (x) == regno)
2606 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2613 /* A subroutine of verify_local_live_at_start. Search through insns
2614 between HEAD and END looking for register REGNO. */
2617 verify_wide_reg (regno, head, end)
2623 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2624 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2628 head = NEXT_INSN (head);
2631 /* We didn't find the register at all. Something's way screwy. */
2635 /* A subroutine of update_life_info. Verify that there are no untoward
2636 changes in live_at_start during a local update. */
2639 verify_local_live_at_start (new_live_at_start, bb)
2640 regset new_live_at_start;
2643 if (reload_completed)
2645 /* After reload, there are no pseudos, nor subregs of multi-word
2646 registers. The regsets should exactly match. */
2647 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2654 /* Find the set of changed registers. */
2655 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2657 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2659 /* No registers should die. */
2660 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2662 /* Verify that the now-live register is wider than word_mode. */
2663 verify_wide_reg (i, bb->head, bb->end);
2668 /* Updates life information starting with the basic blocks set in BLOCKS.
2670 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2671 expecting local modifications to basic blocks. If we find extra
2672 registers live at the beginning of a block, then we either killed
2673 useful data, or we have a broken split that wants data not provided.
2674 If we find registers removed from live_at_start, that means we have
2675 a broken peephole that is killing a register it shouldn't.
2677 ??? This is not true in one situation -- when a pre-reload splitter
2678 generates subregs of a multi-word pseudo, current life analysis will
2679 lose the kill. So we _can_ have a pseudo go live. How irritating.
2681 BLOCK_FOR_INSN is assumed to be correct.
2683 Including PROP_REG_INFO does not properly refresh regs_ever_live
2684 unless the caller resets it to zero. */
2687 update_life_info (blocks, extent, prop_flags)
2689 enum update_life_extent extent;
2693 regset_head tmp_head;
2696 tmp = INITIALIZE_REG_SET (tmp_head);
2698 /* For a global update, we go through the relaxation process again. */
2699 if (extent != UPDATE_LIFE_LOCAL)
2701 calculate_global_regs_live (blocks, blocks,
2702 prop_flags & PROP_SCAN_DEAD_CODE);
2704 /* If asked, remove notes from the blocks we'll update. */
2705 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2706 count_or_remove_death_notes (blocks, 1);
2709 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2711 basic_block bb = BASIC_BLOCK (i);
2713 COPY_REG_SET (tmp, bb->global_live_at_end);
2714 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2716 if (extent == UPDATE_LIFE_LOCAL)
2717 verify_local_live_at_start (tmp, bb);
2722 if (prop_flags & PROP_REG_INFO)
2724 /* The only pseudos that are live at the beginning of the function
2725 are those that were not set anywhere in the function. local-alloc
2726 doesn't know how to handle these correctly, so mark them as not
2727 local to any one basic block. */
2728 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2729 FIRST_PSEUDO_REGISTER, i,
2730 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2732 /* We have a problem with any pseudoreg that lives across the setjmp.
2733 ANSI says that if a user variable does not change in value between
2734 the setjmp and the longjmp, then the longjmp preserves it. This
2735 includes longjmp from a place where the pseudo appears dead.
2736 (In principle, the value still exists if it is in scope.)
2737 If the pseudo goes in a hard reg, some other value may occupy
2738 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2739 Conclusion: such a pseudo must not go in a hard reg. */
2740 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2741 FIRST_PSEUDO_REGISTER, i,
2743 if (regno_reg_rtx[i] != 0)
2745 REG_LIVE_LENGTH (i) = -1;
2746 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2752 /* Free the variables allocated by find_basic_blocks.
2754 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2757 free_basic_block_vars (keep_head_end_p)
2758 int keep_head_end_p;
2760 if (basic_block_for_insn)
2762 VARRAY_FREE (basic_block_for_insn);
2763 basic_block_for_insn = NULL;
2766 if (! keep_head_end_p)
2769 VARRAY_FREE (basic_block_info);
2772 ENTRY_BLOCK_PTR->aux = NULL;
2773 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2774 EXIT_BLOCK_PTR->aux = NULL;
2775 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2779 /* Return nonzero if the destination of SET equals the source. */
2784 rtx src = SET_SRC (set);
2785 rtx dst = SET_DEST (set);
2787 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2789 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2791 src = SUBREG_REG (src);
2792 dst = SUBREG_REG (dst);
2795 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2796 && REGNO (src) == REGNO (dst));
2799 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2805 rtx pat = PATTERN (insn);
2807 /* Insns carrying these notes are useful later on. */
2808 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2811 if (GET_CODE (pat) == SET && set_noop_p (pat))
2814 if (GET_CODE (pat) == PARALLEL)
2817 /* If nothing but SETs of registers to themselves,
2818 this insn can also be deleted. */
2819 for (i = 0; i < XVECLEN (pat, 0); i++)
2821 rtx tem = XVECEXP (pat, 0, i);
2823 if (GET_CODE (tem) == USE
2824 || GET_CODE (tem) == CLOBBER)
2827 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2836 /* Delete any insns that copy a register to itself. */
2839 delete_noop_moves (f)
2843 for (insn = f; insn; insn = NEXT_INSN (insn))
2845 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2847 PUT_CODE (insn, NOTE);
2848 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2849 NOTE_SOURCE_FILE (insn) = 0;
2854 /* Determine if the stack pointer is constant over the life of the function.
2855 Only useful before prologues have been emitted. */
2858 notice_stack_pointer_modification_1 (x, pat, data)
2860 rtx pat ATTRIBUTE_UNUSED;
2861 void *data ATTRIBUTE_UNUSED;
2863 if (x == stack_pointer_rtx
2864 /* The stack pointer is only modified indirectly as the result
2865 of a push until later in flow. See the comments in rtl.texi
2866 regarding Embedded Side-Effects on Addresses. */
2867 || (GET_CODE (x) == MEM
2868 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2869 || GET_CODE (XEXP (x, 0)) == PRE_INC
2870 || GET_CODE (XEXP (x, 0)) == POST_DEC
2871 || GET_CODE (XEXP (x, 0)) == POST_INC)
2872 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2873 current_function_sp_is_unchanging = 0;
2877 notice_stack_pointer_modification (f)
2882 /* Assume that the stack pointer is unchanging if alloca hasn't
2884 current_function_sp_is_unchanging = !current_function_calls_alloca;
2885 if (! current_function_sp_is_unchanging)
2888 for (insn = f; insn; insn = NEXT_INSN (insn))
2890 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2892 /* Check if insn modifies the stack pointer. */
2893 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2895 if (! current_function_sp_is_unchanging)
2901 /* Mark a register in SET. Hard registers in large modes get all
2902 of their component registers set as well. */
2904 mark_reg (reg, xset)
2908 regset set = (regset) xset;
2909 int regno = REGNO (reg);
2911 if (GET_MODE (reg) == BLKmode)
2914 SET_REGNO_REG_SET (set, regno);
2915 if (regno < FIRST_PSEUDO_REGISTER)
2917 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2919 SET_REGNO_REG_SET (set, regno + n);
2923 /* Mark those regs which are needed at the end of the function as live
2924 at the end of the last basic block. */
2926 mark_regs_live_at_end (set)
2931 /* If exiting needs the right stack value, consider the stack pointer
2932 live at the end of the function. */
2933 if ((HAVE_epilogue && reload_completed)
2934 || ! EXIT_IGNORE_STACK
2935 || (! FRAME_POINTER_REQUIRED
2936 && ! current_function_calls_alloca
2937 && flag_omit_frame_pointer)
2938 || current_function_sp_is_unchanging)
2940 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2943 /* Mark the frame pointer if needed at the end of the function. If
2944 we end up eliminating it, it will be removed from the live list
2945 of each basic block by reload. */
2947 if (! reload_completed || frame_pointer_needed)
2949 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2950 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2951 /* If they are different, also mark the hard frame pointer as live */
2952 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2956 #ifdef PIC_OFFSET_TABLE_REGNUM
2957 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2958 /* Many architectures have a GP register even without flag_pic.
2959 Assume the pic register is not in use, or will be handled by
2960 other means, if it is not fixed. */
2961 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2962 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2966 /* Mark all global registers, and all registers used by the epilogue
2967 as being live at the end of the function since they may be
2968 referenced by our caller. */
2969 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2971 #ifdef EPILOGUE_USES
2972 || EPILOGUE_USES (i)
2975 SET_REGNO_REG_SET (set, i);
2977 /* Mark all call-saved registers that we actaully used. */
2978 if (HAVE_epilogue && reload_completed)
2980 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2981 if (! call_used_regs[i] && regs_ever_live[i])
2982 SET_REGNO_REG_SET (set, i);
2985 /* Mark function return value. */
2986 diddle_return_value (mark_reg, set);
2989 /* Callback function for for_each_successor_phi. DATA is a regset.
2990 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2991 INSN, in the regset. */
2994 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2995 rtx insn ATTRIBUTE_UNUSED;
2996 int dest_regno ATTRIBUTE_UNUSED;
3000 regset live = (regset) data;
3001 SET_REGNO_REG_SET (live, src_regno);
3005 /* Propagate global life info around the graph of basic blocks. Begin
3006 considering blocks with their corresponding bit set in BLOCKS_IN.
3007 BLOCKS_OUT is set for every block that was changed. */
3010 calculate_global_regs_live (blocks_in, blocks_out, flags)
3011 sbitmap blocks_in, blocks_out;
3014 basic_block *queue, *qhead, *qtail, *qend;
3015 regset tmp, new_live_at_end;
3016 regset_head tmp_head;
3017 regset_head new_live_at_end_head;
3020 tmp = INITIALIZE_REG_SET (tmp_head);
3021 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3023 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3024 because the `head == tail' style test for an empty queue doesn't
3025 work with a full queue. */
3026 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3028 qhead = qend = queue + n_basic_blocks + 2;
3030 /* Clear out the garbage that might be hanging out in bb->aux. */
3031 for (i = n_basic_blocks - 1; i >= 0; --i)
3032 BASIC_BLOCK (i)->aux = NULL;
3034 /* Queue the blocks set in the initial mask. Do this in reverse block
3035 number order so that we are more likely for the first round to do
3036 useful work. We use AUX non-null to flag that the block is queued. */
3037 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3039 basic_block bb = BASIC_BLOCK (i);
3044 sbitmap_zero (blocks_out);
3046 while (qhead != qtail)
3048 int rescan, changed;
3057 /* Begin by propogating live_at_start from the successor blocks. */
3058 CLEAR_REG_SET (new_live_at_end);
3059 for (e = bb->succ; e ; e = e->succ_next)
3061 basic_block sb = e->dest;
3062 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3065 /* Regs used in phi nodes are not included in
3066 global_live_at_start, since they are live only along a
3067 particular edge. Set those regs that are live because of a
3068 phi node alternative corresponding to this particular block. */
3069 for_each_successor_phi (bb, &set_phi_alternative_reg,
3072 if (bb == ENTRY_BLOCK_PTR)
3074 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3078 /* On our first pass through this block, we'll go ahead and continue.
3079 Recognize first pass by local_set NULL. On subsequent passes, we
3080 get to skip out early if live_at_end wouldn't have changed. */
3082 if (bb->local_set == NULL)
3084 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3089 /* If any bits were removed from live_at_end, we'll have to
3090 rescan the block. This wouldn't be necessary if we had
3091 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3092 local_live is really dependant on live_at_end. */
3093 CLEAR_REG_SET (tmp);
3094 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3095 new_live_at_end, BITMAP_AND_COMPL);
3099 /* Find the set of changed bits. Take this opportunity
3100 to notice that this set is empty and early out. */
3101 CLEAR_REG_SET (tmp);
3102 changed = bitmap_operation (tmp, bb->global_live_at_end,
3103 new_live_at_end, BITMAP_XOR);
3107 /* If any of the changed bits overlap with local_set,
3108 we'll have to rescan the block. Detect overlap by
3109 the AND with ~local_set turning off bits. */
3110 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3115 /* Let our caller know that BB changed enough to require its
3116 death notes updated. */
3117 SET_BIT (blocks_out, bb->index);
3121 /* Add to live_at_start the set of all registers in
3122 new_live_at_end that aren't in the old live_at_end. */
3124 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3126 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3128 changed = bitmap_operation (bb->global_live_at_start,
3129 bb->global_live_at_start,
3136 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3138 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3139 into live_at_start. */
3140 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3142 /* If live_at start didn't change, no need to go farther. */
3143 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3146 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3149 /* Queue all predecessors of BB so that we may re-examine
3150 their live_at_end. */
3151 for (e = bb->pred; e ; e = e->pred_next)
3153 basic_block pb = e->src;
3154 if (pb->aux == NULL)
3165 FREE_REG_SET (new_live_at_end);
3167 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3169 basic_block bb = BASIC_BLOCK (i);
3170 FREE_REG_SET (bb->local_set);
3176 /* Subroutines of life analysis. */
3178 /* Allocate the permanent data structures that represent the results
3179 of life analysis. Not static since used also for stupid life analysis. */
3182 allocate_bb_life_data ()
3186 for (i = 0; i < n_basic_blocks; i++)
3188 basic_block bb = BASIC_BLOCK (i);
3190 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3191 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3194 ENTRY_BLOCK_PTR->global_live_at_end
3195 = OBSTACK_ALLOC_REG_SET (function_obstack);
3196 EXIT_BLOCK_PTR->global_live_at_start
3197 = OBSTACK_ALLOC_REG_SET (function_obstack);
3199 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3203 allocate_reg_life_data ()
3207 /* Recalculate the register space, in case it has grown. Old style
3208 vector oriented regsets would set regset_{size,bytes} here also. */
3209 allocate_reg_info (max_regno, FALSE, FALSE);
3211 /* Reset all the data we'll collect in propagate_block and its
3213 for (i = 0; i < max_regno; i++)
3217 REG_N_DEATHS (i) = 0;
3218 REG_N_CALLS_CROSSED (i) = 0;
3219 REG_LIVE_LENGTH (i) = 0;
3220 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3224 /* Delete dead instructions for propagate_block. */
3227 propagate_block_delete_insn (bb, insn)
3231 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3233 /* If the insn referred to a label, and that label was attached to
3234 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3235 pretty much mandatory to delete it, because the ADDR_VEC may be
3236 referencing labels that no longer exist. */
3240 rtx label = XEXP (inote, 0);
3243 if (LABEL_NUSES (label) == 1
3244 && (next = next_nonnote_insn (label)) != NULL
3245 && GET_CODE (next) == JUMP_INSN
3246 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3247 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3249 rtx pat = PATTERN (next);
3250 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3251 int len = XVECLEN (pat, diff_vec_p);
3254 for (i = 0; i < len; i++)
3255 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3257 flow_delete_insn (next);
3261 if (bb->end == insn)
3262 bb->end = PREV_INSN (insn);
3263 flow_delete_insn (insn);
3266 /* Delete dead libcalls for propagate_block. Return the insn
3267 before the libcall. */
3270 propagate_block_delete_libcall (bb, insn, note)
3274 rtx first = XEXP (note, 0);
3275 rtx before = PREV_INSN (first);
3277 if (insn == bb->end)
3280 flow_delete_insn_chain (first, insn);
3284 /* Compute the registers live at the beginning of a basic block BB from
3285 those live at the end.
3287 When called, REG_LIVE contains those live at the end. On return, it
3288 contains those live at the beginning.
3290 LOCAL_SET, if non-null, will be set with all registers killed by
3291 this basic block. */
3294 propagate_block (bb, live, local_set, flags)
3300 struct propagate_block_info pbi;
3302 regset_head new_live_head, new_dead_head;
3305 pbi.reg_live = live;
3306 pbi.mem_set_list = NULL_RTX;
3307 pbi.local_set = local_set;
3311 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3312 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3314 pbi.reg_next_use = NULL;
3316 pbi.new_live = INITIALIZE_REG_SET (new_live_head);
3317 pbi.new_dead = INITIALIZE_REG_SET (new_dead_head);
3319 if (flags & PROP_REG_INFO)
3323 /* Process the regs live at the end of the block.
3324 Mark them as not local to any one basic block. */
3325 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3327 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3331 /* Scan the block an insn at a time from end to beginning. */
3333 for (insn = bb->end; ; insn = prev)
3335 prev = PREV_INSN (insn);
3337 if (GET_CODE (insn) == NOTE)
3339 /* If this is a call to `setjmp' et al,
3340 warn if any non-volatile datum is live. */
3342 if ((flags & PROP_REG_INFO)
3343 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3344 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3347 /* Update the life-status of regs for this insn.
3348 First DEAD gets which regs are set in this insn
3349 then LIVE gets which regs are used in this insn.
3350 Then the regs live before the insn
3351 are those live after, with DEAD regs turned off,
3352 and then LIVE regs turned on. */
3354 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3357 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3358 int insn_is_dead = 0;
3359 int libcall_is_dead = 0;
3361 if (flags & PROP_SCAN_DEAD_CODE)
3363 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3365 libcall_is_dead = (insn_is_dead && note != 0
3366 && libcall_dead_p (&pbi, PATTERN (insn),
3370 /* We almost certainly don't want to delete prologue or epilogue
3371 instructions. Warn about probable compiler losage. */
3374 && (((HAVE_epilogue || HAVE_prologue)
3375 && prologue_epilogue_contains (insn))
3376 || (HAVE_sibcall_epilogue
3377 && sibcall_epilogue_contains (insn))))
3379 if (flags & PROP_KILL_DEAD_CODE)
3381 warning ("ICE: would have deleted prologue/epilogue insn");
3382 if (!inhibit_warnings)
3385 libcall_is_dead = insn_is_dead = 0;
3388 /* If an instruction consists of just dead store(s) on final pass,
3390 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3392 if (libcall_is_dead)
3394 prev = propagate_block_delete_libcall (bb, insn, note);
3395 insn = NEXT_INSN (prev);
3398 propagate_block_delete_insn (bb, insn);
3400 /* CC0 is now known to be dead. Either this insn used it,
3401 in which case it doesn't anymore, or clobbered it,
3402 so the next insn can't use it. */
3408 /* See if this is an increment or decrement that can be
3409 merged into a following memory address. */
3412 register rtx x = single_set (insn);
3414 /* Does this instruction increment or decrement a register? */
3415 if (!reload_completed
3416 && (flags & PROP_AUTOINC)
3418 && GET_CODE (SET_DEST (x)) == REG
3419 && (GET_CODE (SET_SRC (x)) == PLUS
3420 || GET_CODE (SET_SRC (x)) == MINUS)
3421 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3422 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3423 /* Ok, look for a following memory ref we can combine with.
3424 If one is found, change the memory ref to a PRE_INC
3425 or PRE_DEC, cancel this insn, and return 1.
3426 Return 0 if nothing has been done. */
3427 && try_pre_increment_1 (&pbi, insn))
3430 #endif /* AUTO_INC_DEC */
3432 CLEAR_REG_SET (pbi.new_live);
3433 CLEAR_REG_SET (pbi.new_dead);
3435 /* If this is not the final pass, and this insn is copying the
3436 value of a library call and it's dead, don't scan the
3437 insns that perform the library call, so that the call's
3438 arguments are not marked live. */
3439 if (libcall_is_dead)
3441 /* Record the death of the dest reg. */
3442 mark_set_regs (&pbi, PATTERN (insn), insn);
3444 insn = XEXP (note, 0);
3445 prev = PREV_INSN (insn);
3447 else if (GET_CODE (PATTERN (insn)) == SET
3448 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3449 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3450 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3451 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3452 /* We have an insn to pop a constant amount off the stack.
3453 (Such insns use PLUS regardless of the direction of the stack,
3454 and any insn to adjust the stack by a constant is always a pop.)
3455 These insns, if not dead stores, have no effect on life. */
3459 /* Any regs live at the time of a call instruction
3460 must not go in a register clobbered by calls.
3461 Find all regs now live and record this for them. */
3463 if (GET_CODE (insn) == CALL_INSN
3464 && (flags & PROP_REG_INFO))
3465 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3466 { REG_N_CALLS_CROSSED (i)++; });
3468 /* Record sets. Do this even for dead instructions,
3469 since they would have killed the values if they hadn't
3471 mark_set_regs (&pbi, PATTERN (insn), insn);
3473 /* If an insn doesn't use CC0, it becomes dead since we
3474 assume that every insn clobbers it. So show it dead here;
3475 mark_used_regs will set it live if it is referenced. */
3480 mark_used_regs (&pbi, PATTERN (insn), NULL_RTX, insn);
3482 /* Sometimes we may have inserted something before INSN
3483 (such as a move) when we make an auto-inc. So ensure
3484 we will scan those insns. */
3486 prev = PREV_INSN (insn);
3489 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3495 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3496 cond = COND_EXEC_TEST (PATTERN (insn));
3498 /* Non-constant calls clobber memory. */
3499 if (! CONST_CALL_P (insn))
3500 free_EXPR_LIST_list (&pbi.mem_set_list);
3502 /* There may be extra registers to be clobbered. */
3503 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3505 note = XEXP (note, 1))
3506 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3507 mark_set_1 (&pbi, XEXP (XEXP (note, 0), 0),
3510 /* Calls change all call-used and global registers. */
3511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3512 if (call_used_regs[i] && ! global_regs[i]
3516 mark_set_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3517 cond, &dummy, &dummy);
3520 /* Calls use their arguments. */
3521 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3523 note = XEXP (note, 1))
3524 if (GET_CODE (XEXP (note, 0)) == USE)
3525 mark_used_regs (&pbi, XEXP (XEXP (note, 0), 0),
3528 /* The stack ptr is used (honorarily) by a CALL insn. */
3529 SET_REGNO_REG_SET (pbi.new_live, STACK_POINTER_REGNUM);
3531 /* Calls may also reference any of the global registers,
3532 so they are made live. */
3533 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3535 mark_used_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3540 /* Update reg_live for the registers killed and used. */
3541 AND_COMPL_REG_SET (pbi.reg_live, pbi.new_dead);
3542 IOR_REG_SET (pbi.reg_live, pbi.new_live);
3544 /* On final pass, update counts of how many insns in which
3545 each reg is live. */
3546 if (flags & PROP_REG_INFO)
3547 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3548 { REG_LIVE_LENGTH (i)++; });
3551 if (insn == bb->head)
3555 FREE_REG_SET (pbi.new_live);
3556 FREE_REG_SET (pbi.new_dead);
3557 free_EXPR_LIST_list (&pbi.mem_set_list);
3559 if (pbi.reg_next_use)
3560 free (pbi.reg_next_use);
3564 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3565 (SET expressions whose destinations are registers dead after the insn).
3566 NEEDED is the regset that says which regs are alive after the insn.
3568 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3570 If X is the entire body of an insn, NOTES contains the reg notes
3571 pertaining to the insn. */
3574 insn_dead_p (pbi, x, call_ok, notes)
3575 struct propagate_block_info *pbi;
3578 rtx notes ATTRIBUTE_UNUSED;
3580 enum rtx_code code = GET_CODE (x);
3583 /* If flow is invoked after reload, we must take existing AUTO_INC
3584 expresions into account. */
3585 if (reload_completed)
3587 for ( ; notes; notes = XEXP (notes, 1))
3589 if (REG_NOTE_KIND (notes) == REG_INC)
3591 int regno = REGNO (XEXP (notes, 0));
3593 /* Don't delete insns to set global regs. */
3594 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3595 || REGNO_REG_SET_P (pbi->reg_live, regno))
3602 /* If setting something that's a reg or part of one,
3603 see if that register's altered value will be live. */
3607 rtx r = SET_DEST (x);
3610 if (GET_CODE (r) == CC0)
3611 return ! pbi->cc0_live;
3614 /* A SET that is a subroutine call cannot be dead. */
3615 if (GET_CODE (SET_SRC (x)) == CALL)
3621 /* Don't eliminate loads from volatile memory or volatile asms. */
3622 else if (volatile_refs_p (SET_SRC (x)))
3625 if (GET_CODE (r) == MEM)
3629 if (MEM_VOLATILE_P (r))
3632 /* Walk the set of memory locations we are currently tracking
3633 and see if one is an identical match to this memory location.
3634 If so, this memory write is dead (remember, we're walking
3635 backwards from the end of the block to the start. */
3636 temp = pbi->mem_set_list;
3639 if (rtx_equal_p (XEXP (temp, 0), r))
3641 temp = XEXP (temp, 1);
3646 while (GET_CODE (r) == SUBREG
3647 || GET_CODE (r) == STRICT_LOW_PART
3648 || GET_CODE (r) == ZERO_EXTRACT)
3651 if (GET_CODE (r) == REG)
3653 int regno = REGNO (r);
3656 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3659 /* If this is a hard register, verify that subsequent
3660 words are not needed. */
3661 if (regno < FIRST_PSEUDO_REGISTER)
3663 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3666 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3670 /* Don't delete insns to set global regs. */
3671 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3674 /* Make sure insns to set the stack pointer aren't deleted. */
3675 if (regno == STACK_POINTER_REGNUM)
3678 /* Make sure insns to set the frame pointer aren't deleted. */
3679 if (regno == FRAME_POINTER_REGNUM
3680 && (! reload_completed || frame_pointer_needed))
3682 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3683 if (regno == HARD_FRAME_POINTER_REGNUM
3684 && (! reload_completed || frame_pointer_needed))
3688 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3689 /* Make sure insns to set arg pointer are never deleted
3690 (if the arg pointer isn't fixed, there will be a USE
3691 for it, so we can treat it normally). */
3692 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3696 /* Otherwise, the set is dead. */
3702 /* If performing several activities, insn is dead if each activity
3703 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3704 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3706 else if (code == PARALLEL)
3708 int i = XVECLEN (x, 0);
3710 for (i--; i >= 0; i--)
3711 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3712 && GET_CODE (XVECEXP (x, 0, i)) != USE
3713 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3719 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3720 is not necessarily true for hard registers. */
3721 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3722 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3723 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3726 /* We do not check other CLOBBER or USE here. An insn consisting of just
3727 a CLOBBER or just a USE should not be deleted. */
3731 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3732 return 1 if the entire library call is dead.
3733 This is true if X copies a register (hard or pseudo)
3734 and if the hard return reg of the call insn is dead.
3735 (The caller should have tested the destination of X already for death.)
3737 If this insn doesn't just copy a register, then we don't
3738 have an ordinary libcall. In that case, cse could not have
3739 managed to substitute the source for the dest later on,
3740 so we can assume the libcall is dead.
3742 NEEDED is the bit vector of pseudoregs live before this insn.
3743 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3746 libcall_dead_p (pbi, x, note, insn)
3747 struct propagate_block_info *pbi;
3752 register RTX_CODE code = GET_CODE (x);
3756 register rtx r = SET_SRC (x);
3757 if (GET_CODE (r) == REG)
3759 rtx call = XEXP (note, 0);
3763 /* Find the call insn. */
3764 while (call != insn && GET_CODE (call) != CALL_INSN)
3765 call = NEXT_INSN (call);
3767 /* If there is none, do nothing special,
3768 since ordinary death handling can understand these insns. */
3772 /* See if the hard reg holding the value is dead.
3773 If this is a PARALLEL, find the call within it. */
3774 call_pat = PATTERN (call);
3775 if (GET_CODE (call_pat) == PARALLEL)
3777 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3778 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3779 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3782 /* This may be a library call that is returning a value
3783 via invisible pointer. Do nothing special, since
3784 ordinary death handling can understand these insns. */
3788 call_pat = XVECEXP (call_pat, 0, i);
3791 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3797 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3798 live at function entry. Don't count global register variables, variables
3799 in registers that can be used for function arg passing, or variables in
3800 fixed hard registers. */
3803 regno_uninitialized (regno)
3806 if (n_basic_blocks == 0
3807 || (regno < FIRST_PSEUDO_REGISTER
3808 && (global_regs[regno]
3809 || fixed_regs[regno]
3810 || FUNCTION_ARG_REGNO_P (regno))))
3813 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3816 /* 1 if register REGNO was alive at a place where `setjmp' was called
3817 and was set more than once or is an argument.
3818 Such regs may be clobbered by `longjmp'. */
3821 regno_clobbered_at_setjmp (regno)
3824 if (n_basic_blocks == 0)
3827 return ((REG_N_SETS (regno) > 1
3828 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3829 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3832 /* INSN references memory, possibly using autoincrement addressing modes.
3833 Find any entries on the mem_set_list that need to be invalidated due
3834 to an address change. */
3836 invalidate_mems_from_autoinc (pbi, insn)
3837 struct propagate_block_info *pbi;
3840 rtx note = REG_NOTES (insn);
3841 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3843 if (REG_NOTE_KIND (note) == REG_INC)
3845 rtx temp = pbi->mem_set_list;
3846 rtx prev = NULL_RTX;
3851 next = XEXP (temp, 1);
3852 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3854 /* Splice temp out of list. */
3856 XEXP (prev, 1) = next;
3858 pbi->mem_set_list = next;
3859 free_EXPR_LIST_node (temp);
3869 /* Process the registers that are set within X. Their bits are set to
3870 1 in the regset DEAD, because they are dead prior to this insn.
3872 If INSN is nonzero, it is the insn being processed.
3874 FLAGS is the set of operations to perform. */
3877 mark_set_regs (pbi, x, insn)
3878 struct propagate_block_info *pbi;
3881 rtx cond = NULL_RTX;
3884 switch (GET_CODE (x))
3888 mark_set_1 (pbi, SET_DEST (x), cond, insn);
3892 cond = COND_EXEC_TEST (x);
3893 x = COND_EXEC_CODE (x);
3899 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3901 rtx sub = XVECEXP (x, 0, i);
3902 switch (GET_CODE (sub))
3905 if (cond != NULL_RTX)
3908 cond = COND_EXEC_TEST (sub);
3909 sub = COND_EXEC_CODE (sub);
3910 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3916 mark_set_1 (pbi, SET_DEST (sub), cond, insn);
3931 /* Process a single SET rtx, X. */
3934 mark_set_1 (pbi, reg, cond, insn)
3935 struct propagate_block_info *pbi;
3936 rtx reg, cond, insn;
3938 register int regno = -1;
3939 int flags = pbi->flags;
3941 /* Some targets place small structures in registers for
3942 return values of functions. We have to detect this
3943 case specially here to get correct flow information. */
3944 if (GET_CODE (reg) == PARALLEL
3945 && GET_MODE (reg) == BLKmode)
3949 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3950 mark_set_1 (pbi, XVECEXP (reg, 0, i), cond, insn);
3954 /* Modifying just one hardware register of a multi-reg value or just a
3955 byte field of a register does not mean the value from before this insn
3956 is now dead. But it does mean liveness of that register at the end of
3957 the block is significant.
3959 Within mark_set_1, however, we treat it as if the register is indeed
3960 modified. mark_used_regs will, however, also treat this register as
3961 being used. Thus, we treat these insns as setting a new value for the
3962 register as a function of its old value. This cases LOG_LINKS to be
3963 made appropriately and this will help combine.
3965 ??? This is all done incorrectly. We should not be setting bits in
3966 new_dead for these registers, since, as we just explained, they are
3967 not dead. We should be setting bits in local_set, and updating
3968 LOG_LINKS, but that is different. */
3970 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3971 || GET_CODE (reg) == SIGN_EXTRACT
3972 || GET_CODE (reg) == STRICT_LOW_PART)
3973 reg = XEXP (reg, 0);
3975 /* If this set is a MEM, then it kills any aliased writes.
3976 If this set is a REG, then it kills any MEMs which use the reg. */
3977 if (flags & PROP_SCAN_DEAD_CODE)
3979 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
3981 rtx temp = pbi->mem_set_list;
3982 rtx prev = NULL_RTX;
3987 next = XEXP (temp, 1);
3988 if ((GET_CODE (reg) == MEM
3989 && output_dependence (XEXP (temp, 0), reg))
3990 || (GET_CODE (reg) == REG
3991 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3993 /* Splice this entry out of the list. */
3995 XEXP (prev, 1) = next;
3997 pbi->mem_set_list = next;
3998 free_EXPR_LIST_node (temp);
4006 /* If the memory reference had embedded side effects (autoincrement
4007 address modes. Then we may need to kill some entries on the
4009 if (insn && GET_CODE (reg) == MEM)
4010 invalidate_mems_from_autoinc (pbi, insn);
4012 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4013 /* We do not know the size of a BLKmode store, so we do not track
4014 them for redundant store elimination. */
4015 && GET_MODE (reg) != BLKmode
4016 /* There are no REG_INC notes for SP, so we can't assume we'll see
4017 everything that invalidates it. To be safe, don't eliminate any
4018 stores though SP; none of them should be redundant anyway. */
4019 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4020 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4023 if (GET_CODE (reg) == REG
4024 && (regno = REGNO (reg),
4025 ! (regno == FRAME_POINTER_REGNUM
4026 && (! reload_completed || frame_pointer_needed)))
4027 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4028 && ! (regno == HARD_FRAME_POINTER_REGNUM
4029 && (! reload_completed || frame_pointer_needed))
4031 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4032 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4036 int some_was_live, some_was_dead;
4038 /* Perform the pbi datastructure update. */
4039 if (! mark_set_reg (pbi, reg, cond, &some_was_live, &some_was_dead))
4042 /* Additional data to record if this is the final pass. */
4043 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4044 | PROP_DEATH_NOTES | PROP_AUTOINC))
4047 register int blocknum = pbi->bb->index;
4050 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4051 y = pbi->reg_next_use[regno];
4053 /* If this is a hard reg, record this function uses the reg. */
4055 if (regno < FIRST_PSEUDO_REGISTER)
4058 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4060 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4061 for (i = regno; i < endregno; i++)
4063 /* The next use is no longer "next", since a store
4065 pbi->reg_next_use[i] = 0;
4068 if (flags & PROP_REG_INFO)
4069 for (i = regno; i < endregno; i++)
4071 regs_ever_live[i] = 1;
4077 /* The next use is no longer "next", since a store
4079 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4080 pbi->reg_next_use[regno] = 0;
4082 /* Keep track of which basic blocks each reg appears in. */
4084 if (flags & PROP_REG_INFO)
4086 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4087 REG_BASIC_BLOCK (regno) = blocknum;
4088 else if (REG_BASIC_BLOCK (regno) != blocknum)
4089 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4091 /* Count (weighted) references, stores, etc. This counts a
4092 register twice if it is modified, but that is correct. */
4093 REG_N_SETS (regno)++;
4094 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4096 /* The insns where a reg is live are normally counted
4097 elsewhere, but we want the count to include the insn
4098 where the reg is set, and the normal counting mechanism
4099 would not count it. */
4100 REG_LIVE_LENGTH (regno)++;
4104 if (! some_was_dead)
4106 if (flags & PROP_LOG_LINKS)
4108 /* Make a logical link from the next following insn
4109 that uses this register, back to this insn.
4110 The following insns have already been processed.
4112 We don't build a LOG_LINK for hard registers containing
4113 in ASM_OPERANDs. If these registers get replaced,
4114 we might wind up changing the semantics of the insn,
4115 even if reload can make what appear to be valid
4116 assignments later. */
4117 if (y && (BLOCK_NUM (y) == blocknum)
4118 && (regno >= FIRST_PSEUDO_REGISTER
4119 || asm_noperands (PATTERN (y)) < 0))
4120 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4123 else if (! some_was_live)
4125 if (flags & PROP_REG_INFO)
4126 REG_N_DEATHS (REGNO (reg))++;
4128 if (flags & PROP_DEATH_NOTES)
4130 /* Note that dead stores have already been deleted
4131 when possible. If we get here, we have found a
4132 dead store that cannot be eliminated (because the
4133 same insn does something useful). Indicate this
4134 by marking the reg being set as dying here. */
4136 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4141 if (flags & PROP_DEATH_NOTES)
4143 /* This is a case where we have a multi-word hard register
4144 and some, but not all, of the words of the register are
4145 needed in subsequent insns. Write REG_UNUSED notes
4146 for those parts that were not needed. This case should
4151 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4153 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4157 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4163 else if (GET_CODE (reg) == REG)
4165 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4166 pbi->reg_next_use[regno] = 0;
4169 /* If this is the last pass and this is a SCRATCH, show it will be dying
4170 here and count it. */
4171 else if (GET_CODE (reg) == SCRATCH)
4173 if (flags & PROP_DEATH_NOTES)
4175 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4179 /* Update data structures for a (possibly conditional) store into REG.
4180 Return true if REG is now unconditionally dead. */
4183 mark_set_reg (pbi, reg, cond, p_some_was_live, p_some_was_dead)
4184 struct propagate_block_info *pbi;
4186 rtx cond ATTRIBUTE_UNUSED;
4187 int *p_some_was_live, *p_some_was_dead;
4189 int regno = REGNO (reg);
4190 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4191 int some_was_dead = ! some_was_live;
4193 /* Mark it as a significant register for this basic block. */
4195 SET_REGNO_REG_SET (pbi->local_set, regno);
4197 /* A hard reg in a wide mode may really be multiple registers.
4198 If so, mark all of them just like the first. */
4199 if (regno < FIRST_PSEUDO_REGISTER)
4201 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4204 int regno_n = regno + n;
4205 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4207 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4209 some_was_live |= needed_regno;
4210 some_was_dead |= ! needed_regno;
4214 *p_some_was_live = some_was_live;
4215 *p_some_was_dead = some_was_dead;
4217 /* The stack pointer is never dead. Well, not strictly true, but it's
4218 very difficult to tell from here. Hopefully combine_stack_adjustments
4219 will fix up the most egregious errors. */
4220 if (regno == STACK_POINTER_REGNUM)
4223 /* Mark it as dead before this insn. */
4224 SET_REGNO_REG_SET (pbi->new_dead, regno);
4225 if (regno < FIRST_PSEUDO_REGISTER)
4227 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4229 SET_REGNO_REG_SET (pbi->new_dead, regno + n);
4232 /* Unconditionally dead. */
4238 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4242 find_auto_inc (pbi, x, insn)
4243 struct propagate_block_info *pbi;
4247 rtx addr = XEXP (x, 0);
4248 HOST_WIDE_INT offset = 0;
4251 /* Here we detect use of an index register which might be good for
4252 postincrement, postdecrement, preincrement, or predecrement. */
4254 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4255 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4257 if (GET_CODE (addr) == REG)
4260 register int size = GET_MODE_SIZE (GET_MODE (x));
4263 int regno = REGNO (addr);
4265 /* Is the next use an increment that might make auto-increment? */
4266 if ((incr = pbi->reg_next_use[regno]) != 0
4267 && (set = single_set (incr)) != 0
4268 && GET_CODE (set) == SET
4269 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4270 /* Can't add side effects to jumps; if reg is spilled and
4271 reloaded, there's no way to store back the altered value. */
4272 && GET_CODE (insn) != JUMP_INSN
4273 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4274 && XEXP (y, 0) == addr
4275 && GET_CODE (XEXP (y, 1)) == CONST_INT
4276 && ((HAVE_POST_INCREMENT
4277 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4278 || (HAVE_POST_DECREMENT
4279 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4280 || (HAVE_PRE_INCREMENT
4281 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4282 || (HAVE_PRE_DECREMENT
4283 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4284 /* Make sure this reg appears only once in this insn. */
4285 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4286 use != 0 && use != (rtx) 1))
4288 rtx q = SET_DEST (set);
4289 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4290 ? (offset ? PRE_INC : POST_INC)
4291 : (offset ? PRE_DEC : POST_DEC));
4293 if (dead_or_set_p (incr, addr)
4294 /* Mustn't autoinc an eliminable register. */
4295 && (regno >= FIRST_PSEUDO_REGISTER
4296 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4298 /* This is the simple case. Try to make the auto-inc. If
4299 we can't, we are done. Otherwise, we will do any
4300 needed updates below. */
4301 if (! validate_change (insn, &XEXP (x, 0),
4302 gen_rtx_fmt_e (inc_code, Pmode, addr),
4306 else if (GET_CODE (q) == REG
4307 /* PREV_INSN used here to check the semi-open interval
4309 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4310 /* We must also check for sets of q as q may be
4311 a call clobbered hard register and there may
4312 be a call between PREV_INSN (insn) and incr. */
4313 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4315 /* We have *p followed sometime later by q = p+size.
4316 Both p and q must be live afterward,
4317 and q is not used between INSN and its assignment.
4318 Change it to q = p, ...*q..., q = q+size.
4319 Then fall into the usual case. */
4324 emit_move_insn (q, addr);
4325 insns = get_insns ();
4328 bb = BLOCK_FOR_INSN (insn);
4329 for (temp = insns; temp; temp = NEXT_INSN (temp))
4330 set_block_for_insn (temp, bb);
4332 /* If we can't make the auto-inc, or can't make the
4333 replacement into Y, exit. There's no point in making
4334 the change below if we can't do the auto-inc and doing
4335 so is not correct in the pre-inc case. */
4337 validate_change (insn, &XEXP (x, 0),
4338 gen_rtx_fmt_e (inc_code, Pmode, q),
4340 validate_change (incr, &XEXP (y, 0), q, 1);
4341 if (! apply_change_group ())
4344 /* We now know we'll be doing this change, so emit the
4345 new insn(s) and do the updates. */
4346 emit_insns_before (insns, insn);
4348 if (BLOCK_FOR_INSN (insn)->head == insn)
4349 BLOCK_FOR_INSN (insn)->head = insns;
4351 /* INCR will become a NOTE and INSN won't contain a
4352 use of ADDR. If a use of ADDR was just placed in
4353 the insn before INSN, make that the next use.
4354 Otherwise, invalidate it. */
4355 if (GET_CODE (PREV_INSN (insn)) == INSN
4356 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4357 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4358 pbi->reg_next_use[regno] = PREV_INSN (insn);
4360 pbi->reg_next_use[regno] = 0;
4365 /* REGNO is now used in INCR which is below INSN, but it
4366 previously wasn't live here. If we don't mark it as
4367 live, we'll put a REG_DEAD note for it on this insn,
4368 which is incorrect. */
4369 SET_REGNO_REG_SET (pbi->reg_live, regno);
4371 /* If there are any calls between INSN and INCR, show
4372 that REGNO now crosses them. */
4373 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4374 if (GET_CODE (temp) == CALL_INSN)
4375 REG_N_CALLS_CROSSED (regno)++;
4380 /* If we haven't returned, it means we were able to make the
4381 auto-inc, so update the status. First, record that this insn
4382 has an implicit side effect. */
4385 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4387 /* Modify the old increment-insn to simply copy
4388 the already-incremented value of our register. */
4389 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4392 /* If that makes it a no-op (copying the register into itself) delete
4393 it so it won't appear to be a "use" and a "set" of this
4395 if (SET_DEST (set) == addr)
4397 /* If the original source was dead, it's dead now. */
4398 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4399 if (note && XEXP (note, 0) != addr)
4400 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4402 PUT_CODE (incr, NOTE);
4403 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4404 NOTE_SOURCE_FILE (incr) = 0;
4407 if (regno >= FIRST_PSEUDO_REGISTER)
4409 /* Count an extra reference to the reg. When a reg is
4410 incremented, spilling it is worse, so we want to make
4411 that less likely. */
4412 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4414 /* Count the increment as a setting of the register,
4415 even though it isn't a SET in rtl. */
4416 REG_N_SETS (regno)++;
4421 #endif /* AUTO_INC_DEC */
4424 mark_used_reg (pbi, reg, cond, insn)
4425 struct propagate_block_info *pbi;
4427 rtx cond ATTRIBUTE_UNUSED;
4430 int regno = REGNO (reg);
4431 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4432 int some_was_dead = ! some_was_live;
4434 SET_REGNO_REG_SET (pbi->new_live, regno);
4436 /* A hard reg in a wide mode may really be multiple registers.
4437 If so, mark all of them just like the first. */
4438 if (regno < FIRST_PSEUDO_REGISTER)
4440 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4443 int regno_n = regno + n;
4444 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4446 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4447 some_was_live |= needed_regno;
4448 some_was_dead |= ! needed_regno;
4452 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4454 /* Record where each reg is used, so when the reg is set we know
4455 the next insn that uses it. */
4456 pbi->reg_next_use[regno] = insn;
4459 if (pbi->flags & PROP_REG_INFO)
4461 if (regno < FIRST_PSEUDO_REGISTER)
4463 /* If this is a register we are going to try to eliminate,
4464 don't mark it live here. If we are successful in
4465 eliminating it, it need not be live unless it is used for
4466 pseudos, in which case it will have been set live when it
4467 was allocated to the pseudos. If the register will not
4468 be eliminated, reload will set it live at that point.
4470 Otherwise, record that this function uses this register. */
4471 /* ??? The PPC backend tries to "eliminate" on the pic
4472 register to itself. This should be fixed. In the mean
4473 time, hack around it. */
4475 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4476 && (regno == FRAME_POINTER_REGNUM
4477 || regno == ARG_POINTER_REGNUM)))
4479 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4481 regs_ever_live[regno + --n] = 1;
4487 /* Keep track of which basic block each reg appears in. */
4489 register int blocknum = pbi->bb->index;
4490 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4491 REG_BASIC_BLOCK (regno) = blocknum;
4492 else if (REG_BASIC_BLOCK (regno) != blocknum)
4493 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4495 /* Count (weighted) number of uses of each reg. */
4496 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4500 /* Record and count the insns in which a reg dies. If it is used in
4501 this insn and was dead below the insn then it dies in this insn.
4502 If it was set in this insn, we do not make a REG_DEAD note;
4503 likewise if we already made such a note.
4505 ??? This could be done better. In new_dead we have a record of
4506 which registers are set or clobbered this insn (which in itself is
4507 slightly incorrect, see the commentary near strict_low_part in
4508 mark_set_1), which should be the set of registers that we do not
4509 wish to create death notes for under the above rule. Note that
4510 we have not yet processed the call-clobbered/call-used registers,
4511 and they do not quite follow the above rule, since we do want death
4512 notes for call-clobbered register arguments. Which begs the whole
4513 question of whether we should in fact have death notes for registers
4514 used and clobbered (but not set) in the same insn. The only useful
4515 thing we ought to be getting from dead_or_set_p is detection of
4516 duplicate death notes. */
4518 if ((pbi->flags & PROP_DEATH_NOTES)
4520 && ! dead_or_set_p (insn, reg))
4524 /* Check for the case where the register dying partially
4525 overlaps the register set by this insn. */
4526 if (regno < FIRST_PSEUDO_REGISTER
4527 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4529 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4531 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4534 /* If none of the words in X is needed, make a REG_DEAD note.
4535 Otherwise, we must make partial REG_DEAD notes. */
4536 if (! some_was_live)
4539 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4540 REG_N_DEATHS (regno)++;
4544 /* Don't make a REG_DEAD note for a part of a register
4545 that is set in the insn. */
4547 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4548 for (; n >= regno; n--)
4549 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4550 && ! dead_or_set_regno_p (insn, n))
4552 = alloc_EXPR_LIST (REG_DEAD,
4553 gen_rtx_REG (reg_raw_mode[n], n),
4559 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4560 This is done assuming the registers needed from X are those that
4561 have 1-bits in PBI->REG_LIVE.
4563 INSN is the containing instruction. If INSN is dead, this function
4567 mark_used_regs (pbi, x, cond, insn)
4568 struct propagate_block_info *pbi;
4571 register RTX_CODE code;
4573 int flags = pbi->flags;
4576 code = GET_CODE (x);
4596 /* If we are clobbering a MEM, mark any registers inside the address
4598 if (GET_CODE (XEXP (x, 0)) == MEM)
4599 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4603 /* Don't bother watching stores to mems if this is not the
4604 final pass. We'll not be deleting dead stores this round. */
4605 if (flags & PROP_SCAN_DEAD_CODE)
4607 /* Invalidate the data for the last MEM stored, but only if MEM is
4608 something that can be stored into. */
4609 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4610 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4611 ; /* needn't clear the memory set list */
4614 rtx temp = pbi->mem_set_list;
4615 rtx prev = NULL_RTX;
4620 next = XEXP (temp, 1);
4621 if (anti_dependence (XEXP (temp, 0), x))
4623 /* Splice temp out of the list. */
4625 XEXP (prev, 1) = next;
4627 pbi->mem_set_list = next;
4628 free_EXPR_LIST_node (temp);
4636 /* If the memory reference had embedded side effects (autoincrement
4637 address modes. Then we may need to kill some entries on the
4640 invalidate_mems_from_autoinc (pbi, insn);
4644 if (flags & PROP_AUTOINC)
4645 find_auto_inc (pbi, x, insn);
4650 if (GET_CODE (SUBREG_REG (x)) == REG
4651 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4652 && (GET_MODE_SIZE (GET_MODE (x))
4653 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4654 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4656 /* While we're here, optimize this case. */
4658 if (GET_CODE (x) != REG)
4663 /* See a register other than being set => mark it as needed. */
4664 mark_used_reg (pbi, x, cond, insn);
4669 register rtx testreg = SET_DEST (x);
4672 /* If storing into MEM, don't show it as being used. But do
4673 show the address as being used. */
4674 if (GET_CODE (testreg) == MEM)
4677 if (flags & PROP_AUTOINC)
4678 find_auto_inc (pbi, testreg, insn);
4680 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4681 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4685 /* Storing in STRICT_LOW_PART is like storing in a reg
4686 in that this SET might be dead, so ignore it in TESTREG.
4687 but in some other ways it is like using the reg.
4689 Storing in a SUBREG or a bit field is like storing the entire
4690 register in that if the register's value is not used
4691 then this SET is not needed. */
4692 while (GET_CODE (testreg) == STRICT_LOW_PART
4693 || GET_CODE (testreg) == ZERO_EXTRACT
4694 || GET_CODE (testreg) == SIGN_EXTRACT
4695 || GET_CODE (testreg) == SUBREG)
4697 if (GET_CODE (testreg) == SUBREG
4698 && GET_CODE (SUBREG_REG (testreg)) == REG
4699 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4700 && (GET_MODE_SIZE (GET_MODE (testreg))
4701 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4702 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4704 /* Modifying a single register in an alternate mode
4705 does not use any of the old value. But these other
4706 ways of storing in a register do use the old value. */
4707 if (GET_CODE (testreg) == SUBREG
4708 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4713 testreg = XEXP (testreg, 0);
4716 /* If this is a store into a register, recursively scan the
4717 value being stored. */
4719 if ((GET_CODE (testreg) == PARALLEL
4720 && GET_MODE (testreg) == BLKmode)
4721 || (GET_CODE (testreg) == REG
4722 && (regno = REGNO (testreg),
4723 ! (regno == FRAME_POINTER_REGNUM
4724 && (! reload_completed || frame_pointer_needed)))
4725 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4726 && ! (regno == HARD_FRAME_POINTER_REGNUM
4727 && (! reload_completed || frame_pointer_needed))
4729 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4730 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4735 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4736 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4743 case UNSPEC_VOLATILE:
4747 /* Traditional and volatile asm instructions must be considered to use
4748 and clobber all hard registers, all pseudo-registers and all of
4749 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4751 Consider for instance a volatile asm that changes the fpu rounding
4752 mode. An insn should not be moved across this even if it only uses
4753 pseudo-regs because it might give an incorrectly rounded result.
4755 ?!? Unfortunately, marking all hard registers as live causes massive
4756 problems for the register allocator and marking all pseudos as live
4757 creates mountains of uninitialized variable warnings.
4759 So for now, just clear the memory set list and mark any regs
4760 we can find in ASM_OPERANDS as used. */
4761 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4762 free_EXPR_LIST_list (&pbi->mem_set_list);
4764 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4765 We can not just fall through here since then we would be confused
4766 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4767 traditional asms unlike their normal usage. */
4768 if (code == ASM_OPERANDS)
4772 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4773 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4779 if (cond != NULL_RTX)
4782 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4784 cond = COND_EXEC_TEST (x);
4785 x = COND_EXEC_CODE (x);
4789 /* We _do_not_ want to scan operands of phi nodes. Operands of
4790 a phi function are evaluated only when control reaches this
4791 block along a particular edge. Therefore, regs that appear
4792 as arguments to phi should not be added to the global live at
4800 /* Recursively scan the operands of this expression. */
4803 register const char *fmt = GET_RTX_FORMAT (code);
4806 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4810 /* Tail recursive case: save a function call level. */
4816 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4818 else if (fmt[i] == 'E')
4821 for (j = 0; j < XVECLEN (x, i); j++)
4822 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4831 try_pre_increment_1 (pbi, insn)
4832 struct propagate_block_info *pbi;
4835 /* Find the next use of this reg. If in same basic block,
4836 make it do pre-increment or pre-decrement if appropriate. */
4837 rtx x = single_set (insn);
4838 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4839 * INTVAL (XEXP (SET_SRC (x), 1)));
4840 int regno = REGNO (SET_DEST (x));
4841 rtx y = pbi->reg_next_use[regno];
4843 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4844 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4845 mode would be better. */
4846 && ! dead_or_set_p (y, SET_DEST (x))
4847 && try_pre_increment (y, SET_DEST (x), amount))
4849 /* We have found a suitable auto-increment
4850 and already changed insn Y to do it.
4851 So flush this increment-instruction. */
4852 PUT_CODE (insn, NOTE);
4853 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4854 NOTE_SOURCE_FILE (insn) = 0;
4855 /* Count a reference to this reg for the increment
4856 insn we are deleting. When a reg is incremented.
4857 spilling it is worse, so we want to make that
4859 if (regno >= FIRST_PSEUDO_REGISTER)
4861 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4862 REG_N_SETS (regno)++;
4869 /* Try to change INSN so that it does pre-increment or pre-decrement
4870 addressing on register REG in order to add AMOUNT to REG.
4871 AMOUNT is negative for pre-decrement.
4872 Returns 1 if the change could be made.
4873 This checks all about the validity of the result of modifying INSN. */
4876 try_pre_increment (insn, reg, amount)
4878 HOST_WIDE_INT amount;
4882 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4883 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4885 /* Nonzero if we can try to make a post-increment or post-decrement.
4886 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4887 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4888 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4891 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4894 /* From the sign of increment, see which possibilities are conceivable
4895 on this target machine. */
4896 if (HAVE_PRE_INCREMENT && amount > 0)
4898 if (HAVE_POST_INCREMENT && amount > 0)
4901 if (HAVE_PRE_DECREMENT && amount < 0)
4903 if (HAVE_POST_DECREMENT && amount < 0)
4906 if (! (pre_ok || post_ok))
4909 /* It is not safe to add a side effect to a jump insn
4910 because if the incremented register is spilled and must be reloaded
4911 there would be no way to store the incremented value back in memory. */
4913 if (GET_CODE (insn) == JUMP_INSN)
4918 use = find_use_as_address (PATTERN (insn), reg, 0);
4919 if (post_ok && (use == 0 || use == (rtx) 1))
4921 use = find_use_as_address (PATTERN (insn), reg, -amount);
4925 if (use == 0 || use == (rtx) 1)
4928 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4931 /* See if this combination of instruction and addressing mode exists. */
4932 if (! validate_change (insn, &XEXP (use, 0),
4933 gen_rtx_fmt_e (amount > 0
4934 ? (do_post ? POST_INC : PRE_INC)
4935 : (do_post ? POST_DEC : PRE_DEC),
4939 /* Record that this insn now has an implicit side effect on X. */
4940 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4944 #endif /* AUTO_INC_DEC */
4946 /* Find the place in the rtx X where REG is used as a memory address.
4947 Return the MEM rtx that so uses it.
4948 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4949 (plus REG (const_int PLUSCONST)).
4951 If such an address does not appear, return 0.
4952 If REG appears more than once, or is used other than in such an address,
4956 find_use_as_address (x, reg, plusconst)
4959 HOST_WIDE_INT plusconst;
4961 enum rtx_code code = GET_CODE (x);
4962 const char *fmt = GET_RTX_FORMAT (code);
4964 register rtx value = 0;
4967 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4970 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4971 && XEXP (XEXP (x, 0), 0) == reg
4972 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4973 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4976 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4978 /* If REG occurs inside a MEM used in a bit-field reference,
4979 that is unacceptable. */
4980 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4981 return (rtx) (HOST_WIDE_INT) 1;
4985 return (rtx) (HOST_WIDE_INT) 1;
4987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4991 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4995 return (rtx) (HOST_WIDE_INT) 1;
4997 else if (fmt[i] == 'E')
5000 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5002 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5006 return (rtx) (HOST_WIDE_INT) 1;
5014 /* Write information about registers and basic blocks into FILE.
5015 This is part of making a debugging dump. */
5018 dump_regset (r, outf)
5025 fputs (" (nil)", outf);
5029 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5031 fprintf (outf, " %d", i);
5032 if (i < FIRST_PSEUDO_REGISTER)
5033 fprintf (outf, " [%s]",
5042 dump_regset (r, stderr);
5043 putc ('\n', stderr);
5047 dump_flow_info (file)
5051 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5053 fprintf (file, "%d registers.\n", max_regno);
5054 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5057 enum reg_class class, altclass;
5058 fprintf (file, "\nRegister %d used %d times across %d insns",
5059 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5060 if (REG_BASIC_BLOCK (i) >= 0)
5061 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5063 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5064 (REG_N_SETS (i) == 1) ? "" : "s");
5065 if (REG_USERVAR_P (regno_reg_rtx[i]))
5066 fprintf (file, "; user var");
5067 if (REG_N_DEATHS (i) != 1)
5068 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5069 if (REG_N_CALLS_CROSSED (i) == 1)
5070 fprintf (file, "; crosses 1 call");
5071 else if (REG_N_CALLS_CROSSED (i))
5072 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5073 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5074 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5075 class = reg_preferred_class (i);
5076 altclass = reg_alternate_class (i);
5077 if (class != GENERAL_REGS || altclass != ALL_REGS)
5079 if (altclass == ALL_REGS || class == ALL_REGS)
5080 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5081 else if (altclass == NO_REGS)
5082 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5084 fprintf (file, "; pref %s, else %s",
5085 reg_class_names[(int) class],
5086 reg_class_names[(int) altclass]);
5088 if (REGNO_POINTER_FLAG (i))
5089 fprintf (file, "; pointer");
5090 fprintf (file, ".\n");
5093 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5094 for (i = 0; i < n_basic_blocks; i++)
5096 register basic_block bb = BASIC_BLOCK (i);
5099 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5100 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5102 fprintf (file, "Predecessors: ");
5103 for (e = bb->pred; e ; e = e->pred_next)
5104 dump_edge_info (file, e, 0);
5106 fprintf (file, "\nSuccessors: ");
5107 for (e = bb->succ; e ; e = e->succ_next)
5108 dump_edge_info (file, e, 1);
5110 fprintf (file, "\nRegisters live at start:");
5111 dump_regset (bb->global_live_at_start, file);
5113 fprintf (file, "\nRegisters live at end:");
5114 dump_regset (bb->global_live_at_end, file);
5125 dump_flow_info (stderr);
5129 dump_edge_info (file, e, do_succ)
5134 basic_block side = (do_succ ? e->dest : e->src);
5136 if (side == ENTRY_BLOCK_PTR)
5137 fputs (" ENTRY", file);
5138 else if (side == EXIT_BLOCK_PTR)
5139 fputs (" EXIT", file);
5141 fprintf (file, " %d", side->index);
5145 static const char * const bitnames[] = {
5146 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5149 int i, flags = e->flags;
5153 for (i = 0; flags; i++)
5154 if (flags & (1 << i))
5160 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5161 fputs (bitnames[i], file);
5163 fprintf (file, "%d", i);
5171 /* Print out one basic block with live information at start and end. */
5181 fprintf (outf, ";; Basic block %d, loop depth %d",
5182 bb->index, bb->loop_depth);
5183 if (bb->eh_beg != -1 || bb->eh_end != -1)
5184 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5187 fputs (";; Predecessors: ", outf);
5188 for (e = bb->pred; e ; e = e->pred_next)
5189 dump_edge_info (outf, e, 0);
5192 fputs (";; Registers live at start:", outf);
5193 dump_regset (bb->global_live_at_start, outf);
5196 for (insn = bb->head, last = NEXT_INSN (bb->end);
5198 insn = NEXT_INSN (insn))
5199 print_rtl_single (outf, insn);
5201 fputs (";; Registers live at end:", outf);
5202 dump_regset (bb->global_live_at_end, outf);
5205 fputs (";; Successors: ", outf);
5206 for (e = bb->succ; e; e = e->succ_next)
5207 dump_edge_info (outf, e, 1);
5215 dump_bb (bb, stderr);
5222 dump_bb (BASIC_BLOCK(n), stderr);
5225 /* Like print_rtl, but also print out live information for the start of each
5229 print_rtl_with_bb (outf, rtx_first)
5233 register rtx tmp_rtx;
5236 fprintf (outf, "(nil)\n");
5240 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5241 int max_uid = get_max_uid ();
5242 basic_block *start = (basic_block *)
5243 xcalloc (max_uid, sizeof (basic_block));
5244 basic_block *end = (basic_block *)
5245 xcalloc (max_uid, sizeof (basic_block));
5246 enum bb_state *in_bb_p = (enum bb_state *)
5247 xcalloc (max_uid, sizeof (enum bb_state));
5249 for (i = n_basic_blocks - 1; i >= 0; i--)
5251 basic_block bb = BASIC_BLOCK (i);
5254 start[INSN_UID (bb->head)] = bb;
5255 end[INSN_UID (bb->end)] = bb;
5256 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5258 enum bb_state state = IN_MULTIPLE_BB;
5259 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5261 in_bb_p[INSN_UID(x)] = state;
5268 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5273 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5275 fprintf (outf, ";; Start of basic block %d, registers live:",
5277 dump_regset (bb->global_live_at_start, outf);
5281 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5282 && GET_CODE (tmp_rtx) != NOTE
5283 && GET_CODE (tmp_rtx) != BARRIER)
5284 fprintf (outf, ";; Insn is not within a basic block\n");
5285 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5286 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5288 did_output = print_rtl_single (outf, tmp_rtx);
5290 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5292 fprintf (outf, ";; End of basic block %d, registers live:\n",
5294 dump_regset (bb->global_live_at_end, outf);
5307 if (current_function_epilogue_delay_list != 0)
5309 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5310 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5311 tmp_rtx = XEXP (tmp_rtx, 1))
5312 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5316 /* Compute dominator relationships using new flow graph structures. */
5318 compute_flow_dominators (dominators, post_dominators)
5319 sbitmap *dominators;
5320 sbitmap *post_dominators;
5323 sbitmap *temp_bitmap;
5325 basic_block *worklist, *workend, *qin, *qout;
5328 /* Allocate a worklist array/queue. Entries are only added to the
5329 list if they were not already on the list. So the size is
5330 bounded by the number of basic blocks. */
5331 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5332 workend = &worklist[n_basic_blocks];
5334 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5335 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5339 /* The optimistic setting of dominators requires us to put every
5340 block on the work list initially. */
5341 qin = qout = worklist;
5342 for (bb = 0; bb < n_basic_blocks; bb++)
5344 *qin++ = BASIC_BLOCK (bb);
5345 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5347 qlen = n_basic_blocks;
5350 /* We want a maximal solution, so initially assume everything dominates
5352 sbitmap_vector_ones (dominators, n_basic_blocks);
5354 /* Mark successors of the entry block so we can identify them below. */
5355 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5356 e->dest->aux = ENTRY_BLOCK_PTR;
5358 /* Iterate until the worklist is empty. */
5361 /* Take the first entry off the worklist. */
5362 basic_block b = *qout++;
5363 if (qout >= workend)
5369 /* Compute the intersection of the dominators of all the
5372 If one of the predecessor blocks is the ENTRY block, then the
5373 intersection of the dominators of the predecessor blocks is
5374 defined as the null set. We can identify such blocks by the
5375 special value in the AUX field in the block structure. */
5376 if (b->aux == ENTRY_BLOCK_PTR)
5378 /* Do not clear the aux field for blocks which are
5379 successors of the ENTRY block. That way we we never
5380 add them to the worklist again.
5382 The intersect of dominators of the preds of this block is
5383 defined as the null set. */
5384 sbitmap_zero (temp_bitmap[bb]);
5388 /* Clear the aux field of this block so it can be added to
5389 the worklist again if necessary. */
5391 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5394 /* Make sure each block always dominates itself. */
5395 SET_BIT (temp_bitmap[bb], bb);
5397 /* If the out state of this block changed, then we need to
5398 add the successors of this block to the worklist if they
5399 are not already on the worklist. */
5400 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5402 for (e = b->succ; e; e = e->succ_next)
5404 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5418 if (post_dominators)
5420 /* The optimistic setting of dominators requires us to put every
5421 block on the work list initially. */
5422 qin = qout = worklist;
5423 for (bb = 0; bb < n_basic_blocks; bb++)
5425 *qin++ = BASIC_BLOCK (bb);
5426 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5428 qlen = n_basic_blocks;
5431 /* We want a maximal solution, so initially assume everything post
5432 dominates everything else. */
5433 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5435 /* Mark predecessors of the exit block so we can identify them below. */
5436 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5437 e->src->aux = EXIT_BLOCK_PTR;
5439 /* Iterate until the worklist is empty. */
5442 /* Take the first entry off the worklist. */
5443 basic_block b = *qout++;
5444 if (qout >= workend)
5450 /* Compute the intersection of the post dominators of all the
5453 If one of the successor blocks is the EXIT block, then the
5454 intersection of the dominators of the successor blocks is
5455 defined as the null set. We can identify such blocks by the
5456 special value in the AUX field in the block structure. */
5457 if (b->aux == EXIT_BLOCK_PTR)
5459 /* Do not clear the aux field for blocks which are
5460 predecessors of the EXIT block. That way we we never
5461 add them to the worklist again.
5463 The intersect of dominators of the succs of this block is
5464 defined as the null set. */
5465 sbitmap_zero (temp_bitmap[bb]);
5469 /* Clear the aux field of this block so it can be added to
5470 the worklist again if necessary. */
5472 sbitmap_intersection_of_succs (temp_bitmap[bb],
5473 post_dominators, bb);
5476 /* Make sure each block always post dominates itself. */
5477 SET_BIT (temp_bitmap[bb], bb);
5479 /* If the out state of this block changed, then we need to
5480 add the successors of this block to the worklist if they
5481 are not already on the worklist. */
5482 if (sbitmap_a_and_b (post_dominators[bb],
5483 post_dominators[bb],
5486 for (e = b->pred; e; e = e->pred_next)
5488 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5506 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5509 compute_immediate_dominators (idom, dominators)
5511 sbitmap *dominators;
5516 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5518 /* Begin with tmp(n) = dom(n) - { n }. */
5519 for (b = n_basic_blocks; --b >= 0; )
5521 sbitmap_copy (tmp[b], dominators[b]);
5522 RESET_BIT (tmp[b], b);
5525 /* Subtract out all of our dominator's dominators. */
5526 for (b = n_basic_blocks; --b >= 0; )
5528 sbitmap tmp_b = tmp[b];
5531 for (s = n_basic_blocks; --s >= 0; )
5532 if (TEST_BIT (tmp_b, s))
5533 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5536 /* Find the one bit set in the bitmap and put it in the output array. */
5537 for (b = n_basic_blocks; --b >= 0; )
5540 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5543 sbitmap_vector_free (tmp);
5546 /* Count for a single SET rtx, X. */
5549 count_reg_sets_1 (x, loop_depth)
5554 register rtx reg = SET_DEST (x);
5556 /* Find the register that's set/clobbered. */
5557 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5558 || GET_CODE (reg) == SIGN_EXTRACT
5559 || GET_CODE (reg) == STRICT_LOW_PART)
5560 reg = XEXP (reg, 0);
5562 if (GET_CODE (reg) == PARALLEL
5563 && GET_MODE (reg) == BLKmode)
5566 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5567 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5571 if (GET_CODE (reg) == REG)
5573 regno = REGNO (reg);
5574 if (regno >= FIRST_PSEUDO_REGISTER)
5576 /* Count (weighted) references, stores, etc. This counts a
5577 register twice if it is modified, but that is correct. */
5578 REG_N_SETS (regno)++;
5579 REG_N_REFS (regno) += loop_depth + 1;
5584 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5585 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5588 count_reg_sets (x, loop_depth)
5592 register RTX_CODE code = GET_CODE (x);
5594 if (code == SET || code == CLOBBER)
5595 count_reg_sets_1 (x, loop_depth);
5596 else if (code == PARALLEL)
5599 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5601 code = GET_CODE (XVECEXP (x, 0, i));
5602 if (code == SET || code == CLOBBER)
5603 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5608 /* Increment REG_N_REFS by the current loop depth each register reference
5612 count_reg_references (x, loop_depth)
5616 register RTX_CODE code;
5619 code = GET_CODE (x);
5639 /* If we are clobbering a MEM, mark any registers inside the address
5641 if (GET_CODE (XEXP (x, 0)) == MEM)
5642 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5646 /* While we're here, optimize this case. */
5649 /* In case the SUBREG is not of a register, don't optimize */
5650 if (GET_CODE (x) != REG)
5652 count_reg_references (x, loop_depth);
5656 /* ... fall through ... */
5659 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5660 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5665 register rtx testreg = SET_DEST (x);
5668 /* If storing into MEM, don't show it as being used. But do
5669 show the address as being used. */
5670 if (GET_CODE (testreg) == MEM)
5672 count_reg_references (XEXP (testreg, 0), loop_depth);
5673 count_reg_references (SET_SRC (x), loop_depth);
5677 /* Storing in STRICT_LOW_PART is like storing in a reg
5678 in that this SET might be dead, so ignore it in TESTREG.
5679 but in some other ways it is like using the reg.
5681 Storing in a SUBREG or a bit field is like storing the entire
5682 register in that if the register's value is not used
5683 then this SET is not needed. */
5684 while (GET_CODE (testreg) == STRICT_LOW_PART
5685 || GET_CODE (testreg) == ZERO_EXTRACT
5686 || GET_CODE (testreg) == SIGN_EXTRACT
5687 || GET_CODE (testreg) == SUBREG)
5689 /* Modifying a single register in an alternate mode
5690 does not use any of the old value. But these other
5691 ways of storing in a register do use the old value. */
5692 if (GET_CODE (testreg) == SUBREG
5693 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5698 testreg = XEXP (testreg, 0);
5701 /* If this is a store into a register,
5702 recursively scan the value being stored. */
5704 if ((GET_CODE (testreg) == PARALLEL
5705 && GET_MODE (testreg) == BLKmode)
5706 || GET_CODE (testreg) == REG)
5708 count_reg_references (SET_SRC (x), loop_depth);
5710 count_reg_references (SET_DEST (x), loop_depth);
5720 /* Recursively scan the operands of this expression. */
5723 register const char *fmt = GET_RTX_FORMAT (code);
5726 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5730 /* Tail recursive case: save a function call level. */
5736 count_reg_references (XEXP (x, i), loop_depth);
5738 else if (fmt[i] == 'E')
5741 for (j = 0; j < XVECLEN (x, i); j++)
5742 count_reg_references (XVECEXP (x, i, j), loop_depth);
5748 /* Recompute register set/reference counts immediately prior to register
5751 This avoids problems with set/reference counts changing to/from values
5752 which have special meanings to the register allocators.
5754 Additionally, the reference counts are the primary component used by the
5755 register allocators to prioritize pseudos for allocation to hard regs.
5756 More accurate reference counts generally lead to better register allocation.
5758 F is the first insn to be scanned.
5760 LOOP_STEP denotes how much loop_depth should be incremented per
5761 loop nesting level in order to increase the ref count more for
5762 references in a loop.
5764 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5765 possibly other information which is used by the register allocators. */
5768 recompute_reg_usage (f, loop_step)
5769 rtx f ATTRIBUTE_UNUSED;
5770 int loop_step ATTRIBUTE_UNUSED;
5777 /* Clear out the old data. */
5778 max_reg = max_reg_num ();
5779 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5785 /* Scan each insn in the chain and count how many times each register is
5787 for (index = 0; index < n_basic_blocks; index++)
5789 basic_block bb = BASIC_BLOCK (index);
5790 loop_depth = bb->loop_depth;
5791 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5793 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5797 /* This call will increment REG_N_SETS for each SET or CLOBBER
5798 of a register in INSN. It will also increment REG_N_REFS
5799 by the loop depth for each set of a register in INSN. */
5800 count_reg_sets (PATTERN (insn), loop_depth);
5802 /* count_reg_sets does not detect autoincrement address modes, so
5803 detect them here by looking at the notes attached to INSN. */
5804 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5806 if (REG_NOTE_KIND (links) == REG_INC)
5807 /* Count (weighted) references, stores, etc. This
5808 counts a register twice if it is modified, but
5810 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5813 /* This call will increment REG_N_REFS by the current loop depth
5814 for each reference to a register in INSN. */
5815 count_reg_references (PATTERN (insn), loop_depth);
5817 /* count_reg_references will not include counts for arguments to
5818 function calls, so detect them here by examining the
5819 CALL_INSN_FUNCTION_USAGE data. */
5820 if (GET_CODE (insn) == CALL_INSN)
5824 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5826 note = XEXP (note, 1))
5827 if (GET_CODE (XEXP (note, 0)) == USE)
5828 count_reg_references (XEXP (XEXP (note, 0), 0),
5832 if (insn == bb->end)
5838 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5839 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5840 of the number of registers that died. */
5843 count_or_remove_death_notes (blocks, kill)
5849 for (i = n_basic_blocks - 1; i >= 0; --i)
5854 if (blocks && ! TEST_BIT (blocks, i))
5857 bb = BASIC_BLOCK (i);
5859 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5861 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5863 rtx *pprev = ®_NOTES (insn);
5868 switch (REG_NOTE_KIND (link))
5871 if (GET_CODE (XEXP (link, 0)) == REG)
5873 rtx reg = XEXP (link, 0);
5876 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5879 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5887 rtx next = XEXP (link, 1);
5888 free_EXPR_LIST_node (link);
5889 *pprev = link = next;
5895 pprev = &XEXP (link, 1);
5902 if (insn == bb->end)
5910 /* Record INSN's block as BB. */
5913 set_block_for_insn (insn, bb)
5917 size_t uid = INSN_UID (insn);
5918 if (uid >= basic_block_for_insn->num_elements)
5922 /* Add one-eighth the size so we don't keep calling xrealloc. */
5923 new_size = uid + (uid + 7) / 8;
5925 VARRAY_GROW (basic_block_for_insn, new_size);
5927 VARRAY_BB (basic_block_for_insn, uid) = bb;
5930 /* Record INSN's block number as BB. */
5931 /* ??? This has got to go. */
5934 set_block_num (insn, bb)
5938 set_block_for_insn (insn, BASIC_BLOCK (bb));
5941 /* Verify the CFG consistency. This function check some CFG invariants and
5942 aborts when something is wrong. Hope that this function will help to
5943 convert many optimization passes to preserve CFG consistent.
5945 Currently it does following checks:
5947 - test head/end pointers
5948 - overlapping of basic blocks
5949 - edge list corectness
5950 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5951 - tails of basic blocks (ensure that boundary is necesary)
5952 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5953 and NOTE_INSN_BASIC_BLOCK
5954 - check that all insns are in the basic blocks
5955 (except the switch handling code, barriers and notes)
5956 - check that all returns are followed by barriers
5958 In future it can be extended check a lot of other stuff as well
5959 (reachability of basic blocks, life information, etc. etc.). */
5964 const int max_uid = get_max_uid ();
5965 const rtx rtx_first = get_insns ();
5966 basic_block *bb_info;
5970 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5972 /* First pass check head/end pointers and set bb_info array used by
5974 for (i = n_basic_blocks - 1; i >= 0; i--)
5976 basic_block bb = BASIC_BLOCK (i);
5978 /* Check the head pointer and make sure that it is pointing into
5980 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5985 error ("Head insn %d for block %d not found in the insn stream.",
5986 INSN_UID (bb->head), bb->index);
5990 /* Check the end pointer and make sure that it is pointing into
5992 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5994 if (bb_info[INSN_UID (x)] != NULL)
5996 error ("Insn %d is in multiple basic blocks (%d and %d)",
5997 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6000 bb_info[INSN_UID (x)] = bb;
6007 error ("End insn %d for block %d not found in the insn stream.",
6008 INSN_UID (bb->end), bb->index);
6013 /* Now check the basic blocks (boundaries etc.) */
6014 for (i = n_basic_blocks - 1; i >= 0; i--)
6016 basic_block bb = BASIC_BLOCK (i);
6017 /* Check corectness of edge lists */
6025 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6027 fprintf (stderr, "Predecessor: ");
6028 dump_edge_info (stderr, e, 0);
6029 fprintf (stderr, "\nSuccessor: ");
6030 dump_edge_info (stderr, e, 1);
6034 if (e->dest != EXIT_BLOCK_PTR)
6036 edge e2 = e->dest->pred;
6037 while (e2 && e2 != e)
6041 error ("Basic block %i edge lists are corrupted", bb->index);
6053 error ("Basic block %d pred edge is corrupted", bb->index);
6054 fputs ("Predecessor: ", stderr);
6055 dump_edge_info (stderr, e, 0);
6056 fputs ("\nSuccessor: ", stderr);
6057 dump_edge_info (stderr, e, 1);
6058 fputc ('\n', stderr);
6061 if (e->src != ENTRY_BLOCK_PTR)
6063 edge e2 = e->src->succ;
6064 while (e2 && e2 != e)
6068 error ("Basic block %i edge lists are corrupted", bb->index);
6075 /* OK pointers are correct. Now check the header of basic
6076 block. It ought to contain optional CODE_LABEL followed
6077 by NOTE_BASIC_BLOCK. */
6079 if (GET_CODE (x) == CODE_LABEL)
6083 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6089 if (GET_CODE (x) != NOTE
6090 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6091 || NOTE_BASIC_BLOCK (x) != bb)
6093 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6100 /* Do checks for empty blocks here */
6107 if (GET_CODE (x) == NOTE
6108 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6110 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6111 INSN_UID (x), bb->index);
6118 if (GET_CODE (x) == JUMP_INSN
6119 || GET_CODE (x) == CODE_LABEL
6120 || GET_CODE (x) == BARRIER)
6122 error ("In basic block %d:", bb->index);
6123 fatal_insn ("Flow control insn inside a basic block", x);
6134 if (!bb_info[INSN_UID (x)])
6136 switch (GET_CODE (x))
6143 /* An addr_vec is placed outside any block block. */
6145 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6146 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6147 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6152 /* But in any case, non-deletable labels can appear anywhere. */
6156 fatal_insn ("Insn outside basic block", x);
6160 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6161 && GET_CODE (x) == JUMP_INSN
6162 && returnjump_p (x) && ! condjump_p (x)
6163 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6164 fatal_insn ("Return not followed by barrier", x);
6176 /* Functions to access an edge list with a vector representation.
6177 Enough data is kept such that given an index number, the
6178 pred and succ that edge reprsents can be determined, or
6179 given a pred and a succ, it's index number can be returned.
6180 This allows algorithms which comsume a lot of memory to
6181 represent the normally full matrix of edge (pred,succ) with a
6182 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6183 wasted space in the client code due to sparse flow graphs. */
6185 /* This functions initializes the edge list. Basically the entire
6186 flowgraph is processed, and all edges are assigned a number,
6187 and the data structure is filed in. */
6191 struct edge_list *elist;
6197 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6201 /* Determine the number of edges in the flow graph by counting successor
6202 edges on each basic block. */
6203 for (x = 0; x < n_basic_blocks; x++)
6205 basic_block bb = BASIC_BLOCK (x);
6207 for (e = bb->succ; e; e = e->succ_next)
6210 /* Don't forget successors of the entry block. */
6211 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6214 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6215 elist->num_blocks = block_count;
6216 elist->num_edges = num_edges;
6217 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6221 /* Follow successors of the entry block, and register these edges. */
6222 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6224 elist->index_to_edge[num_edges] = e;
6228 for (x = 0; x < n_basic_blocks; x++)
6230 basic_block bb = BASIC_BLOCK (x);
6232 /* Follow all successors of blocks, and register these edges. */
6233 for (e = bb->succ; e; e = e->succ_next)
6235 elist->index_to_edge[num_edges] = e;
6242 /* This function free's memory associated with an edge list. */
6244 free_edge_list (elist)
6245 struct edge_list *elist;
6249 free (elist->index_to_edge);
6254 /* This function provides debug output showing an edge list. */
6256 print_edge_list (f, elist)
6258 struct edge_list *elist;
6261 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6262 elist->num_blocks - 2, elist->num_edges);
6264 for (x = 0; x < elist->num_edges; x++)
6266 fprintf (f, " %-4d - edge(", x);
6267 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6268 fprintf (f,"entry,");
6270 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6272 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6273 fprintf (f,"exit)\n");
6275 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6279 /* This function provides an internal consistancy check of an edge list,
6280 verifying that all edges are present, and that there are no
6283 verify_edge_list (f, elist)
6285 struct edge_list *elist;
6287 int x, pred, succ, index;
6290 for (x = 0; x < n_basic_blocks; x++)
6292 basic_block bb = BASIC_BLOCK (x);
6294 for (e = bb->succ; e; e = e->succ_next)
6296 pred = e->src->index;
6297 succ = e->dest->index;
6298 index = EDGE_INDEX (elist, e->src, e->dest);
6299 if (index == EDGE_INDEX_NO_EDGE)
6301 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6304 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6305 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6306 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6307 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6308 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6309 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6312 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6314 pred = e->src->index;
6315 succ = e->dest->index;
6316 index = EDGE_INDEX (elist, e->src, e->dest);
6317 if (index == EDGE_INDEX_NO_EDGE)
6319 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6322 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6323 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6324 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6325 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6326 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6327 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6329 /* We've verified that all the edges are in the list, no lets make sure
6330 there are no spurious edges in the list. */
6332 for (pred = 0 ; pred < n_basic_blocks; pred++)
6333 for (succ = 0 ; succ < n_basic_blocks; succ++)
6335 basic_block p = BASIC_BLOCK (pred);
6336 basic_block s = BASIC_BLOCK (succ);
6340 for (e = p->succ; e; e = e->succ_next)
6346 for (e = s->pred; e; e = e->pred_next)
6352 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6353 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6354 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6356 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6357 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6358 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6359 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6360 BASIC_BLOCK (succ)));
6362 for (succ = 0 ; succ < n_basic_blocks; succ++)
6364 basic_block p = ENTRY_BLOCK_PTR;
6365 basic_block s = BASIC_BLOCK (succ);
6369 for (e = p->succ; e; e = e->succ_next)
6375 for (e = s->pred; e; e = e->pred_next)
6381 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6382 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6383 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6385 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6386 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6387 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6388 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6389 BASIC_BLOCK (succ)));
6391 for (pred = 0 ; pred < n_basic_blocks; pred++)
6393 basic_block p = BASIC_BLOCK (pred);
6394 basic_block s = EXIT_BLOCK_PTR;
6398 for (e = p->succ; e; e = e->succ_next)
6404 for (e = s->pred; e; e = e->pred_next)
6410 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6411 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6412 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6414 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6415 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6416 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6417 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6422 /* This routine will determine what, if any, edge there is between
6423 a specified predecessor and successor. */
6426 find_edge_index (edge_list, pred, succ)
6427 struct edge_list *edge_list;
6428 basic_block pred, succ;
6431 for (x = 0; x < NUM_EDGES (edge_list); x++)
6433 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6434 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6437 return (EDGE_INDEX_NO_EDGE);
6440 /* This function will remove an edge from the flow graph. */
6445 edge last_pred = NULL;
6446 edge last_succ = NULL;
6448 basic_block src, dest;
6451 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6457 last_succ->succ_next = e->succ_next;
6459 src->succ = e->succ_next;
6461 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6467 last_pred->pred_next = e->pred_next;
6469 dest->pred = e->pred_next;
6475 /* This routine will remove any fake successor edges for a basic block.
6476 When the edge is removed, it is also removed from whatever predecessor
6479 remove_fake_successors (bb)
6483 for (e = bb->succ; e ; )
6487 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6492 /* This routine will remove all fake edges from the flow graph. If
6493 we remove all fake successors, it will automatically remove all
6494 fake predecessors. */
6496 remove_fake_edges ()
6500 for (x = 0; x < n_basic_blocks; x++)
6501 remove_fake_successors (BASIC_BLOCK (x));
6503 /* We've handled all successors except the entry block's. */
6504 remove_fake_successors (ENTRY_BLOCK_PTR);
6507 /* This functions will add a fake edge between any block which has no
6508 successors, and the exit block. Some data flow equations require these
6511 add_noreturn_fake_exit_edges ()
6515 for (x = 0; x < n_basic_blocks; x++)
6516 if (BASIC_BLOCK (x)->succ == NULL)
6517 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6520 /* Dump the list of basic blocks in the bitmap NODES. */
6522 flow_nodes_print (str, nodes, file)
6524 const sbitmap nodes;
6529 fprintf (file, "%s { ", str);
6530 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6531 fputs ("}\n", file);
6535 /* Dump the list of exiting edges in the array EDGES. */
6537 flow_exits_print (str, edges, num_edges, file)
6545 fprintf (file, "%s { ", str);
6546 for (i = 0; i < num_edges; i++)
6547 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6548 fputs ("}\n", file);
6552 /* Dump loop related CFG information. */
6554 flow_loops_cfg_dump (loops, file)
6555 const struct loops *loops;
6560 if (! loops->num || ! file || ! loops->cfg.dom)
6563 for (i = 0; i < n_basic_blocks; i++)
6567 fprintf (file, ";; %d succs { ", i);
6568 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6569 fprintf (file, "%d ", succ->dest->index);
6570 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6574 /* Dump the DFS node order. */
6575 if (loops->cfg.dfs_order)
6577 fputs (";; DFS order: ", file);
6578 for (i = 0; i < n_basic_blocks; i++)
6579 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6585 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6587 flow_loop_nested_p (outer, loop)
6591 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6595 /* Dump the loop information specified by LOOPS to the stream FILE. */
6597 flow_loops_dump (loops, file, verbose)
6598 const struct loops *loops;
6605 num_loops = loops->num;
6606 if (! num_loops || ! file)
6609 fprintf (file, ";; %d loops found, %d levels\n",
6610 num_loops, loops->levels);
6612 for (i = 0; i < num_loops; i++)
6614 struct loop *loop = &loops->array[i];
6616 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6617 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6618 loop->header->index, loop->latch->index,
6619 loop->pre_header ? loop->pre_header->index : -1,
6620 loop->depth, loop->level,
6621 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6622 fprintf (file, ";; %d", loop->num_nodes);
6623 flow_nodes_print (" nodes", loop->nodes, file);
6624 fprintf (file, ";; %d", loop->num_exits);
6625 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6631 for (j = 0; j < i; j++)
6633 struct loop *oloop = &loops->array[j];
6635 if (loop->header == oloop->header)
6640 smaller = loop->num_nodes < oloop->num_nodes;
6642 /* If the union of LOOP and OLOOP is different than
6643 the larger of LOOP and OLOOP then LOOP and OLOOP
6644 must be disjoint. */
6645 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6646 smaller ? oloop : loop);
6647 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6648 loop->header->index, i, j,
6649 disjoint ? "disjoint" : "nested");
6656 /* Print diagnostics to compare our concept of a loop with
6657 what the loop notes say. */
6658 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6659 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6660 != NOTE_INSN_LOOP_BEG)
6661 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6662 INSN_UID (PREV_INSN (loop->first->head)));
6663 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6664 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6665 != NOTE_INSN_LOOP_END)
6666 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6667 INSN_UID (NEXT_INSN (loop->last->end)));
6672 flow_loops_cfg_dump (loops, file);
6676 /* Free all the memory allocated for LOOPS. */
6678 flow_loops_free (loops)
6679 struct loops *loops;
6688 /* Free the loop descriptors. */
6689 for (i = 0; i < loops->num; i++)
6691 struct loop *loop = &loops->array[i];
6694 sbitmap_free (loop->nodes);
6698 free (loops->array);
6699 loops->array = NULL;
6702 sbitmap_vector_free (loops->cfg.dom);
6703 if (loops->cfg.dfs_order)
6704 free (loops->cfg.dfs_order);
6706 sbitmap_free (loops->shared_headers);
6711 /* Find the exits from the loop using the bitmap of loop nodes NODES
6712 and store in EXITS array. Return the number of exits from the
6715 flow_loop_exits_find (nodes, exits)
6716 const sbitmap nodes;
6725 /* Check all nodes within the loop to see if there are any
6726 successors not in the loop. Note that a node may have multiple
6729 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6730 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6732 basic_block dest = e->dest;
6734 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6742 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6744 /* Store all exiting edges into an array. */
6746 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6747 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6749 basic_block dest = e->dest;
6751 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6752 (*exits)[num_exits++] = e;
6760 /* Find the nodes contained within the loop with header HEADER and
6761 latch LATCH and store in NODES. Return the number of nodes within
6764 flow_loop_nodes_find (header, latch, nodes)
6773 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6776 /* Start with only the loop header in the set of loop nodes. */
6777 sbitmap_zero (nodes);
6778 SET_BIT (nodes, header->index);
6780 header->loop_depth++;
6782 /* Push the loop latch on to the stack. */
6783 if (! TEST_BIT (nodes, latch->index))
6785 SET_BIT (nodes, latch->index);
6786 latch->loop_depth++;
6788 stack[sp++] = latch;
6797 for (e = node->pred; e; e = e->pred_next)
6799 basic_block ancestor = e->src;
6801 /* If each ancestor not marked as part of loop, add to set of
6802 loop nodes and push on to stack. */
6803 if (ancestor != ENTRY_BLOCK_PTR
6804 && ! TEST_BIT (nodes, ancestor->index))
6806 SET_BIT (nodes, ancestor->index);
6807 ancestor->loop_depth++;
6809 stack[sp++] = ancestor;
6818 /* Compute the depth first search order and store in the array
6819 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6820 number of nodes visited. */
6822 flow_depth_first_order_compute (dfs_order)
6831 /* Allocate stack for back-tracking up CFG. */
6832 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6835 /* Allocate bitmap to track nodes that have been visited. */
6836 visited = sbitmap_alloc (n_basic_blocks);
6838 /* None of the nodes in the CFG have been visited yet. */
6839 sbitmap_zero (visited);
6841 /* Start with the first successor edge from the entry block. */
6842 e = ENTRY_BLOCK_PTR->succ;
6845 basic_block src = e->src;
6846 basic_block dest = e->dest;
6848 /* Mark that we have visited this node. */
6849 if (src != ENTRY_BLOCK_PTR)
6850 SET_BIT (visited, src->index);
6852 /* If this node has not been visited before, push the current
6853 edge on to the stack and proceed with the first successor
6854 edge of this node. */
6855 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6863 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6866 /* DEST has no successors (for example, a non-returning
6867 function is called) so do not push the current edge
6868 but carry on with its next successor. */
6869 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6870 SET_BIT (visited, dest->index);
6873 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6875 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6877 /* Pop edge off stack. */
6885 sbitmap_free (visited);
6887 /* The number of nodes visited should not be greater than
6889 if (dfsnum > n_basic_blocks)
6892 /* There are some nodes left in the CFG that are unreachable. */
6893 if (dfsnum < n_basic_blocks)
6899 /* Return the block for the pre-header of the loop with header
6900 HEADER where DOM specifies the dominator information. Return NULL if
6901 there is no pre-header. */
6903 flow_loop_pre_header_find (header, dom)
6907 basic_block pre_header;
6910 /* If block p is a predecessor of the header and is the only block
6911 that the header does not dominate, then it is the pre-header. */
6913 for (e = header->pred; e; e = e->pred_next)
6915 basic_block node = e->src;
6917 if (node != ENTRY_BLOCK_PTR
6918 && ! TEST_BIT (dom[node->index], header->index))
6920 if (pre_header == NULL)
6924 /* There are multiple edges into the header from outside
6925 the loop so there is no pre-header block. */
6935 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6936 previously added. The insertion algorithm assumes that the loops
6937 are added in the order found by a depth first search of the CFG. */
6939 flow_loop_tree_node_add (prevloop, loop)
6940 struct loop *prevloop;
6944 if (flow_loop_nested_p (prevloop, loop))
6946 prevloop->inner = loop;
6947 loop->outer = prevloop;
6951 while (prevloop->outer)
6953 if (flow_loop_nested_p (prevloop->outer, loop))
6955 prevloop->next = loop;
6956 loop->outer = prevloop->outer;
6959 prevloop = prevloop->outer;
6962 prevloop->next = loop;
6967 /* Build the loop hierarchy tree for LOOPS. */
6969 flow_loops_tree_build (loops)
6970 struct loops *loops;
6975 num_loops = loops->num;
6979 /* Root the loop hierarchy tree with the first loop found.
6980 Since we used a depth first search this should be the
6982 loops->tree = &loops->array[0];
6983 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6985 /* Add the remaining loops to the tree. */
6986 for (i = 1; i < num_loops; i++)
6987 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6991 /* Helper function to compute loop nesting depth and enclosed loop level
6992 for the natural loop specified by LOOP at the loop depth DEPTH.
6993 Returns the loop level. */
6995 flow_loop_level_compute (loop, depth)
7005 /* Traverse loop tree assigning depth and computing level as the
7006 maximum level of all the inner loops of this loop. The loop
7007 level is equivalent to the height of the loop in the loop tree
7008 and corresponds to the number of enclosed loop levels (including
7010 for (inner = loop->inner; inner; inner = inner->next)
7014 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7019 loop->level = level;
7020 loop->depth = depth;
7025 /* Compute the loop nesting depth and enclosed loop level for the loop
7026 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7030 flow_loops_level_compute (loops)
7031 struct loops *loops;
7037 /* Traverse all the outer level loops. */
7038 for (loop = loops->tree; loop; loop = loop->next)
7040 level = flow_loop_level_compute (loop, 1);
7048 /* Find all the natural loops in the function and save in LOOPS structure
7049 and recalculate loop_depth information in basic block structures.
7050 Return the number of natural loops found. */
7053 flow_loops_find (loops)
7054 struct loops *loops;
7065 loops->array = NULL;
7069 /* Taking care of this degenerate case makes the rest of
7070 this code simpler. */
7071 if (n_basic_blocks == 0)
7074 /* Compute the dominators. */
7075 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7076 compute_flow_dominators (dom, NULL);
7078 /* Count the number of loop edges (back edges). This should be the
7079 same as the number of natural loops. Also clear the loop_depth
7080 and as we work from inner->outer in a loop nest we call
7081 find_loop_nodes_find which will increment loop_depth for nodes
7082 within the current loop, which happens to enclose inner loops. */
7085 for (b = 0; b < n_basic_blocks; b++)
7087 BASIC_BLOCK (b)->loop_depth = 0;
7088 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7090 basic_block latch = e->src;
7092 /* Look for back edges where a predecessor is dominated
7093 by this block. A natural loop has a single entry
7094 node (header) that dominates all the nodes in the
7095 loop. It also has single back edge to the header
7096 from a latch node. Note that multiple natural loops
7097 may share the same header. */
7098 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7105 /* Compute depth first search order of the CFG so that outer
7106 natural loops will be found before inner natural loops. */
7107 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7108 flow_depth_first_order_compute (dfs_order);
7110 /* Allocate loop structures. */
7112 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7114 headers = sbitmap_alloc (n_basic_blocks);
7115 sbitmap_zero (headers);
7117 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7118 sbitmap_zero (loops->shared_headers);
7120 /* Find and record information about all the natural loops
7123 for (b = 0; b < n_basic_blocks; b++)
7127 /* Search the nodes of the CFG in DFS order that we can find
7128 outer loops first. */
7129 header = BASIC_BLOCK (dfs_order[b]);
7131 /* Look for all the possible latch blocks for this header. */
7132 for (e = header->pred; e; e = e->pred_next)
7134 basic_block latch = e->src;
7136 /* Look for back edges where a predecessor is dominated
7137 by this block. A natural loop has a single entry
7138 node (header) that dominates all the nodes in the
7139 loop. It also has single back edge to the header
7140 from a latch node. Note that multiple natural loops
7141 may share the same header. */
7142 if (latch != ENTRY_BLOCK_PTR
7143 && TEST_BIT (dom[latch->index], header->index))
7147 loop = loops->array + num_loops;
7149 loop->header = header;
7150 loop->latch = latch;
7152 /* Keep track of blocks that are loop headers so
7153 that we can tell which loops should be merged. */
7154 if (TEST_BIT (headers, header->index))
7155 SET_BIT (loops->shared_headers, header->index);
7156 SET_BIT (headers, header->index);
7158 /* Find nodes contained within the loop. */
7159 loop->nodes = sbitmap_alloc (n_basic_blocks);
7161 = flow_loop_nodes_find (header, latch, loop->nodes);
7163 /* Compute first and last blocks within the loop.
7164 These are often the same as the loop header and
7165 loop latch respectively, but this is not always
7168 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7170 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7172 /* Find edges which exit the loop. Note that a node
7173 may have several exit edges. */
7175 = flow_loop_exits_find (loop->nodes, &loop->exits);
7177 /* Look to see if the loop has a pre-header node. */
7179 = flow_loop_pre_header_find (header, dom);
7186 /* Natural loops with shared headers may either be disjoint or
7187 nested. Disjoint loops with shared headers cannot be inner
7188 loops and should be merged. For now just mark loops that share
7190 for (i = 0; i < num_loops; i++)
7191 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7192 loops->array[i].shared = 1;
7194 sbitmap_free (headers);
7197 loops->num = num_loops;
7199 /* Save CFG derived information to avoid recomputing it. */
7200 loops->cfg.dom = dom;
7201 loops->cfg.dfs_order = dfs_order;
7203 /* Build the loop hierarchy tree. */
7204 flow_loops_tree_build (loops);
7206 /* Assign the loop nesting depth and enclosed loop level for each
7208 loops->levels = flow_loops_level_compute (loops);
7214 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7216 flow_loop_outside_edge_p (loop, e)
7217 const struct loop *loop;
7220 if (e->dest != loop->header)
7222 return (e->src == ENTRY_BLOCK_PTR)
7223 || ! TEST_BIT (loop->nodes, e->src->index);
7227 /* Clear LOG_LINKS fields of insns in a chain. */
7229 clear_log_links (insns)
7233 for (i = insns; i; i = NEXT_INSN (i))
7234 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')