1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
176 /* Number of edges in the current function. */
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
195 ENTRY_BLOCK, /* index */
197 -1, -1 /* eh_beg, eh_end */
204 NULL, /* local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 EXIT_BLOCK, /* index */
210 -1, -1 /* eh_beg, eh_end */
214 /* Nonzero if the second flow pass has completed. */
217 /* Maximum register number used in this function, plus one. */
221 /* Indexed by n, giving various register information */
223 varray_type reg_n_info;
225 /* Size of a regset for the current function,
226 in (1) bytes and (2) elements. */
231 /* Regset of regs live when calls to `setjmp'-like functions happen. */
232 /* ??? Does this exist only for the setjmp-clobbered warning message? */
234 regset regs_live_at_setjmp;
236 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
237 that have to go in the same hard reg.
238 The first two regs in the list are a pair, and the next two
239 are another pair, etc. */
242 /* Set of registers that may be eliminable. These are handled specially
243 in updating regs_ever_live. */
245 static HARD_REG_SET elim_reg_set;
247 /* The basic block structure for every insn, indexed by uid. */
249 varray_type basic_block_for_insn;
251 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
252 /* ??? Should probably be using LABEL_NUSES instead. It would take a
253 bit of surgery to be able to use or co-opt the routines in jump. */
255 static rtx label_value_list;
257 /* Holds information for tracking conditional register life information. */
258 struct reg_cond_life_info
260 /* An EXPR_LIST of conditions under which a register is dead. */
263 /* ??? Could store mask of bytes that are dead, so that we could finally
264 track lifetimes of multi-word registers accessed via subregs. */
267 /* For use in communicating between propagate_block and its subroutines.
268 Holds all information needed to compute life and def-use information. */
270 struct propagate_block_info
272 /* The basic block we're considering. */
275 /* Bit N is set if register N is conditionally or unconditionally live. */
278 /* Bit N is set if register N is set this insn. */
281 /* Element N is the next insn that uses (hard or pseudo) register N
282 within the current basic block; or zero, if there is no such insn. */
285 /* Contains a list of all the MEMs we are tracking for dead store
289 /* If non-null, record the set of registers set in the basic block. */
292 #ifdef HAVE_conditional_execution
293 /* Indexed by register number, holds a reg_cond_life_info for each
294 register that is not unconditionally live or dead. */
295 splay_tree reg_cond_dead;
297 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 /* Non-zero if the value of CC0 is live. */
304 /* Flags controling the set of information propagate_block collects. */
308 /* Forward declarations */
309 static int count_basic_blocks PARAMS ((rtx));
310 static rtx find_basic_blocks_1 PARAMS ((rtx));
311 static void clear_edges PARAMS ((void));
312 static void make_edges PARAMS ((rtx));
313 static void make_label_edge PARAMS ((sbitmap *, basic_block,
315 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
316 basic_block, rtx, int));
317 static void mark_critical_edges PARAMS ((void));
318 static void move_stray_eh_region_notes PARAMS ((void));
319 static void record_active_eh_regions PARAMS ((rtx));
321 static void commit_one_edge_insertion PARAMS ((edge));
323 static void delete_unreachable_blocks PARAMS ((void));
324 static void delete_eh_regions PARAMS ((void));
325 static int can_delete_note_p PARAMS ((rtx));
326 static void expunge_block PARAMS ((basic_block));
327 static int can_delete_label_p PARAMS ((rtx));
328 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
330 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
332 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
333 static void try_merge_blocks PARAMS ((void));
334 static void tidy_fallthru_edges PARAMS ((void));
335 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
336 static void verify_wide_reg PARAMS ((int, rtx, rtx));
337 static void verify_local_live_at_start PARAMS ((regset, basic_block));
338 static int set_noop_p PARAMS ((rtx));
339 static int noop_move_p PARAMS ((rtx));
340 static void delete_noop_moves PARAMS ((rtx));
341 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
342 static void notice_stack_pointer_modification PARAMS ((rtx));
343 static void mark_reg PARAMS ((rtx, void *));
344 static void mark_regs_live_at_end PARAMS ((regset));
345 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
346 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
347 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
348 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
349 static int insn_dead_p PARAMS ((struct propagate_block_info *,
351 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
353 static void mark_set_regs PARAMS ((struct propagate_block_info *,
355 static void mark_set_1 PARAMS ((struct propagate_block_info *,
356 enum rtx_code, rtx, rtx,
358 #ifdef HAVE_conditional_execution
359 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
361 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
362 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
363 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
365 static rtx ior_reg_cond PARAMS ((rtx, rtx));
366 static rtx not_reg_cond PARAMS ((rtx));
367 static rtx nand_reg_cond PARAMS ((rtx, rtx));
370 static void find_auto_inc PARAMS ((struct propagate_block_info *,
372 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
374 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
376 static void mark_used_reg PARAMS ((struct propagate_block_info *,
378 static void mark_used_regs PARAMS ((struct propagate_block_info *,
380 void dump_flow_info PARAMS ((FILE *));
381 void debug_flow_info PARAMS ((void));
382 static void dump_edge_info PARAMS ((FILE *, edge, int));
384 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
386 static void remove_fake_successors PARAMS ((basic_block));
387 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
388 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
389 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
390 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
391 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
392 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
393 static int flow_depth_first_order_compute PARAMS ((int *));
394 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
395 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
396 static void flow_loops_tree_build PARAMS ((struct loops *));
397 static int flow_loop_level_compute PARAMS ((struct loop *, int));
398 static int flow_loops_level_compute PARAMS ((struct loops *));
400 /* Find basic blocks of the current function.
401 F is the first insn of the function and NREGS the number of register
405 find_basic_blocks (f, nregs, file)
407 int nregs ATTRIBUTE_UNUSED;
408 FILE *file ATTRIBUTE_UNUSED;
412 /* Flush out existing data. */
413 if (basic_block_info != NULL)
419 /* Clear bb->aux on all extant basic blocks. We'll use this as a
420 tag for reuse during create_basic_block, just in case some pass
421 copies around basic block notes improperly. */
422 for (i = 0; i < n_basic_blocks; ++i)
423 BASIC_BLOCK (i)->aux = NULL;
425 VARRAY_FREE (basic_block_info);
428 n_basic_blocks = count_basic_blocks (f);
430 /* Size the basic block table. The actual structures will be allocated
431 by find_basic_blocks_1, since we want to keep the structure pointers
432 stable across calls to find_basic_blocks. */
433 /* ??? This whole issue would be much simpler if we called find_basic_blocks
434 exactly once, and thereafter we don't have a single long chain of
435 instructions at all until close to the end of compilation when we
436 actually lay them out. */
438 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
440 label_value_list = find_basic_blocks_1 (f);
442 /* Record the block to which an insn belongs. */
443 /* ??? This should be done another way, by which (perhaps) a label is
444 tagged directly with the basic block that it starts. It is used for
445 more than that currently, but IMO that is the only valid use. */
447 max_uid = get_max_uid ();
449 /* Leave space for insns life_analysis makes in some cases for auto-inc.
450 These cases are rare, so we don't need too much space. */
451 max_uid += max_uid / 10;
454 compute_bb_for_insn (max_uid);
456 /* Discover the edges of our cfg. */
457 record_active_eh_regions (f);
458 make_edges (label_value_list);
460 /* Do very simple cleanup now, for the benefit of code that runs between
461 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
462 tidy_fallthru_edges ();
464 mark_critical_edges ();
466 #ifdef ENABLE_CHECKING
471 /* Count the basic blocks of the function. */
474 count_basic_blocks (f)
478 register RTX_CODE prev_code;
479 register int count = 0;
481 int call_had_abnormal_edge = 0;
483 prev_code = JUMP_INSN;
484 for (insn = f; insn; insn = NEXT_INSN (insn))
486 register RTX_CODE code = GET_CODE (insn);
488 if (code == CODE_LABEL
489 || (GET_RTX_CLASS (code) == 'i'
490 && (prev_code == JUMP_INSN
491 || prev_code == BARRIER
492 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
495 /* Record whether this call created an edge. */
496 if (code == CALL_INSN)
498 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
499 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
501 call_had_abnormal_edge = 0;
503 /* If there is an EH region or rethrow, we have an edge. */
504 if ((eh_region && region > 0)
505 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
506 call_had_abnormal_edge = 1;
507 else if (nonlocal_goto_handler_labels && region >= 0)
508 /* If there is a nonlocal goto label and the specified
509 region number isn't -1, we have an edge. (0 means
510 no throw, but might have a nonlocal goto). */
511 call_had_abnormal_edge = 1;
516 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
518 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
522 /* The rest of the compiler works a bit smoother when we don't have to
523 check for the edge case of do-nothing functions with no basic blocks. */
526 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
533 /* Find all basic blocks of the function whose first insn is F.
535 Collect and return a list of labels whose addresses are taken. This
536 will be used in make_edges for use with computed gotos. */
539 find_basic_blocks_1 (f)
542 register rtx insn, next;
544 rtx bb_note = NULL_RTX;
545 rtx eh_list = NULL_RTX;
546 rtx label_value_list = NULL_RTX;
550 /* We process the instructions in a slightly different way than we did
551 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
552 closed out the previous block, so that it gets attached at the proper
553 place. Since this form should be equivalent to the previous,
554 count_basic_blocks continues to use the old form as a check. */
556 for (insn = f; insn; insn = next)
558 enum rtx_code code = GET_CODE (insn);
560 next = NEXT_INSN (insn);
566 int kind = NOTE_LINE_NUMBER (insn);
568 /* Keep a LIFO list of the currently active exception notes. */
569 if (kind == NOTE_INSN_EH_REGION_BEG)
570 eh_list = alloc_INSN_LIST (insn, eh_list);
571 else if (kind == NOTE_INSN_EH_REGION_END)
575 eh_list = XEXP (eh_list, 1);
576 free_INSN_LIST_node (t);
579 /* Look for basic block notes with which to keep the
580 basic_block_info pointers stable. Unthread the note now;
581 we'll put it back at the right place in create_basic_block.
582 Or not at all if we've already found a note in this block. */
583 else if (kind == NOTE_INSN_BASIC_BLOCK)
585 if (bb_note == NULL_RTX)
588 next = flow_delete_insn (insn);
594 /* A basic block starts at a label. If we've closed one off due
595 to a barrier or some such, no need to do it again. */
596 if (head != NULL_RTX)
598 /* While we now have edge lists with which other portions of
599 the compiler might determine a call ending a basic block
600 does not imply an abnormal edge, it will be a bit before
601 everything can be updated. So continue to emit a noop at
602 the end of such a block. */
603 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
605 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606 end = emit_insn_after (nop, end);
609 create_basic_block (i++, head, end, bb_note);
617 /* A basic block ends at a jump. */
618 if (head == NULL_RTX)
622 /* ??? Make a special check for table jumps. The way this
623 happens is truly and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
634 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
643 goto new_bb_inclusive;
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head == NULL_RTX)
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
656 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
658 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659 end = emit_insn_after (nop, end);
661 goto new_bb_exclusive;
665 /* Record whether this call created an edge. */
666 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
667 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
668 int call_has_abnormal_edge = 0;
670 /* If there is an EH region or rethrow, we have an edge. */
671 if ((eh_list && region > 0)
672 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
673 call_has_abnormal_edge = 1;
674 else if (nonlocal_goto_handler_labels && region >= 0)
675 /* If there is a nonlocal goto label and the specified
676 region number isn't -1, we have an edge. (0 means
677 no throw, but might have a nonlocal goto). */
678 call_has_abnormal_edge = 1;
680 /* A basic block ends at a call that can either throw or
681 do a non-local goto. */
682 if (call_has_abnormal_edge)
685 if (head == NULL_RTX)
690 create_basic_block (i++, head, end, bb_note);
691 head = end = NULL_RTX;
699 if (GET_RTX_CLASS (code) == 'i')
701 if (head == NULL_RTX)
708 if (GET_RTX_CLASS (code) == 'i')
712 /* Make a list of all labels referred to other than by jumps
713 (which just don't have the REG_LABEL notes).
715 Make a special exception for labels followed by an ADDR*VEC,
716 as this would be a part of the tablejump setup code.
718 Make a special exception for the eh_return_stub_label, which
719 we know isn't part of any otherwise visible control flow. */
721 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
722 if (REG_NOTE_KIND (note) == REG_LABEL)
724 rtx lab = XEXP (note, 0), next;
726 if (lab == eh_return_stub_label)
728 else if ((next = next_nonnote_insn (lab)) != NULL
729 && GET_CODE (next) == JUMP_INSN
730 && (GET_CODE (PATTERN (next)) == ADDR_VEC
731 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
735 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
740 if (head != NULL_RTX)
741 create_basic_block (i++, head, end, bb_note);
743 if (i != n_basic_blocks)
746 return label_value_list;
749 /* Tidy the CFG by deleting unreachable code and whatnot. */
755 delete_unreachable_blocks ();
756 move_stray_eh_region_notes ();
757 record_active_eh_regions (f);
759 mark_critical_edges ();
761 /* Kill the data we won't maintain. */
762 label_value_list = NULL_RTX;
765 /* Create a new basic block consisting of the instructions between
766 HEAD and END inclusive. Reuses the note and basic block struct
767 in BB_NOTE, if any. */
770 create_basic_block (index, head, end, bb_note)
772 rtx head, end, bb_note;
777 && ! RTX_INTEGRATED_P (bb_note)
778 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
781 /* If we found an existing note, thread it back onto the chain. */
783 if (GET_CODE (head) == CODE_LABEL)
784 add_insn_after (bb_note, head);
787 add_insn_before (bb_note, head);
793 /* Otherwise we must create a note and a basic block structure.
794 Since we allow basic block structs in rtl, give the struct
795 the same lifetime by allocating it off the function obstack
796 rather than using malloc. */
798 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
799 memset (bb, 0, sizeof (*bb));
801 if (GET_CODE (head) == CODE_LABEL)
802 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
805 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
808 NOTE_BASIC_BLOCK (bb_note) = bb;
811 /* Always include the bb note in the block. */
812 if (NEXT_INSN (end) == bb_note)
818 BASIC_BLOCK (index) = bb;
820 /* Tag the block so that we know it has been used when considering
821 other basic block notes. */
825 /* Records the basic block struct in BB_FOR_INSN, for every instruction
826 indexed by INSN_UID. MAX is the size of the array. */
829 compute_bb_for_insn (max)
834 if (basic_block_for_insn)
835 VARRAY_FREE (basic_block_for_insn);
836 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
838 for (i = 0; i < n_basic_blocks; ++i)
840 basic_block bb = BASIC_BLOCK (i);
847 int uid = INSN_UID (insn);
849 VARRAY_BB (basic_block_for_insn, uid) = bb;
852 insn = NEXT_INSN (insn);
857 /* Free the memory associated with the edge structures. */
865 for (i = 0; i < n_basic_blocks; ++i)
867 basic_block bb = BASIC_BLOCK (i);
869 for (e = bb->succ; e ; e = n)
879 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
885 ENTRY_BLOCK_PTR->succ = 0;
886 EXIT_BLOCK_PTR->pred = 0;
891 /* Identify the edges between basic blocks.
893 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
894 that are otherwise unreachable may be reachable with a non-local goto.
896 BB_EH_END is an array indexed by basic block number in which we record
897 the list of exception regions active at the end of the basic block. */
900 make_edges (label_value_list)
901 rtx label_value_list;
904 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
905 sbitmap *edge_cache = NULL;
907 /* Assume no computed jump; revise as we create edges. */
908 current_function_has_computed_jump = 0;
910 /* Heavy use of computed goto in machine-generated code can lead to
911 nearly fully-connected CFGs. In that case we spend a significant
912 amount of time searching the edge lists for duplicates. */
913 if (forced_labels || label_value_list)
915 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
916 sbitmap_vector_zero (edge_cache, n_basic_blocks);
919 /* By nature of the way these get numbered, block 0 is always the entry. */
920 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
922 for (i = 0; i < n_basic_blocks; ++i)
924 basic_block bb = BASIC_BLOCK (i);
927 int force_fallthru = 0;
929 /* Examine the last instruction of the block, and discover the
930 ways we can leave the block. */
933 code = GET_CODE (insn);
936 if (code == JUMP_INSN)
940 /* ??? Recognize a tablejump and do the right thing. */
941 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
942 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
943 && GET_CODE (tmp) == JUMP_INSN
944 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
945 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
950 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
951 vec = XVEC (PATTERN (tmp), 0);
953 vec = XVEC (PATTERN (tmp), 1);
955 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
956 make_label_edge (edge_cache, bb,
957 XEXP (RTVEC_ELT (vec, j), 0), 0);
959 /* Some targets (eg, ARM) emit a conditional jump that also
960 contains the out-of-range target. Scan for these and
961 add an edge if necessary. */
962 if ((tmp = single_set (insn)) != NULL
963 && SET_DEST (tmp) == pc_rtx
964 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
965 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
966 make_label_edge (edge_cache, bb,
967 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
969 #ifdef CASE_DROPS_THROUGH
970 /* Silly VAXen. The ADDR_VEC is going to be in the way of
971 us naturally detecting fallthru into the next block. */
976 /* If this is a computed jump, then mark it as reaching
977 everything on the label_value_list and forced_labels list. */
978 else if (computed_jump_p (insn))
980 current_function_has_computed_jump = 1;
982 for (x = label_value_list; x; x = XEXP (x, 1))
983 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
985 for (x = forced_labels; x; x = XEXP (x, 1))
986 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
989 /* Returns create an exit out. */
990 else if (returnjump_p (insn))
991 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
993 /* Otherwise, we have a plain conditional or unconditional jump. */
996 if (! JUMP_LABEL (insn))
998 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1002 /* If this is a sibling call insn, then this is in effect a
1003 combined call and return, and so we need an edge to the
1004 exit block. No need to worry about EH edges, since we
1005 wouldn't have created the sibling call in the first place. */
1007 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1008 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1011 /* If this is a CALL_INSN, then mark it as reaching the active EH
1012 handler for this CALL_INSN. If we're handling asynchronous
1013 exceptions then any insn can reach any of the active handlers.
1015 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1017 if (code == CALL_INSN || asynchronous_exceptions)
1019 /* Add any appropriate EH edges. We do this unconditionally
1020 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1021 on the call, and this needn't be within an EH region. */
1022 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1024 /* If we have asynchronous exceptions, do the same for *all*
1025 exception regions active in the block. */
1026 if (asynchronous_exceptions
1027 && bb->eh_beg != bb->eh_end)
1029 if (bb->eh_beg >= 0)
1030 make_eh_edge (edge_cache, eh_nest_info, bb,
1031 NULL_RTX, bb->eh_beg);
1033 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1034 if (GET_CODE (x) == NOTE
1035 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1036 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1038 int region = NOTE_EH_HANDLER (x);
1039 make_eh_edge (edge_cache, eh_nest_info, bb,
1044 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1046 /* ??? This could be made smarter: in some cases it's possible
1047 to tell that certain calls will not do a nonlocal goto.
1049 For example, if the nested functions that do the nonlocal
1050 gotos do not have their addresses taken, then only calls to
1051 those functions or to other nested functions that use them
1052 could possibly do nonlocal gotos. */
1053 /* We do know that a REG_EH_REGION note with a value less
1054 than 0 is guaranteed not to perform a non-local goto. */
1055 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1056 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1057 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1058 make_label_edge (edge_cache, bb, XEXP (x, 0),
1059 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1063 /* We know something about the structure of the function __throw in
1064 libgcc2.c. It is the only function that ever contains eh_stub
1065 labels. It modifies its return address so that the last block
1066 returns to one of the eh_stub labels within it. So we have to
1067 make additional edges in the flow graph. */
1068 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1069 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1071 /* Find out if we can drop through to the next block. */
1072 insn = next_nonnote_insn (insn);
1073 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1074 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1075 else if (i + 1 < n_basic_blocks)
1077 rtx tmp = BLOCK_HEAD (i + 1);
1078 if (GET_CODE (tmp) == NOTE)
1079 tmp = next_nonnote_insn (tmp);
1080 if (force_fallthru || insn == tmp)
1081 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1085 free_eh_nesting_info (eh_nest_info);
1087 sbitmap_vector_free (edge_cache);
1090 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1091 about the edge that is accumulated between calls. */
1094 make_edge (edge_cache, src, dst, flags)
1095 sbitmap *edge_cache;
1096 basic_block src, dst;
1102 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1103 many edges to them, and we didn't allocate memory for it. */
1104 use_edge_cache = (edge_cache
1105 && src != ENTRY_BLOCK_PTR
1106 && dst != EXIT_BLOCK_PTR);
1108 /* Make sure we don't add duplicate edges. */
1109 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1110 for (e = src->succ; e ; e = e->succ_next)
1117 e = (edge) xcalloc (1, sizeof (*e));
1120 e->succ_next = src->succ;
1121 e->pred_next = dst->pred;
1130 SET_BIT (edge_cache[src->index], dst->index);
1133 /* Create an edge from a basic block to a label. */
1136 make_label_edge (edge_cache, src, label, flags)
1137 sbitmap *edge_cache;
1142 if (GET_CODE (label) != CODE_LABEL)
1145 /* If the label was never emitted, this insn is junk, but avoid a
1146 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1147 as a result of a syntax error and a diagnostic has already been
1150 if (INSN_UID (label) == 0)
1153 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1156 /* Create the edges generated by INSN in REGION. */
1159 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1160 sbitmap *edge_cache;
1161 eh_nesting_info *eh_nest_info;
1166 handler_info **handler_list;
1169 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1170 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1173 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1174 EDGE_ABNORMAL | EDGE_EH | is_call);
1178 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1179 dangerous if we intend to move basic blocks around. Move such notes
1180 into the following block. */
1183 move_stray_eh_region_notes ()
1188 if (n_basic_blocks < 2)
1191 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1192 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1194 rtx insn, next, list = NULL_RTX;
1196 b1 = BASIC_BLOCK (i);
1197 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1199 next = NEXT_INSN (insn);
1200 if (GET_CODE (insn) == NOTE
1201 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1202 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1204 /* Unlink from the insn chain. */
1205 NEXT_INSN (PREV_INSN (insn)) = next;
1206 PREV_INSN (next) = PREV_INSN (insn);
1209 NEXT_INSN (insn) = list;
1214 if (list == NULL_RTX)
1217 /* Find where to insert these things. */
1219 if (GET_CODE (insn) == CODE_LABEL)
1220 insn = NEXT_INSN (insn);
1224 next = NEXT_INSN (list);
1225 add_insn_after (list, insn);
1231 /* Recompute eh_beg/eh_end for each basic block. */
1234 record_active_eh_regions (f)
1237 rtx insn, eh_list = NULL_RTX;
1239 basic_block bb = BASIC_BLOCK (0);
1241 for (insn = f; insn ; insn = NEXT_INSN (insn))
1243 if (bb->head == insn)
1244 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1246 if (GET_CODE (insn) == NOTE)
1248 int kind = NOTE_LINE_NUMBER (insn);
1249 if (kind == NOTE_INSN_EH_REGION_BEG)
1250 eh_list = alloc_INSN_LIST (insn, eh_list);
1251 else if (kind == NOTE_INSN_EH_REGION_END)
1253 rtx t = XEXP (eh_list, 1);
1254 free_INSN_LIST_node (eh_list);
1259 if (bb->end == insn)
1261 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1263 if (i == n_basic_blocks)
1265 bb = BASIC_BLOCK (i);
1270 /* Identify critical edges and set the bits appropriately. */
1273 mark_critical_edges ()
1275 int i, n = n_basic_blocks;
1278 /* We begin with the entry block. This is not terribly important now,
1279 but could be if a front end (Fortran) implemented alternate entry
1281 bb = ENTRY_BLOCK_PTR;
1288 /* (1) Critical edges must have a source with multiple successors. */
1289 if (bb->succ && bb->succ->succ_next)
1291 for (e = bb->succ; e ; e = e->succ_next)
1293 /* (2) Critical edges must have a destination with multiple
1294 predecessors. Note that we know there is at least one
1295 predecessor -- the edge we followed to get here. */
1296 if (e->dest->pred->pred_next)
1297 e->flags |= EDGE_CRITICAL;
1299 e->flags &= ~EDGE_CRITICAL;
1304 for (e = bb->succ; e ; e = e->succ_next)
1305 e->flags &= ~EDGE_CRITICAL;
1310 bb = BASIC_BLOCK (i);
1314 /* Split a (typically critical) edge. Return the new block.
1315 Abort on abnormal edges.
1317 ??? The code generally expects to be called on critical edges.
1318 The case of a block ending in an unconditional jump to a
1319 block with multiple predecessors is not handled optimally. */
1322 split_edge (edge_in)
1325 basic_block old_pred, bb, old_succ;
1330 /* Abnormal edges cannot be split. */
1331 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1334 old_pred = edge_in->src;
1335 old_succ = edge_in->dest;
1337 /* Remove the existing edge from the destination's pred list. */
1340 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1342 *pp = edge_in->pred_next;
1343 edge_in->pred_next = NULL;
1346 /* Create the new structures. */
1347 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1348 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1351 memset (bb, 0, sizeof (*bb));
1352 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1353 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1355 /* ??? This info is likely going to be out of date very soon. */
1356 if (old_succ->global_live_at_start)
1358 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1359 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1363 CLEAR_REG_SET (bb->global_live_at_start);
1364 CLEAR_REG_SET (bb->global_live_at_end);
1369 bb->succ = edge_out;
1372 edge_in->flags &= ~EDGE_CRITICAL;
1374 edge_out->pred_next = old_succ->pred;
1375 edge_out->succ_next = NULL;
1377 edge_out->dest = old_succ;
1378 edge_out->flags = EDGE_FALLTHRU;
1379 edge_out->probability = REG_BR_PROB_BASE;
1381 old_succ->pred = edge_out;
1383 /* Tricky case -- if there existed a fallthru into the successor
1384 (and we're not it) we must add a new unconditional jump around
1385 the new block we're actually interested in.
1387 Further, if that edge is critical, this means a second new basic
1388 block must be created to hold it. In order to simplify correct
1389 insn placement, do this before we touch the existing basic block
1390 ordering for the block we were really wanting. */
1391 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1394 for (e = edge_out->pred_next; e ; e = e->pred_next)
1395 if (e->flags & EDGE_FALLTHRU)
1400 basic_block jump_block;
1403 if ((e->flags & EDGE_CRITICAL) == 0
1404 && e->src != ENTRY_BLOCK_PTR)
1406 /* Non critical -- we can simply add a jump to the end
1407 of the existing predecessor. */
1408 jump_block = e->src;
1412 /* We need a new block to hold the jump. The simplest
1413 way to do the bulk of the work here is to recursively
1415 jump_block = split_edge (e);
1416 e = jump_block->succ;
1419 /* Now add the jump insn ... */
1420 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1422 jump_block->end = pos;
1423 if (basic_block_for_insn)
1424 set_block_for_insn (pos, jump_block);
1425 emit_barrier_after (pos);
1427 /* ... let jump know that label is in use, ... */
1428 JUMP_LABEL (pos) = old_succ->head;
1429 ++LABEL_NUSES (old_succ->head);
1431 /* ... and clear fallthru on the outgoing edge. */
1432 e->flags &= ~EDGE_FALLTHRU;
1434 /* Continue splitting the interesting edge. */
1438 /* Place the new block just in front of the successor. */
1439 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1440 if (old_succ == EXIT_BLOCK_PTR)
1441 j = n_basic_blocks - 1;
1443 j = old_succ->index;
1444 for (i = n_basic_blocks - 1; i > j; --i)
1446 basic_block tmp = BASIC_BLOCK (i - 1);
1447 BASIC_BLOCK (i) = tmp;
1450 BASIC_BLOCK (i) = bb;
1453 /* Create the basic block note.
1455 Where we place the note can have a noticable impact on the generated
1456 code. Consider this cfg:
1467 If we need to insert an insn on the edge from block 0 to block 1,
1468 we want to ensure the instructions we insert are outside of any
1469 loop notes that physically sit between block 0 and block 1. Otherwise
1470 we confuse the loop optimizer into thinking the loop is a phony. */
1471 if (old_succ != EXIT_BLOCK_PTR
1472 && PREV_INSN (old_succ->head)
1473 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1474 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1475 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1476 PREV_INSN (old_succ->head));
1477 else if (old_succ != EXIT_BLOCK_PTR)
1478 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1480 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1481 NOTE_BASIC_BLOCK (bb_note) = bb;
1482 bb->head = bb->end = bb_note;
1484 /* Not quite simple -- for non-fallthru edges, we must adjust the
1485 predecessor's jump instruction to target our new block. */
1486 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1488 rtx tmp, insn = old_pred->end;
1489 rtx old_label = old_succ->head;
1490 rtx new_label = gen_label_rtx ();
1492 if (GET_CODE (insn) != JUMP_INSN)
1495 /* ??? Recognize a tablejump and adjust all matching cases. */
1496 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1497 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1498 && GET_CODE (tmp) == JUMP_INSN
1499 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1500 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1505 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1506 vec = XVEC (PATTERN (tmp), 0);
1508 vec = XVEC (PATTERN (tmp), 1);
1510 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1511 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1513 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1514 --LABEL_NUSES (old_label);
1515 ++LABEL_NUSES (new_label);
1518 /* Handle casesi dispatch insns */
1519 if ((tmp = single_set (insn)) != NULL
1520 && SET_DEST (tmp) == pc_rtx
1521 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1522 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1523 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1525 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1527 --LABEL_NUSES (old_label);
1528 ++LABEL_NUSES (new_label);
1533 /* This would have indicated an abnormal edge. */
1534 if (computed_jump_p (insn))
1537 /* A return instruction can't be redirected. */
1538 if (returnjump_p (insn))
1541 /* If the insn doesn't go where we think, we're confused. */
1542 if (JUMP_LABEL (insn) != old_label)
1545 redirect_jump (insn, new_label);
1548 emit_label_before (new_label, bb_note);
1549 bb->head = new_label;
1555 /* Queue instructions for insertion on an edge between two basic blocks.
1556 The new instructions and basic blocks (if any) will not appear in the
1557 CFG until commit_edge_insertions is called. */
1560 insert_insn_on_edge (pattern, e)
1564 /* We cannot insert instructions on an abnormal critical edge.
1565 It will be easier to find the culprit if we die now. */
1566 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1567 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1570 if (e->insns == NULL_RTX)
1573 push_to_sequence (e->insns);
1575 emit_insn (pattern);
1577 e->insns = get_insns ();
1581 /* Update the CFG for the instructions queued on edge E. */
1584 commit_one_edge_insertion (e)
1587 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1590 /* Pull the insns off the edge now since the edge might go away. */
1592 e->insns = NULL_RTX;
1594 /* Figure out where to put these things. If the destination has
1595 one predecessor, insert there. Except for the exit block. */
1596 if (e->dest->pred->pred_next == NULL
1597 && e->dest != EXIT_BLOCK_PTR)
1601 /* Get the location correct wrt a code label, and "nice" wrt
1602 a basic block note, and before everything else. */
1604 if (GET_CODE (tmp) == CODE_LABEL)
1605 tmp = NEXT_INSN (tmp);
1606 if (GET_CODE (tmp) == NOTE
1607 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1608 tmp = NEXT_INSN (tmp);
1609 if (tmp == bb->head)
1612 after = PREV_INSN (tmp);
1615 /* If the source has one successor and the edge is not abnormal,
1616 insert there. Except for the entry block. */
1617 else if ((e->flags & EDGE_ABNORMAL) == 0
1618 && e->src->succ->succ_next == NULL
1619 && e->src != ENTRY_BLOCK_PTR)
1622 /* It is possible to have a non-simple jump here. Consider a target
1623 where some forms of unconditional jumps clobber a register. This
1624 happens on the fr30 for example.
1626 We know this block has a single successor, so we can just emit
1627 the queued insns before the jump. */
1628 if (GET_CODE (bb->end) == JUMP_INSN)
1634 /* We'd better be fallthru, or we've lost track of what's what. */
1635 if ((e->flags & EDGE_FALLTHRU) == 0)
1642 /* Otherwise we must split the edge. */
1645 bb = split_edge (e);
1649 /* Now that we've found the spot, do the insertion. */
1651 /* Set the new block number for these insns, if structure is allocated. */
1652 if (basic_block_for_insn)
1655 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1656 set_block_for_insn (i, bb);
1661 emit_insns_before (insns, before);
1662 if (before == bb->head)
1667 rtx last = emit_insns_after (insns, after);
1668 if (after == bb->end)
1672 if (GET_CODE (last) == JUMP_INSN)
1674 if (returnjump_p (last))
1676 /* ??? Remove all outgoing edges from BB and add one
1677 for EXIT. This is not currently a problem because
1678 this only happens for the (single) epilogue, which
1679 already has a fallthru edge to EXIT. */
1682 if (e->dest != EXIT_BLOCK_PTR
1683 || e->succ_next != NULL
1684 || (e->flags & EDGE_FALLTHRU) == 0)
1686 e->flags &= ~EDGE_FALLTHRU;
1688 emit_barrier_after (last);
1697 /* Update the CFG for all queued instructions. */
1700 commit_edge_insertions ()
1705 #ifdef ENABLE_CHECKING
1706 verify_flow_info ();
1710 bb = ENTRY_BLOCK_PTR;
1715 for (e = bb->succ; e ; e = next)
1717 next = e->succ_next;
1719 commit_one_edge_insertion (e);
1722 if (++i >= n_basic_blocks)
1724 bb = BASIC_BLOCK (i);
1728 /* Delete all unreachable basic blocks. */
1731 delete_unreachable_blocks ()
1733 basic_block *worklist, *tos;
1734 int deleted_handler;
1739 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1741 /* Use basic_block->aux as a marker. Clear them all. */
1743 for (i = 0; i < n; ++i)
1744 BASIC_BLOCK (i)->aux = NULL;
1746 /* Add our starting points to the worklist. Almost always there will
1747 be only one. It isn't inconcievable that we might one day directly
1748 support Fortran alternate entry points. */
1750 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1754 /* Mark the block with a handy non-null value. */
1758 /* Iterate: find everything reachable from what we've already seen. */
1760 while (tos != worklist)
1762 basic_block b = *--tos;
1764 for (e = b->succ; e ; e = e->succ_next)
1772 /* Delete all unreachable basic blocks. Count down so that we don't
1773 interfere with the block renumbering that happens in flow_delete_block. */
1775 deleted_handler = 0;
1777 for (i = n - 1; i >= 0; --i)
1779 basic_block b = BASIC_BLOCK (i);
1782 /* This block was found. Tidy up the mark. */
1785 deleted_handler |= flow_delete_block (b);
1788 tidy_fallthru_edges ();
1790 /* If we deleted an exception handler, we may have EH region begin/end
1791 blocks to remove as well. */
1792 if (deleted_handler)
1793 delete_eh_regions ();
1798 /* Find EH regions for which there is no longer a handler, and delete them. */
1801 delete_eh_regions ()
1805 update_rethrow_references ();
1807 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1808 if (GET_CODE (insn) == NOTE)
1810 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1811 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1813 int num = NOTE_EH_HANDLER (insn);
1814 /* A NULL handler indicates a region is no longer needed,
1815 as long as its rethrow label isn't used. */
1816 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (insn) = 0;
1825 /* Return true if NOTE is not one of the ones that must be kept paired,
1826 so that we may simply delete them. */
1829 can_delete_note_p (note)
1832 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1833 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1836 /* Unlink a chain of insns between START and FINISH, leaving notes
1837 that must be paired. */
1840 flow_delete_insn_chain (start, finish)
1843 /* Unchain the insns one by one. It would be quicker to delete all
1844 of these with a single unchaining, rather than one at a time, but
1845 we need to keep the NOTE's. */
1851 next = NEXT_INSN (start);
1852 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1854 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1857 next = flow_delete_insn (start);
1859 if (start == finish)
1865 /* Delete the insns in a (non-live) block. We physically delete every
1866 non-deleted-note insn, and update the flow graph appropriately.
1868 Return nonzero if we deleted an exception handler. */
1870 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1871 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1874 flow_delete_block (b)
1877 int deleted_handler = 0;
1880 /* If the head of this block is a CODE_LABEL, then it might be the
1881 label for an exception handler which can't be reached.
1883 We need to remove the label from the exception_handler_label list
1884 and remove the associated NOTE_INSN_EH_REGION_BEG and
1885 NOTE_INSN_EH_REGION_END notes. */
1889 never_reached_warning (insn);
1891 if (GET_CODE (insn) == CODE_LABEL)
1893 rtx x, *prev = &exception_handler_labels;
1895 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1897 if (XEXP (x, 0) == insn)
1899 /* Found a match, splice this label out of the EH label list. */
1900 *prev = XEXP (x, 1);
1901 XEXP (x, 1) = NULL_RTX;
1902 XEXP (x, 0) = NULL_RTX;
1904 /* Remove the handler from all regions */
1905 remove_handler (insn);
1906 deleted_handler = 1;
1909 prev = &XEXP (x, 1);
1912 /* This label may be referenced by code solely for its value, or
1913 referenced by static data, or something. We have determined
1914 that it is not reachable, but cannot delete the label itself.
1915 Save code space and continue to delete the balance of the block,
1916 along with properly updating the cfg. */
1917 if (!can_delete_label_p (insn))
1919 /* If we've only got one of these, skip the whole deleting
1922 goto no_delete_insns;
1923 insn = NEXT_INSN (insn);
1927 /* Include any jump table following the basic block. */
1929 if (GET_CODE (end) == JUMP_INSN
1930 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1931 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1932 && GET_CODE (tmp) == JUMP_INSN
1933 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1934 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1937 /* Include any barrier that may follow the basic block. */
1938 tmp = next_nonnote_insn (end);
1939 if (tmp && GET_CODE (tmp) == BARRIER)
1942 /* Selectively delete the entire chain. */
1943 flow_delete_insn_chain (insn, end);
1947 /* Remove the edges into and out of this block. Note that there may
1948 indeed be edges in, if we are removing an unreachable loop. */
1952 for (e = b->pred; e ; e = next)
1954 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1957 next = e->pred_next;
1961 for (e = b->succ; e ; e = next)
1963 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1966 next = e->succ_next;
1975 /* Remove the basic block from the array, and compact behind it. */
1978 return deleted_handler;
1981 /* Remove block B from the basic block array and compact behind it. */
1987 int i, n = n_basic_blocks;
1989 for (i = b->index; i + 1 < n; ++i)
1991 basic_block x = BASIC_BLOCK (i + 1);
1992 BASIC_BLOCK (i) = x;
1996 basic_block_info->num_elements--;
2000 /* Delete INSN by patching it out. Return the next insn. */
2003 flow_delete_insn (insn)
2006 rtx prev = PREV_INSN (insn);
2007 rtx next = NEXT_INSN (insn);
2010 PREV_INSN (insn) = NULL_RTX;
2011 NEXT_INSN (insn) = NULL_RTX;
2014 NEXT_INSN (prev) = next;
2016 PREV_INSN (next) = prev;
2018 set_last_insn (prev);
2020 if (GET_CODE (insn) == CODE_LABEL)
2021 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2023 /* If deleting a jump, decrement the use count of the label. Deleting
2024 the label itself should happen in the normal course of block merging. */
2025 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2026 LABEL_NUSES (JUMP_LABEL (insn))--;
2028 /* Also if deleting an insn that references a label. */
2029 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2030 LABEL_NUSES (XEXP (note, 0))--;
2035 /* True if a given label can be deleted. */
2038 can_delete_label_p (label)
2043 if (LABEL_PRESERVE_P (label))
2046 for (x = forced_labels; x ; x = XEXP (x, 1))
2047 if (label == XEXP (x, 0))
2049 for (x = label_value_list; x ; x = XEXP (x, 1))
2050 if (label == XEXP (x, 0))
2052 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2053 if (label == XEXP (x, 0))
2056 /* User declared labels must be preserved. */
2057 if (LABEL_NAME (label) != 0)
2063 /* Blocks A and B are to be merged into a single block A. The insns
2064 are already contiguous, hence `nomove'. */
2067 merge_blocks_nomove (a, b)
2071 rtx b_head, b_end, a_end;
2072 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2075 /* If there was a CODE_LABEL beginning B, delete it. */
2078 if (GET_CODE (b_head) == CODE_LABEL)
2080 /* Detect basic blocks with nothing but a label. This can happen
2081 in particular at the end of a function. */
2082 if (b_head == b_end)
2084 del_first = del_last = b_head;
2085 b_head = NEXT_INSN (b_head);
2088 /* Delete the basic block note. */
2089 if (GET_CODE (b_head) == NOTE
2090 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2092 if (b_head == b_end)
2097 b_head = NEXT_INSN (b_head);
2100 /* If there was a jump out of A, delete it. */
2102 if (GET_CODE (a_end) == JUMP_INSN)
2106 prev = prev_nonnote_insn (a_end);
2113 /* If this was a conditional jump, we need to also delete
2114 the insn that set cc0. */
2115 if (prev && sets_cc0_p (prev))
2118 prev = prev_nonnote_insn (prev);
2128 /* Delete everything marked above as well as crap that might be
2129 hanging out between the two blocks. */
2130 flow_delete_insn_chain (del_first, del_last);
2132 /* Normally there should only be one successor of A and that is B, but
2133 partway though the merge of blocks for conditional_execution we'll
2134 be merging a TEST block with THEN and ELSE successors. Free the
2135 whole lot of them and hope the caller knows what they're doing. */
2137 remove_edge (a->succ);
2139 /* Adjust the edges out of B for the new owner. */
2140 for (e = b->succ; e ; e = e->succ_next)
2144 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2145 b->pred = b->succ = NULL;
2147 /* Reassociate the insns of B with A. */
2150 if (basic_block_for_insn)
2152 BLOCK_FOR_INSN (b_head) = a;
2153 while (b_head != b_end)
2155 b_head = NEXT_INSN (b_head);
2156 BLOCK_FOR_INSN (b_head) = a;
2166 /* Blocks A and B are to be merged into a single block. A has no incoming
2167 fallthru edge, so it can be moved before B without adding or modifying
2168 any jumps (aside from the jump from A to B). */
2171 merge_blocks_move_predecessor_nojumps (a, b)
2174 rtx start, end, barrier;
2180 /* We want to delete the BARRIER after the end of the insns we are
2181 going to move. If we don't find a BARRIER, then do nothing. This
2182 can happen in some cases if we have labels we can not delete.
2184 Similarly, do nothing if we can not delete the label at the start
2185 of the target block. */
2186 barrier = next_nonnote_insn (end);
2187 if (GET_CODE (barrier) != BARRIER
2188 || (GET_CODE (b->head) == CODE_LABEL
2189 && ! can_delete_label_p (b->head)))
2192 flow_delete_insn (barrier);
2194 /* Move block and loop notes out of the chain so that we do not
2195 disturb their order.
2197 ??? A better solution would be to squeeze out all the non-nested notes
2198 and adjust the block trees appropriately. Even better would be to have
2199 a tighter connection between block trees and rtl so that this is not
2201 start = squeeze_notes (start, end);
2203 /* Scramble the insn chain. */
2204 if (end != PREV_INSN (b->head))
2205 reorder_insns (start, end, PREV_INSN (b->head));
2209 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2210 a->index, b->index);
2213 /* Swap the records for the two blocks around. Although we are deleting B,
2214 A is now where B was and we want to compact the BB array from where
2216 BASIC_BLOCK(a->index) = b;
2217 BASIC_BLOCK(b->index) = a;
2219 a->index = b->index;
2222 /* Now blocks A and B are contiguous. Merge them. */
2223 merge_blocks_nomove (a, b);
2228 /* Blocks A and B are to be merged into a single block. B has no outgoing
2229 fallthru edge, so it can be moved after A without adding or modifying
2230 any jumps (aside from the jump from A to B). */
2233 merge_blocks_move_successor_nojumps (a, b)
2236 rtx start, end, barrier;
2241 /* We want to delete the BARRIER after the end of the insns we are
2242 going to move. If we don't find a BARRIER, then do nothing. This
2243 can happen in some cases if we have labels we can not delete.
2245 Similarly, do nothing if we can not delete the label at the start
2246 of the target block. */
2247 barrier = next_nonnote_insn (end);
2248 if (GET_CODE (barrier) != BARRIER
2249 || (GET_CODE (b->head) == CODE_LABEL
2250 && ! can_delete_label_p (b->head)))
2253 flow_delete_insn (barrier);
2255 /* Move block and loop notes out of the chain so that we do not
2256 disturb their order.
2258 ??? A better solution would be to squeeze out all the non-nested notes
2259 and adjust the block trees appropriately. Even better would be to have
2260 a tighter connection between block trees and rtl so that this is not
2262 start = squeeze_notes (start, end);
2264 /* Scramble the insn chain. */
2265 reorder_insns (start, end, a->end);
2267 /* Now blocks A and B are contiguous. Merge them. */
2268 merge_blocks_nomove (a, b);
2272 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2273 b->index, a->index);
2279 /* Attempt to merge basic blocks that are potentially non-adjacent.
2280 Return true iff the attempt succeeded. */
2283 merge_blocks (e, b, c)
2287 /* If B has a fallthru edge to C, no need to move anything. */
2288 if (e->flags & EDGE_FALLTHRU)
2290 /* If a label still appears somewhere and we cannot delete the label,
2291 then we cannot merge the blocks. The edge was tidied already. */
2293 rtx insn, stop = NEXT_INSN (c->head);
2294 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2295 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2298 merge_blocks_nomove (b, c);
2302 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2303 b->index, c->index);
2312 int c_has_outgoing_fallthru;
2313 int b_has_incoming_fallthru;
2315 /* We must make sure to not munge nesting of exception regions,
2316 lexical blocks, and loop notes.
2318 The first is taken care of by requiring that the active eh
2319 region at the end of one block always matches the active eh
2320 region at the beginning of the next block.
2322 The later two are taken care of by squeezing out all the notes. */
2324 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2325 executed and we may want to treat blocks which have two out
2326 edges, one normal, one abnormal as only having one edge for
2327 block merging purposes. */
2329 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2330 if (tmp_edge->flags & EDGE_FALLTHRU)
2332 c_has_outgoing_fallthru = (tmp_edge != NULL);
2334 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2335 if (tmp_edge->flags & EDGE_FALLTHRU)
2337 b_has_incoming_fallthru = (tmp_edge != NULL);
2339 /* If B does not have an incoming fallthru, and the exception regions
2340 match, then it can be moved immediately before C without introducing
2343 C can not be the first block, so we do not have to worry about
2344 accessing a non-existent block. */
2345 d = BASIC_BLOCK (c->index - 1);
2346 if (! b_has_incoming_fallthru
2347 && d->eh_end == b->eh_beg
2348 && b->eh_end == c->eh_beg)
2349 return merge_blocks_move_predecessor_nojumps (b, c);
2351 /* Otherwise, we're going to try to move C after B. Make sure the
2352 exception regions match.
2354 If B is the last basic block, then we must not try to access the
2355 block structure for block B + 1. Luckily in that case we do not
2356 need to worry about matching exception regions. */
2357 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2358 if (b->eh_end == c->eh_beg
2359 && (d == NULL || c->eh_end == d->eh_beg))
2361 /* If C does not have an outgoing fallthru, then it can be moved
2362 immediately after B without introducing or modifying jumps. */
2363 if (! c_has_outgoing_fallthru)
2364 return merge_blocks_move_successor_nojumps (b, c);
2366 /* Otherwise, we'll need to insert an extra jump, and possibly
2367 a new block to contain it. */
2368 /* ??? Not implemented yet. */
2375 /* Top level driver for merge_blocks. */
2382 /* Attempt to merge blocks as made possible by edge removal. If a block
2383 has only one successor, and the successor has only one predecessor,
2384 they may be combined. */
2386 for (i = 0; i < n_basic_blocks; )
2388 basic_block c, b = BASIC_BLOCK (i);
2391 /* A loop because chains of blocks might be combineable. */
2392 while ((s = b->succ) != NULL
2393 && s->succ_next == NULL
2394 && (s->flags & EDGE_EH) == 0
2395 && (c = s->dest) != EXIT_BLOCK_PTR
2396 && c->pred->pred_next == NULL
2397 /* If the jump insn has side effects, we can't kill the edge. */
2398 && (GET_CODE (b->end) != JUMP_INSN
2399 || onlyjump_p (b->end))
2400 && merge_blocks (s, b, c))
2403 /* Don't get confused by the index shift caused by deleting blocks. */
2408 /* The given edge should potentially be a fallthru edge. If that is in
2409 fact true, delete the jump and barriers that are in the way. */
2412 tidy_fallthru_edge (e, b, c)
2418 /* ??? In a late-running flow pass, other folks may have deleted basic
2419 blocks by nopping out blocks, leaving multiple BARRIERs between here
2420 and the target label. They ought to be chastized and fixed.
2422 We can also wind up with a sequence of undeletable labels between
2423 one block and the next.
2425 So search through a sequence of barriers, labels, and notes for
2426 the head of block C and assert that we really do fall through. */
2428 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2431 /* Remove what will soon cease being the jump insn from the source block.
2432 If block B consisted only of this single jump, turn it into a deleted
2435 if (GET_CODE (q) == JUMP_INSN
2436 && (simplejump_p (q)
2437 || (b->succ == e && e->succ_next == NULL)))
2440 /* If this was a conditional jump, we need to also delete
2441 the insn that set cc0. */
2442 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2449 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2450 NOTE_SOURCE_FILE (q) = 0;
2453 b->end = q = PREV_INSN (q);
2456 /* Selectively unlink the sequence. */
2457 if (q != PREV_INSN (c->head))
2458 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2460 e->flags |= EDGE_FALLTHRU;
2463 /* Fix up edges that now fall through, or rather should now fall through
2464 but previously required a jump around now deleted blocks. Simplify
2465 the search by only examining blocks numerically adjacent, since this
2466 is how find_basic_blocks created them. */
2469 tidy_fallthru_edges ()
2473 for (i = 1; i < n_basic_blocks; ++i)
2475 basic_block b = BASIC_BLOCK (i - 1);
2476 basic_block c = BASIC_BLOCK (i);
2479 /* We care about simple conditional or unconditional jumps with
2482 If we had a conditional branch to the next instruction when
2483 find_basic_blocks was called, then there will only be one
2484 out edge for the block which ended with the conditional
2485 branch (since we do not create duplicate edges).
2487 Furthermore, the edge will be marked as a fallthru because we
2488 merge the flags for the duplicate edges. So we do not want to
2489 check that the edge is not a FALLTHRU edge. */
2490 if ((s = b->succ) != NULL
2491 && s->succ_next == NULL
2493 /* If the jump insn has side effects, we can't tidy the edge. */
2494 && (GET_CODE (b->end) != JUMP_INSN
2495 || onlyjump_p (b->end)))
2496 tidy_fallthru_edge (s, b, c);
2500 /* Perform data flow analysis.
2501 F is the first insn of the function; FLAGS is a set of PROP_* flags
2502 to be used in accumulating flow info. */
2505 life_analysis (f, file, flags)
2510 #ifdef ELIMINABLE_REGS
2512 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2515 /* Record which registers will be eliminated. We use this in
2518 CLEAR_HARD_REG_SET (elim_reg_set);
2520 #ifdef ELIMINABLE_REGS
2521 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2522 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2524 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2528 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2530 /* The post-reload life analysis have (on a global basis) the same
2531 registers live as was computed by reload itself. elimination
2532 Otherwise offsets and such may be incorrect.
2534 Reload will make some registers as live even though they do not
2535 appear in the rtl. */
2536 if (reload_completed)
2537 flags &= ~PROP_REG_INFO;
2539 /* We want alias analysis information for local dead store elimination. */
2540 if (flags & PROP_SCAN_DEAD_CODE)
2541 init_alias_analysis ();
2543 /* Always remove no-op moves. Do this before other processing so
2544 that we don't have to keep re-scanning them. */
2545 delete_noop_moves (f);
2547 /* Some targets can emit simpler epilogues if they know that sp was
2548 not ever modified during the function. After reload, of course,
2549 we've already emitted the epilogue so there's no sense searching. */
2550 if (! reload_completed)
2551 notice_stack_pointer_modification (f);
2553 /* Allocate and zero out data structures that will record the
2554 data from lifetime analysis. */
2555 allocate_reg_life_data ();
2556 allocate_bb_life_data ();
2558 /* Find the set of registers live on function exit. */
2559 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2561 /* "Update" life info from zero. It'd be nice to begin the
2562 relaxation with just the exit and noreturn blocks, but that set
2563 is not immediately handy. */
2565 if (flags & PROP_REG_INFO)
2566 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2567 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2570 if (flags & PROP_SCAN_DEAD_CODE)
2571 end_alias_analysis ();
2574 dump_flow_info (file);
2576 free_basic_block_vars (1);
2579 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2580 Search for REGNO. If found, abort if it is not wider than word_mode. */
2583 verify_wide_reg_1 (px, pregno)
2588 unsigned int regno = *(int *) pregno;
2590 if (GET_CODE (x) == REG && REGNO (x) == regno)
2592 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2599 /* A subroutine of verify_local_live_at_start. Search through insns
2600 between HEAD and END looking for register REGNO. */
2603 verify_wide_reg (regno, head, end)
2609 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2610 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2614 head = NEXT_INSN (head);
2617 /* We didn't find the register at all. Something's way screwy. */
2621 /* A subroutine of update_life_info. Verify that there are no untoward
2622 changes in live_at_start during a local update. */
2625 verify_local_live_at_start (new_live_at_start, bb)
2626 regset new_live_at_start;
2629 if (reload_completed)
2631 /* After reload, there are no pseudos, nor subregs of multi-word
2632 registers. The regsets should exactly match. */
2633 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2640 /* Find the set of changed registers. */
2641 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2643 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2645 /* No registers should die. */
2646 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2648 /* Verify that the now-live register is wider than word_mode. */
2649 verify_wide_reg (i, bb->head, bb->end);
2654 /* Updates life information starting with the basic blocks set in BLOCKS.
2655 If BLOCKS is null, consider it to be the universal set.
2657 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2658 we are only expecting local modifications to basic blocks. If we find
2659 extra registers live at the beginning of a block, then we either killed
2660 useful data, or we have a broken split that wants data not provided.
2661 If we find registers removed from live_at_start, that means we have
2662 a broken peephole that is killing a register it shouldn't.
2664 ??? This is not true in one situation -- when a pre-reload splitter
2665 generates subregs of a multi-word pseudo, current life analysis will
2666 lose the kill. So we _can_ have a pseudo go live. How irritating.
2668 Including PROP_REG_INFO does not properly refresh regs_ever_live
2669 unless the caller resets it to zero. */
2672 update_life_info (blocks, extent, prop_flags)
2674 enum update_life_extent extent;
2678 regset_head tmp_head;
2681 tmp = INITIALIZE_REG_SET (tmp_head);
2683 /* For a global update, we go through the relaxation process again. */
2684 if (extent != UPDATE_LIFE_LOCAL)
2686 calculate_global_regs_live (blocks, blocks,
2687 prop_flags & PROP_SCAN_DEAD_CODE);
2689 /* If asked, remove notes from the blocks we'll update. */
2690 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2691 count_or_remove_death_notes (blocks, 1);
2696 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2698 basic_block bb = BASIC_BLOCK (i);
2700 COPY_REG_SET (tmp, bb->global_live_at_end);
2701 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2703 if (extent == UPDATE_LIFE_LOCAL)
2704 verify_local_live_at_start (tmp, bb);
2709 for (i = n_basic_blocks - 1; i >= 0; --i)
2711 basic_block bb = BASIC_BLOCK (i);
2713 COPY_REG_SET (tmp, bb->global_live_at_end);
2714 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2716 if (extent == UPDATE_LIFE_LOCAL)
2717 verify_local_live_at_start (tmp, bb);
2723 if (prop_flags & PROP_REG_INFO)
2725 /* The only pseudos that are live at the beginning of the function
2726 are those that were not set anywhere in the function. local-alloc
2727 doesn't know how to handle these correctly, so mark them as not
2728 local to any one basic block. */
2729 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2730 FIRST_PSEUDO_REGISTER, i,
2731 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2733 /* We have a problem with any pseudoreg that lives across the setjmp.
2734 ANSI says that if a user variable does not change in value between
2735 the setjmp and the longjmp, then the longjmp preserves it. This
2736 includes longjmp from a place where the pseudo appears dead.
2737 (In principle, the value still exists if it is in scope.)
2738 If the pseudo goes in a hard reg, some other value may occupy
2739 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2740 Conclusion: such a pseudo must not go in a hard reg. */
2741 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2742 FIRST_PSEUDO_REGISTER, i,
2744 if (regno_reg_rtx[i] != 0)
2746 REG_LIVE_LENGTH (i) = -1;
2747 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2753 /* Free the variables allocated by find_basic_blocks.
2755 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2758 free_basic_block_vars (keep_head_end_p)
2759 int keep_head_end_p;
2761 if (basic_block_for_insn)
2763 VARRAY_FREE (basic_block_for_insn);
2764 basic_block_for_insn = NULL;
2767 if (! keep_head_end_p)
2770 VARRAY_FREE (basic_block_info);
2773 ENTRY_BLOCK_PTR->aux = NULL;
2774 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2775 EXIT_BLOCK_PTR->aux = NULL;
2776 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2780 /* Return nonzero if the destination of SET equals the source. */
2785 rtx src = SET_SRC (set);
2786 rtx dst = SET_DEST (set);
2788 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2790 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2792 src = SUBREG_REG (src);
2793 dst = SUBREG_REG (dst);
2796 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2797 && REGNO (src) == REGNO (dst));
2800 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2806 rtx pat = PATTERN (insn);
2808 /* Insns carrying these notes are useful later on. */
2809 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2812 if (GET_CODE (pat) == SET && set_noop_p (pat))
2815 if (GET_CODE (pat) == PARALLEL)
2818 /* If nothing but SETs of registers to themselves,
2819 this insn can also be deleted. */
2820 for (i = 0; i < XVECLEN (pat, 0); i++)
2822 rtx tem = XVECEXP (pat, 0, i);
2824 if (GET_CODE (tem) == USE
2825 || GET_CODE (tem) == CLOBBER)
2828 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2837 /* Delete any insns that copy a register to itself. */
2840 delete_noop_moves (f)
2844 for (insn = f; insn; insn = NEXT_INSN (insn))
2846 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2848 PUT_CODE (insn, NOTE);
2849 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2850 NOTE_SOURCE_FILE (insn) = 0;
2855 /* Determine if the stack pointer is constant over the life of the function.
2856 Only useful before prologues have been emitted. */
2859 notice_stack_pointer_modification_1 (x, pat, data)
2861 rtx pat ATTRIBUTE_UNUSED;
2862 void *data ATTRIBUTE_UNUSED;
2864 if (x == stack_pointer_rtx
2865 /* The stack pointer is only modified indirectly as the result
2866 of a push until later in flow. See the comments in rtl.texi
2867 regarding Embedded Side-Effects on Addresses. */
2868 || (GET_CODE (x) == MEM
2869 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2870 || GET_CODE (XEXP (x, 0)) == PRE_INC
2871 || GET_CODE (XEXP (x, 0)) == POST_DEC
2872 || GET_CODE (XEXP (x, 0)) == POST_INC)
2873 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2874 current_function_sp_is_unchanging = 0;
2878 notice_stack_pointer_modification (f)
2883 /* Assume that the stack pointer is unchanging if alloca hasn't
2885 current_function_sp_is_unchanging = !current_function_calls_alloca;
2886 if (! current_function_sp_is_unchanging)
2889 for (insn = f; insn; insn = NEXT_INSN (insn))
2891 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2893 /* Check if insn modifies the stack pointer. */
2894 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2896 if (! current_function_sp_is_unchanging)
2902 /* Mark a register in SET. Hard registers in large modes get all
2903 of their component registers set as well. */
2905 mark_reg (reg, xset)
2909 regset set = (regset) xset;
2910 int regno = REGNO (reg);
2912 if (GET_MODE (reg) == BLKmode)
2915 SET_REGNO_REG_SET (set, regno);
2916 if (regno < FIRST_PSEUDO_REGISTER)
2918 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2920 SET_REGNO_REG_SET (set, regno + n);
2924 /* Mark those regs which are needed at the end of the function as live
2925 at the end of the last basic block. */
2927 mark_regs_live_at_end (set)
2932 /* If exiting needs the right stack value, consider the stack pointer
2933 live at the end of the function. */
2934 if ((HAVE_epilogue && reload_completed)
2935 || ! EXIT_IGNORE_STACK
2936 || (! FRAME_POINTER_REQUIRED
2937 && ! current_function_calls_alloca
2938 && flag_omit_frame_pointer)
2939 || current_function_sp_is_unchanging)
2941 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2944 /* Mark the frame pointer if needed at the end of the function. If
2945 we end up eliminating it, it will be removed from the live list
2946 of each basic block by reload. */
2948 if (! reload_completed || frame_pointer_needed)
2950 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2951 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2952 /* If they are different, also mark the hard frame pointer as live */
2953 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2957 #ifdef PIC_OFFSET_TABLE_REGNUM
2958 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2959 /* Many architectures have a GP register even without flag_pic.
2960 Assume the pic register is not in use, or will be handled by
2961 other means, if it is not fixed. */
2962 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2963 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2967 /* Mark all global registers, and all registers used by the epilogue
2968 as being live at the end of the function since they may be
2969 referenced by our caller. */
2970 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2972 #ifdef EPILOGUE_USES
2973 || EPILOGUE_USES (i)
2976 SET_REGNO_REG_SET (set, i);
2978 /* Mark all call-saved registers that we actaully used. */
2979 if (HAVE_epilogue && reload_completed)
2981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2982 if (! call_used_regs[i] && regs_ever_live[i])
2983 SET_REGNO_REG_SET (set, i);
2986 /* Mark function return value. */
2987 diddle_return_value (mark_reg, set);
2990 /* Callback function for for_each_successor_phi. DATA is a regset.
2991 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2992 INSN, in the regset. */
2995 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2996 rtx insn ATTRIBUTE_UNUSED;
2997 int dest_regno ATTRIBUTE_UNUSED;
3001 regset live = (regset) data;
3002 SET_REGNO_REG_SET (live, src_regno);
3006 /* Propagate global life info around the graph of basic blocks. Begin
3007 considering blocks with their corresponding bit set in BLOCKS_IN.
3008 If BLOCKS_IN is null, consider it the universal set.
3010 BLOCKS_OUT is set for every block that was changed. */
3013 calculate_global_regs_live (blocks_in, blocks_out, flags)
3014 sbitmap blocks_in, blocks_out;
3017 basic_block *queue, *qhead, *qtail, *qend;
3018 regset tmp, new_live_at_end;
3019 regset_head tmp_head;
3020 regset_head new_live_at_end_head;
3023 tmp = INITIALIZE_REG_SET (tmp_head);
3024 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3026 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3027 because the `head == tail' style test for an empty queue doesn't
3028 work with a full queue. */
3029 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3031 qhead = qend = queue + n_basic_blocks + 2;
3033 /* Clear out the garbage that might be hanging out in bb->aux. */
3034 for (i = n_basic_blocks - 1; i >= 0; --i)
3035 BASIC_BLOCK (i)->aux = NULL;
3037 /* Queue the blocks set in the initial mask. Do this in reverse block
3038 number order so that we are more likely for the first round to do
3039 useful work. We use AUX non-null to flag that the block is queued. */
3042 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3044 basic_block bb = BASIC_BLOCK (i);
3051 for (i = 0; i < n_basic_blocks; ++i)
3053 basic_block bb = BASIC_BLOCK (i);
3060 sbitmap_zero (blocks_out);
3062 while (qhead != qtail)
3064 int rescan, changed;
3073 /* Begin by propogating live_at_start from the successor blocks. */
3074 CLEAR_REG_SET (new_live_at_end);
3075 for (e = bb->succ; e ; e = e->succ_next)
3077 basic_block sb = e->dest;
3078 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3081 /* Force the stack pointer to be live -- which might not already be
3082 the case for blocks within infinite loops. */
3083 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3085 /* Regs used in phi nodes are not included in
3086 global_live_at_start, since they are live only along a
3087 particular edge. Set those regs that are live because of a
3088 phi node alternative corresponding to this particular block. */
3090 for_each_successor_phi (bb, &set_phi_alternative_reg,
3093 if (bb == ENTRY_BLOCK_PTR)
3095 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3099 /* On our first pass through this block, we'll go ahead and continue.
3100 Recognize first pass by local_set NULL. On subsequent passes, we
3101 get to skip out early if live_at_end wouldn't have changed. */
3103 if (bb->local_set == NULL)
3105 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3110 /* If any bits were removed from live_at_end, we'll have to
3111 rescan the block. This wouldn't be necessary if we had
3112 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3113 local_live is really dependant on live_at_end. */
3114 CLEAR_REG_SET (tmp);
3115 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3116 new_live_at_end, BITMAP_AND_COMPL);
3120 /* Find the set of changed bits. Take this opportunity
3121 to notice that this set is empty and early out. */
3122 CLEAR_REG_SET (tmp);
3123 changed = bitmap_operation (tmp, bb->global_live_at_end,
3124 new_live_at_end, BITMAP_XOR);
3128 /* If any of the changed bits overlap with local_set,
3129 we'll have to rescan the block. Detect overlap by
3130 the AND with ~local_set turning off bits. */
3131 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3136 /* Let our caller know that BB changed enough to require its
3137 death notes updated. */
3139 SET_BIT (blocks_out, bb->index);
3143 /* Add to live_at_start the set of all registers in
3144 new_live_at_end that aren't in the old live_at_end. */
3146 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3148 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3150 changed = bitmap_operation (bb->global_live_at_start,
3151 bb->global_live_at_start,
3158 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3160 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3161 into live_at_start. */
3162 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3164 /* If live_at start didn't change, no need to go farther. */
3165 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3168 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3171 /* Queue all predecessors of BB so that we may re-examine
3172 their live_at_end. */
3173 for (e = bb->pred; e ; e = e->pred_next)
3175 basic_block pb = e->src;
3176 if (pb->aux == NULL)
3187 FREE_REG_SET (new_live_at_end);
3191 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3193 basic_block bb = BASIC_BLOCK (i);
3194 FREE_REG_SET (bb->local_set);
3199 for (i = n_basic_blocks - 1; i >= 0; --i)
3201 basic_block bb = BASIC_BLOCK (i);
3202 FREE_REG_SET (bb->local_set);
3209 /* Subroutines of life analysis. */
3211 /* Allocate the permanent data structures that represent the results
3212 of life analysis. Not static since used also for stupid life analysis. */
3215 allocate_bb_life_data ()
3219 for (i = 0; i < n_basic_blocks; i++)
3221 basic_block bb = BASIC_BLOCK (i);
3223 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3224 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3227 ENTRY_BLOCK_PTR->global_live_at_end
3228 = OBSTACK_ALLOC_REG_SET (function_obstack);
3229 EXIT_BLOCK_PTR->global_live_at_start
3230 = OBSTACK_ALLOC_REG_SET (function_obstack);
3232 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3236 allocate_reg_life_data ()
3240 max_regno = max_reg_num ();
3242 /* Recalculate the register space, in case it has grown. Old style
3243 vector oriented regsets would set regset_{size,bytes} here also. */
3244 allocate_reg_info (max_regno, FALSE, FALSE);
3246 /* Reset all the data we'll collect in propagate_block and its
3248 for (i = 0; i < max_regno; i++)
3252 REG_N_DEATHS (i) = 0;
3253 REG_N_CALLS_CROSSED (i) = 0;
3254 REG_LIVE_LENGTH (i) = 0;
3255 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3259 /* Delete dead instructions for propagate_block. */
3262 propagate_block_delete_insn (bb, insn)
3266 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3268 /* If the insn referred to a label, and that label was attached to
3269 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3270 pretty much mandatory to delete it, because the ADDR_VEC may be
3271 referencing labels that no longer exist. */
3275 rtx label = XEXP (inote, 0);
3278 if (LABEL_NUSES (label) == 1
3279 && (next = next_nonnote_insn (label)) != NULL
3280 && GET_CODE (next) == JUMP_INSN
3281 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3282 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3284 rtx pat = PATTERN (next);
3285 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3286 int len = XVECLEN (pat, diff_vec_p);
3289 for (i = 0; i < len; i++)
3290 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3292 flow_delete_insn (next);
3296 if (bb->end == insn)
3297 bb->end = PREV_INSN (insn);
3298 flow_delete_insn (insn);
3301 /* Delete dead libcalls for propagate_block. Return the insn
3302 before the libcall. */
3305 propagate_block_delete_libcall (bb, insn, note)
3309 rtx first = XEXP (note, 0);
3310 rtx before = PREV_INSN (first);
3312 if (insn == bb->end)
3315 flow_delete_insn_chain (first, insn);
3319 /* Update the life-status of regs for one insn. Return the previous insn. */
3322 propagate_one_insn (pbi, insn)
3323 struct propagate_block_info *pbi;
3326 rtx prev = PREV_INSN (insn);
3327 int flags = pbi->flags;
3328 int insn_is_dead = 0;
3329 int libcall_is_dead = 0;
3333 if (! INSN_P (insn))
3336 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3337 if (flags & PROP_SCAN_DEAD_CODE)
3339 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3341 libcall_is_dead = (insn_is_dead && note != 0
3342 && libcall_dead_p (pbi, PATTERN (insn),
3346 /* We almost certainly don't want to delete prologue or epilogue
3347 instructions. Warn about probable compiler losage. */
3350 && (((HAVE_epilogue || HAVE_prologue)
3351 && prologue_epilogue_contains (insn))
3352 || (HAVE_sibcall_epilogue
3353 && sibcall_epilogue_contains (insn))))
3355 if (flags & PROP_KILL_DEAD_CODE)
3357 warning ("ICE: would have deleted prologue/epilogue insn");
3358 if (!inhibit_warnings)
3361 libcall_is_dead = insn_is_dead = 0;
3364 /* If an instruction consists of just dead store(s) on final pass,
3366 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3368 /* Record sets. Do this even for dead instructions, since they
3369 would have killed the values if they hadn't been deleted. */
3370 mark_set_regs (pbi, PATTERN (insn), insn);
3372 /* CC0 is now known to be dead. Either this insn used it,
3373 in which case it doesn't anymore, or clobbered it,
3374 so the next insn can't use it. */
3377 if (libcall_is_dead)
3379 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3380 insn = NEXT_INSN (prev);
3383 propagate_block_delete_insn (pbi->bb, insn);
3388 /* See if this is an increment or decrement that can be merged into
3389 a following memory address. */
3392 register rtx x = single_set (insn);
3394 /* Does this instruction increment or decrement a register? */
3395 if (!reload_completed
3396 && (flags & PROP_AUTOINC)
3398 && GET_CODE (SET_DEST (x)) == REG
3399 && (GET_CODE (SET_SRC (x)) == PLUS
3400 || GET_CODE (SET_SRC (x)) == MINUS)
3401 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3402 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3403 /* Ok, look for a following memory ref we can combine with.
3404 If one is found, change the memory ref to a PRE_INC
3405 or PRE_DEC, cancel this insn, and return 1.
3406 Return 0 if nothing has been done. */
3407 && try_pre_increment_1 (pbi, insn))
3410 #endif /* AUTO_INC_DEC */
3412 CLEAR_REG_SET (pbi->new_set);
3414 /* If this is not the final pass, and this insn is copying the value of
3415 a library call and it's dead, don't scan the insns that perform the
3416 library call, so that the call's arguments are not marked live. */
3417 if (libcall_is_dead)
3419 /* Record the death of the dest reg. */
3420 mark_set_regs (pbi, PATTERN (insn), insn);
3422 insn = XEXP (note, 0);
3423 return PREV_INSN (insn);
3425 else if (GET_CODE (PATTERN (insn)) == SET
3426 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3427 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3428 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3429 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3430 /* We have an insn to pop a constant amount off the stack.
3431 (Such insns use PLUS regardless of the direction of the stack,
3432 and any insn to adjust the stack by a constant is always a pop.)
3433 These insns, if not dead stores, have no effect on life. */
3437 /* Any regs live at the time of a call instruction must not go
3438 in a register clobbered by calls. Find all regs now live and
3439 record this for them. */
3441 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3442 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3443 { REG_N_CALLS_CROSSED (i)++; });
3445 /* Record sets. Do this even for dead instructions, since they
3446 would have killed the values if they hadn't been deleted. */
3447 mark_set_regs (pbi, PATTERN (insn), insn);
3449 if (GET_CODE (insn) == CALL_INSN)
3455 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3456 cond = COND_EXEC_TEST (PATTERN (insn));
3458 /* Non-constant calls clobber memory. */
3459 if (! CONST_CALL_P (insn))
3460 free_EXPR_LIST_list (&pbi->mem_set_list);
3462 /* There may be extra registers to be clobbered. */
3463 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3465 note = XEXP (note, 1))
3466 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3467 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3468 cond, insn, pbi->flags);
3470 /* Calls change all call-used and global registers. */
3471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3472 if (call_used_regs[i] && ! global_regs[i]
3475 /* We do not want REG_UNUSED notes for these registers. */
3476 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3477 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3481 /* If an insn doesn't use CC0, it becomes dead since we assume
3482 that every insn clobbers it. So show it dead here;
3483 mark_used_regs will set it live if it is referenced. */
3488 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3490 /* Sometimes we may have inserted something before INSN (such as a move)
3491 when we make an auto-inc. So ensure we will scan those insns. */
3493 prev = PREV_INSN (insn);
3496 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3502 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3503 cond = COND_EXEC_TEST (PATTERN (insn));
3505 /* Calls use their arguments. */
3506 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3508 note = XEXP (note, 1))
3509 if (GET_CODE (XEXP (note, 0)) == USE)
3510 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3513 /* The stack ptr is used (honorarily) by a CALL insn. */
3514 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3516 /* Calls may also reference any of the global registers,
3517 so they are made live. */
3518 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3520 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3525 /* On final pass, update counts of how many insns in which each reg
3527 if (flags & PROP_REG_INFO)
3528 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3529 { REG_LIVE_LENGTH (i)++; });
3534 /* Initialize a propagate_block_info struct for public consumption.
3535 Note that the structure itself is opaque to this file, but that
3536 the user can use the regsets provided here. */
3538 struct propagate_block_info *
3539 init_propagate_block_info (bb, live, local_set, flags)
3545 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3548 pbi->reg_live = live;
3549 pbi->mem_set_list = NULL_RTX;
3550 pbi->local_set = local_set;
3554 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3555 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3557 pbi->reg_next_use = NULL;
3559 pbi->new_set = BITMAP_XMALLOC ();
3561 #ifdef HAVE_conditional_execution
3562 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3563 free_reg_cond_life_info);
3564 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3566 /* If this block ends in a conditional branch, for each register live
3567 from one side of the branch and not the other, record the register
3568 as conditionally dead. */
3569 if (GET_CODE (bb->end) == JUMP_INSN
3570 && condjump_p (bb->end)
3571 && ! simplejump_p (bb->end))
3573 regset_head diff_head;
3574 regset diff = INITIALIZE_REG_SET (diff_head);
3575 basic_block bb_true, bb_false;
3576 rtx cond_true, cond_false;
3579 /* Identify the successor blocks. */
3580 bb_false = bb->succ->succ_next->dest;