1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
164 #define LOCAL_REGNO(REGNO) 0
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
170 #ifdef HAVE_conditional_execution
171 #ifndef REVERSE_CONDEXEC_PREDICATES_P
172 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
176 /* The obstack on which the flow graph components are allocated. */
178 struct obstack flow_obstack;
179 static char *flow_firstobj;
181 /* Number of basic blocks in the current function. */
185 /* Number of edges in the current function. */
189 /* The basic block array. */
191 varray_type basic_block_info;
193 /* The special entry and exit blocks. */
195 struct basic_block_def entry_exit_blocks[2]
200 NULL, /* local_set */
201 NULL, /* cond_local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 ENTRY_BLOCK, /* index */
215 NULL, /* local_set */
216 NULL, /* cond_local_set */
217 NULL, /* global_live_at_start */
218 NULL, /* global_live_at_end */
220 EXIT_BLOCK, /* index */
227 /* Nonzero if the second flow pass has completed. */
230 /* Maximum register number used in this function, plus one. */
234 /* Indexed by n, giving various register information */
236 varray_type reg_n_info;
238 /* Size of a regset for the current function,
239 in (1) bytes and (2) elements. */
244 /* Regset of regs live when calls to `setjmp'-like functions happen. */
245 /* ??? Does this exist only for the setjmp-clobbered warning message? */
247 regset regs_live_at_setjmp;
249 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
250 that have to go in the same hard reg.
251 The first two regs in the list are a pair, and the next two
252 are another pair, etc. */
255 /* Callback that determines if it's ok for a function to have no
256 noreturn attribute. */
257 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
259 /* Set of registers that may be eliminable. These are handled specially
260 in updating regs_ever_live. */
262 static HARD_REG_SET elim_reg_set;
264 /* The basic block structure for every insn, indexed by uid. */
266 varray_type basic_block_for_insn;
268 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
269 /* ??? Should probably be using LABEL_NUSES instead. It would take a
270 bit of surgery to be able to use or co-opt the routines in jump. */
272 static rtx label_value_list;
273 static rtx tail_recursion_label_list;
275 /* Holds information for tracking conditional register life information. */
276 struct reg_cond_life_info
278 /* A boolean expression of conditions under which a register is dead. */
280 /* Conditions under which a register is dead at the basic block end. */
283 /* A boolean expression of conditions under which a register has been
287 /* ??? Could store mask of bytes that are dead, so that we could finally
288 track lifetimes of multi-word registers accessed via subregs. */
291 /* For use in communicating between propagate_block and its subroutines.
292 Holds all information needed to compute life and def-use information. */
294 struct propagate_block_info
296 /* The basic block we're considering. */
299 /* Bit N is set if register N is conditionally or unconditionally live. */
302 /* Bit N is set if register N is set this insn. */
305 /* Element N is the next insn that uses (hard or pseudo) register N
306 within the current basic block; or zero, if there is no such insn. */
309 /* Contains a list of all the MEMs we are tracking for dead store
313 /* If non-null, record the set of registers set unconditionally in the
317 /* If non-null, record the set of registers set conditionally in the
319 regset cond_local_set;
321 #ifdef HAVE_conditional_execution
322 /* Indexed by register number, holds a reg_cond_life_info for each
323 register that is not unconditionally live or dead. */
324 splay_tree reg_cond_dead;
326 /* Bit N is set if register N is in an expression in reg_cond_dead. */
330 /* The length of mem_set_list. */
331 int mem_set_list_len;
333 /* Non-zero if the value of CC0 is live. */
336 /* Flags controling the set of information propagate_block collects. */
340 /* Maximum length of pbi->mem_set_list before we start dropping
341 new elements on the floor. */
342 #define MAX_MEM_SET_LIST_LEN 100
344 /* Store the data structures necessary for depth-first search. */
345 struct depth_first_search_dsS {
346 /* stack for backtracking during the algorithm */
349 /* number of edges in the stack. That is, positions 0, ..., sp-1
353 /* record of basic blocks already seen by depth-first search */
354 sbitmap visited_blocks;
356 typedef struct depth_first_search_dsS *depth_first_search_ds;
358 /* Have print_rtl_and_abort give the same information that fancy_abort
360 #define print_rtl_and_abort() \
361 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
363 /* Forward declarations */
364 static int count_basic_blocks PARAMS ((rtx));
365 static void find_basic_blocks_1 PARAMS ((rtx));
366 static rtx find_label_refs PARAMS ((rtx, rtx));
367 static void make_edges PARAMS ((rtx));
368 static void make_label_edge PARAMS ((sbitmap *, basic_block,
370 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
372 static void commit_one_edge_insertion PARAMS ((edge));
374 static void delete_unreachable_blocks PARAMS ((void));
375 static int can_delete_note_p PARAMS ((rtx));
376 static void expunge_block PARAMS ((basic_block));
377 static int can_delete_label_p PARAMS ((rtx));
378 static int tail_recursion_label_p PARAMS ((rtx));
379 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
381 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
383 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
384 static bool try_optimize_cfg PARAMS ((void));
385 static bool forwarder_block_p PARAMS ((basic_block));
386 static bool can_fallthru PARAMS ((basic_block, basic_block));
387 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
388 static bool try_simplify_condjump PARAMS ((basic_block));
389 static bool try_forward_edges PARAMS ((basic_block));
390 static void tidy_fallthru_edges PARAMS ((void));
391 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
392 static void verify_wide_reg PARAMS ((int, rtx, rtx));
393 static void verify_local_live_at_start PARAMS ((regset, basic_block));
394 static int noop_move_p PARAMS ((rtx));
395 static void delete_noop_moves PARAMS ((rtx));
396 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
397 static void notice_stack_pointer_modification PARAMS ((rtx));
398 static void mark_reg PARAMS ((rtx, void *));
399 static void mark_regs_live_at_end PARAMS ((regset));
400 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
401 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
402 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
403 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
404 static int insn_dead_p PARAMS ((struct propagate_block_info *,
406 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
408 static void mark_set_regs PARAMS ((struct propagate_block_info *,
410 static void mark_set_1 PARAMS ((struct propagate_block_info *,
411 enum rtx_code, rtx, rtx,
413 #ifdef HAVE_conditional_execution
414 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
416 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
417 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
418 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
420 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
421 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
422 static rtx not_reg_cond PARAMS ((rtx));
423 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
426 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
427 rtx, rtx, rtx, rtx, rtx));
428 static void find_auto_inc PARAMS ((struct propagate_block_info *,
430 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
432 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
434 static void mark_used_reg PARAMS ((struct propagate_block_info *,
436 static void mark_used_regs PARAMS ((struct propagate_block_info *,
438 void dump_flow_info PARAMS ((FILE *));
439 void debug_flow_info PARAMS ((void));
440 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
444 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
446 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
448 static void remove_fake_successors PARAMS ((basic_block));
449 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
451 static void flow_edge_list_print PARAMS ((const char *, const edge *,
453 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
455 static int flow_loop_nested_p PARAMS ((struct loop *,
457 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
459 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
460 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
461 static void flow_dfs_compute_reverse_init
462 PARAMS ((depth_first_search_ds));
463 static void flow_dfs_compute_reverse_add_bb
464 PARAMS ((depth_first_search_ds, basic_block));
465 static basic_block flow_dfs_compute_reverse_execute
466 PARAMS ((depth_first_search_ds));
467 static void flow_dfs_compute_reverse_finish
468 PARAMS ((depth_first_search_ds));
469 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
470 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
472 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
473 static void flow_loops_tree_build PARAMS ((struct loops *));
474 static int flow_loop_level_compute PARAMS ((struct loop *, int));
475 static int flow_loops_level_compute PARAMS ((struct loops *));
476 static void allocate_bb_life_data PARAMS ((void));
477 static void find_sub_basic_blocks PARAMS ((basic_block));
478 static bool redirect_edge_and_branch PARAMS ((edge, basic_block));
479 static rtx block_label PARAMS ((basic_block));
481 /* Find basic blocks of the current function.
482 F is the first insn of the function and NREGS the number of register
486 find_basic_blocks (f, nregs, file)
488 int nregs ATTRIBUTE_UNUSED;
489 FILE *file ATTRIBUTE_UNUSED;
493 /* Flush out existing data. */
494 if (basic_block_info != NULL)
500 /* Clear bb->aux on all extant basic blocks. We'll use this as a
501 tag for reuse during create_basic_block, just in case some pass
502 copies around basic block notes improperly. */
503 for (i = 0; i < n_basic_blocks; ++i)
504 BASIC_BLOCK (i)->aux = NULL;
506 VARRAY_FREE (basic_block_info);
509 n_basic_blocks = count_basic_blocks (f);
511 /* Size the basic block table. The actual structures will be allocated
512 by find_basic_blocks_1, since we want to keep the structure pointers
513 stable across calls to find_basic_blocks. */
514 /* ??? This whole issue would be much simpler if we called find_basic_blocks
515 exactly once, and thereafter we don't have a single long chain of
516 instructions at all until close to the end of compilation when we
517 actually lay them out. */
519 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
521 find_basic_blocks_1 (f);
523 /* Record the block to which an insn belongs. */
524 /* ??? This should be done another way, by which (perhaps) a label is
525 tagged directly with the basic block that it starts. It is used for
526 more than that currently, but IMO that is the only valid use. */
528 max_uid = get_max_uid ();
530 /* Leave space for insns life_analysis makes in some cases for auto-inc.
531 These cases are rare, so we don't need too much space. */
532 max_uid += max_uid / 10;
535 compute_bb_for_insn (max_uid);
537 /* Discover the edges of our cfg. */
538 make_edges (label_value_list);
540 /* Do very simple cleanup now, for the benefit of code that runs between
541 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
542 tidy_fallthru_edges ();
544 mark_critical_edges ();
546 #ifdef ENABLE_CHECKING
552 check_function_return_warnings ()
554 if (warn_missing_noreturn
555 && !TREE_THIS_VOLATILE (cfun->decl)
556 && EXIT_BLOCK_PTR->pred == NULL
557 && (lang_missing_noreturn_ok_p
558 && !lang_missing_noreturn_ok_p (cfun->decl)))
559 warning ("function might be possible candidate for attribute `noreturn'");
561 /* If we have a path to EXIT, then we do return. */
562 if (TREE_THIS_VOLATILE (cfun->decl)
563 && EXIT_BLOCK_PTR->pred != NULL)
564 warning ("`noreturn' function does return");
566 /* If the clobber_return_insn appears in some basic block, then we
567 do reach the end without returning a value. */
568 else if (warn_return_type
569 && cfun->x_clobber_return_insn != NULL
570 && EXIT_BLOCK_PTR->pred != NULL)
572 int max_uid = get_max_uid ();
574 /* If clobber_return_insn was excised by jump1, then renumber_insns
575 can make max_uid smaller than the number still recorded in our rtx.
576 That's fine, since this is a quick way of verifying that the insn
577 is no longer in the chain. */
578 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
580 /* Recompute insn->block mapping, since the initial mapping is
581 set before we delete unreachable blocks. */
582 compute_bb_for_insn (max_uid);
584 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
585 warning ("control reaches end of non-void function");
590 /* Count the basic blocks of the function. */
593 count_basic_blocks (f)
597 register RTX_CODE prev_code;
598 register int count = 0;
599 int saw_abnormal_edge = 0;
601 prev_code = JUMP_INSN;
602 for (insn = f; insn; insn = NEXT_INSN (insn))
604 enum rtx_code code = GET_CODE (insn);
606 if (code == CODE_LABEL
607 || (GET_RTX_CLASS (code) == 'i'
608 && (prev_code == JUMP_INSN
609 || prev_code == BARRIER
610 || saw_abnormal_edge)))
612 saw_abnormal_edge = 0;
616 /* Record whether this insn created an edge. */
617 if (code == CALL_INSN)
621 /* If there is a nonlocal goto label and the specified
622 region number isn't -1, we have an edge. */
623 if (nonlocal_goto_handler_labels
624 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
625 || INTVAL (XEXP (note, 0)) >= 0))
626 saw_abnormal_edge = 1;
628 else if (can_throw_internal (insn))
629 saw_abnormal_edge = 1;
631 else if (flag_non_call_exceptions
633 && can_throw_internal (insn))
634 saw_abnormal_edge = 1;
640 /* The rest of the compiler works a bit smoother when we don't have to
641 check for the edge case of do-nothing functions with no basic blocks. */
644 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
651 /* Scan a list of insns for labels referred to other than by jumps.
652 This is used to scan the alternatives of a call placeholder. */
654 find_label_refs (f, lvl)
660 for (insn = f; insn; insn = NEXT_INSN (insn))
661 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
665 /* Make a list of all labels referred to other than by jumps
666 (which just don't have the REG_LABEL notes).
668 Make a special exception for labels followed by an ADDR*VEC,
669 as this would be a part of the tablejump setup code.
671 Make a special exception to registers loaded with label
672 values just before jump insns that use them. */
674 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
675 if (REG_NOTE_KIND (note) == REG_LABEL)
677 rtx lab = XEXP (note, 0), next;
679 if ((next = next_nonnote_insn (lab)) != NULL
680 && GET_CODE (next) == JUMP_INSN
681 && (GET_CODE (PATTERN (next)) == ADDR_VEC
682 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
684 else if (GET_CODE (lab) == NOTE)
686 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
687 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
690 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
697 /* Assume that someone emitted code with control flow instructions to the
698 basic block. Update the data structure. */
700 find_sub_basic_blocks (bb)
703 rtx first_insn = bb->head, insn;
705 edge succ_list = bb->succ;
706 rtx jump_insn = NULL_RTX;
710 basic_block first_bb = bb, last_bb;
713 if (GET_CODE (first_insn) == LABEL_REF)
714 first_insn = NEXT_INSN (first_insn);
715 first_insn = NEXT_INSN (first_insn);
719 /* Scan insn chain and try to find new basic block boundaries. */
722 enum rtx_code code = GET_CODE (insn);
726 /* We need some special care for those expressions. */
727 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
728 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
737 /* On code label, split current basic block. */
739 falltru = split_block (bb, PREV_INSN (insn));
744 remove_edge (falltru);
748 if (LABEL_ALTERNATE_NAME (insn))
749 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
752 /* In case we've previously split insn on the JUMP_INSN, move the
753 block header to proper place. */
756 falltru = split_block (bb, PREV_INSN (insn));
766 insn = NEXT_INSN (insn);
768 /* Last basic block must end in the original BB end. */
772 /* Wire in the original edges for last basic block. */
775 bb->succ = succ_list;
777 succ_list->src = bb, succ_list = succ_list->succ_next;
780 bb->succ = succ_list;
782 /* Now re-scan and wire in all edges. This expect simple (conditional)
783 jumps at the end of each new basic blocks. */
785 for (i = first_bb->index; i < last_bb->index; i++)
787 bb = BASIC_BLOCK (i);
788 if (GET_CODE (bb->end) == JUMP_INSN)
790 mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
791 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
793 insn = NEXT_INSN (insn);
797 /* Find all basic blocks of the function whose first insn is F.
799 Collect and return a list of labels whose addresses are taken. This
800 will be used in make_edges for use with computed gotos. */
803 find_basic_blocks_1 (f)
806 register rtx insn, next;
808 rtx bb_note = NULL_RTX;
814 /* We process the instructions in a slightly different way than we did
815 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
816 closed out the previous block, so that it gets attached at the proper
817 place. Since this form should be equivalent to the previous,
818 count_basic_blocks continues to use the old form as a check. */
820 for (insn = f; insn; insn = next)
822 enum rtx_code code = GET_CODE (insn);
824 next = NEXT_INSN (insn);
830 int kind = NOTE_LINE_NUMBER (insn);
832 /* Look for basic block notes with which to keep the
833 basic_block_info pointers stable. Unthread the note now;
834 we'll put it back at the right place in create_basic_block.
835 Or not at all if we've already found a note in this block. */
836 if (kind == NOTE_INSN_BASIC_BLOCK)
838 if (bb_note == NULL_RTX)
841 next = flow_delete_insn (insn);
847 /* A basic block starts at a label. If we've closed one off due
848 to a barrier or some such, no need to do it again. */
849 if (head != NULL_RTX)
851 /* While we now have edge lists with which other portions of
852 the compiler might determine a call ending a basic block
853 does not imply an abnormal edge, it will be a bit before
854 everything can be updated. So continue to emit a noop at
855 the end of such a block. */
856 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
858 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
859 end = emit_insn_after (nop, end);
862 create_basic_block (i++, head, end, bb_note);
870 /* A basic block ends at a jump. */
871 if (head == NULL_RTX)
875 /* ??? Make a special check for table jumps. The way this
876 happens is truly and amazingly gross. We are about to
877 create a basic block that contains just a code label and
878 an addr*vec jump insn. Worse, an addr_diff_vec creates
879 its own natural loop.
881 Prevent this bit of brain damage, pasting things together
882 correctly in make_edges.
884 The correct solution involves emitting the table directly
885 on the tablejump instruction as a note, or JUMP_LABEL. */
887 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
888 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
896 goto new_bb_inclusive;
899 /* A basic block ends at a barrier. It may be that an unconditional
900 jump already closed the basic block -- no need to do it again. */
901 if (head == NULL_RTX)
904 /* While we now have edge lists with which other portions of the
905 compiler might determine a call ending a basic block does not
906 imply an abnormal edge, it will be a bit before everything can
907 be updated. So continue to emit a noop at the end of such a
909 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
911 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
912 end = emit_insn_after (nop, end);
914 goto new_bb_exclusive;
918 /* Record whether this call created an edge. */
919 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
920 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
922 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
924 /* Scan each of the alternatives for label refs. */
925 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
926 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
927 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
928 /* Record its tail recursion label, if any. */
929 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
930 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
933 /* A basic block ends at a call that can either throw or
934 do a non-local goto. */
935 if ((nonlocal_goto_handler_labels && region >= 0)
936 || can_throw_internal (insn))
939 if (head == NULL_RTX)
944 create_basic_block (i++, head, end, bb_note);
945 head = end = NULL_RTX;
953 /* Non-call exceptions generate new blocks just like calls. */
954 if (flag_non_call_exceptions && can_throw_internal (insn))
955 goto new_bb_inclusive;
957 if (head == NULL_RTX)
966 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
970 /* Make a list of all labels referred to other than by jumps.
972 Make a special exception for labels followed by an ADDR*VEC,
973 as this would be a part of the tablejump setup code.
975 Make a special exception to registers loaded with label
976 values just before jump insns that use them. */
978 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
979 if (REG_NOTE_KIND (note) == REG_LABEL)
981 rtx lab = XEXP (note, 0), next;
983 if ((next = next_nonnote_insn (lab)) != NULL
984 && GET_CODE (next) == JUMP_INSN
985 && (GET_CODE (PATTERN (next)) == ADDR_VEC
986 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
988 else if (GET_CODE (lab) == NOTE)
990 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
991 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
994 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
999 if (head != NULL_RTX)
1000 create_basic_block (i++, head, end, bb_note);
1002 flow_delete_insn (bb_note);
1004 if (i != n_basic_blocks)
1007 label_value_list = lvl;
1008 tail_recursion_label_list = trll;
1011 /* Tidy the CFG by deleting unreachable code and whatnot. */
1016 delete_unreachable_blocks ();
1017 if (try_optimize_cfg ())
1018 delete_unreachable_blocks ();
1019 mark_critical_edges ();
1021 /* Kill the data we won't maintain. */
1022 free_EXPR_LIST_list (&label_value_list);
1023 free_EXPR_LIST_list (&tail_recursion_label_list);
1026 /* Create a new basic block consisting of the instructions between
1027 HEAD and END inclusive. Reuses the note and basic block struct
1028 in BB_NOTE, if any. */
1031 create_basic_block (index, head, end, bb_note)
1033 rtx head, end, bb_note;
1038 && ! RTX_INTEGRATED_P (bb_note)
1039 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1042 /* If we found an existing note, thread it back onto the chain. */
1046 if (GET_CODE (head) == CODE_LABEL)
1050 after = PREV_INSN (head);
1054 if (after != bb_note && NEXT_INSN (after) != bb_note)
1055 reorder_insns (bb_note, bb_note, after);
1059 /* Otherwise we must create a note and a basic block structure.
1060 Since we allow basic block structs in rtl, give the struct
1061 the same lifetime by allocating it off the function obstack
1062 rather than using malloc. */
1064 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1065 memset (bb, 0, sizeof (*bb));
1067 if (GET_CODE (head) == CODE_LABEL)
1068 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1071 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1074 NOTE_BASIC_BLOCK (bb_note) = bb;
1077 /* Always include the bb note in the block. */
1078 if (NEXT_INSN (end) == bb_note)
1084 BASIC_BLOCK (index) = bb;
1086 /* Tag the block so that we know it has been used when considering
1087 other basic block notes. */
1091 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1092 indexed by INSN_UID. MAX is the size of the array. */
1095 compute_bb_for_insn (max)
1100 if (basic_block_for_insn)
1101 VARRAY_FREE (basic_block_for_insn);
1102 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1104 for (i = 0; i < n_basic_blocks; ++i)
1106 basic_block bb = BASIC_BLOCK (i);
1113 int uid = INSN_UID (insn);
1115 VARRAY_BB (basic_block_for_insn, uid) = bb;
1118 insn = NEXT_INSN (insn);
1123 /* Free the memory associated with the edge structures. */
1131 for (i = 0; i < n_basic_blocks; ++i)
1133 basic_block bb = BASIC_BLOCK (i);
1135 for (e = bb->succ; e; e = n)
1145 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1151 ENTRY_BLOCK_PTR->succ = 0;
1152 EXIT_BLOCK_PTR->pred = 0;
1157 /* Identify the edges between basic blocks.
1159 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1160 that are otherwise unreachable may be reachable with a non-local goto.
1162 BB_EH_END is an array indexed by basic block number in which we record
1163 the list of exception regions active at the end of the basic block. */
1166 make_edges (label_value_list)
1167 rtx label_value_list;
1170 sbitmap *edge_cache = NULL;
1172 /* Assume no computed jump; revise as we create edges. */
1173 current_function_has_computed_jump = 0;
1175 /* Heavy use of computed goto in machine-generated code can lead to
1176 nearly fully-connected CFGs. In that case we spend a significant
1177 amount of time searching the edge lists for duplicates. */
1178 if (forced_labels || label_value_list)
1180 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1181 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1184 /* By nature of the way these get numbered, block 0 is always the entry. */
1185 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1187 for (i = 0; i < n_basic_blocks; ++i)
1189 basic_block bb = BASIC_BLOCK (i);
1192 int force_fallthru = 0;
1194 if (GET_CODE (bb->head) == CODE_LABEL
1195 && LABEL_ALTERNATE_NAME (bb->head))
1196 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1198 /* Examine the last instruction of the block, and discover the
1199 ways we can leave the block. */
1202 code = GET_CODE (insn);
1205 if (code == JUMP_INSN)
1209 /* Recognize exception handling placeholders. */
1210 if (GET_CODE (PATTERN (insn)) == RESX)
1211 make_eh_edge (edge_cache, bb, insn);
1213 /* Recognize a non-local goto as a branch outside the
1214 current function. */
1215 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1218 /* ??? Recognize a tablejump and do the right thing. */
1219 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1220 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1221 && GET_CODE (tmp) == JUMP_INSN
1222 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1223 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1228 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1229 vec = XVEC (PATTERN (tmp), 0);
1231 vec = XVEC (PATTERN (tmp), 1);
1233 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1234 make_label_edge (edge_cache, bb,
1235 XEXP (RTVEC_ELT (vec, j), 0), 0);
1237 /* Some targets (eg, ARM) emit a conditional jump that also
1238 contains the out-of-range target. Scan for these and
1239 add an edge if necessary. */
1240 if ((tmp = single_set (insn)) != NULL
1241 && SET_DEST (tmp) == pc_rtx
1242 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1243 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1244 make_label_edge (edge_cache, bb,
1245 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1247 #ifdef CASE_DROPS_THROUGH
1248 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1249 us naturally detecting fallthru into the next block. */
1254 /* If this is a computed jump, then mark it as reaching
1255 everything on the label_value_list and forced_labels list. */
1256 else if (computed_jump_p (insn))
1258 current_function_has_computed_jump = 1;
1260 for (x = label_value_list; x; x = XEXP (x, 1))
1261 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1263 for (x = forced_labels; x; x = XEXP (x, 1))
1264 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1267 /* Returns create an exit out. */
1268 else if (returnjump_p (insn))
1269 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1271 /* Otherwise, we have a plain conditional or unconditional jump. */
1274 if (! JUMP_LABEL (insn))
1276 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1280 /* If this is a sibling call insn, then this is in effect a
1281 combined call and return, and so we need an edge to the
1282 exit block. No need to worry about EH edges, since we
1283 wouldn't have created the sibling call in the first place. */
1285 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1286 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1287 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1289 /* If this is a CALL_INSN, then mark it as reaching the active EH
1290 handler for this CALL_INSN. If we're handling non-call
1291 exceptions then any insn can reach any of the active handlers.
1293 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1295 else if (code == CALL_INSN || flag_non_call_exceptions)
1297 /* Add any appropriate EH edges. */
1298 make_eh_edge (edge_cache, bb, insn);
1300 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1302 /* ??? This could be made smarter: in some cases it's possible
1303 to tell that certain calls will not do a nonlocal goto.
1305 For example, if the nested functions that do the nonlocal
1306 gotos do not have their addresses taken, then only calls to
1307 those functions or to other nested functions that use them
1308 could possibly do nonlocal gotos. */
1309 /* We do know that a REG_EH_REGION note with a value less
1310 than 0 is guaranteed not to perform a non-local goto. */
1311 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1312 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1313 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1314 make_label_edge (edge_cache, bb, XEXP (x, 0),
1315 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1319 /* Find out if we can drop through to the next block. */
1320 insn = next_nonnote_insn (insn);
1321 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1322 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1323 else if (i + 1 < n_basic_blocks)
1325 rtx tmp = BLOCK_HEAD (i + 1);
1326 if (GET_CODE (tmp) == NOTE)
1327 tmp = next_nonnote_insn (tmp);
1328 if (force_fallthru || insn == tmp)
1329 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1334 sbitmap_vector_free (edge_cache);
1337 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1338 about the edge that is accumulated between calls. */
1341 make_edge (edge_cache, src, dst, flags)
1342 sbitmap *edge_cache;
1343 basic_block src, dst;
1349 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1350 many edges to them, and we didn't allocate memory for it. */
1351 use_edge_cache = (edge_cache
1352 && src != ENTRY_BLOCK_PTR
1353 && dst != EXIT_BLOCK_PTR);
1355 /* Make sure we don't add duplicate edges. */
1356 switch (use_edge_cache)
1359 /* Quick test for non-existance of the edge. */
1360 if (! TEST_BIT (edge_cache[src->index], dst->index))
1363 /* The edge exists; early exit if no work to do. */
1369 for (e = src->succ; e; e = e->succ_next)
1378 e = (edge) xcalloc (1, sizeof (*e));
1381 e->succ_next = src->succ;
1382 e->pred_next = dst->pred;
1391 SET_BIT (edge_cache[src->index], dst->index);
1394 /* Create an edge from a basic block to a label. */
1397 make_label_edge (edge_cache, src, label, flags)
1398 sbitmap *edge_cache;
1403 if (GET_CODE (label) != CODE_LABEL)
1406 /* If the label was never emitted, this insn is junk, but avoid a
1407 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1408 as a result of a syntax error and a diagnostic has already been
1411 if (INSN_UID (label) == 0)
1414 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1417 /* Create the edges generated by INSN in REGION. */
1420 make_eh_edge (edge_cache, src, insn)
1421 sbitmap *edge_cache;
1425 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1428 handlers = reachable_handlers (insn);
1430 for (i = handlers; i; i = XEXP (i, 1))
1431 make_label_edge (edge_cache, src, XEXP (i, 0),
1432 EDGE_ABNORMAL | EDGE_EH | is_call);
1434 free_INSN_LIST_list (&handlers);
1437 /* Identify critical edges and set the bits appropriately. */
1440 mark_critical_edges ()
1442 int i, n = n_basic_blocks;
1445 /* We begin with the entry block. This is not terribly important now,
1446 but could be if a front end (Fortran) implemented alternate entry
1448 bb = ENTRY_BLOCK_PTR;
1455 /* (1) Critical edges must have a source with multiple successors. */
1456 if (bb->succ && bb->succ->succ_next)
1458 for (e = bb->succ; e; e = e->succ_next)
1460 /* (2) Critical edges must have a destination with multiple
1461 predecessors. Note that we know there is at least one
1462 predecessor -- the edge we followed to get here. */
1463 if (e->dest->pred->pred_next)
1464 e->flags |= EDGE_CRITICAL;
1466 e->flags &= ~EDGE_CRITICAL;
1471 for (e = bb->succ; e; e = e->succ_next)
1472 e->flags &= ~EDGE_CRITICAL;
1477 bb = BASIC_BLOCK (i);
1481 /* Split a block BB after insn INSN creating a new fallthru edge.
1482 Return the new edge. Note that to keep other parts of the compiler happy,
1483 this function renumbers all the basic blocks so that the new
1484 one has a number one greater than the block split. */
1487 split_block (bb, insn)
1497 /* There is no point splitting the block after its end. */
1498 if (bb->end == insn)
1501 /* Create the new structures. */
1502 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1503 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1506 memset (new_bb, 0, sizeof (*new_bb));
1508 new_bb->head = NEXT_INSN (insn);
1509 new_bb->end = bb->end;
1512 new_bb->succ = bb->succ;
1513 bb->succ = new_edge;
1514 new_bb->pred = new_edge;
1515 new_bb->count = bb->count;
1516 new_bb->frequency = bb->frequency;
1517 new_bb->loop_depth = bb->loop_depth;
1520 new_edge->dest = new_bb;
1521 new_edge->flags = EDGE_FALLTHRU;
1522 new_edge->probability = REG_BR_PROB_BASE;
1523 new_edge->count = bb->count;
1525 /* Redirect the src of the successor edges of bb to point to new_bb. */
1526 for (e = new_bb->succ; e; e = e->succ_next)
1529 /* Place the new block just after the block being split. */
1530 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1532 /* Some parts of the compiler expect blocks to be number in
1533 sequential order so insert the new block immediately after the
1534 block being split.. */
1536 for (i = n_basic_blocks - 1; i > j + 1; --i)
1538 basic_block tmp = BASIC_BLOCK (i - 1);
1539 BASIC_BLOCK (i) = tmp;
1543 BASIC_BLOCK (i) = new_bb;
1546 if (GET_CODE (new_bb->head) == CODE_LABEL)
1548 /* Create the basic block note. */
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1551 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1555 /* Create the basic block note. */
1556 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1558 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1559 new_bb->head = bb_note;
1562 update_bb_for_insn (new_bb);
1564 if (bb->global_live_at_start)
1566 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1567 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1568 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1570 /* We now have to calculate which registers are live at the end
1571 of the split basic block and at the start of the new basic
1572 block. Start with those registers that are known to be live
1573 at the end of the original basic block and get
1574 propagate_block to determine which registers are live. */
1575 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1576 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1577 COPY_REG_SET (bb->global_live_at_end,
1578 new_bb->global_live_at_start);
1584 /* Return label in the head of basic block. Create one if it doesn't exist. */
1589 if (GET_CODE (block->head) != CODE_LABEL)
1590 block->head = emit_label_before (gen_label_rtx (), block->head);
1594 /* Return true if the block has no effect and only forwards control flow to
1595 its single destination. */
1597 forwarder_block_p (bb)
1600 rtx insn = bb->head;
1601 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1602 || !bb->succ || bb->succ->succ_next)
1605 while (insn != bb->end)
1607 if (active_insn_p (insn))
1609 insn = NEXT_INSN (insn);
1611 return (!active_insn_p (insn)
1612 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1615 /* Return nonzero if we can reach target from src by falling trought. */
1617 can_fallthru (src, target)
1618 basic_block src, target;
1620 rtx insn = src->end;
1621 rtx insn2 = target->head;
1623 if (!active_insn_p (insn2))
1624 insn2 = next_active_insn (insn2);
1625 /* ??? Later we may add code to move jump tables offline. */
1626 return next_active_insn (insn) == insn2;
1629 /* Attempt to perform edge redirection by replacing possibly complex jump
1630 instruction by unconditional jump or removing jump completely.
1631 This can apply only if all edges now point to the same block.
1633 The parameters and return values are equivalent to redirect_edge_and_branch.
1636 try_redirect_by_replacing_jump (e, target)
1640 basic_block src = e->src;
1641 rtx insn = src->end;
1647 /* Verify that all targets will be TARGET. */
1648 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1649 if (tmp->dest != target && tmp != e)
1651 if (tmp || GET_CODE (insn) != JUMP_INSN)
1654 /* Avoid removing branch with side effects. */
1655 set = single_set (insn);
1656 if (!set || side_effects_p (set))
1659 /* See if we can create the fallthru edge. */
1660 if (can_fallthru (src, target))
1662 src->end = PREV_INSN (insn);
1664 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1665 flow_delete_insn (insn);
1669 /* If this already is simplejump, redirect it. */
1670 else if (simplejump_p (insn))
1672 if (e->dest == target)
1675 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1676 INSN_UID (insn), e->dest->index, target->index);
1677 redirect_jump (insn, block_label (target), 0);
1679 /* Or replace possibly complicated jump insn by simple jump insn. */
1682 rtx target_label = block_label (target);
1684 src->end = PREV_INSN (insn);
1685 src->end = emit_jump_insn_after (gen_jump (target_label), src->end);
1686 JUMP_LABEL (src->end) = target_label;
1687 LABEL_NUSES (target_label)++;
1689 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1690 INSN_UID (insn), INSN_UID (src->end));
1691 flow_delete_insn (insn);
1695 /* Keep only one edge out and set proper flags. */
1696 while (src->succ->succ_next)
1697 remove_edge (src->succ);
1700 e->flags = EDGE_FALLTHRU;
1703 e->probability = REG_BR_PROB_BASE;
1704 e->count = src->count;
1706 /* Fixup barriers. */
1707 barrier = next_nonnote_insn (insn);
1708 if (fallthru && GET_CODE (barrier) == BARRIER)
1709 flow_delete_insn (barrier);
1710 else if (!fallthru && GET_CODE (barrier) != BARRIER)
1711 emit_barrier_after (insn);
1713 /* In case we've zapped an conditional jump, we need to kill the cc0
1714 setter too if available. */
1717 if (GET_CODE (insn) == JUMP_INSN)
1718 insn = prev_nonnote_insn (insn);
1719 if (sets_cc0_p (insn))
1721 if (insn == src->end)
1722 src->end = PREV_INSN (insn);
1723 flow_delete_insn (insn);
1727 if (e->dest != target)
1728 redirect_edge_succ (e, target);
1732 /* Attempt to change code to redirect edge E to TARGET.
1733 Don't do that on expense of adding new instructions or reordering
1736 Function can be also called with edge destionation equivalent to the
1737 TARGET. Then it should try the simplifications and do nothing if
1740 Return true if transformation suceeded. We still return flase in case
1741 E already destinated TARGET and we didn't managed to simplify instruction
1744 redirect_edge_and_branch (e, target)
1749 rtx old_label = e->dest->head;
1750 basic_block src = e->src;
1751 rtx insn = src->end;
1753 if (try_redirect_by_replacing_jump (e, target))
1755 /* Do this fast path late, as we want above code to simplify for cases
1756 where called on single edge leaving basic block containing nontrivial
1758 else if (e->dest == target)
1761 /* We can only redirect non-fallthru edges of jump insn. */
1762 if (e->flags & EDGE_FALLTHRU)
1764 if (GET_CODE (insn) != JUMP_INSN)
1767 /* Recognize a tablejump and adjust all matching cases. */
1768 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1769 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1770 && GET_CODE (tmp) == JUMP_INSN
1771 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1772 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1776 rtx new_label = block_label (target);
1778 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1779 vec = XVEC (PATTERN (tmp), 0);
1781 vec = XVEC (PATTERN (tmp), 1);
1783 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1784 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1786 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1787 --LABEL_NUSES (old_label);
1788 ++LABEL_NUSES (new_label);
1791 /* Handle casesi dispatch insns */
1792 if ((tmp = single_set (insn)) != NULL
1793 && SET_DEST (tmp) == pc_rtx
1794 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1795 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1796 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1798 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1800 --LABEL_NUSES (old_label);
1801 ++LABEL_NUSES (new_label);
1806 /* ?? We may play the games with moving the named labels from
1807 one basic block to the other in case only one computed_jump is
1809 if (computed_jump_p (insn))
1812 /* A return instruction can't be redirected. */
1813 if (returnjump_p (insn))
1816 /* If the insn doesn't go where we think, we're confused. */
1817 if (JUMP_LABEL (insn) != old_label)
1819 redirect_jump (insn, block_label (target), 0);
1823 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1824 e->src->index, e->dest->index, target->index);
1825 if (e->dest != target)
1828 /* Check whether the edge is already present. */
1829 for (s = src->succ; s; s=s->succ_next)
1830 if (s->dest == target)
1834 s->flags |= e->flags;
1835 s->probability += e->probability;
1836 s->count += e->count;
1840 redirect_edge_succ (e, target);
1845 /* Split a (typically critical) edge. Return the new block.
1846 Abort on abnormal edges.
1848 ??? The code generally expects to be called on critical edges.
1849 The case of a block ending in an unconditional jump to a
1850 block with multiple predecessors is not handled optimally. */
1853 split_edge (edge_in)
1856 basic_block old_pred, bb, old_succ;
1861 /* Abnormal edges cannot be split. */
1862 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1865 old_pred = edge_in->src;
1866 old_succ = edge_in->dest;
1868 /* Create the new structures. */
1869 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1870 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1873 memset (bb, 0, sizeof (*bb));
1875 /* ??? This info is likely going to be out of date very soon. */
1876 if (old_succ->global_live_at_start)
1878 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1879 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1880 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1881 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1885 bb->succ = edge_out;
1886 bb->count = edge_in->count;
1887 bb->frequency = (edge_in->probability * edge_in->src->frequency
1888 / REG_BR_PROB_BASE);
1890 edge_in->flags &= ~EDGE_CRITICAL;
1892 edge_out->pred_next = old_succ->pred;
1893 edge_out->succ_next = NULL;
1895 edge_out->dest = old_succ;
1896 edge_out->flags = EDGE_FALLTHRU;
1897 edge_out->probability = REG_BR_PROB_BASE;
1898 edge_out->count = edge_in->count;
1900 old_succ->pred = edge_out;
1902 /* Tricky case -- if there existed a fallthru into the successor
1903 (and we're not it) we must add a new unconditional jump around
1904 the new block we're actually interested in.
1906 Further, if that edge is critical, this means a second new basic
1907 block must be created to hold it. In order to simplify correct
1908 insn placement, do this before we touch the existing basic block
1909 ordering for the block we were really wanting. */
1910 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1913 for (e = edge_out->pred_next; e; e = e->pred_next)
1914 if (e->flags & EDGE_FALLTHRU)
1919 basic_block jump_block;
1922 if ((e->flags & EDGE_CRITICAL) == 0
1923 && e->src != ENTRY_BLOCK_PTR)
1925 /* Non critical -- we can simply add a jump to the end
1926 of the existing predecessor. */
1927 jump_block = e->src;
1931 /* We need a new block to hold the jump. The simplest
1932 way to do the bulk of the work here is to recursively
1934 jump_block = split_edge (e);
1935 e = jump_block->succ;
1938 /* Now add the jump insn ... */
1939 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1941 jump_block->end = pos;
1942 if (basic_block_for_insn)
1943 set_block_for_insn (pos, jump_block);
1944 emit_barrier_after (pos);
1946 /* ... let jump know that label is in use, ... */
1947 JUMP_LABEL (pos) = old_succ->head;
1948 ++LABEL_NUSES (old_succ->head);
1950 /* ... and clear fallthru on the outgoing edge. */
1951 e->flags &= ~EDGE_FALLTHRU;
1953 /* Continue splitting the interesting edge. */
1957 /* Place the new block just in front of the successor. */
1958 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1959 if (old_succ == EXIT_BLOCK_PTR)
1960 j = n_basic_blocks - 1;
1962 j = old_succ->index;
1963 for (i = n_basic_blocks - 1; i > j; --i)
1965 basic_block tmp = BASIC_BLOCK (i - 1);
1966 BASIC_BLOCK (i) = tmp;
1969 BASIC_BLOCK (i) = bb;
1972 /* Create the basic block note.
1974 Where we place the note can have a noticable impact on the generated
1975 code. Consider this cfg:
1985 If we need to insert an insn on the edge from block 0 to block 1,
1986 we want to ensure the instructions we insert are outside of any
1987 loop notes that physically sit between block 0 and block 1. Otherwise
1988 we confuse the loop optimizer into thinking the loop is a phony. */
1989 if (old_succ != EXIT_BLOCK_PTR
1990 && PREV_INSN (old_succ->head)
1991 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1992 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1993 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1994 PREV_INSN (old_succ->head));
1995 else if (old_succ != EXIT_BLOCK_PTR)
1996 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1998 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1999 NOTE_BASIC_BLOCK (bb_note) = bb;
2000 bb->head = bb->end = bb_note;
2002 /* For non-fallthry edges, we must adjust the predecessor's
2003 jump instruction to target our new block. */
2004 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2006 if (!redirect_edge_and_branch (edge_in, bb))
2010 redirect_edge_succ (edge_in, bb);
2015 /* Queue instructions for insertion on an edge between two basic blocks.
2016 The new instructions and basic blocks (if any) will not appear in the
2017 CFG until commit_edge_insertions is called. */
2020 insert_insn_on_edge (pattern, e)
2024 /* We cannot insert instructions on an abnormal critical edge.
2025 It will be easier to find the culprit if we die now. */
2026 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2027 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2030 if (e->insns == NULL_RTX)
2033 push_to_sequence (e->insns);
2035 emit_insn (pattern);
2037 e->insns = get_insns ();
2041 /* Update the CFG for the instructions queued on edge E. */
2044 commit_one_edge_insertion (e)
2047 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2050 /* Pull the insns off the edge now since the edge might go away. */
2052 e->insns = NULL_RTX;
2054 /* Figure out where to put these things. If the destination has
2055 one predecessor, insert there. Except for the exit block. */
2056 if (e->dest->pred->pred_next == NULL
2057 && e->dest != EXIT_BLOCK_PTR)
2061 /* Get the location correct wrt a code label, and "nice" wrt
2062 a basic block note, and before everything else. */
2064 if (GET_CODE (tmp) == CODE_LABEL)
2065 tmp = NEXT_INSN (tmp);
2066 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2067 tmp = NEXT_INSN (tmp);
2068 if (tmp == bb->head)
2071 after = PREV_INSN (tmp);
2074 /* If the source has one successor and the edge is not abnormal,
2075 insert there. Except for the entry block. */
2076 else if ((e->flags & EDGE_ABNORMAL) == 0
2077 && e->src->succ->succ_next == NULL
2078 && e->src != ENTRY_BLOCK_PTR)
2081 /* It is possible to have a non-simple jump here. Consider a target
2082 where some forms of unconditional jumps clobber a register. This
2083 happens on the fr30 for example.
2085 We know this block has a single successor, so we can just emit
2086 the queued insns before the jump. */
2087 if (GET_CODE (bb->end) == JUMP_INSN)
2093 /* We'd better be fallthru, or we've lost track of what's what. */
2094 if ((e->flags & EDGE_FALLTHRU) == 0)
2101 /* Otherwise we must split the edge. */
2104 bb = split_edge (e);
2108 /* Now that we've found the spot, do the insertion. */
2110 /* Set the new block number for these insns, if structure is allocated. */
2111 if (basic_block_for_insn)
2114 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2115 set_block_for_insn (i, bb);
2120 emit_insns_before (insns, before);
2121 if (before == bb->head)
2124 last = prev_nonnote_insn (before);
2128 last = emit_insns_after (insns, after);
2129 if (after == bb->end)
2133 if (returnjump_p (last))
2135 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2136 This is not currently a problem because this only happens
2137 for the (single) epilogue, which already has a fallthru edge
2141 if (e->dest != EXIT_BLOCK_PTR
2142 || e->succ_next != NULL
2143 || (e->flags & EDGE_FALLTHRU) == 0)
2145 e->flags &= ~EDGE_FALLTHRU;
2147 emit_barrier_after (last);
2151 flow_delete_insn (before);
2153 else if (GET_CODE (last) == JUMP_INSN)
2155 find_sub_basic_blocks (bb);
2158 /* Update the CFG for all queued instructions. */
2161 commit_edge_insertions ()
2166 #ifdef ENABLE_CHECKING
2167 verify_flow_info ();
2171 bb = ENTRY_BLOCK_PTR;
2176 for (e = bb->succ; e; e = next)
2178 next = e->succ_next;
2180 commit_one_edge_insertion (e);
2183 if (++i >= n_basic_blocks)
2185 bb = BASIC_BLOCK (i);
2189 /* Add fake edges to the function exit for any non constant calls in
2190 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2191 BLOCKS is zero. Return the nuber of blocks that were split. */
2194 flow_call_edges_add (blocks)
2198 int blocks_split = 0;
2202 /* Map bb indicies into basic block pointers since split_block
2203 will renumber the basic blocks. */
2205 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2209 for (i = 0; i < n_basic_blocks; i++)
2210 bbs[bb_num++] = BASIC_BLOCK (i);
2214 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2216 bbs[bb_num++] = BASIC_BLOCK (i);
2221 /* Now add fake edges to the function exit for any non constant
2222 calls since there is no way that we can determine if they will
2225 for (i = 0; i < bb_num; i++)
2227 basic_block bb = bbs[i];
2231 for (insn = bb->end; ; insn = prev_insn)
2233 prev_insn = PREV_INSN (insn);
2234 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2238 /* Note that the following may create a new basic block
2239 and renumber the existing basic blocks. */
2240 e = split_block (bb, insn);
2244 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2246 if (insn == bb->head)
2252 verify_flow_info ();
2255 return blocks_split;
2258 /* Find unreachable blocks. An unreachable block will have NULL in
2259 block->aux, a non-NULL value indicates the block is reachable. */
2262 find_unreachable_blocks ()
2266 basic_block *tos, *worklist;
2269 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2271 /* Use basic_block->aux as a marker. Clear them all. */
2273 for (i = 0; i < n; ++i)
2274 BASIC_BLOCK (i)->aux = NULL;
2276 /* Add our starting points to the worklist. Almost always there will
2277 be only one. It isn't inconcievable that we might one day directly
2278 support Fortran alternate entry points. */
2280 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2284 /* Mark the block with a handy non-null value. */
2288 /* Iterate: find everything reachable from what we've already seen. */
2290 while (tos != worklist)
2292 basic_block b = *--tos;
2294 for (e = b->succ; e; e = e->succ_next)
2305 /* Delete all unreachable basic blocks. */
2307 delete_unreachable_blocks ()
2311 find_unreachable_blocks ();
2313 /* Delete all unreachable basic blocks. Count down so that we
2314 don't interfere with the block renumbering that happens in
2315 flow_delete_block. */
2317 for (i = n_basic_blocks - 1; i >= 0; --i)
2319 basic_block b = BASIC_BLOCK (i);
2322 /* This block was found. Tidy up the mark. */
2325 flow_delete_block (b);
2328 tidy_fallthru_edges ();
2331 /* Return true if NOTE is not one of the ones that must be kept paired,
2332 so that we may simply delete them. */
2335 can_delete_note_p (note)
2338 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2339 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2342 /* Unlink a chain of insns between START and FINISH, leaving notes
2343 that must be paired. */
2346 flow_delete_insn_chain (start, finish)
2349 /* Unchain the insns one by one. It would be quicker to delete all
2350 of these with a single unchaining, rather than one at a time, but
2351 we need to keep the NOTE's. */
2357 next = NEXT_INSN (start);
2358 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2360 else if (GET_CODE (start) == CODE_LABEL
2361 && ! can_delete_label_p (start))
2363 const char *name = LABEL_NAME (start);
2364 PUT_CODE (start, NOTE);
2365 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2366 NOTE_SOURCE_FILE (start) = name;
2369 next = flow_delete_insn (start);
2371 if (start == finish)
2377 /* Delete the insns in a (non-live) block. We physically delete every
2378 non-deleted-note insn, and update the flow graph appropriately.
2380 Return nonzero if we deleted an exception handler. */
2382 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2383 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2386 flow_delete_block (b)
2389 int deleted_handler = 0;
2392 /* If the head of this block is a CODE_LABEL, then it might be the
2393 label for an exception handler which can't be reached.
2395 We need to remove the label from the exception_handler_label list
2396 and remove the associated NOTE_INSN_EH_REGION_BEG and
2397 NOTE_INSN_EH_REGION_END notes. */
2401 never_reached_warning (insn);
2403 if (GET_CODE (insn) == CODE_LABEL)
2404 maybe_remove_eh_handler (insn);
2406 /* Include any jump table following the basic block. */
2408 if (GET_CODE (end) == JUMP_INSN
2409 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2410 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2411 && GET_CODE (tmp) == JUMP_INSN
2412 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2413 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2416 /* Include any barrier that may follow the basic block. */
2417 tmp = next_nonnote_insn (end);
2418 if (tmp && GET_CODE (tmp) == BARRIER)
2421 /* Selectively delete the entire chain. */
2422 flow_delete_insn_chain (insn, end);
2424 /* Remove the edges into and out of this block. Note that there may
2425 indeed be edges in, if we are removing an unreachable loop. */
2429 for (e = b->pred; e; e = next)
2431 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2434 next = e->pred_next;
2438 for (e = b->succ; e; e = next)
2440 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2443 next = e->succ_next;
2452 /* Remove the basic block from the array, and compact behind it. */
2455 return deleted_handler;
2458 /* Remove block B from the basic block array and compact behind it. */
2464 int i, n = n_basic_blocks;
2466 for (i = b->index; i + 1 < n; ++i)
2468 basic_block x = BASIC_BLOCK (i + 1);
2469 BASIC_BLOCK (i) = x;
2473 basic_block_info->num_elements--;
2477 /* Delete INSN by patching it out. Return the next insn. */
2480 flow_delete_insn (insn)
2483 rtx prev = PREV_INSN (insn);
2484 rtx next = NEXT_INSN (insn);
2487 PREV_INSN (insn) = NULL_RTX;
2488 NEXT_INSN (insn) = NULL_RTX;
2489 INSN_DELETED_P (insn) = 1;
2492 NEXT_INSN (prev) = next;
2494 PREV_INSN (next) = prev;
2496 set_last_insn (prev);
2498 if (GET_CODE (insn) == CODE_LABEL)
2499 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2501 /* If deleting a jump, decrement the use count of the label. Deleting
2502 the label itself should happen in the normal course of block merging. */
2503 if (GET_CODE (insn) == JUMP_INSN
2504 && JUMP_LABEL (insn)
2505 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2506 LABEL_NUSES (JUMP_LABEL (insn))--;
2508 /* Also if deleting an insn that references a label. */
2509 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2510 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2511 LABEL_NUSES (XEXP (note, 0))--;
2513 if (GET_CODE (insn) == JUMP_INSN
2514 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2515 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2517 rtx pat = PATTERN (insn);
2518 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2519 int len = XVECLEN (pat, diff_vec_p);
2522 for (i = 0; i < len; i++)
2523 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2529 /* True if a given label can be deleted. */
2532 can_delete_label_p (label)
2537 if (LABEL_PRESERVE_P (label))
2540 for (x = forced_labels; x; x = XEXP (x, 1))
2541 if (label == XEXP (x, 0))
2543 for (x = label_value_list; x; x = XEXP (x, 1))
2544 if (label == XEXP (x, 0))
2546 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2547 if (label == XEXP (x, 0))
2550 /* User declared labels must be preserved. */
2551 if (LABEL_NAME (label) != 0)
2558 tail_recursion_label_p (label)
2563 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2564 if (label == XEXP (x, 0))
2570 /* Blocks A and B are to be merged into a single block A. The insns
2571 are already contiguous, hence `nomove'. */
2574 merge_blocks_nomove (a, b)
2578 rtx b_head, b_end, a_end;
2579 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2582 /* If there was a CODE_LABEL beginning B, delete it. */
2585 if (GET_CODE (b_head) == CODE_LABEL)
2587 /* Detect basic blocks with nothing but a label. This can happen
2588 in particular at the end of a function. */
2589 if (b_head == b_end)
2591 del_first = del_last = b_head;
2592 b_head = NEXT_INSN (b_head);
2595 /* Delete the basic block note. */
2596 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2598 if (b_head == b_end)
2603 b_head = NEXT_INSN (b_head);
2606 /* If there was a jump out of A, delete it. */
2608 if (GET_CODE (a_end) == JUMP_INSN)
2612 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2613 if (GET_CODE (prev) != NOTE
2614 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2621 /* If this was a conditional jump, we need to also delete
2622 the insn that set cc0. */
2623 if (prev && sets_cc0_p (prev))
2626 prev = prev_nonnote_insn (prev);
2635 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2636 del_first = NEXT_INSN (a_end);
2638 /* Delete everything marked above as well as crap that might be
2639 hanging out between the two blocks. */
2640 flow_delete_insn_chain (del_first, del_last);
2642 /* Normally there should only be one successor of A and that is B, but
2643 partway though the merge of blocks for conditional_execution we'll
2644 be merging a TEST block with THEN and ELSE successors. Free the
2645 whole lot of them and hope the caller knows what they're doing. */
2647 remove_edge (a->succ);
2649 /* Adjust the edges out of B for the new owner. */
2650 for (e = b->succ; e; e = e->succ_next)
2654 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2655 b->pred = b->succ = NULL;
2657 /* Reassociate the insns of B with A. */
2660 if (basic_block_for_insn)
2662 BLOCK_FOR_INSN (b_head) = a;
2663 while (b_head != b_end)
2665 b_head = NEXT_INSN (b_head);
2666 BLOCK_FOR_INSN (b_head) = a;
2676 /* Blocks A and B are to be merged into a single block. A has no incoming
2677 fallthru edge, so it can be moved before B without adding or modifying
2678 any jumps (aside from the jump from A to B). */
2681 merge_blocks_move_predecessor_nojumps (a, b)
2684 rtx start, end, barrier;
2690 barrier = next_nonnote_insn (end);
2691 if (GET_CODE (barrier) != BARRIER)
2693 flow_delete_insn (barrier);
2695 /* Move block and loop notes out of the chain so that we do not
2696 disturb their order.
2698 ??? A better solution would be to squeeze out all the non-nested notes
2699 and adjust the block trees appropriately. Even better would be to have
2700 a tighter connection between block trees and rtl so that this is not
2702 start = squeeze_notes (start, end);
2704 /* Scramble the insn chain. */
2705 if (end != PREV_INSN (b->head))
2706 reorder_insns (start, end, PREV_INSN (b->head));
2710 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2711 a->index, b->index);
2714 /* Swap the records for the two blocks around. Although we are deleting B,
2715 A is now where B was and we want to compact the BB array from where
2717 BASIC_BLOCK (a->index) = b;
2718 BASIC_BLOCK (b->index) = a;
2720 a->index = b->index;
2723 /* Now blocks A and B are contiguous. Merge them. */
2724 merge_blocks_nomove (a, b);
2729 /* Blocks A and B are to be merged into a single block. B has no outgoing
2730 fallthru edge, so it can be moved after A without adding or modifying
2731 any jumps (aside from the jump from A to B). */
2734 merge_blocks_move_successor_nojumps (a, b)
2737 rtx start, end, barrier;
2741 barrier = NEXT_INSN (end);
2743 /* Recognize a jump table following block B. */
2744 if (GET_CODE (barrier) == CODE_LABEL
2745 && NEXT_INSN (barrier)
2746 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2747 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2748 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2750 end = NEXT_INSN (barrier);
2751 barrier = NEXT_INSN (end);
2754 /* There had better have been a barrier there. Delete it. */
2755 if (GET_CODE (barrier) != BARRIER)
2757 flow_delete_insn (barrier);
2759 /* Move block and loop notes out of the chain so that we do not
2760 disturb their order.
2762 ??? A better solution would be to squeeze out all the non-nested notes
2763 and adjust the block trees appropriately. Even better would be to have
2764 a tighter connection between block trees and rtl so that this is not
2766 start = squeeze_notes (start, end);
2768 /* Scramble the insn chain. */
2769 reorder_insns (start, end, a->end);
2771 /* Now blocks A and B are contiguous. Merge them. */
2772 merge_blocks_nomove (a, b);
2776 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2777 b->index, a->index);
2783 /* Attempt to merge basic blocks that are potentially non-adjacent.
2784 Return true iff the attempt succeeded. */
2787 merge_blocks (e, b, c)
2791 /* If C has a tail recursion label, do not merge. There is no
2792 edge recorded from the call_placeholder back to this label, as
2793 that would make optimize_sibling_and_tail_recursive_calls more
2794 complex for no gain. */
2795 if (GET_CODE (c->head) == CODE_LABEL
2796 && tail_recursion_label_p (c->head))
2799 /* If B has a fallthru edge to C, no need to move anything. */
2800 if (e->flags & EDGE_FALLTHRU)
2802 merge_blocks_nomove (b, c);
2806 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2807 b->index, c->index);
2815 int c_has_outgoing_fallthru;
2816 int b_has_incoming_fallthru;
2818 /* We must make sure to not munge nesting of exception regions,
2819 lexical blocks, and loop notes.
2821 The first is taken care of by requiring that the active eh
2822 region at the end of one block always matches the active eh
2823 region at the beginning of the next block.
2825 The later two are taken care of by squeezing out all the notes. */
2827 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2828 executed and we may want to treat blocks which have two out
2829 edges, one normal, one abnormal as only having one edge for
2830 block merging purposes. */
2832 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2833 if (tmp_edge->flags & EDGE_FALLTHRU)
2835 c_has_outgoing_fallthru = (tmp_edge != NULL);
2837 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2838 if (tmp_edge->flags & EDGE_FALLTHRU)
2840 b_has_incoming_fallthru = (tmp_edge != NULL);
2842 /* If B does not have an incoming fallthru, then it can be moved
2843 immediately before C without introducing or modifying jumps.
2844 C cannot be the first block, so we do not have to worry about
2845 accessing a non-existent block. */
2846 if (! b_has_incoming_fallthru)
2847 return merge_blocks_move_predecessor_nojumps (b, c);
2849 /* Otherwise, we're going to try to move C after B. If C does
2850 not have an outgoing fallthru, then it can be moved
2851 immediately after B without introducing or modifying jumps. */
2852 if (! c_has_outgoing_fallthru)
2853 return merge_blocks_move_successor_nojumps (b, c);
2855 /* Otherwise, we'll need to insert an extra jump, and possibly
2856 a new block to contain it. */
2857 /* ??? Not implemented yet. */
2863 /* Simplify conditional jump around an jump.
2864 Return nonzero in case optimization matched. */
2867 try_simplify_condjump (src)
2870 basic_block final_block, next_block;
2871 rtx insn = src->end;
2872 edge branch, fallthru;
2874 if (!any_condjump_p (insn))
2877 fallthru = FALLTHRU_EDGE (src);
2879 /* Following block must be simple forwarder block with single
2880 entry and must not be last in the stream. */
2881 next_block = fallthru->dest;
2882 if (!forwarder_block_p (next_block)
2883 || next_block->pred->pred_next
2884 || next_block->index == n_basic_blocks - 1)
2887 /* The branch must target to block afterwards. */
2888 final_block = BASIC_BLOCK (next_block->index + 1);
2890 branch = BRANCH_EDGE (src);
2892 if (branch->dest != final_block)
2895 /* Avoid jump.c from being overactive on removin ureachable insns. */
2896 LABEL_NUSES (JUMP_LABEL (insn))++;
2897 if (!invert_jump (insn, block_label (next_block->succ->dest), 1))
2899 LABEL_NUSES (JUMP_LABEL (insn))--;
2903 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
2904 INSN_UID (insn), INSN_UID (next_block->end));
2906 redirect_edge_succ (branch, final_block);
2907 redirect_edge_succ (fallthru, next_block->succ->dest);
2909 branch->flags |= EDGE_FALLTHRU;
2910 fallthru->flags &= ~EDGE_FALLTHRU;
2912 flow_delete_block (next_block);
2916 /* Attempt to forward edges leaving basic block B.
2917 Return nonzero if sucessfull. */
2920 try_forward_edges (b)
2925 for (e = b->succ; e; e = e->succ_next)
2927 basic_block target = e->dest, first = e->dest;
2930 /* Look for the real destination of jump.
2931 Avoid inifinite loop in the infinite empty loop by counting
2932 up to n_basic_blocks. */
2933 while (forwarder_block_p (target)
2934 && target->succ->dest != EXIT_BLOCK_PTR
2935 && counter < n_basic_blocks)
2937 /* Bypass trivial infinite loops. */
2938 if (target == target->succ->dest)
2939 counter = n_basic_blocks;
2940 target = target->succ->dest, counter++;
2943 if (target != first && counter < n_basic_blocks
2944 && redirect_edge_and_branch (e, target))
2946 while (first != target)
2948 first->count -= e->count;
2949 first->succ->count -= e->count;
2950 first->frequency -= ((e->probability * b->frequency
2951 + REG_BR_PROB_BASE / 2)
2952 / REG_BR_PROB_BASE);
2953 first = first->succ->dest;
2955 /* We've possibly removed the edge. */
2959 else if (rtl_dump_file && counter == n_basic_blocks)
2960 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n", target->index);
2961 else if (rtl_dump_file && first != target)
2962 fprintf (rtl_dump_file,
2963 "Forwarding edge %i->%i to %i failed.\n", b->index,
2964 e->dest->index, target->index);
2969 /* Do simple CFG optimizations - basic block merging, simplifying of jump
2972 Return nonzero in case some optimizations matched. */
2978 bool changed_overall = 0;
2981 /* Attempt to merge blocks as made possible by edge removal. If a block
2982 has only one successor, and the successor has only one predecessor,
2983 they may be combined. */
2988 for (i = 0; i < n_basic_blocks;)
2990 basic_block c, b = BASIC_BLOCK (i);
2992 int changed_here = 0;
2994 /* Delete trivially dead basic block. */
2995 if (b->pred == NULL)
2997 c = BASIC_BLOCK (i - 1);
2999 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3000 flow_delete_block (b);
3004 /* The fallthru forwarder block can be deleted. */
3005 if (b->pred->pred_next == NULL
3006 && forwarder_block_p (b)
3007 && (b->pred->flags & EDGE_FALLTHRU)
3008 && (b->succ->flags & EDGE_FALLTHRU))
3011 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3013 c = BASIC_BLOCK (i ? i - 1 : i + 1);
3014 redirect_edge_succ (b->pred, b->succ->dest);
3015 flow_delete_block (b);
3020 /* A loop because chains of blocks might be combineable. */
3021 while ((s = b->succ) != NULL
3022 && s->succ_next == NULL
3023 && (s->flags & EDGE_EH) == 0
3024 && (c = s->dest) != EXIT_BLOCK_PTR
3025 && c->pred->pred_next == NULL
3026 /* If the jump insn has side effects, we can't kill the edge. */
3027 && (GET_CODE (b->end) != JUMP_INSN
3028 || onlyjump_p (b->end)) && merge_blocks (s, b, c))
3031 if (try_simplify_condjump (b))
3034 /* In the case basic blocks has single outgoing edge, but over by the
3035 non-trivial jump instruction, we can replace it by unconditional
3036 jump, or delete the jump completely. Use logic of
3037 redirect_edge_and_branch to do the dirty job for us.
3039 We match cases as conditional jumps jumping to the next block or
3043 && b->succ->succ_next == NULL
3044 && GET_CODE (b->end) == JUMP_INSN
3045 && b->succ->dest != EXIT_BLOCK_PTR
3046 && redirect_edge_and_branch (b->succ, b->succ->dest))
3049 if (try_forward_edges (b))
3052 /* Don't get confused by the index shift caused by deleting
3059 changed_overall |= changed;
3063 #ifdef ENABLE_CHECKING
3065 verify_flow_info ();
3067 return changed_overall;
3070 /* The given edge should potentially be a fallthru edge. If that is in
3071 fact true, delete the jump and barriers that are in the way. */
3074 tidy_fallthru_edge (e, b, c)
3080 /* ??? In a late-running flow pass, other folks may have deleted basic
3081 blocks by nopping out blocks, leaving multiple BARRIERs between here
3082 and the target label. They ought to be chastized and fixed.
3084 We can also wind up with a sequence of undeletable labels between
3085 one block and the next.
3087 So search through a sequence of barriers, labels, and notes for
3088 the head of block C and assert that we really do fall through. */
3090 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3093 /* Remove what will soon cease being the jump insn from the source block.
3094 If block B consisted only of this single jump, turn it into a deleted
3097 if (GET_CODE (q) == JUMP_INSN
3099 && (any_uncondjump_p (q)
3100 || (b->succ == e && e->succ_next == NULL)))
3103 /* If this was a conditional jump, we need to also delete
3104 the insn that set cc0. */
3105 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3112 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3113 NOTE_SOURCE_FILE (q) = 0;
3119 /* We don't want a block to end on a line-number note since that has
3120 the potential of changing the code between -g and not -g. */
3121 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3128 /* Selectively unlink the sequence. */
3129 if (q != PREV_INSN (c->head))
3130 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3132 e->flags |= EDGE_FALLTHRU;
3135 /* Fix up edges that now fall through, or rather should now fall through
3136 but previously required a jump around now deleted blocks. Simplify
3137 the search by only examining blocks numerically adjacent, since this
3138 is how find_basic_blocks created them. */
3141 tidy_fallthru_edges ()
3145 for (i = 1; i < n_basic_blocks; ++i)
3147 basic_block b = BASIC_BLOCK (i - 1);
3148 basic_block c = BASIC_BLOCK (i);
3151 /* We care about simple conditional or unconditional jumps with
3154 If we had a conditional branch to the next instruction when
3155 find_basic_blocks was called, then there will only be one
3156 out edge for the block which ended with the conditional
3157 branch (since we do not create duplicate edges).
3159 Furthermore, the edge will be marked as a fallthru because we
3160 merge the flags for the duplicate edges. So we do not want to
3161 check that the edge is not a FALLTHRU edge. */
3162 if ((s = b->succ) != NULL
3163 && ! (s->flags & EDGE_COMPLEX)
3164 && s->succ_next == NULL
3166 /* If the jump insn has side effects, we can't tidy the edge. */
3167 && (GET_CODE (b->end) != JUMP_INSN
3168 || onlyjump_p (b->end)))
3169 tidy_fallthru_edge (s, b, c);
3173 /* Perform data flow analysis.
3174 F is the first insn of the function; FLAGS is a set of PROP_* flags
3175 to be used in accumulating flow info. */
3178 life_analysis (f, file, flags)
3183 #ifdef ELIMINABLE_REGS
3185 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
3188 /* Record which registers will be eliminated. We use this in
3191 CLEAR_HARD_REG_SET (elim_reg_set);
3193 #ifdef ELIMINABLE_REGS
3194 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3195 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3197 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3201 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
3203 /* The post-reload life analysis have (on a global basis) the same
3204 registers live as was computed by reload itself. elimination
3205 Otherwise offsets and such may be incorrect.
3207 Reload will make some registers as live even though they do not
3210 We don't want to create new auto-incs after reload, since they
3211 are unlikely to be useful and can cause problems with shared
3213 if (reload_completed)
3214 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
3216 /* We want alias analysis information for local dead store elimination. */
3217 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3218 init_alias_analysis ();
3220 /* Always remove no-op moves. Do this before other processing so
3221 that we don't have to keep re-scanning them. */
3222 delete_noop_moves (f);
3224 /* Some targets can emit simpler epilogues if they know that sp was
3225 not ever modified during the function. After reload, of course,
3226 we've already emitted the epilogue so there's no sense searching. */
3227 if (! reload_completed)
3228 notice_stack_pointer_modification (f);
3230 /* Allocate and zero out data structures that will record the
3231 data from lifetime analysis. */
3232 allocate_reg_life_data ();
3233 allocate_bb_life_data ();
3235 /* Find the set of registers live on function exit. */
3236 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
3238 /* "Update" life info from zero. It'd be nice to begin the
3239 relaxation with just the exit and noreturn blocks, but that set
3240 is not immediately handy. */
3242 if (flags & PROP_REG_INFO)
3243 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3244 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
3247 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3248 end_alias_analysis ();
3251 dump_flow_info (file);
3253 free_basic_block_vars (1);
3255 #ifdef ENABLE_CHECKING
3259 /* Search for any REG_LABEL notes which reference deleted labels. */
3260 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3262 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3264 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
3271 /* A subroutine of verify_wide_reg, called through for_each_rtx.
3272 Search for REGNO. If found, abort if it is not wider than word_mode. */
3275 verify_wide_reg_1 (px, pregno)
3280 unsigned int regno = *(int *) pregno;
3282 if (GET_CODE (x) == REG && REGNO (x) == regno)
3284 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
3291 /* A subroutine of verify_local_live_at_start. Search through insns
3292 between HEAD and END looking for register REGNO. */
3295 verify_wide_reg (regno, head, end)
3302 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
3306 head = NEXT_INSN (head);
3309 /* We didn't find the register at all. Something's way screwy. */
3311 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3312 print_rtl_and_abort ();
3315 /* A subroutine of update_life_info. Verify that there are no untoward
3316 changes in live_at_start during a local update. */
3319 verify_local_live_at_start (new_live_at_start, bb)
3320 regset new_live_at_start;
3323 if (reload_completed)
3325 /* After reload, there are no pseudos, nor subregs of multi-word
3326 registers. The regsets should exactly match. */
3327 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3331 fprintf (rtl_dump_file,
3332 "live_at_start mismatch in bb %d, aborting\n",
3334 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
3335 debug_bitmap_file (rtl_dump_file, new_live_at_start);
3337 print_rtl_and_abort ();
3344 /* Find the set of changed registers. */
3345 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
3347 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
3349 /* No registers should die. */
3350 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
3353 fprintf (rtl_dump_file,
3354 "Register %d died unexpectedly in block %d\n", i,
3356 print_rtl_and_abort ();
3359 /* Verify that the now-live register is wider than word_mode. */
3360 verify_wide_reg (i, bb->head, bb->end);
3365 /* Updates life information starting with the basic blocks set in BLOCKS.
3366 If BLOCKS is null, consider it to be the universal set.
3368 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3369 we are only expecting local modifications to basic blocks. If we find
3370 extra registers live at the beginning of a block, then we either killed
3371 useful data, or we have a broken split that wants data not provided.
3372 If we find registers removed from live_at_start, that means we have
3373 a broken peephole that is killing a register it shouldn't.
3375 ??? This is not true in one situation -- when a pre-reload splitter
3376 generates subregs of a multi-word pseudo, current life analysis will
3377 lose the kill. So we _can_ have a pseudo go live. How irritating.
3379 Including PROP_REG_INFO does not properly refresh regs_ever_live
3380 unless the caller resets it to zero. */
3383 update_life_info (blocks, extent, prop_flags)
3385 enum update_life_extent extent;
3389 regset_head tmp_head;
3392 tmp = INITIALIZE_REG_SET (tmp_head);
3394 /* For a global update, we go through the relaxation process again. */
3395 if (extent != UPDATE_LIFE_LOCAL)
3397 calculate_global_regs_live (blocks, blocks,
3398 prop_flags & PROP_SCAN_DEAD_CODE);
3400 /* If asked, remove notes from the blocks we'll update. */
3401 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3402 count_or_remove_death_notes (blocks, 1);
3407 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3409 basic_block bb = BASIC_BLOCK (i);
3411 COPY_REG_SET (tmp, bb->global_live_at_end);
3412 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3414 if (extent == UPDATE_LIFE_LOCAL)
3415 verify_local_live_at_start (tmp, bb);
3420 for (i = n_basic_blocks - 1; i >= 0; --i)
3422 basic_block bb = BASIC_BLOCK (i);
3424 COPY_REG_SET (tmp, bb->global_live_at_end);
3425 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3427 if (extent == UPDATE_LIFE_LOCAL)
3428 verify_local_live_at_start (tmp, bb);
3434 if (prop_flags & PROP_REG_INFO)
3436 /* The only pseudos that are live at the beginning of the function
3437 are those that were not set anywhere in the function. local-alloc
3438 doesn't know how to handle these correctly, so mark them as not
3439 local to any one basic block. */
3440 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3441 FIRST_PSEUDO_REGISTER, i,
3442 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3444 /* We have a problem with any pseudoreg that lives across the setjmp.
3445 ANSI says that if a user variable does not change in value between
3446 the setjmp and the longjmp, then the longjmp preserves it. This
3447 includes longjmp from a place where the pseudo appears dead.
3448 (In principle, the value still exists if it is in scope.)
3449 If the pseudo goes in a hard reg, some other value may occupy
3450 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3451 Conclusion: such a pseudo must not go in a hard reg. */
3452 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3453 FIRST_PSEUDO_REGISTER, i,
3455 if (regno_reg_rtx[i] != 0)
3457 REG_LIVE_LENGTH (i) = -1;
3458 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3464 /* Free the variables allocated by find_basic_blocks.
3466 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3469 free_basic_block_vars (keep_head_end_p)
3470 int keep_head_end_p;
3472 if (basic_block_for_insn)
3474 VARRAY_FREE (basic_block_for_insn);
3475 basic_block_for_insn = NULL;
3478 if (! keep_head_end_p)
3480 if (basic_block_info)
3483 VARRAY_FREE (basic_block_info);
3487 ENTRY_BLOCK_PTR->aux = NULL;
3488 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3489 EXIT_BLOCK_PTR->aux = NULL;
3490 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3494 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3501 rtx pat = PATTERN (insn);
3503 /* Insns carrying these notes are useful later on. */
3504 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3507 if (GET_CODE (pat) == SET && set_noop_p (pat))
3510 if (GET_CODE (pat) == PARALLEL)
3513 /* If nothing but SETs of registers to themselves,
3514 this insn can also be deleted. */
3515 for (i = 0; i < XVECLEN (pat, 0); i++)
3517 rtx tem = XVECEXP (pat, 0, i);
3519 if (GET_CODE (tem) == USE
3520 || GET_CODE (tem) == CLOBBER)
3523 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3532 /* Delete any insns that copy a register to itself. */
3535 delete_noop_moves (f)
3539 for (insn = f; insn; insn = NEXT_INSN (insn))
3541 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3543 PUT_CODE (insn, NOTE);
3544 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3545 NOTE_SOURCE_FILE (insn) = 0;
3550 /* Determine if the stack pointer is constant over the life of the function.
3551 Only useful before prologues have been emitted. */
3554 notice_stack_pointer_modification_1 (x, pat, data)
3556 rtx pat ATTRIBUTE_UNUSED;
3557 void *data ATTRIBUTE_UNUSED;
3559 if (x == stack_pointer_rtx
3560 /* The stack pointer is only modified indirectly as the result
3561 of a push until later in flow. See the comments in rtl.texi
3562 regarding Embedded Side-Effects on Addresses. */
3563 || (GET_CODE (x) == MEM
3564 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3565 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3566 current_function_sp_is_unchanging = 0;
3570 notice_stack_pointer_modification (f)
3575 /* Assume that the stack pointer is unchanging if alloca hasn't
3577 current_function_sp_is_unchanging = !current_function_calls_alloca;
3578 if (! current_function_sp_is_unchanging)
3581 for (insn = f; insn; insn = NEXT_INSN (insn))
3585 /* Check if insn modifies the stack pointer. */
3586 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3588 if (! current_function_sp_is_unchanging)
3594 /* Mark a register in SET. Hard registers in large modes get all
3595 of their component registers set as well. */
3598 mark_reg (reg, xset)
3602 regset set = (regset) xset;
3603 int regno = REGNO (reg);
3605 if (GET_MODE (reg) == BLKmode)
3608 SET_REGNO_REG_SET (set, regno);
3609 if (regno < FIRST_PSEUDO_REGISTER)
3611 int n = HARD_REGNO_NREGS (regno, G