1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
164 #define LOCAL_REGNO(REGNO) 0
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
170 #ifdef HAVE_conditional_execution
171 #ifndef REVERSE_CONDEXEC_PREDICATES_P
172 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
176 /* The obstack on which the flow graph components are allocated. */
178 struct obstack flow_obstack;
179 static char *flow_firstobj;
181 /* Number of basic blocks in the current function. */
185 /* Number of edges in the current function. */
189 /* The basic block array. */
191 varray_type basic_block_info;
193 /* The special entry and exit blocks. */
195 struct basic_block_def entry_exit_blocks[2]
200 NULL, /* local_set */
201 NULL, /* cond_local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 ENTRY_BLOCK, /* index */
215 NULL, /* local_set */
216 NULL, /* cond_local_set */
217 NULL, /* global_live_at_start */
218 NULL, /* global_live_at_end */
220 EXIT_BLOCK, /* index */
227 /* Nonzero if the second flow pass has completed. */
230 /* Maximum register number used in this function, plus one. */
234 /* Indexed by n, giving various register information */
236 varray_type reg_n_info;
238 /* Size of a regset for the current function,
239 in (1) bytes and (2) elements. */
244 /* Regset of regs live when calls to `setjmp'-like functions happen. */
245 /* ??? Does this exist only for the setjmp-clobbered warning message? */
247 regset regs_live_at_setjmp;
249 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
250 that have to go in the same hard reg.
251 The first two regs in the list are a pair, and the next two
252 are another pair, etc. */
255 /* Callback that determines if it's ok for a function to have no
256 noreturn attribute. */
257 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
259 /* Set of registers that may be eliminable. These are handled specially
260 in updating regs_ever_live. */
262 static HARD_REG_SET elim_reg_set;
264 /* The basic block structure for every insn, indexed by uid. */
266 varray_type basic_block_for_insn;
268 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
269 /* ??? Should probably be using LABEL_NUSES instead. It would take a
270 bit of surgery to be able to use or co-opt the routines in jump. */
272 static rtx label_value_list;
273 static rtx tail_recursion_label_list;
275 /* Holds information for tracking conditional register life information. */
276 struct reg_cond_life_info
278 /* A boolean expression of conditions under which a register is dead. */
280 /* Conditions under which a register is dead at the basic block end. */
283 /* A boolean expression of conditions under which a register has been
287 /* ??? Could store mask of bytes that are dead, so that we could finally
288 track lifetimes of multi-word registers accessed via subregs. */
291 /* For use in communicating between propagate_block and its subroutines.
292 Holds all information needed to compute life and def-use information. */
294 struct propagate_block_info
296 /* The basic block we're considering. */
299 /* Bit N is set if register N is conditionally or unconditionally live. */
302 /* Bit N is set if register N is set this insn. */
305 /* Element N is the next insn that uses (hard or pseudo) register N
306 within the current basic block; or zero, if there is no such insn. */
309 /* Contains a list of all the MEMs we are tracking for dead store
313 /* If non-null, record the set of registers set unconditionally in the
317 /* If non-null, record the set of registers set conditionally in the
319 regset cond_local_set;
321 #ifdef HAVE_conditional_execution
322 /* Indexed by register number, holds a reg_cond_life_info for each
323 register that is not unconditionally live or dead. */
324 splay_tree reg_cond_dead;
326 /* Bit N is set if register N is in an expression in reg_cond_dead. */
330 /* The length of mem_set_list. */
331 int mem_set_list_len;
333 /* Non-zero if the value of CC0 is live. */
336 /* Flags controling the set of information propagate_block collects. */
340 /* Maximum length of pbi->mem_set_list before we start dropping
341 new elements on the floor. */
342 #define MAX_MEM_SET_LIST_LEN 100
344 /* Store the data structures necessary for depth-first search. */
345 struct depth_first_search_dsS {
346 /* stack for backtracking during the algorithm */
349 /* number of edges in the stack. That is, positions 0, ..., sp-1
353 /* record of basic blocks already seen by depth-first search */
354 sbitmap visited_blocks;
356 typedef struct depth_first_search_dsS *depth_first_search_ds;
358 /* Have print_rtl_and_abort give the same information that fancy_abort
360 #define print_rtl_and_abort() \
361 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
363 /* Forward declarations */
364 static int count_basic_blocks PARAMS ((rtx));
365 static void find_basic_blocks_1 PARAMS ((rtx));
366 static rtx find_label_refs PARAMS ((rtx, rtx));
367 static void make_edges PARAMS ((rtx));
368 static void make_label_edge PARAMS ((sbitmap *, basic_block,
370 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
372 static void commit_one_edge_insertion PARAMS ((edge));
374 static void delete_unreachable_blocks PARAMS ((void));
375 static int can_delete_note_p PARAMS ((rtx));
376 static void expunge_block PARAMS ((basic_block));
377 static int can_delete_label_p PARAMS ((rtx));
378 static int tail_recursion_label_p PARAMS ((rtx));
379 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
381 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
383 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
384 static void try_merge_blocks PARAMS ((void));
385 static void tidy_fallthru_edges PARAMS ((void));
386 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
387 static void verify_wide_reg PARAMS ((int, rtx, rtx));
388 static void verify_local_live_at_start PARAMS ((regset, basic_block));
389 static int noop_move_p PARAMS ((rtx));
390 static void delete_noop_moves PARAMS ((rtx));
391 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
392 static void notice_stack_pointer_modification PARAMS ((rtx));
393 static void mark_reg PARAMS ((rtx, void *));
394 static void mark_regs_live_at_end PARAMS ((regset));
395 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
396 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
397 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
398 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
399 static int insn_dead_p PARAMS ((struct propagate_block_info *,
401 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
403 static void mark_set_regs PARAMS ((struct propagate_block_info *,
405 static void mark_set_1 PARAMS ((struct propagate_block_info *,
406 enum rtx_code, rtx, rtx,
408 #ifdef HAVE_conditional_execution
409 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
411 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
412 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
413 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
415 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
416 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
417 static rtx not_reg_cond PARAMS ((rtx));
418 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
421 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
422 rtx, rtx, rtx, rtx, rtx));
423 static void find_auto_inc PARAMS ((struct propagate_block_info *,
425 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
427 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
429 static void mark_used_reg PARAMS ((struct propagate_block_info *,
431 static void mark_used_regs PARAMS ((struct propagate_block_info *,
433 void dump_flow_info PARAMS ((FILE *));
434 void debug_flow_info PARAMS ((void));
435 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
439 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
441 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
443 static void remove_fake_successors PARAMS ((basic_block));
444 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
446 static void flow_edge_list_print PARAMS ((const char *, const edge *,
448 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
450 static int flow_loop_nested_p PARAMS ((struct loop *,
452 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
454 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
455 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
456 static int flow_depth_first_order_compute PARAMS ((int *, int *));
457 static void flow_dfs_compute_reverse_init
458 PARAMS ((depth_first_search_ds));
459 static void flow_dfs_compute_reverse_add_bb
460 PARAMS ((depth_first_search_ds, basic_block));
461 static basic_block flow_dfs_compute_reverse_execute
462 PARAMS ((depth_first_search_ds));
463 static void flow_dfs_compute_reverse_finish
464 PARAMS ((depth_first_search_ds));
465 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
466 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
468 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
469 static void flow_loops_tree_build PARAMS ((struct loops *));
470 static int flow_loop_level_compute PARAMS ((struct loop *, int));
471 static int flow_loops_level_compute PARAMS ((struct loops *));
472 static void allocate_bb_life_data PARAMS ((void));
473 static void find_sub_basic_blocks PARAMS ((basic_block));
474 static int redirect_edge_and_branch PARAMS ((edge, basic_block));
475 static rtx block_label PARAMS ((basic_block));
477 /* Find basic blocks of the current function.
478 F is the first insn of the function and NREGS the number of register
482 find_basic_blocks (f, nregs, file)
484 int nregs ATTRIBUTE_UNUSED;
485 FILE *file ATTRIBUTE_UNUSED;
489 /* Flush out existing data. */
490 if (basic_block_info != NULL)
496 /* Clear bb->aux on all extant basic blocks. We'll use this as a
497 tag for reuse during create_basic_block, just in case some pass
498 copies around basic block notes improperly. */
499 for (i = 0; i < n_basic_blocks; ++i)
500 BASIC_BLOCK (i)->aux = NULL;
502 VARRAY_FREE (basic_block_info);
505 n_basic_blocks = count_basic_blocks (f);
507 /* Size the basic block table. The actual structures will be allocated
508 by find_basic_blocks_1, since we want to keep the structure pointers
509 stable across calls to find_basic_blocks. */
510 /* ??? This whole issue would be much simpler if we called find_basic_blocks
511 exactly once, and thereafter we don't have a single long chain of
512 instructions at all until close to the end of compilation when we
513 actually lay them out. */
515 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
517 find_basic_blocks_1 (f);
519 /* Record the block to which an insn belongs. */
520 /* ??? This should be done another way, by which (perhaps) a label is
521 tagged directly with the basic block that it starts. It is used for
522 more than that currently, but IMO that is the only valid use. */
524 max_uid = get_max_uid ();
526 /* Leave space for insns life_analysis makes in some cases for auto-inc.
527 These cases are rare, so we don't need too much space. */
528 max_uid += max_uid / 10;
531 compute_bb_for_insn (max_uid);
533 /* Discover the edges of our cfg. */
534 make_edges (label_value_list);
536 /* Do very simple cleanup now, for the benefit of code that runs between
537 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
538 tidy_fallthru_edges ();
540 mark_critical_edges ();
542 #ifdef ENABLE_CHECKING
548 check_function_return_warnings ()
550 if (warn_missing_noreturn
551 && !TREE_THIS_VOLATILE (cfun->decl)
552 && EXIT_BLOCK_PTR->pred == NULL
553 && (lang_missing_noreturn_ok_p
554 && !lang_missing_noreturn_ok_p (cfun->decl)))
555 warning ("function might be possible candidate for attribute `noreturn'");
557 /* If we have a path to EXIT, then we do return. */
558 if (TREE_THIS_VOLATILE (cfun->decl)
559 && EXIT_BLOCK_PTR->pred != NULL)
560 warning ("`noreturn' function does return");
562 /* If the clobber_return_insn appears in some basic block, then we
563 do reach the end without returning a value. */
564 else if (warn_return_type
565 && cfun->x_clobber_return_insn != NULL
566 && EXIT_BLOCK_PTR->pred != NULL)
568 int max_uid = get_max_uid ();
570 /* If clobber_return_insn was excised by jump1, then renumber_insns
571 can make max_uid smaller than the number still recorded in our rtx.
572 That's fine, since this is a quick way of verifying that the insn
573 is no longer in the chain. */
574 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
576 /* Recompute insn->block mapping, since the initial mapping is
577 set before we delete unreachable blocks. */
578 compute_bb_for_insn (max_uid);
580 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
581 warning ("control reaches end of non-void function");
586 /* Count the basic blocks of the function. */
589 count_basic_blocks (f)
593 register RTX_CODE prev_code;
594 register int count = 0;
595 int saw_abnormal_edge = 0;
597 prev_code = JUMP_INSN;
598 for (insn = f; insn; insn = NEXT_INSN (insn))
600 enum rtx_code code = GET_CODE (insn);
602 if (code == CODE_LABEL
603 || (GET_RTX_CLASS (code) == 'i'
604 && (prev_code == JUMP_INSN
605 || prev_code == BARRIER
606 || saw_abnormal_edge)))
608 saw_abnormal_edge = 0;
612 /* Record whether this insn created an edge. */
613 if (code == CALL_INSN)
617 /* If there is a nonlocal goto label and the specified
618 region number isn't -1, we have an edge. */
619 if (nonlocal_goto_handler_labels
620 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
621 || INTVAL (XEXP (note, 0)) >= 0))
622 saw_abnormal_edge = 1;
624 else if (can_throw_internal (insn))
625 saw_abnormal_edge = 1;
627 else if (flag_non_call_exceptions
629 && can_throw_internal (insn))
630 saw_abnormal_edge = 1;
636 /* The rest of the compiler works a bit smoother when we don't have to
637 check for the edge case of do-nothing functions with no basic blocks. */
640 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
647 /* Scan a list of insns for labels referred to other than by jumps.
648 This is used to scan the alternatives of a call placeholder. */
650 find_label_refs (f, lvl)
656 for (insn = f; insn; insn = NEXT_INSN (insn))
657 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
661 /* Make a list of all labels referred to other than by jumps
662 (which just don't have the REG_LABEL notes).
664 Make a special exception for labels followed by an ADDR*VEC,
665 as this would be a part of the tablejump setup code.
667 Make a special exception to registers loaded with label
668 values just before jump insns that use them. */
670 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
671 if (REG_NOTE_KIND (note) == REG_LABEL)
673 rtx lab = XEXP (note, 0), next;
675 if ((next = next_nonnote_insn (lab)) != NULL
676 && GET_CODE (next) == JUMP_INSN
677 && (GET_CODE (PATTERN (next)) == ADDR_VEC
678 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
680 else if (GET_CODE (lab) == NOTE)
682 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
683 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
686 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
693 /* Assume that someone emitted code with control flow instructions to the
694 basic block. Update the data structure. */
696 find_sub_basic_blocks (bb)
699 rtx first_insn = bb->head, insn;
701 edge succ_list = bb->succ;
702 rtx jump_insn = NULL_RTX;
706 basic_block first_bb = bb, last_bb;
709 if (GET_CODE (first_insn) == LABEL_REF)
710 first_insn = NEXT_INSN (first_insn);
711 first_insn = NEXT_INSN (first_insn);
715 /* Scan insn chain and try to find new basic block boundaries. */
718 enum rtx_code code = GET_CODE (insn);
722 /* We need some special care for those expressions. */
723 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
724 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
733 /* On code label, split current basic block. */
735 falltru = split_block (bb, PREV_INSN (insn));
740 remove_edge (falltru);
744 if (LABEL_ALTERNATE_NAME (insn))
745 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
748 /* In case we've previously split insn on the JUMP_INSN, move the
749 block header to proper place. */
752 falltru = split_block (bb, PREV_INSN (insn));
762 insn = NEXT_INSN (insn);
764 /* Last basic block must end in the original BB end. */
768 /* Wire in the original edges for last basic block. */
771 bb->succ = succ_list;
773 succ_list->src = bb, succ_list = succ_list->succ_next;
776 bb->succ = succ_list;
778 /* Now re-scan and wire in all edges. This expect simple (conditional)
779 jumps at the end of each new basic blocks. */
781 for (i = first_bb->index; i < last_bb->index; i++)
783 bb = BASIC_BLOCK (i);
784 if (GET_CODE (bb->end) == JUMP_INSN)
786 mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
787 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
789 insn = NEXT_INSN (insn);
793 /* Find all basic blocks of the function whose first insn is F.
795 Collect and return a list of labels whose addresses are taken. This
796 will be used in make_edges for use with computed gotos. */
799 find_basic_blocks_1 (f)
802 register rtx insn, next;
804 rtx bb_note = NULL_RTX;
810 /* We process the instructions in a slightly different way than we did
811 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
812 closed out the previous block, so that it gets attached at the proper
813 place. Since this form should be equivalent to the previous,
814 count_basic_blocks continues to use the old form as a check. */
816 for (insn = f; insn; insn = next)
818 enum rtx_code code = GET_CODE (insn);
820 next = NEXT_INSN (insn);
826 int kind = NOTE_LINE_NUMBER (insn);
828 /* Look for basic block notes with which to keep the
829 basic_block_info pointers stable. Unthread the note now;
830 we'll put it back at the right place in create_basic_block.
831 Or not at all if we've already found a note in this block. */
832 if (kind == NOTE_INSN_BASIC_BLOCK)
834 if (bb_note == NULL_RTX)
837 next = flow_delete_insn (insn);
843 /* A basic block starts at a label. If we've closed one off due
844 to a barrier or some such, no need to do it again. */
845 if (head != NULL_RTX)
847 /* While we now have edge lists with which other portions of
848 the compiler might determine a call ending a basic block
849 does not imply an abnormal edge, it will be a bit before
850 everything can be updated. So continue to emit a noop at
851 the end of such a block. */
852 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
854 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
855 end = emit_insn_after (nop, end);
858 create_basic_block (i++, head, end, bb_note);
866 /* A basic block ends at a jump. */
867 if (head == NULL_RTX)
871 /* ??? Make a special check for table jumps. The way this
872 happens is truly and amazingly gross. We are about to
873 create a basic block that contains just a code label and
874 an addr*vec jump insn. Worse, an addr_diff_vec creates
875 its own natural loop.
877 Prevent this bit of brain damage, pasting things together
878 correctly in make_edges.
880 The correct solution involves emitting the table directly
881 on the tablejump instruction as a note, or JUMP_LABEL. */
883 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
884 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
892 goto new_bb_inclusive;
895 /* A basic block ends at a barrier. It may be that an unconditional
896 jump already closed the basic block -- no need to do it again. */
897 if (head == NULL_RTX)
900 /* While we now have edge lists with which other portions of the
901 compiler might determine a call ending a basic block does not
902 imply an abnormal edge, it will be a bit before everything can
903 be updated. So continue to emit a noop at the end of such a
905 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
907 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
908 end = emit_insn_after (nop, end);
910 goto new_bb_exclusive;
914 /* Record whether this call created an edge. */
915 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
916 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
918 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
920 /* Scan each of the alternatives for label refs. */
921 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
922 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
923 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
924 /* Record its tail recursion label, if any. */
925 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
926 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
929 /* A basic block ends at a call that can either throw or
930 do a non-local goto. */
931 if ((nonlocal_goto_handler_labels && region >= 0)
932 || can_throw_internal (insn))
935 if (head == NULL_RTX)
940 create_basic_block (i++, head, end, bb_note);
941 head = end = NULL_RTX;
949 /* Non-call exceptions generate new blocks just like calls. */
950 if (flag_non_call_exceptions && can_throw_internal (insn))
951 goto new_bb_inclusive;
953 if (head == NULL_RTX)
962 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
966 /* Make a list of all labels referred to other than by jumps.
968 Make a special exception for labels followed by an ADDR*VEC,
969 as this would be a part of the tablejump setup code.
971 Make a special exception to registers loaded with label
972 values just before jump insns that use them. */
974 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
975 if (REG_NOTE_KIND (note) == REG_LABEL)
977 rtx lab = XEXP (note, 0), next;
979 if ((next = next_nonnote_insn (lab)) != NULL
980 && GET_CODE (next) == JUMP_INSN
981 && (GET_CODE (PATTERN (next)) == ADDR_VEC
982 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
984 else if (GET_CODE (lab) == NOTE)
986 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
987 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
990 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
995 if (head != NULL_RTX)
996 create_basic_block (i++, head, end, bb_note);
998 flow_delete_insn (bb_note);
1000 if (i != n_basic_blocks)
1003 label_value_list = lvl;
1004 tail_recursion_label_list = trll;
1007 /* Tidy the CFG by deleting unreachable code and whatnot. */
1012 delete_unreachable_blocks ();
1013 try_merge_blocks ();
1014 mark_critical_edges ();
1016 /* Kill the data we won't maintain. */
1017 free_EXPR_LIST_list (&label_value_list);
1018 free_EXPR_LIST_list (&tail_recursion_label_list);
1021 /* Create a new basic block consisting of the instructions between
1022 HEAD and END inclusive. Reuses the note and basic block struct
1023 in BB_NOTE, if any. */
1026 create_basic_block (index, head, end, bb_note)
1028 rtx head, end, bb_note;
1033 && ! RTX_INTEGRATED_P (bb_note)
1034 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1037 /* If we found an existing note, thread it back onto the chain. */
1041 if (GET_CODE (head) == CODE_LABEL)
1045 after = PREV_INSN (head);
1049 if (after != bb_note && NEXT_INSN (after) != bb_note)
1050 reorder_insns (bb_note, bb_note, after);
1054 /* Otherwise we must create a note and a basic block structure.
1055 Since we allow basic block structs in rtl, give the struct
1056 the same lifetime by allocating it off the function obstack
1057 rather than using malloc. */
1059 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1060 memset (bb, 0, sizeof (*bb));
1062 if (GET_CODE (head) == CODE_LABEL)
1063 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1066 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1069 NOTE_BASIC_BLOCK (bb_note) = bb;
1072 /* Always include the bb note in the block. */
1073 if (NEXT_INSN (end) == bb_note)
1079 BASIC_BLOCK (index) = bb;
1081 /* Tag the block so that we know it has been used when considering
1082 other basic block notes. */
1086 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1087 indexed by INSN_UID. MAX is the size of the array. */
1090 compute_bb_for_insn (max)
1095 if (basic_block_for_insn)
1096 VARRAY_FREE (basic_block_for_insn);
1097 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1099 for (i = 0; i < n_basic_blocks; ++i)
1101 basic_block bb = BASIC_BLOCK (i);
1108 int uid = INSN_UID (insn);
1110 VARRAY_BB (basic_block_for_insn, uid) = bb;
1113 insn = NEXT_INSN (insn);
1118 /* Free the memory associated with the edge structures. */
1126 for (i = 0; i < n_basic_blocks; ++i)
1128 basic_block bb = BASIC_BLOCK (i);
1130 for (e = bb->succ; e; e = n)
1140 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1146 ENTRY_BLOCK_PTR->succ = 0;
1147 EXIT_BLOCK_PTR->pred = 0;
1152 /* Identify the edges between basic blocks.
1154 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1155 that are otherwise unreachable may be reachable with a non-local goto.
1157 BB_EH_END is an array indexed by basic block number in which we record
1158 the list of exception regions active at the end of the basic block. */
1161 make_edges (label_value_list)
1162 rtx label_value_list;
1165 sbitmap *edge_cache = NULL;
1167 /* Assume no computed jump; revise as we create edges. */
1168 current_function_has_computed_jump = 0;
1170 /* Heavy use of computed goto in machine-generated code can lead to
1171 nearly fully-connected CFGs. In that case we spend a significant
1172 amount of time searching the edge lists for duplicates. */
1173 if (forced_labels || label_value_list)
1175 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1176 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1179 /* By nature of the way these get numbered, block 0 is always the entry. */
1180 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1182 for (i = 0; i < n_basic_blocks; ++i)
1184 basic_block bb = BASIC_BLOCK (i);
1187 int force_fallthru = 0;
1189 if (GET_CODE (bb->head) == CODE_LABEL
1190 && LABEL_ALTERNATE_NAME (bb->head))
1191 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1193 /* Examine the last instruction of the block, and discover the
1194 ways we can leave the block. */
1197 code = GET_CODE (insn);
1200 if (code == JUMP_INSN)
1204 /* Recognize exception handling placeholders. */
1205 if (GET_CODE (PATTERN (insn)) == RESX)
1206 make_eh_edge (edge_cache, bb, insn);
1208 /* Recognize a non-local goto as a branch outside the
1209 current function. */
1210 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1213 /* ??? Recognize a tablejump and do the right thing. */
1214 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1215 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1216 && GET_CODE (tmp) == JUMP_INSN
1217 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1218 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1223 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1224 vec = XVEC (PATTERN (tmp), 0);
1226 vec = XVEC (PATTERN (tmp), 1);
1228 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1229 make_label_edge (edge_cache, bb,
1230 XEXP (RTVEC_ELT (vec, j), 0), 0);
1232 /* Some targets (eg, ARM) emit a conditional jump that also
1233 contains the out-of-range target. Scan for these and
1234 add an edge if necessary. */
1235 if ((tmp = single_set (insn)) != NULL
1236 && SET_DEST (tmp) == pc_rtx
1237 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1238 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1239 make_label_edge (edge_cache, bb,
1240 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1242 #ifdef CASE_DROPS_THROUGH
1243 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1244 us naturally detecting fallthru into the next block. */
1249 /* If this is a computed jump, then mark it as reaching
1250 everything on the label_value_list and forced_labels list. */
1251 else if (computed_jump_p (insn))
1253 current_function_has_computed_jump = 1;
1255 for (x = label_value_list; x; x = XEXP (x, 1))
1256 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1258 for (x = forced_labels; x; x = XEXP (x, 1))
1259 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1262 /* Returns create an exit out. */
1263 else if (returnjump_p (insn))
1264 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1266 /* Otherwise, we have a plain conditional or unconditional jump. */
1269 if (! JUMP_LABEL (insn))
1271 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1275 /* If this is a sibling call insn, then this is in effect a
1276 combined call and return, and so we need an edge to the
1277 exit block. No need to worry about EH edges, since we
1278 wouldn't have created the sibling call in the first place. */
1280 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1281 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1282 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1284 /* If this is a CALL_INSN, then mark it as reaching the active EH
1285 handler for this CALL_INSN. If we're handling non-call
1286 exceptions then any insn can reach any of the active handlers.
1288 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1290 else if (code == CALL_INSN || flag_non_call_exceptions)
1292 /* Add any appropriate EH edges. */
1293 make_eh_edge (edge_cache, bb, insn);
1295 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1297 /* ??? This could be made smarter: in some cases it's possible
1298 to tell that certain calls will not do a nonlocal goto.
1300 For example, if the nested functions that do the nonlocal
1301 gotos do not have their addresses taken, then only calls to
1302 those functions or to other nested functions that use them
1303 could possibly do nonlocal gotos. */
1304 /* We do know that a REG_EH_REGION note with a value less
1305 than 0 is guaranteed not to perform a non-local goto. */
1306 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1307 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1308 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1309 make_label_edge (edge_cache, bb, XEXP (x, 0),
1310 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1314 /* Find out if we can drop through to the next block. */
1315 insn = next_nonnote_insn (insn);
1316 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1317 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1318 else if (i + 1 < n_basic_blocks)
1320 rtx tmp = BLOCK_HEAD (i + 1);
1321 if (GET_CODE (tmp) == NOTE)
1322 tmp = next_nonnote_insn (tmp);
1323 if (force_fallthru || insn == tmp)
1324 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1329 sbitmap_vector_free (edge_cache);
1332 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1333 about the edge that is accumulated between calls. */
1336 make_edge (edge_cache, src, dst, flags)
1337 sbitmap *edge_cache;
1338 basic_block src, dst;
1344 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1345 many edges to them, and we didn't allocate memory for it. */
1346 use_edge_cache = (edge_cache
1347 && src != ENTRY_BLOCK_PTR
1348 && dst != EXIT_BLOCK_PTR);
1350 /* Make sure we don't add duplicate edges. */
1351 switch (use_edge_cache)
1354 /* Quick test for non-existance of the edge. */
1355 if (! TEST_BIT (edge_cache[src->index], dst->index))
1358 /* The edge exists; early exit if no work to do. */
1364 for (e = src->succ; e; e = e->succ_next)
1373 e = (edge) xcalloc (1, sizeof (*e));
1376 e->succ_next = src->succ;
1377 e->pred_next = dst->pred;
1386 SET_BIT (edge_cache[src->index], dst->index);
1389 /* Create an edge from a basic block to a label. */
1392 make_label_edge (edge_cache, src, label, flags)
1393 sbitmap *edge_cache;
1398 if (GET_CODE (label) != CODE_LABEL)
1401 /* If the label was never emitted, this insn is junk, but avoid a
1402 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1403 as a result of a syntax error and a diagnostic has already been
1406 if (INSN_UID (label) == 0)
1409 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1412 /* Create the edges generated by INSN in REGION. */
1415 make_eh_edge (edge_cache, src, insn)
1416 sbitmap *edge_cache;
1420 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1423 handlers = reachable_handlers (insn);
1425 for (i = handlers; i; i = XEXP (i, 1))
1426 make_label_edge (edge_cache, src, XEXP (i, 0),
1427 EDGE_ABNORMAL | EDGE_EH | is_call);
1429 free_INSN_LIST_list (&handlers);
1432 /* Identify critical edges and set the bits appropriately. */
1435 mark_critical_edges ()
1437 int i, n = n_basic_blocks;
1440 /* We begin with the entry block. This is not terribly important now,
1441 but could be if a front end (Fortran) implemented alternate entry
1443 bb = ENTRY_BLOCK_PTR;
1450 /* (1) Critical edges must have a source with multiple successors. */
1451 if (bb->succ && bb->succ->succ_next)
1453 for (e = bb->succ; e; e = e->succ_next)
1455 /* (2) Critical edges must have a destination with multiple
1456 predecessors. Note that we know there is at least one
1457 predecessor -- the edge we followed to get here. */
1458 if (e->dest->pred->pred_next)
1459 e->flags |= EDGE_CRITICAL;
1461 e->flags &= ~EDGE_CRITICAL;
1466 for (e = bb->succ; e; e = e->succ_next)
1467 e->flags &= ~EDGE_CRITICAL;
1472 bb = BASIC_BLOCK (i);
1476 /* Split a block BB after insn INSN creating a new fallthru edge.
1477 Return the new edge. Note that to keep other parts of the compiler happy,
1478 this function renumbers all the basic blocks so that the new
1479 one has a number one greater than the block split. */
1482 split_block (bb, insn)
1492 /* There is no point splitting the block after its end. */
1493 if (bb->end == insn)
1496 /* Create the new structures. */
1497 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1498 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1501 memset (new_bb, 0, sizeof (*new_bb));
1503 new_bb->head = NEXT_INSN (insn);
1504 new_bb->end = bb->end;
1507 new_bb->succ = bb->succ;
1508 bb->succ = new_edge;
1509 new_bb->pred = new_edge;
1510 new_bb->count = bb->count;
1511 new_bb->frequency = bb->frequency;
1512 new_bb->loop_depth = bb->loop_depth;
1515 new_edge->dest = new_bb;
1516 new_edge->flags = EDGE_FALLTHRU;
1517 new_edge->probability = REG_BR_PROB_BASE;
1518 new_edge->count = bb->count;
1520 /* Redirect the src of the successor edges of bb to point to new_bb. */
1521 for (e = new_bb->succ; e; e = e->succ_next)
1524 /* Place the new block just after the block being split. */
1525 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1527 /* Some parts of the compiler expect blocks to be number in
1528 sequential order so insert the new block immediately after the
1529 block being split.. */
1531 for (i = n_basic_blocks - 1; i > j + 1; --i)
1533 basic_block tmp = BASIC_BLOCK (i - 1);
1534 BASIC_BLOCK (i) = tmp;
1538 BASIC_BLOCK (i) = new_bb;
1541 if (GET_CODE (new_bb->head) == CODE_LABEL)
1543 /* Create the basic block note. */
1544 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1546 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1550 /* Create the basic block note. */
1551 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1553 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1554 new_bb->head = bb_note;
1557 update_bb_for_insn (new_bb);
1559 if (bb->global_live_at_start)
1561 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1562 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1563 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1565 /* We now have to calculate which registers are live at the end
1566 of the split basic block and at the start of the new basic
1567 block. Start with those registers that are known to be live
1568 at the end of the original basic block and get
1569 propagate_block to determine which registers are live. */
1570 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1571 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1572 COPY_REG_SET (bb->global_live_at_end,
1573 new_bb->global_live_at_start);
1579 /* Return label in the head of basic block. Create one if it doesn't exist. */
1584 if (GET_CODE (block->head) != CODE_LABEL)
1585 block->head = emit_label_before (gen_label_rtx (), block->head);
1589 /* Attempt to change code to redirect edge E to TARGET.
1590 Don't do that on expense of adding new instructions or reordering
1593 redirect_edge_and_branch (e, target)
1597 rtx insn = e->src->end;
1599 rtx old_label = e->dest->head;
1600 if (e->flags & EDGE_FALLTHRU)
1603 if (GET_CODE (insn) != JUMP_INSN)
1606 /* Recognize a tablejump and adjust all matching cases. */
1607 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1608 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1609 && GET_CODE (tmp) == JUMP_INSN
1610 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1611 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1615 rtx new_label = block_label (target);
1617 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1618 vec = XVEC (PATTERN (tmp), 0);
1620 vec = XVEC (PATTERN (tmp), 1);
1622 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1623 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1625 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1626 --LABEL_NUSES (old_label);
1627 ++LABEL_NUSES (new_label);
1630 /* Handle casesi dispatch insns */
1631 if ((tmp = single_set (insn)) != NULL
1632 && SET_DEST (tmp) == pc_rtx
1633 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1634 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1635 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1637 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1639 --LABEL_NUSES (old_label);
1640 ++LABEL_NUSES (new_label);
1645 /* ?? We may play the games with moving the named labels from
1646 one basic block to the other in case only one computed_jump is
1648 if (computed_jump_p (insn))
1651 /* A return instruction can't be redirected. */
1652 if (returnjump_p (insn))
1655 /* If the insn doesn't go where we think, we're confused. */
1656 if (JUMP_LABEL (insn) != old_label)
1658 redirect_jump (insn, block_label (target), 0);
1661 redirect_edge_succ (e, target);
1666 /* Split a (typically critical) edge. Return the new block.
1667 Abort on abnormal edges.
1669 ??? The code generally expects to be called on critical edges.
1670 The case of a block ending in an unconditional jump to a
1671 block with multiple predecessors is not handled optimally. */
1674 split_edge (edge_in)
1677 basic_block old_pred, bb, old_succ;
1682 /* Abnormal edges cannot be split. */
1683 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1686 old_pred = edge_in->src;
1687 old_succ = edge_in->dest;
1689 /* Create the new structures. */
1690 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1691 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1694 memset (bb, 0, sizeof (*bb));
1696 /* ??? This info is likely going to be out of date very soon. */
1697 if (old_succ->global_live_at_start)
1699 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1700 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1701 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1702 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1706 bb->succ = edge_out;
1707 bb->count = edge_in->count;
1708 bb->frequency = (edge_in->probability * edge_in->src->frequency
1709 / REG_BR_PROB_BASE);
1711 edge_in->flags &= ~EDGE_CRITICAL;
1713 edge_out->pred_next = old_succ->pred;
1714 edge_out->succ_next = NULL;
1716 edge_out->dest = old_succ;
1717 edge_out->flags = EDGE_FALLTHRU;
1718 edge_out->probability = REG_BR_PROB_BASE;
1719 edge_out->count = edge_in->count;
1721 old_succ->pred = edge_out;
1723 /* Tricky case -- if there existed a fallthru into the successor
1724 (and we're not it) we must add a new unconditional jump around
1725 the new block we're actually interested in.
1727 Further, if that edge is critical, this means a second new basic
1728 block must be created to hold it. In order to simplify correct
1729 insn placement, do this before we touch the existing basic block
1730 ordering for the block we were really wanting. */
1731 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1734 for (e = edge_out->pred_next; e; e = e->pred_next)
1735 if (e->flags & EDGE_FALLTHRU)
1740 basic_block jump_block;
1743 if ((e->flags & EDGE_CRITICAL) == 0
1744 && e->src != ENTRY_BLOCK_PTR)
1746 /* Non critical -- we can simply add a jump to the end
1747 of the existing predecessor. */
1748 jump_block = e->src;
1752 /* We need a new block to hold the jump. The simplest
1753 way to do the bulk of the work here is to recursively
1755 jump_block = split_edge (e);
1756 e = jump_block->succ;
1759 /* Now add the jump insn ... */
1760 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1762 jump_block->end = pos;
1763 if (basic_block_for_insn)
1764 set_block_for_insn (pos, jump_block);
1765 emit_barrier_after (pos);
1767 /* ... let jump know that label is in use, ... */
1768 JUMP_LABEL (pos) = old_succ->head;
1769 ++LABEL_NUSES (old_succ->head);
1771 /* ... and clear fallthru on the outgoing edge. */
1772 e->flags &= ~EDGE_FALLTHRU;
1774 /* Continue splitting the interesting edge. */
1778 /* Place the new block just in front of the successor. */
1779 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1780 if (old_succ == EXIT_BLOCK_PTR)
1781 j = n_basic_blocks - 1;
1783 j = old_succ->index;
1784 for (i = n_basic_blocks - 1; i > j; --i)
1786 basic_block tmp = BASIC_BLOCK (i - 1);
1787 BASIC_BLOCK (i) = tmp;
1790 BASIC_BLOCK (i) = bb;
1793 /* Create the basic block note.
1795 Where we place the note can have a noticable impact on the generated
1796 code. Consider this cfg:
1806 If we need to insert an insn on the edge from block 0 to block 1,
1807 we want to ensure the instructions we insert are outside of any
1808 loop notes that physically sit between block 0 and block 1. Otherwise
1809 we confuse the loop optimizer into thinking the loop is a phony. */
1810 if (old_succ != EXIT_BLOCK_PTR
1811 && PREV_INSN (old_succ->head)
1812 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1813 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1814 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1815 PREV_INSN (old_succ->head));
1816 else if (old_succ != EXIT_BLOCK_PTR)
1817 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1819 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1820 NOTE_BASIC_BLOCK (bb_note) = bb;
1821 bb->head = bb->end = bb_note;
1823 /* For non-fallthry edges, we must adjust the predecessor's
1824 jump instruction to target our new block. */
1825 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1827 if (!redirect_edge_and_branch (edge_in, bb))
1831 redirect_edge_succ (edge_in, bb);
1836 /* Queue instructions for insertion on an edge between two basic blocks.
1837 The new instructions and basic blocks (if any) will not appear in the
1838 CFG until commit_edge_insertions is called. */
1841 insert_insn_on_edge (pattern, e)
1845 /* We cannot insert instructions on an abnormal critical edge.
1846 It will be easier to find the culprit if we die now. */
1847 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1848 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1851 if (e->insns == NULL_RTX)
1854 push_to_sequence (e->insns);
1856 emit_insn (pattern);
1858 e->insns = get_insns ();
1862 /* Update the CFG for the instructions queued on edge E. */
1865 commit_one_edge_insertion (e)
1868 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1871 /* Pull the insns off the edge now since the edge might go away. */
1873 e->insns = NULL_RTX;
1875 /* Figure out where to put these things. If the destination has
1876 one predecessor, insert there. Except for the exit block. */
1877 if (e->dest->pred->pred_next == NULL
1878 && e->dest != EXIT_BLOCK_PTR)
1882 /* Get the location correct wrt a code label, and "nice" wrt
1883 a basic block note, and before everything else. */
1885 if (GET_CODE (tmp) == CODE_LABEL)
1886 tmp = NEXT_INSN (tmp);
1887 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1888 tmp = NEXT_INSN (tmp);
1889 if (tmp == bb->head)
1892 after = PREV_INSN (tmp);
1895 /* If the source has one successor and the edge is not abnormal,
1896 insert there. Except for the entry block. */
1897 else if ((e->flags & EDGE_ABNORMAL) == 0
1898 && e->src->succ->succ_next == NULL
1899 && e->src != ENTRY_BLOCK_PTR)
1902 /* It is possible to have a non-simple jump here. Consider a target
1903 where some forms of unconditional jumps clobber a register. This
1904 happens on the fr30 for example.
1906 We know this block has a single successor, so we can just emit
1907 the queued insns before the jump. */
1908 if (GET_CODE (bb->end) == JUMP_INSN)
1914 /* We'd better be fallthru, or we've lost track of what's what. */
1915 if ((e->flags & EDGE_FALLTHRU) == 0)
1922 /* Otherwise we must split the edge. */
1925 bb = split_edge (e);
1929 /* Now that we've found the spot, do the insertion. */
1931 /* Set the new block number for these insns, if structure is allocated. */
1932 if (basic_block_for_insn)
1935 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1936 set_block_for_insn (i, bb);
1941 emit_insns_before (insns, before);
1942 if (before == bb->head)
1945 last = prev_nonnote_insn (before);
1949 last = emit_insns_after (insns, after);
1950 if (after == bb->end)
1954 if (returnjump_p (last))
1956 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1957 This is not currently a problem because this only happens
1958 for the (single) epilogue, which already has a fallthru edge
1962 if (e->dest != EXIT_BLOCK_PTR
1963 || e->succ_next != NULL
1964 || (e->flags & EDGE_FALLTHRU) == 0)
1966 e->flags &= ~EDGE_FALLTHRU;
1968 emit_barrier_after (last);
1972 flow_delete_insn (before);
1974 else if (GET_CODE (last) == JUMP_INSN)
1976 find_sub_basic_blocks (bb);
1979 /* Update the CFG for all queued instructions. */
1982 commit_edge_insertions ()
1987 #ifdef ENABLE_CHECKING
1988 verify_flow_info ();
1992 bb = ENTRY_BLOCK_PTR;
1997 for (e = bb->succ; e; e = next)
1999 next = e->succ_next;
2001 commit_one_edge_insertion (e);
2004 if (++i >= n_basic_blocks)
2006 bb = BASIC_BLOCK (i);
2010 /* Add fake edges to the function exit for any non constant calls in
2011 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2012 BLOCKS is zero. Return the nuber of blocks that were split. */
2015 flow_call_edges_add (blocks)
2019 int blocks_split = 0;
2023 /* Map bb indicies into basic block pointers since split_block
2024 will renumber the basic blocks. */
2026 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2030 for (i = 0; i < n_basic_blocks; i++)
2031 bbs[bb_num++] = BASIC_BLOCK (i);
2035 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2037 bbs[bb_num++] = BASIC_BLOCK (i);
2042 /* Now add fake edges to the function exit for any non constant
2043 calls since there is no way that we can determine if they will
2046 for (i = 0; i < bb_num; i++)
2048 basic_block bb = bbs[i];
2052 for (insn = bb->end; ; insn = prev_insn)
2054 prev_insn = PREV_INSN (insn);
2055 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2059 /* Note that the following may create a new basic block
2060 and renumber the existing basic blocks. */
2061 e = split_block (bb, insn);
2065 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2067 if (insn == bb->head)
2073 verify_flow_info ();
2076 return blocks_split;
2079 /* Find unreachable blocks. An unreachable block will have NULL in
2080 block->aux, a non-NULL value indicates the block is reachable. */
2083 find_unreachable_blocks ()
2087 basic_block *tos, *worklist;
2090 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2092 /* Use basic_block->aux as a marker. Clear them all. */
2094 for (i = 0; i < n; ++i)
2095 BASIC_BLOCK (i)->aux = NULL;
2097 /* Add our starting points to the worklist. Almost always there will
2098 be only one. It isn't inconcievable that we might one day directly
2099 support Fortran alternate entry points. */
2101 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2105 /* Mark the block with a handy non-null value. */
2109 /* Iterate: find everything reachable from what we've already seen. */
2111 while (tos != worklist)
2113 basic_block b = *--tos;
2115 for (e = b->succ; e; e = e->succ_next)
2126 /* Delete all unreachable basic blocks. */
2128 delete_unreachable_blocks ()
2132 find_unreachable_blocks ();
2134 /* Delete all unreachable basic blocks. Count down so that we
2135 don't interfere with the block renumbering that happens in
2136 flow_delete_block. */
2138 for (i = n_basic_blocks - 1; i >= 0; --i)
2140 basic_block b = BASIC_BLOCK (i);
2143 /* This block was found. Tidy up the mark. */
2146 flow_delete_block (b);
2149 tidy_fallthru_edges ();
2152 /* Return true if NOTE is not one of the ones that must be kept paired,
2153 so that we may simply delete them. */
2156 can_delete_note_p (note)
2159 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2160 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2163 /* Unlink a chain of insns between START and FINISH, leaving notes
2164 that must be paired. */
2167 flow_delete_insn_chain (start, finish)
2170 /* Unchain the insns one by one. It would be quicker to delete all
2171 of these with a single unchaining, rather than one at a time, but
2172 we need to keep the NOTE's. */
2178 next = NEXT_INSN (start);
2179 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2181 else if (GET_CODE (start) == CODE_LABEL
2182 && ! can_delete_label_p (start))
2184 const char *name = LABEL_NAME (start);
2185 PUT_CODE (start, NOTE);
2186 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2187 NOTE_SOURCE_FILE (start) = name;
2190 next = flow_delete_insn (start);
2192 if (start == finish)
2198 /* Delete the insns in a (non-live) block. We physically delete every
2199 non-deleted-note insn, and update the flow graph appropriately.
2201 Return nonzero if we deleted an exception handler. */
2203 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2204 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2207 flow_delete_block (b)
2210 int deleted_handler = 0;
2213 /* If the head of this block is a CODE_LABEL, then it might be the
2214 label for an exception handler which can't be reached.
2216 We need to remove the label from the exception_handler_label list
2217 and remove the associated NOTE_INSN_EH_REGION_BEG and
2218 NOTE_INSN_EH_REGION_END notes. */
2222 never_reached_warning (insn);
2224 if (GET_CODE (insn) == CODE_LABEL)
2225 maybe_remove_eh_handler (insn);
2227 /* Include any jump table following the basic block. */
2229 if (GET_CODE (end) == JUMP_INSN
2230 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2231 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2232 && GET_CODE (tmp) == JUMP_INSN
2233 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2234 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2237 /* Include any barrier that may follow the basic block. */
2238 tmp = next_nonnote_insn (end);
2239 if (tmp && GET_CODE (tmp) == BARRIER)
2242 /* Selectively delete the entire chain. */
2243 flow_delete_insn_chain (insn, end);
2245 /* Remove the edges into and out of this block. Note that there may
2246 indeed be edges in, if we are removing an unreachable loop. */
2250 for (e = b->pred; e; e = next)
2252 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2255 next = e->pred_next;
2259 for (e = b->succ; e; e = next)
2261 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2264 next = e->succ_next;
2273 /* Remove the basic block from the array, and compact behind it. */
2276 return deleted_handler;
2279 /* Remove block B from the basic block array and compact behind it. */
2285 int i, n = n_basic_blocks;
2287 for (i = b->index; i + 1 < n; ++i)
2289 basic_block x = BASIC_BLOCK (i + 1);
2290 BASIC_BLOCK (i) = x;
2294 basic_block_info->num_elements--;
2298 /* Delete INSN by patching it out. Return the next insn. */
2301 flow_delete_insn (insn)
2304 rtx prev = PREV_INSN (insn);
2305 rtx next = NEXT_INSN (insn);
2308 PREV_INSN (insn) = NULL_RTX;
2309 NEXT_INSN (insn) = NULL_RTX;
2310 INSN_DELETED_P (insn) = 1;
2313 NEXT_INSN (prev) = next;
2315 PREV_INSN (next) = prev;
2317 set_last_insn (prev);
2319 if (GET_CODE (insn) == CODE_LABEL)
2320 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2322 /* If deleting a jump, decrement the use count of the label. Deleting
2323 the label itself should happen in the normal course of block merging. */
2324 if (GET_CODE (insn) == JUMP_INSN
2325 && JUMP_LABEL (insn)
2326 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2327 LABEL_NUSES (JUMP_LABEL (insn))--;
2329 /* Also if deleting an insn that references a label. */
2330 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2331 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2332 LABEL_NUSES (XEXP (note, 0))--;
2337 /* True if a given label can be deleted. */
2340 can_delete_label_p (label)
2345 if (LABEL_PRESERVE_P (label))
2348 for (x = forced_labels; x; x = XEXP (x, 1))
2349 if (label == XEXP (x, 0))
2351 for (x = label_value_list; x; x = XEXP (x, 1))
2352 if (label == XEXP (x, 0))
2354 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2355 if (label == XEXP (x, 0))
2358 /* User declared labels must be preserved. */
2359 if (LABEL_NAME (label) != 0)
2366 tail_recursion_label_p (label)
2371 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2372 if (label == XEXP (x, 0))
2378 /* Blocks A and B are to be merged into a single block A. The insns
2379 are already contiguous, hence `nomove'. */
2382 merge_blocks_nomove (a, b)
2386 rtx b_head, b_end, a_end;
2387 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2390 /* If there was a CODE_LABEL beginning B, delete it. */
2393 if (GET_CODE (b_head) == CODE_LABEL)
2395 /* Detect basic blocks with nothing but a label. This can happen
2396 in particular at the end of a function. */
2397 if (b_head == b_end)
2399 del_first = del_last = b_head;
2400 b_head = NEXT_INSN (b_head);
2403 /* Delete the basic block note. */
2404 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2406 if (b_head == b_end)
2411 b_head = NEXT_INSN (b_head);
2414 /* If there was a jump out of A, delete it. */
2416 if (GET_CODE (a_end) == JUMP_INSN)
2420 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2421 if (GET_CODE (prev) != NOTE
2422 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2429 /* If this was a conditional jump, we need to also delete
2430 the insn that set cc0. */
2431 if (prev && sets_cc0_p (prev))
2434 prev = prev_nonnote_insn (prev);
2443 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2444 del_first = NEXT_INSN (a_end);
2446 /* Delete everything marked above as well as crap that might be
2447 hanging out between the two blocks. */
2448 flow_delete_insn_chain (del_first, del_last);
2450 /* Normally there should only be one successor of A and that is B, but
2451 partway though the merge of blocks for conditional_execution we'll
2452 be merging a TEST block with THEN and ELSE successors. Free the
2453 whole lot of them and hope the caller knows what they're doing. */
2455 remove_edge (a->succ);
2457 /* Adjust the edges out of B for the new owner. */
2458 for (e = b->succ; e; e = e->succ_next)
2462 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2463 b->pred = b->succ = NULL;
2465 /* Reassociate the insns of B with A. */
2468 if (basic_block_for_insn)
2470 BLOCK_FOR_INSN (b_head) = a;
2471 while (b_head != b_end)
2473 b_head = NEXT_INSN (b_head);
2474 BLOCK_FOR_INSN (b_head) = a;
2484 /* Blocks A and B are to be merged into a single block. A has no incoming
2485 fallthru edge, so it can be moved before B without adding or modifying
2486 any jumps (aside from the jump from A to B). */
2489 merge_blocks_move_predecessor_nojumps (a, b)
2492 rtx start, end, barrier;
2498 barrier = next_nonnote_insn (end);
2499 if (GET_CODE (barrier) != BARRIER)
2501 flow_delete_insn (barrier);
2503 /* Move block and loop notes out of the chain so that we do not
2504 disturb their order.
2506 ??? A better solution would be to squeeze out all the non-nested notes
2507 and adjust the block trees appropriately. Even better would be to have
2508 a tighter connection between block trees and rtl so that this is not
2510 start = squeeze_notes (start, end);
2512 /* Scramble the insn chain. */
2513 if (end != PREV_INSN (b->head))
2514 reorder_insns (start, end, PREV_INSN (b->head));
2518 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2519 a->index, b->index);
2522 /* Swap the records for the two blocks around. Although we are deleting B,
2523 A is now where B was and we want to compact the BB array from where
2525 BASIC_BLOCK (a->index) = b;
2526 BASIC_BLOCK (b->index) = a;
2528 a->index = b->index;
2531 /* Now blocks A and B are contiguous. Merge them. */
2532 merge_blocks_nomove (a, b);
2537 /* Blocks A and B are to be merged into a single block. B has no outgoing
2538 fallthru edge, so it can be moved after A without adding or modifying
2539 any jumps (aside from the jump from A to B). */
2542 merge_blocks_move_successor_nojumps (a, b)
2545 rtx start, end, barrier;
2549 barrier = NEXT_INSN (end);
2551 /* Recognize a jump table following block B. */
2552 if (GET_CODE (barrier) == CODE_LABEL
2553 && NEXT_INSN (barrier)
2554 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2555 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2556 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2558 end = NEXT_INSN (barrier);
2559 barrier = NEXT_INSN (end);
2562 /* There had better have been a barrier there. Delete it. */
2563 if (GET_CODE (barrier) != BARRIER)
2565 flow_delete_insn (barrier);
2567 /* Move block and loop notes out of the chain so that we do not
2568 disturb their order.
2570 ??? A better solution would be to squeeze out all the non-nested notes
2571 and adjust the block trees appropriately. Even better would be to have
2572 a tighter connection between block trees and rtl so that this is not
2574 start = squeeze_notes (start, end);
2576 /* Scramble the insn chain. */
2577 reorder_insns (start, end, a->end);
2579 /* Now blocks A and B are contiguous. Merge them. */
2580 merge_blocks_nomove (a, b);
2584 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2585 b->index, a->index);
2591 /* Attempt to merge basic blocks that are potentially non-adjacent.
2592 Return true iff the attempt succeeded. */
2595 merge_blocks (e, b, c)
2599 /* If C has a tail recursion label, do not merge. There is no
2600 edge recorded from the call_placeholder back to this label, as
2601 that would make optimize_sibling_and_tail_recursive_calls more
2602 complex for no gain. */
2603 if (GET_CODE (c->head) == CODE_LABEL
2604 && tail_recursion_label_p (c->head))
2607 /* If B has a fallthru edge to C, no need to move anything. */
2608 if (e->flags & EDGE_FALLTHRU)
2610 merge_blocks_nomove (b, c);
2614 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2615 b->index, c->index);
2623 int c_has_outgoing_fallthru;
2624 int b_has_incoming_fallthru;
2626 /* We must make sure to not munge nesting of exception regions,
2627 lexical blocks, and loop notes.
2629 The first is taken care of by requiring that the active eh
2630 region at the end of one block always matches the active eh
2631 region at the beginning of the next block.
2633 The later two are taken care of by squeezing out all the notes. */
2635 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2636 executed and we may want to treat blocks which have two out
2637 edges, one normal, one abnormal as only having one edge for
2638 block merging purposes. */
2640 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2641 if (tmp_edge->flags & EDGE_FALLTHRU)
2643 c_has_outgoing_fallthru = (tmp_edge != NULL);
2645 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2646 if (tmp_edge->flags & EDGE_FALLTHRU)
2648 b_has_incoming_fallthru = (tmp_edge != NULL);
2650 /* If B does not have an incoming fallthru, then it can be moved
2651 immediately before C without introducing or modifying jumps.
2652 C cannot be the first block, so we do not have to worry about
2653 accessing a non-existent block. */
2654 if (! b_has_incoming_fallthru)
2655 return merge_blocks_move_predecessor_nojumps (b, c);
2657 /* Otherwise, we're going to try to move C after B. If C does
2658 not have an outgoing fallthru, then it can be moved
2659 immediately after B without introducing or modifying jumps. */
2660 if (! c_has_outgoing_fallthru)
2661 return merge_blocks_move_successor_nojumps (b, c);
2663 /* Otherwise, we'll need to insert an extra jump, and possibly
2664 a new block to contain it. */
2665 /* ??? Not implemented yet. */
2671 /* Top level driver for merge_blocks. */
2678 /* Attempt to merge blocks as made possible by edge removal. If a block
2679 has only one successor, and the successor has only one predecessor,
2680 they may be combined. */
2682 for (i = 0; i < n_basic_blocks;)
2684 basic_block c, b = BASIC_BLOCK (i);
2687 /* A loop because chains of blocks might be combineable. */
2688 while ((s = b->succ) != NULL
2689 && s->succ_next == NULL
2690 && (s->flags & EDGE_EH) == 0
2691 && (c = s->dest) != EXIT_BLOCK_PTR
2692 && c->pred->pred_next == NULL
2693 /* If the jump insn has side effects, we can't kill the edge. */
2694 && (GET_CODE (b->end) != JUMP_INSN
2695 || onlyjump_p (b->end))
2696 && merge_blocks (s, b, c))
2699 /* Don't get confused by the index shift caused by deleting blocks. */
2704 /* The given edge should potentially be a fallthru edge. If that is in
2705 fact true, delete the jump and barriers that are in the way. */
2708 tidy_fallthru_edge (e, b, c)
2714 /* ??? In a late-running flow pass, other folks may have deleted basic
2715 blocks by nopping out blocks, leaving multiple BARRIERs between here
2716 and the target label. They ought to be chastized and fixed.
2718 We can also wind up with a sequence of undeletable labels between
2719 one block and the next.
2721 So search through a sequence of barriers, labels, and notes for
2722 the head of block C and assert that we really do fall through. */
2724 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2727 /* Remove what will soon cease being the jump insn from the source block.
2728 If block B consisted only of this single jump, turn it into a deleted
2731 if (GET_CODE (q) == JUMP_INSN
2733 && (any_uncondjump_p (q)
2734 || (b->succ == e && e->succ_next == NULL)))
2737 /* If this was a conditional jump, we need to also delete
2738 the insn that set cc0. */
2739 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2746 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2747 NOTE_SOURCE_FILE (q) = 0;
2753 /* We don't want a block to end on a line-number note since that has
2754 the potential of changing the code between -g and not -g. */
2755 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
2762 /* Selectively unlink the sequence. */
2763 if (q != PREV_INSN (c->head))
2764 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2766 e->flags |= EDGE_FALLTHRU;
2769 /* Fix up edges that now fall through, or rather should now fall through
2770 but previously required a jump around now deleted blocks. Simplify
2771 the search by only examining blocks numerically adjacent, since this
2772 is how find_basic_blocks created them. */
2775 tidy_fallthru_edges ()
2779 for (i = 1; i < n_basic_blocks; ++i)
2781 basic_block b = BASIC_BLOCK (i - 1);
2782 basic_block c = BASIC_BLOCK (i);
2785 /* We care about simple conditional or unconditional jumps with
2788 If we had a conditional branch to the next instruction when
2789 find_basic_blocks was called, then there will only be one
2790 out edge for the block which ended with the conditional
2791 branch (since we do not create duplicate edges).
2793 Furthermore, the edge will be marked as a fallthru because we
2794 merge the flags for the duplicate edges. So we do not want to
2795 check that the edge is not a FALLTHRU edge. */
2796 if ((s = b->succ) != NULL
2797 && ! (s->flags & EDGE_COMPLEX)
2798 && s->succ_next == NULL
2800 /* If the jump insn has side effects, we can't tidy the edge. */
2801 && (GET_CODE (b->end) != JUMP_INSN
2802 || onlyjump_p (b->end)))
2803 tidy_fallthru_edge (s, b, c);
2807 /* Perform data flow analysis.
2808 F is the first insn of the function; FLAGS is a set of PROP_* flags
2809 to be used in accumulating flow info. */
2812 life_analysis (f, file, flags)
2817 #ifdef ELIMINABLE_REGS
2819 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2822 /* Record which registers will be eliminated. We use this in
2825 CLEAR_HARD_REG_SET (elim_reg_set);
2827 #ifdef ELIMINABLE_REGS
2828 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2829 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2831 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2835 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2837 /* The post-reload life analysis have (on a global basis) the same
2838 registers live as was computed by reload itself. elimination
2839 Otherwise offsets and such may be incorrect.
2841 Reload will make some registers as live even though they do not
2844 We don't want to create new auto-incs after reload, since they
2845 are unlikely to be useful and can cause problems with shared
2847 if (reload_completed)
2848 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2850 /* We want alias analysis information for local dead store elimination. */
2851 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2852 init_alias_analysis ();
2854 /* Always remove no-op moves. Do this before other processing so
2855 that we don't have to keep re-scanning them. */
2856 delete_noop_moves (f);
2858 /* Some targets can emit simpler epilogues if they know that sp was
2859 not ever modified during the function. After reload, of course,
2860 we've already emitted the epilogue so there's no sense searching. */
2861 if (! reload_completed)
2862 notice_stack_pointer_modification (f);
2864 /* Allocate and zero out data structures that will record the
2865 data from lifetime analysis. */
2866 allocate_reg_life_data ();
2867 allocate_bb_life_data ();
2869 /* Find the set of registers live on function exit. */
2870 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2872 /* "Update" life info from zero. It'd be nice to begin the
2873 relaxation with just the exit and noreturn blocks, but that set
2874 is not immediately handy. */
2876 if (flags & PROP_REG_INFO)
2877 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2878 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2881 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2882 end_alias_analysis ();
2885 dump_flow_info (file);
2887 free_basic_block_vars (1);
2889 #ifdef ENABLE_CHECKING
2893 /* Search for any REG_LABEL notes which reference deleted labels. */
2894 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2896 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
2898 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
2905 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2906 Search for REGNO. If found, abort if it is not wider than word_mode. */
2909 verify_wide_reg_1 (px, pregno)
2914 unsigned int regno = *(int *) pregno;
2916 if (GET_CODE (x) == REG && REGNO (x) == regno)
2918 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2925 /* A subroutine of verify_local_live_at_start. Search through insns
2926 between HEAD and END looking for register REGNO. */
2929 verify_wide_reg (regno, head, end)
2936 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2940 head = NEXT_INSN (head);
2943 /* We didn't find the register at all. Something's way screwy. */
2945 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
2946 print_rtl_and_abort ();
2949 /* A subroutine of update_life_info. Verify that there are no untoward
2950 changes in live_at_start during a local update. */
2953 verify_local_live_at_start (new_live_at_start, bb)
2954 regset new_live_at_start;
2957 if (reload_completed)
2959 /* After reload, there are no pseudos, nor subregs of multi-word
2960 registers. The regsets should exactly match. */
2961 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2965 fprintf (rtl_dump_file,
2966 "live_at_start mismatch in bb %d, aborting\n",
2968 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
2969 debug_bitmap_file (rtl_dump_file, new_live_at_start);
2971 print_rtl_and_abort ();
2978 /* Find the set of changed registers. */
2979 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2981 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2983 /* No registers should die. */
2984 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2987 fprintf (rtl_dump_file,
2988 "Register %d died unexpectedly in block %d\n", i,
2990 print_rtl_and_abort ();
2993 /* Verify that the now-live register is wider than word_mode. */
2994 verify_wide_reg (i, bb->head, bb->end);
2999 /* Updates life information starting with the basic blocks set in BLOCKS.
3000 If BLOCKS is null, consider it to be the universal set.
3002 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3003 we are only expecting local modifications to basic blocks. If we find
3004 extra registers live at the beginning of a block, then we either killed
3005 useful data, or we have a broken split that wants data not provided.
3006 If we find registers removed from live_at_start, that means we have
3007 a broken peephole that is killing a register it shouldn't.
3009 ??? This is not true in one situation -- when a pre-reload splitter
3010 generates subregs of a multi-word pseudo, current life analysis will
3011 lose the kill. So we _can_ have a pseudo go live. How irritating.
3013 Including PROP_REG_INFO does not properly refresh regs_ever_live
3014 unless the caller resets it to zero. */
3017 update_life_info (blocks, extent, prop_flags)
3019 enum update_life_extent extent;
3023 regset_head tmp_head;
3026 tmp = INITIALIZE_REG_SET (tmp_head);
3028 /* For a global update, we go through the relaxation process again. */
3029 if (extent != UPDATE_LIFE_LOCAL)
3031 calculate_global_regs_live (blocks, blocks,
3032 prop_flags & PROP_SCAN_DEAD_CODE);
3034 /* If asked, remove notes from the blocks we'll update. */
3035 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3036 count_or_remove_death_notes (blocks, 1);
3041 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3043 basic_block bb = BASIC_BLOCK (i);
3045 COPY_REG_SET (tmp, bb->global_live_at_end);
3046 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3048 if (extent == UPDATE_LIFE_LOCAL)
3049 verify_local_live_at_start (tmp, bb);
3054 for (i = n_basic_blocks - 1; i >= 0; --i)
3056 basic_block bb = BASIC_BLOCK (i);
3058 COPY_REG_SET (tmp, bb->global_live_at_end);
3059 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3061 if (extent == UPDATE_LIFE_LOCAL)
3062 verify_local_live_at_start (tmp, bb);
3068 if (prop_flags & PROP_REG_INFO)
3070 /* The only pseudos that are live at the beginning of the function
3071 are those that were not set anywhere in the function. local-alloc
3072 doesn't know how to handle these correctly, so mark them as not
3073 local to any one basic block. */
3074 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3075 FIRST_PSEUDO_REGISTER, i,
3076 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3078 /* We have a problem with any pseudoreg that lives across the setjmp.
3079 ANSI says that if a user variable does not change in value between
3080 the setjmp and the longjmp, then the longjmp preserves it. This
3081 includes longjmp from a place where the pseudo appears dead.
3082 (In principle, the value still exists if it is in scope.)
3083 If the pseudo goes in a hard reg, some other value may occupy
3084 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3085 Conclusion: such a pseudo must not go in a hard reg. */
3086 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3087 FIRST_PSEUDO_REGISTER, i,
3089 if (regno_reg_rtx[i] != 0)
3091 REG_LIVE_LENGTH (i) = -1;
3092 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3098 /* Free the variables allocated by find_basic_blocks.
3100 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3103 free_basic_block_vars (keep_head_end_p)
3104 int keep_head_end_p;
3106 if (basic_block_for_insn)
3108 VARRAY_FREE (basic_block_for_insn);
3109 basic_block_for_insn = NULL;
3112 if (! keep_head_end_p)
3114 if (basic_block_info)
3117 VARRAY_FREE (basic_block_info);
3121 ENTRY_BLOCK_PTR->aux = NULL;
3122 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3123 EXIT_BLOCK_PTR->aux = NULL;
3124 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3128 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3135 rtx pat = PATTERN (insn);
3137 /* Insns carrying these notes are useful later on. */
3138 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3141 if (GET_CODE (pat) == SET && set_noop_p (pat))
3144 if (GET_CODE (pat) == PARALLEL)
3147 /* If nothing but SETs of registers to themselves,
3148 this insn can also be deleted. */
3149 for (i = 0; i < XVECLEN (pat, 0); i++)
3151 rtx tem = XVECEXP (pat, 0, i);
3153 if (GET_CODE (tem) == USE
3154 || GET_CODE (tem) == CLOBBER)
3157 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3166 /* Delete any insns that copy a register to itself. */
3169 delete_noop_moves (f)
3173 for (insn = f; insn; insn = NEXT_INSN (insn))
3175 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3177 PUT_CODE (insn, NOTE);
3178 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3179 NOTE_SOURCE_FILE (insn) = 0;
3184 /* Determine if the stack pointer is constant over the life of the function.
3185 Only useful before prologues have been emitted. */
3188 notice_stack_pointer_modification_1 (x, pat, data)
3190 rtx pat ATTRIBUTE_UNUSED;
3191 void *data ATTRIBUTE_UNUSED;
3193 if (x == stack_pointer_rtx
3194 /* The stack pointer is only modified indirectly as the result
3195 of a push until later in flow. See the comments in rtl.texi
3196 regarding Embedded Side-Effects on Addresses. */
3197 || (GET_CODE (x) == MEM
3198 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3199 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3200 current_function_sp_is_unchanging = 0;
3204 notice_stack_pointer_modification (f)
3209 /* Assume that the stack pointer is unchanging if alloca hasn't
3211 current_function_sp_is_unchanging = !current_function_calls_alloca;
3212 if (! current_function_sp_is_unchanging)
3215 for (insn = f; insn; insn = NEXT_INSN (insn))
3219 /* Check if insn modifies the stack pointer. */
3220 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3222 if (! current_function_sp_is_unchanging)
3228 /* Mark a register in SET. Hard registers in large modes get all
3229 of their component registers set as well. */
3232 mark_reg (reg, xset)
3236 regset set = (regset) xset;
3237 int regno = REGNO (reg);
3239 if (GET_MODE (reg) == BLKmode)
3242 SET_REGNO_REG_SET (set, regno);
3243 if (regno < FIRST_PSEUDO_REGISTER)
3245 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3247 SET_REGNO_REG_SET (set, regno + n);
3251 /* Mark those regs which are needed at the end of the function as live
3252 at the end of the last basic block. */
3255 mark_regs_live_at_end (set)
3260 /* If exiting needs the right stack value, consider the stack pointer
3261 live at the end of the function. */
3262 if ((HAVE_epilogue && reload_completed)
3263 || ! EXIT_IGNORE_STACK
3264 || (! FRAME_POINTER_REQUIRED
3265 && ! current_function_calls_alloca
3266 && flag_omit_frame_pointer)
3267 || current_function_sp_is_unchanging)
3269 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3272 /* Mark the frame pointer if needed at the end of the function. If
3273 we end up eliminating it, it will be removed from the live list
3274 of each basic block by reload. */
3276 if (! reload_completed || frame_pointer_needed)
3278 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3279 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3280 /* If they are different, also mark the hard frame pointer as live. */
3281 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3282 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3286 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3287 /* Many architectures have a GP register even without flag_pic.
3288 Assume the pic register is not in use, or will be handled by
3289 other means, if it is not fixed. */
3290 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3291 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3292 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3295 /* Mark all global registers, and all registers used by the epilogue
3296 as being live at the end of the function since they may be
3297 referenced by our caller. */
3298 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3299 if (global_regs[i] || EPILOGUE_USES (i))
3300 SET_REGNO_REG_SET (set, i);
3302 if (HAVE_epilogue && reload_completed)
3304 /* Mark all call-saved registers that we actually used. */
3305 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3306 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3307 SET_REGNO_REG_SET (set, i);
3310 #ifdef EH_RETURN_DATA_REGNO
3311 /* Mark the registers that will contain data for the handler. */
3312 if (reload_completed && current_function_calls_eh_return)
3315 unsigned regno = EH_RETURN_DATA_REGNO(i);
3316 if (regno == INVALID_REGNUM)
3318 SET_REGNO_REG_SET (set, regno);
3321 #ifdef EH_RETURN_STACKADJ_RTX
3322 if ((! HAVE_epilogue || ! reload_completed)
3323 && current_function_calls_eh_return)
3325 rtx tmp = EH_RETURN_STACKADJ_RTX;
3326 if (tmp && REG_P (tmp))
3327 mark_reg (tmp, set);
3330 #ifdef EH_RETURN_HANDLER_RTX
3331 if ((! HAVE_epilogue || ! reload_completed)
3332 && current_function_calls_eh_return)
3334 rtx tmp = EH_RETURN_HANDLER_RTX;
3335 if (tmp && REG_P (tmp))
3336 mark_reg (tmp, set);
3340 /* Mark function return value. */
3341 diddle_return_value (mark_reg, set);
3344 /* Callback function for for_each_successor_phi. DATA is a regset.
3345 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3346 INSN, in the regset. */
3349 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3350 rtx insn ATTRIBUTE_UNUSED;
3351 int dest_regno ATTRIBUTE_UNUSED;
3355 regset live = (regset) data;
3356 SET_REGNO_REG_SET (live, src_regno);
3360 /* Propagate global life info around the graph of basic blocks. Begin
3361 considering blocks with their corresponding bit set in BLOCKS_IN.
3362 If BLOCKS_IN is null, consider it the universal set.
3364 BLOCKS_OUT is set for every block that was changed. */
3367 calculate_global_regs_live (blocks_in, blocks_out, flags)
3368 sbitmap blocks_in, blocks_out;
3371 basic_block *queue, *qhead, *qtail, *qend;
3372 regset tmp, new_live_at_end, call_used;
3373 regset_head tmp_head, call_used_head;
3374 regset_head new_live_at_end_head;
3377 tmp = INITIALIZE_REG_SET (tmp_head);
3378 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3379 call_used = INITIALIZE_REG_SET (call_used_head);
3381 /* Inconveniently, this is only redily available in hard reg set form. */
3382 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3383 if (call_used_regs[i])
3384 SET_REGNO_REG_SET (call_used, i);
3386 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3387 because the `head == tail' style test for an empty queue doesn't
3388 work with a full queue. */
3389 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3391 qhead = qend = queue + n_basic_blocks + 2;
3393 /* Queue the blocks set in the initial mask. Do this in reverse block
3394 number order so that we are more likely for the first round to do
3395 useful work. We use AUX non-null to flag that the block is queued. */
3398 /* Clear out the garbage that might be hanging out in bb->aux. */
3399 for (i = n_basic_blocks - 1; i >= 0; --i)
3400 BASIC_BLOCK (i)->aux = NULL;
3402 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3404 basic_block bb = BASIC_BLOCK (i);
3411 for (i = 0; i < n_basic_blocks; ++i)
3413 basic_block bb = BASIC_BLOCK (i);
3420 sbitmap_zero (blocks_out);
3422 /* We work through the queue until there are no more blocks. What
3423 is live at the end of this block is precisely the union of what
3424 is live at the beginning of all its successors. So, we set its
3425 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
3426 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
3427 this block by walking through the instructions in this block in
3428 reverse order and updating as we go. If that changed
3429 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
3430 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
3432 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
3433 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
3434 must either be live at the end of the block, or used within the
3435 block. In the latter case, it will certainly never disappear
3436 from GLOBAL_LIVE_AT_START. In the former case, the register
3437 could go away only if it disappeared from GLOBAL_LIVE_AT_START
3438 for one of the successor blocks. By induction, that cannot
3440 while (qhead != qtail)
3442 int rescan, changed;
3451 /* Begin by propagating live_at_start from the successor blocks. */
3452 CLEAR_REG_SET (new_live_at_end);
3453 for (e = bb->succ; e; e = e->succ_next)
3455 basic_block sb = e->dest;
3457 /* Call-clobbered registers die across exception and call edges. */
3458 /* ??? Abnormal call edges ignored for the moment, as this gets
3459 confused by sibling call edges, which crashes reg-stack. */
3460 if (e->flags & EDGE_EH)
3462 bitmap_operation (tmp, sb->global_live_at_start,
3463 call_used, BITMAP_AND_COMPL);
3464 IOR_REG_SET (new_live_at_end, tmp);
3467 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3470 /* The all-important stack pointer must always be live. */
3471 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3473 /* Before reload, there are a few registers that must be forced
3474 live everywhere -- which might not already be the case for
3475 blocks within infinite loops. */
3476 if (! reload_completed)
3478 /* Any reference to any pseudo before reload is a potential
3479 reference of the frame pointer. */
3480 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3482 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3483 /* Pseudos with argument area equivalences may require
3484 reloading via the argument pointer. */
3485 if (fixed_regs[ARG_POINTER_REGNUM])
3486 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3489 /* Any constant, or pseudo with constant equivalences, may
3490 require reloading from memory using the pic register. */
3491 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3492 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3493 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3496 /* Regs used in phi nodes are not included in
3497 global_live_at_start, since they are live only along a
3498 particular edge. Set those regs that are live because of a
3499 phi node alternative corresponding to this particular block. */
3501 for_each_successor_phi (bb, &set_phi_alternative_reg,
3504 if (bb == ENTRY_BLOCK_PTR)
3506 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3510 /* On our first pass through this block, we'll go ahead and continue.
3511 Recognize first pass by local_set NULL. On subsequent passes, we
3512 get to skip out early if live_at_end wouldn't have changed. */
3514 if (bb->local_set == NULL)
3516 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3517 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3522 /* If any bits were removed from live_at_end, we'll have to
3523 rescan the block. This wouldn't be necessary if we had
3524 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3525 local_live is really dependent on live_at_end. */
3526 CLEAR_REG_SET (tmp);
3527 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3528 new_live_at_end, BITMAP_AND_COMPL);
3532 /* If any of the registers in the new live_at_end set are
3533 conditionally set in this basic block, we must rescan.
3534 This is because conditional lifetimes at the end of the
3535 block do not just take the live_at_end set into account,
3536 but also the liveness at the start of each successor
3537 block. We can miss changes in those sets if we only
3538 compare the new live_at_end against the previous one. */
3539 CLEAR_REG_SET (tmp);
3540 rescan = bitmap_operation (tmp, new_live_at_end,
3541 bb->cond_local_set, BITMAP_AND);
3546 /* Find the set of changed bits. Take this opportunity
3547 to notice that this set is empty and early out. */
3548 CLEAR_REG_SET (tmp);
3549 changed = bitmap_operation (tmp, bb->global_live_at_end,
3550 new_live_at_end, BITMAP_XOR);
3554 /* If any of the changed bits overlap with local_set,
3555 we'll have to rescan the block. Detect overlap by
3556 the AND with ~local_set turning off bits. */
3557 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3562 /* Let our caller know that BB changed enough to require its
3563 death notes updated. */
3565 SET_BIT (blocks_out, bb->index);
3569 /* Add to live_at_start the set of all registers in
3570 new_live_at_end that aren't in the old live_at_end. */
3572 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3574 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3576 changed = bitmap_operation (bb->global_live_at_start,
3577 bb->global_live_at_start,
3584 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3586 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3587 into live_at_start. */
3588 propagate_block (bb, new_live_at_end, bb->local_set,
3589 bb->cond_local_set, flags);
3591 /* If live_at start didn't change, no need to go farther. */
3592 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3595 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3598 /* Queue all predecessors of BB so that we may re-examine
3599 their live_at_end. */
3600 for (e = bb->pred; e; e = e->pred_next)
3602 basic_block pb = e->src;
3603 if (pb->aux == NULL)
3614 FREE_REG_SET (new_live_at_end);
3615 FREE_REG_SET (call_used);
3619 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3621 basic_block bb = BASIC_BLOCK (i);
3622 FREE_REG_SET (bb->local_set);
3623 FREE_REG_SET (bb->cond_local_set);
3628 for (i = n_basic_blocks - 1; i >= 0; --i)
3630 basic_block bb = BASIC_BLOCK (i);
3631 FREE_REG_SET (bb->local_set);
3632 FREE_REG_SET (bb->cond_local_set);
3639 /* Subroutines of life analysis. */
3641 /* Allocate the permanent data structures that represent the results
3642 of life analysis. Not static since used also for stupid life analysis. */
3645 allocate_bb_life_data ()
3649 for (i = 0; i < n_basic_blocks; i++)
3651 basic_block bb = BASIC_BLOCK (i);
3653 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3654 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3657 ENTRY_BLOCK_PTR->global_live_at_end
3658 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3659 EXIT_BLOCK_PTR->global_live_at_start
3660 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3662 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3666 allocate_reg_life_data ()
3670 max_regno = max_reg_num ();
3672 /* Recalculate the register space, in case it has grown. Old style
3673 vector oriented regsets would set regset_{size,bytes} here also. */
3674 allocate_reg_info (max_regno, FALSE, FALSE);
3676 /* Reset all the data we'll collect in propagate_block and its
3678 for (i = 0; i < max_regno; i++)
3682 REG_N_DEATHS (i) = 0;
3683 REG_N_CALLS_CROSSED (i) = 0;
3684 REG_LIVE_LENGTH (i) = 0;
3685 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3689 /* Delete dead instructions for propagate_block. */
3692 propagate_block_delete_insn (bb, insn)
3696 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3698 /* If the insn referred to a label, and that label was attached to
3699 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3700 pretty much mandatory to delete it, because the ADDR_VEC may be
3701 referencing labels that no longer exist.
3703 INSN may reference a deleted label, particularly when a jump
3704 table has been optimized into a direct jump. There's no
3705 real good way to fix up the reference to the deleted label
3706 when the label is deleted, so we just allow it here.
3708 After dead code elimination is complete, we do search for
3709 any REG_LABEL notes which reference deleted labels as a
3712 if (inote && GET_CODE (inote) == CODE_LABEL)
3714 rtx label = XEXP (inote, 0);
3717 /* The label may be forced if it has been put in the constant
3718 pool. If that is the only use we must discard the table
3719 jump following it, but not the label itself. */
3720 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
3721 && (next = next_nonnote_insn (label)) != NULL
3722 && GET_CODE (next) == JUMP_INSN
3723 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3724 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3726 rtx pat = PATTERN (next);
3727 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3728 int len = XVECLEN (pat, diff_vec_p);
3731 for (i = 0; i < len; i++)
3732 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3734 flow_delete_insn (next);
3738 if (bb->end == insn)
3739 bb->end = PREV_INSN (insn);
3740 flow_delete_insn (insn);
3743 /* Delete dead libcalls for propagate_block. Return the insn
3744 before the libcall. */
3747 propagate_block_delete_libcall (bb, insn, note)
3751 rtx first = XEXP (note, 0);
3752 rtx before = PREV_INSN (first);
3754 if (insn == bb->end)
3757 flow_delete_insn_chain (first, insn);
3761 /* Update the life-status of regs for one insn. Return the previous insn. */
3764 propagate_one_insn (pbi, insn)
3765 struct propagate_block_info *pbi;
3768 rtx prev = PREV_INSN (insn);
3769 int flags = pbi->flags;
3770 int insn_is_dead = 0;
3771 int libcall_is_dead = 0;
3775 if (! INSN_P (insn))
3778 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3779 if (flags & PROP_SCAN_DEAD_CODE)
3781 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
3782 libcall_is_dead = (insn_is_dead && note != 0
3783 && libcall_dead_p (pbi, note, insn));
3786 /* If an instruction consists of just dead store(s) on final pass,
3788 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3790 /* If we're trying to delete a prologue or epilogue instruction
3791 that isn't flagged as possibly being dead, something is wrong.
3792 But if we are keeping the stack pointer depressed, we might well
3793 be deleting insns that are used to compute the amount to update
3794 it by, so they are fine. */
3795 if (reload_completed
3796 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3797 && (TYPE_RETURNS_STACK_DEPRESSED
3798 (TREE_TYPE (current_function_decl))))
3799 && (((HAVE_epilogue || HAVE_prologue)
3800 && prologue_epilogue_contains (insn))
3801 || (HAVE_sibcall_epilogue
3802 && sibcall_epilogue_contains (insn)))
3803 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3806 /* Record sets. Do this even for dead instructions, since they
3807 would have killed the values if they hadn't been deleted. */
3808 mark_set_regs (pbi, PATTERN (insn), insn);
3810 /* CC0 is now known to be dead. Either this insn used it,
3811 in which case it doesn't anymore, or clobbered it,
3812 so the next insn can't use it. */
3815 if (libcall_is_dead)
3816 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3818 propagate_block_delete_insn (pbi->bb, insn);
3823 /* See if this is an increment or decrement that can be merged into
3824 a following memory address. */
3827 register rtx x = single_set (insn);
3829 /* Does this instruction increment or decrement a register? */
3830 if ((flags & PROP_AUTOINC)
3832 && GET_CODE (SET_DEST (x)) == REG
3833 && (GET_CODE (SET_SRC (x)) == PLUS
3834 || GET_CODE (SET_SRC (x)) == MINUS)
3835 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3836 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3837 /* Ok, look for a following memory ref we can combine with.
3838 If one is found, change the memory ref to a PRE_INC
3839 or PRE_DEC, cancel this insn, and return 1.
3840 Return 0 if nothing has been done. */
3841 && try_pre_increment_1 (pbi, insn))
3844 #endif /* AUTO_INC_DEC */
3846 CLEAR_REG_SET (pbi->new_set);
3848 /* If this is not the final pass, and this insn is copying the value of
3849 a library call and it's dead, don't scan the insns that perform the
3850 library call, so that the call's arguments are not marked live. */
3851 if (libcall_is_dead)
3853 /* Record the death of the dest reg. */
3854 mark_set_regs (pbi, PATTERN (insn), insn);
3856 insn = XEXP (note, 0);
3857 return PREV_INSN (insn);
3859 else if (GET_CODE (PATTERN (insn)) == SET
3860 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3861 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3862 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3863 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3864 /* We have an insn to pop a constant amount off the stack.
3865 (Such insns use PLUS regardless of the direction of the stack,
3866 and any insn to adjust the stack by a constant is always a pop.)
3867 These insns, if not dead stores, have no effect on life. */
3871 /* Any regs live at the time of a call instruction must not go
3872 in a register clobbered by calls. Find all regs now live and
3873 record this for them. */
3875 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3876 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3877 { REG_N_CALLS_CROSSED (i)++; });
3879 /* Record sets. Do this even for dead instructions, since they
3880 would have killed the values if they hadn't been deleted. */
3881 mark_set_regs (pbi, PATTERN (insn), insn);
3883 if (GET_CODE (insn) == CALL_INSN)
3889 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3890 cond = COND_EXEC_TEST (PATTERN (insn));
3892 /* Non-constant calls clobber memory. */
3893 if (! CONST_CALL_P (insn))
3895 free_EXPR_LIST_list (&pbi->mem_set_list);
3896 pbi->mem_set_list_len = 0;
3899 /* There may be extra registers to be clobbered. */
3900 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3902 note = XEXP (note, 1))
3903 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3904 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3905 cond, insn, pbi->flags);
3907 /* Calls change all call-used and global registers. */
3908 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3909 if (call_used_regs[i] && ! global_regs[i]
3912 /* We do not want REG_UNUSED notes for these registers. */
3913 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3915 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3919 /* If an insn doesn't use CC0, it becomes dead since we assume
3920 that every insn clobbers it. So show it dead here;
3921 mark_used_regs will set it live if it is referenced. */
3926 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3928 /* Sometimes we may have inserted something before INSN (such as a move)
3929 when we make an auto-inc. So ensure we will scan those insns. */
3931 prev = PREV_INSN (insn);
3934 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3940 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3941 cond = COND_EXEC_TEST (PATTERN (insn));
3943 /* Calls use their arguments. */
3944 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3946 note = XEXP (note, 1))
3947 if (GET_CODE (XEXP (note, 0)) == USE)
3948 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3951 /* The stack ptr is used (honorarily) by a CALL insn. */
3952 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3954 /* Calls may also reference any of the global registers,
3955 so they are made live. */
3956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3958 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3963 /* On final pass, update counts of how many insns in which each reg
3965 if (flags & PROP_REG_INFO)
3966 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3967 { REG_LIVE_LENGTH (i)++; });
3972 /* Initialize a propagate_block_info struct for public consumption.
3973 Note that the structure itself is opaque to this file, but that
3974 the user can use the regsets provided here. */
3976 struct propagate_block_info *
3977 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3979 regset live, local_set, cond_local_set;
3982 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3985 pbi->reg_live = live;
3986 pbi->mem_set_list = NULL_RTX;
3987 pbi->mem_set_list_len = 0;
3988 pbi->local_set = local_set;
3989 pbi->cond_local_set = cond_local_set;
3993 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3994 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3996 pbi->reg_next_use = NULL;
3998 pbi->new_set = BITMAP_XMALLOC ();
4000 #ifdef HAVE_conditional_execution
4001 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
4002 free_reg_cond_life_info);
4003 pbi->reg_cond_reg = BITMAP_XMALLOC ();
4005 /* If this block ends in a conditional branch, for each register live
4006 from one side of the branch and not the other, record the register
4007 as conditionally dead. */
4008 if (GET_CODE (bb->end) == JUMP_INSN
4009 && any_condjump_p (bb->end))
4011 regset_head diff_head;
4012 regset diff = INITIALIZE_REG_SET (diff_head);
4013 basic_block bb_true, bb_false;
4014 rtx cond_true, cond_false, set_src;
4017 /* Identify the successor blocks. */
4018 bb_true = bb->succ->dest;
4019 if (bb->succ->succ_next != NULL)
4021 bb_false = bb->succ->succ_next->dest;
4023 if (bb->succ->flags & EDGE_FALLTHRU)
4025 basic_block t = bb_false;
4029 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
4034 /* This can happen with a conditional jump to the next insn. */
4035 if (JUMP_LABEL (bb->end) != bb_true->head)
4038 /* Simplest way to do nothing. */
4042 /* Extract the condition from the branch. */
4043 set_src = SET_SRC (pc_set (bb->end));
4044 cond_true = XEXP (set_src, 0);
4045 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
4046 GET_MODE (cond_true), XEXP (cond_true, 0),
4047 XEXP (cond_true, 1));
4048 if (GET_CODE (XEXP (set_src, 1)) == PC)
4051 cond_false = cond_true;
4055 /* Compute which register lead different lives in the successors. */
4056 if (bitmap_operation (diff, bb_true->global_live_at_start,
4057 bb_false->global_live_at_start, BITMAP_XOR))
4059 rtx reg = XEXP (cond_true, 0);
4061 if (GET_CODE (reg) == SUBREG)
4062 reg = SUBREG_REG (reg);
4064 if (GET_CODE (reg) != REG)
4067 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
4069 /* For each such register, mark it conditionally dead. */
4070 EXECUTE_IF_SET_IN_REG_SET
4073 struct reg_cond_life_info *rcli;
4076 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4078 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
4082 rcli->condition = cond;
4083 rcli->stores = const0_rtx;
4084 rcli->orig_condition = cond;
4086 splay_tree_insert (pbi->reg_cond_dead, i,
4087 (splay_tree_value) rcli);
4091 FREE_REG_SET (diff);
4095 /* If this block has no successors, any stores to the frame that aren't
4096 used later in the block are dead. So make a pass over the block
4097 recording any such that are made and show them dead at the end. We do
4098 a very conservative and simple job here. */
4100 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4101 && (TYPE_RETURNS_STACK_DEPRESSED
4102 (TREE_TYPE (current_function_decl))))
4103 && (flags & PROP_SCAN_DEAD_CODE)
4104 && (bb->succ == NULL
4105 || (bb->succ->succ_next == NULL
4106 && bb->succ->dest == EXIT_BLOCK_PTR
4107 && ! current_function_calls_eh_return)))
4110 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4111 if (GET_CODE (insn) == INSN
4112 && (set = single_set (insn))
4113 && GET_CODE (SET_DEST (set)) == MEM)
4115 rtx mem = SET_DEST (set);
4116 rtx canon_mem = canon_rtx (mem);
4118 /* This optimization is performed by faking a store to the
4119 memory at the end of the block. This doesn't work for
4120 unchanging memories because multiple stores to unchanging
4121 memory is illegal and alias analysis doesn't consider it. */
4122 if (RTX_UNCHANGING_P (canon_mem))
4125 if (XEXP (canon_mem, 0) == frame_pointer_rtx
4126 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
4127 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
4128 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
4131 /* Store a copy of mem, otherwise the address may be scrogged
4132 by find_auto_inc. This matters because insn_dead_p uses
4133 an rtx_equal_p check to determine if two addresses are
4134 the same. This works before find_auto_inc, but fails
4135 after find_auto_inc, causing discrepencies between the
4136 set of live registers calculated during the
4137 calculate_global_regs_live phase and what actually exists
4138 after flow completes, leading to aborts. */
4139 if (flags & PROP_AUTOINC)
4140 mem = shallow_copy_rtx (mem);
4142 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4143 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
4152 /* Release a propagate_block_info struct. */
4155 free_propagate_block_info (pbi)
4156 struct propagate_block_info *pbi;
4158 free_EXPR_LIST_list (&pbi->mem_set_list);
4160 BITMAP_XFREE (pbi->new_set);
4162 #ifdef HAVE_conditional_execution
4163 splay_tree_delete (pbi->reg_cond_dead);
4164 BITMAP_XFREE (pbi->reg_cond_reg);
4167 if (pbi->reg_next_use)
4168 free (pbi->reg_next_use);
4173 /* Compute the registers live at the beginning of a basic block BB from
4174 those live at the end.
4176 When called, REG_LIVE contains those live at the end. On return, it
4177 contains those live at the beginning.
4179 LOCAL_SET, if non-null, will be set with all registers killed
4180 unconditionally by this basic block.
4181 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4182 killed conditionally by this basic block. If there is any unconditional
4183 set of a register, then the corresponding bit will be set in LOCAL_SET
4184 and cleared in COND_LOCAL_SET.
4185 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4186 case, the resulting set will be equal to the union of the two sets that
4187 would otherwise be computed. */
4190 propagate_block (bb, live, local_set, cond_local_set, flags)
4194 regset cond_local_set;
4197 struct propagate_block_info *pbi;
4200 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4202 if (flags & PROP_REG_INFO)
4206 /* Process the regs live at the end of the block.
4207 Mark them as not local to any one basic block. */
4208 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4209 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4212 /* Scan the block an insn at a time from end to beginning. */
4214 for (insn = bb->end;; insn = prev)
4216 /* If this is a call to `setjmp' et al, warn if any
4217 non-volatile datum is live. */
4218 if ((flags & PROP_REG_INFO)
4219 && GET_CODE (insn) == NOTE
4220 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4221 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4223 prev = propagate_one_insn (pbi, insn);
4225 if (insn == bb->head)
4229 free_propagate_block_info (pbi);
4232 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4233 (SET expressions whose destinations are registers dead after the insn).
4234 NEEDED is the regset that says which regs are alive after the insn.
4236 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4238 If X is the entire body of an insn, NOTES contains the reg notes
4239 pertaining to the insn. */
4242 insn_dead_p (pbi, x, call_ok, notes)
4243 struct propagate_block_info *pbi;
4246 rtx notes ATTRIBUTE_UNUSED;
4248 enum rtx_code code = GET_CODE (x);
4251 /* If flow is invoked after reload, we must take existing AUTO_INC
4252 expresions into account. */
4253 if (reload_completed)
4255 for (; notes; notes = XEXP (notes, 1))
4257 if (REG_NOTE_KIND (notes) == REG_INC)
4259 int regno = REGNO (XEXP (notes, 0));
4261 /* Don't delete insns to set global regs. */
4262 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4263 || REGNO_REG_SET_P (pbi->reg_live, regno))
4270 /* If setting something that's a reg or part of one,
4271 see if that register's altered value will be live. */
4275 rtx r = SET_DEST (x);
4278 if (GET_CODE (r) == CC0)
4279 return ! pbi->cc0_live;
4282 /* A SET that is a subroutine call cannot be dead. */
4283 if (GET_CODE (SET_SRC (x)) == CALL)
4289 /* Don't eliminate loads from volatile memory or volatile asms. */
4290 else if (volatile_refs_p (SET_SRC (x)))
4293 if (GET_CODE (r) == MEM)
4297 if (MEM_VOLATILE_P (r))
4300 /* Walk the set of memory locations we are currently tracking
4301 and see if one is an identical match to this memory location.
4302 If so, this memory write is dead (remember, we're walking
4303 backwards from the end of the block to the start). Since
4304 rtx_equal_p does not check the alias set or flags, we also
4305 must have the potential for them to conflict (anti_dependence). */
4306 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
4307 if (anti_dependence (r, XEXP (temp, 0)))
4309 rtx mem = XEXP (temp, 0);
4311 if (rtx_equal_p (mem, r))
4314 /* Check if memory reference matches an auto increment. Only
4315 post increment/decrement or modify are valid. */
4316 if (GET_MODE (mem) == GET_MODE (r)
4317 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4318 || GET_CODE (XEXP (mem, 0)) == POST_INC
4319 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4320 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4321 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4328 while (GET_CODE (r) == SUBREG
4329 || GET_CODE (r) == STRICT_LOW_PART
4330 || GET_CODE (r) == ZERO_EXTRACT)
4333 if (GET_CODE (r) == REG)
4335 int regno = REGNO (r);
4338 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4341 /* If this is a hard register, verify that subsequent
4342 words are not needed. */
4343 if (regno < FIRST_PSEUDO_REGISTER)
4345 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4348 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4352 /* Don't delete insns to set global regs. */
4353 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4356 /* Make sure insns to set the stack pointer aren't deleted. */
4357 if (regno == STACK_POINTER_REGNUM)
4360 /* ??? These bits might be redundant with the force live bits
4361 in calculate_global_regs_live. We would delete from
4362 sequential sets; whether this actually affects real code
4363 for anything but the stack pointer I don't know. */
4364 /* Make sure insns to set the frame pointer aren't deleted. */
4365 if (regno == FRAME_POINTER_REGNUM
4366 && (! reload_completed || frame_pointer_needed))
4368 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4369 if (regno == HARD_FRAME_POINTER_REGNUM
4370 && (! reload_completed || frame_pointer_needed))
4374 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4375 /* Make sure insns to set arg pointer are never deleted
4376 (if the arg pointer isn't fixed, there will be a USE
4377 for it, so we can treat it normally). */
4378 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4382 /* Otherwise, the set is dead. */
4388 /* If performing several activities, insn is dead if each activity
4389 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4390 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4392 else if (code == PARALLEL)
4394 int i = XVECLEN (x, 0);
4396 for (i--; i >= 0; i--)
4397 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4398 && GET_CODE (XVECEXP (x, 0, i)) != USE
4399 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4405 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4406 is not necessarily true for hard registers. */
4407 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4408 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4409 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4412 /* We do not check other CLOBBER or USE here. An insn consisting of just
4413 a CLOBBER or just a USE should not be deleted. */
4417 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4418 return 1 if the entire library call is dead.
4419 This is true if INSN copies a register (hard or pseudo)
4420 and if the hard return reg of the call insn is dead.
4421 (The caller should have tested the destination of the SET inside
4422 INSN already for death.)
4424 If this insn doesn't just copy a register, then we don't
4425 have an ordinary libcall. In that case, cse could not have
4426 managed to substitute the source for the dest later on,
4427 so we can assume the libcall is dead.
4429 PBI is the block info giving pseudoregs live before this insn.
4430 NOTE is the REG_RETVAL note of the insn. */
4433 libcall_dead_p (pbi, note, insn)
4434 struct propagate_block_info *pbi;
4438 rtx x = single_set (insn);
4442 register rtx r = SET_SRC (x);
4443 if (GET_CODE (r) == REG)
4445 rtx call = XEXP (note, 0);
4449 /* Find the call insn. */
4450 while (call != insn && GET_CODE (call) != CALL_INSN)
4451 call = NEXT_INSN (call);
4453 /* If there is none, do nothing special,
4454 since ordinary death handling can understand these insns. */
4458 /* See if the hard reg holding the value is dead.
4459 If this is a PARALLEL, find the call within it. */
4460 call_pat = PATTERN (call);
4461 if (GET_CODE (call_pat) == PARALLEL)
4463 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4464 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4465 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4468 /* This may be a library call that is returning a value
4469 via invisible pointer. Do nothing special, since
4470 ordinary death handling can understand these insns. */
4474 call_pat = XVECEXP (call_pat, 0, i);
4477 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4483 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4484 live at function entry. Don't count global register variables, variables
4485 in registers that can be used for function arg passing, or variables in
4486 fixed hard registers. */
4489 regno_uninitialized (regno)
4492 if (n_basic_blocks == 0
4493 || (regno < FIRST_PSEUDO_REGISTER
4494 && (global_regs[regno]
4495 || fixed_regs[regno]
4496 || FUNCTION_ARG_REGNO_P (regno))))
4499 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4502 /* 1 if register REGNO was alive at a place where `setjmp' was called
4503 and was set more than once or is an argument.
4504 Such regs may be clobbered by `longjmp'. */
4507 regno_clobbered_at_setjmp (regno)
4510 if (n_basic_blocks == 0)
4513 return ((REG_N_SETS (regno) > 1
4514 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4515 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4518 /* INSN references memory, possibly using autoincrement addressing modes.
4519 Find any entries on the mem_set_list that need to be invalidated due
4520 to an address change. */
4523 invalidate_mems_from_autoinc (pbi, insn)
4524 struct propagate_block_info *pbi;
4527 rtx note = REG_NOTES (insn);
4528 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4530 if (REG_NOTE_KIND (note) == REG_INC)
4532 rtx temp = pbi->mem_set_list;
4533 rtx prev = NULL_RTX;
4538 next = XEXP (temp, 1);
4539 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4541 /* Splice temp out of list. */
4543 XEXP (prev, 1) = next;
4545 pbi->mem_set_list = next;
4546 free_EXPR_LIST_node (temp);
4547 pbi->mem_set_list_len--;
4557 /* EXP is either a MEM or a REG. Remove any dependant entries
4558 from pbi->mem_set_list. */
4561 invalidate_mems_from_set (pbi, exp)
4562 struct propagate_block_info *pbi;
4565 rtx temp = pbi->mem_set_list;
4566 rtx prev = NULL_RTX;
4571 next = XEXP (temp, 1);
4572 if ((GET_CODE (exp) == MEM
4573 && output_dependence (XEXP (temp, 0), exp))
4574 || (GET_CODE (exp) == REG
4575 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4577 /* Splice this entry out of the list. */
4579 XEXP (prev, 1) = next;
4581 pbi->mem_set_list = next;
4582 free_EXPR_LIST_node (temp);
4583 pbi->mem_set_list_len--;
4591 /* Process the registers that are set within X. Their bits are set to
4592 1 in the regset DEAD, because they are dead prior to this insn.
4594 If INSN is nonzero, it is the insn being processed.
4596 FLAGS is the set of operations to perform. */
4599 mark_set_regs (pbi, x, insn)
4600 struct propagate_block_info *pbi;
4603 rtx cond = NULL_RTX;
4608 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4610 if (REG_NOTE_KIND (link) == REG_INC)
4611 mark_set_1 (pbi, SET, XEXP (link, 0),
4612 (GET_CODE (x) == COND_EXEC
4613 ? COND_EXEC_TEST (x) : NULL_RTX),
4617 switch (code = GET_CODE (x))
4621 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4625 cond = COND_EXEC_TEST (x);
4626 x = COND_EXEC_CODE (x);
4632 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4634 rtx sub = XVECEXP (x, 0, i);
4635 switch (code = GET_CODE (sub))
4638 if (cond != NULL_RTX)
4641 cond = COND_EXEC_TEST (sub);
4642 sub = COND_EXEC_CODE (sub);
4643 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4649 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4664 /* Process a single set, which appears in INSN. REG (which may not
4665 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
4666 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
4667 If the set is conditional (because it appear in a COND_EXEC), COND
4668 will be the condition. */
4671 mark_set_1 (pbi, code, reg, cond, insn, flags)
4672 struct propagate_block_info *pbi;
4674 rtx reg, cond, insn;
4677 int regno_first = -1, regno_last = -1;
4678 unsigned long not_dead = 0;
4681 /* Modifying just one hardware register of a multi-reg value or just a
4682 byte field of a register does not mean the value from before this insn
4683 is now dead. Of course, if it was dead after it's unused now. */
4685 switch (GET_CODE (reg))
4688 /* Some targets place small structures in registers for return values of
4689 functions. We have to detect this case specially here to get correct
4690 flow information. */
4691 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4692 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
4693 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
4699 case STRICT_LOW_PART:
4700 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4702 reg = XEXP (reg, 0);
4703 while (GET_CODE (reg) == SUBREG
4704 || GET_CODE (reg) == ZERO_EXTRACT
4705 || GET_CODE (reg) == SIGN_EXTRACT
4706 || GET_CODE (reg) == STRICT_LOW_PART);
4707 if (GET_CODE (reg) == MEM)
4709 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4713 regno_last = regno_first = REGNO (reg);
4714 if (regno_first < FIRST_PSEUDO_REGISTER)
4715 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4719 if (GET_CODE (SUBREG_REG (reg)) == REG)
4721 enum machine_mode outer_mode = GET_MODE (reg);
4722 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4724 /* Identify the range of registers affected. This is moderately
4725 tricky for hard registers. See alter_subreg. */
4727 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4728 if (regno_first < FIRST_PSEUDO_REGISTER)
4730 regno_first += subreg_regno_offset (regno_first, inner_mode,
4733 regno_last = (regno_first
4734 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4736 /* Since we've just adjusted the register number ranges, make
4737 sure REG matches. Otherwise some_was_live will be clear
4738 when it shouldn't have been, and we'll create incorrect
4739 REG_UNUSED notes. */
4740 reg = gen_rtx_REG (outer_mode, regno_first);
4744 /* If the number of words in the subreg is less than the number
4745 of words in the full register, we have a well-defined partial
4746 set. Otherwise the high bits are undefined.
4748 This is only really applicable to pseudos, since we just took
4749 care of multi-word hard registers. */
4750 if (((GET_MODE_SIZE (outer_mode)
4751 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4752 < ((GET_MODE_SIZE (inner_mode)
4753 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4754 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
4757 reg = SUBREG_REG (reg);
4761 reg = SUBREG_REG (reg);
4768 /* If this set is a MEM, then it kills any aliased writes.
4769 If this set is a REG, then it kills any MEMs which use the reg. */
4770 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4772 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4773 invalidate_mems_from_set (pbi, reg);
4775 /* If the memory reference had embedded side effects (autoincrement
4776 address modes. Then we may need to kill some entries on the
4778 if (insn && GET_CODE (reg) == MEM)
4779 invalidate_mems_from_autoinc (pbi, insn);
4781 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
4782 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
4783 /* ??? With more effort we could track conditional memory life. */
4785 /* We do not know the size of a BLKmode store, so we do not track
4786 them for redundant store elimination. */
4787 && GET_MODE (reg) != BLKmode
4788 /* There are no REG_INC notes for SP, so we can't assume we'll see
4789 everything that invalidates it. To be safe, don't eliminate any
4790 stores though SP; none of them should be redundant anyway. */
4791 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4794 /* Store a copy of mem, otherwise the address may be
4795 scrogged by find_auto_inc. */
4796 if (flags & PROP_AUTOINC)
4797 reg = shallow_copy_rtx (reg);
4799 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4800 pbi->mem_set_list_len++;
4804 if (GET_CODE (reg) == REG
4805 && ! (regno_first == FRAME_POINTER_REGNUM
4806 && (! reload_completed || frame_pointer_needed))
4807 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4808 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4809 && (! reload_completed || frame_pointer_needed))
4811 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4812 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4816 int some_was_live = 0, some_was_dead = 0;
4818 for (i = regno_first; i <= regno_last; ++i)
4820 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4823 /* Order of the set operation matters here since both
4824 sets may be the same. */
4825 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4826 if (cond != NULL_RTX
4827 && ! REGNO_REG_SET_P (pbi->local_set, i))
4828 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4830 SET_REGNO_REG_SET (pbi->local_set, i);
4832 if (code != CLOBBER)
4833 SET_REGNO_REG_SET (pbi->new_set, i);
4835 some_was_live |= needed_regno;
4836 some_was_dead |= ! needed_regno;
4839 #ifdef HAVE_conditional_execution
4840 /* Consider conditional death in deciding that the register needs
4842 if (some_was_live && ! not_dead
4843 /* The stack pointer is never dead. Well, not strictly true,
4844 but it's very difficult to tell from here. Hopefully
4845 combine_stack_adjustments will fix up the most egregious
4847 && regno_first != STACK_POINTER_REGNUM)
4849 for (i = regno_first; i <= regno_last; ++i)
4850 if (! mark_regno_cond_dead (pbi, i, cond))
4851 not_dead |= ((unsigned long) 1) << (i - regno_first);
4855 /* Additional data to record if this is the final pass. */
4856 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4857 | PROP_DEATH_NOTES | PROP_AUTOINC))
4860 register int blocknum = pbi->bb->index;
4863 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4865 y = pbi->reg_next_use[regno_first];
4867 /* The next use is no longer next, since a store intervenes. */
4868 for (i = regno_first; i <= regno_last; ++i)
4869 pbi->reg_next_use[i] = 0;
4872 if (flags & PROP_REG_INFO)
4874 for (i = regno_first; i <= regno_last; ++i)
4876 /* Count (weighted) references, stores, etc. This counts a
4877 register twice if it is modified, but that is correct. */
4878 REG_N_SETS (i) += 1;
4879 REG_N_REFS (i) += 1;
4880 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
4881 ? 1 : pbi->bb->frequency);
4883 /* The insns where a reg is live are normally counted
4884 elsewhere, but we want the count to include the insn
4885 where the reg is set, and the normal counting mechanism
4886 would not count it. */
4887 REG_LIVE_LENGTH (i) += 1;
4890 /* If this is a hard reg, record this function uses the reg. */
4891 if (regno_first < FIRST_PSEUDO_REGISTER)
4893 for (i = regno_first; i <= regno_last; i++)
4894 regs_ever_live[i] = 1;
4898 /* Keep track of which basic blocks each reg appears in. */
4899 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4900 REG_BASIC_BLOCK (regno_first) = blocknum;
4901 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4902 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4906 if (! some_was_dead)
4908 if (flags & PROP_LOG_LINKS)
4910 /* Make a logical link from the next following insn
4911 that uses this register, back to this insn.
4912 The following insns have already been processed.
4914 We don't build a LOG_LINK for hard registers containing
4915 in ASM_OPERANDs. If these registers get replaced,
4916 we might wind up changing the semantics of the insn,
4917 even if reload can make what appear to be valid
4918 assignments later. */
4919 if (y && (BLOCK_NUM (y) == blocknum)
4920 && (regno_first >= FIRST_PSEUDO_REGISTER
4921 || asm_noperands (PATTERN (y)) < 0))
4922 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4927 else if (! some_was_live)
4929 if (flags & PROP_REG_INFO)
4930 REG_N_DEATHS (regno_first) += 1;
4932 if (flags & PROP_DEATH_NOTES)
4934 /* Note that dead stores have already been deleted
4935 when possible. If we get here, we have found a
4936 dead store that cannot be eliminated (because the
4937 same insn does something useful). Indicate this
4938 by marking the reg being set as dying here. */
4940 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4945 if (flags & PROP_DEATH_NOTES)
4947 /* This is a case where we have a multi-word hard register
4948 and some, but not all, of the words of the register are
4949 needed in subsequent insns. Write REG_UNUSED notes
4950 for those parts that were not needed. This case should
4953 for (i = regno_first; i <= regno_last; ++i)
4954 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4956 = alloc_EXPR_LIST (REG_UNUSED,
4957 gen_rtx_REG (reg_raw_mode[i], i),
4963 /* Mark the register as being dead. */
4965 /* The stack pointer is never dead. Well, not strictly true,
4966 but it's very difficult to tell from here. Hopefully
4967 combine_stack_adjustments will fix up the most egregious
4969 && regno_first != STACK_POINTER_REGNUM)
4971 for (i = regno_first; i <= regno_last; ++i)
4972 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
4973 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4976 else if (GET_CODE (reg) == REG)
4978 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4979 pbi->reg_next_use[regno_first] = 0;
4982 /* If this is the last pass and this is a SCRATCH, show it will be dying
4983 here and count it. */
4984 else if (GET_CODE (reg) == SCRATCH)
4986 if (flags & PROP_DEATH_NOTES)
4988 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4992 #ifdef HAVE_conditional_execution
4993 /* Mark REGNO conditionally dead.
4994 Return true if the register is now unconditionally dead. */
4997 mark_regno_cond_dead (pbi, regno, cond)
4998 struct propagate_block_info *pbi;
5002 /* If this is a store to a predicate register, the value of the
5003 predicate is changing, we don't know that the predicate as seen
5004 before is the same as that seen after. Flush all dependent
5005 conditions from reg_cond_dead. This will make all such
5006 conditionally live registers unconditionally live. */
5007 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
5008 flush_reg_cond_reg (pbi, regno);
5010 /* If this is an unconditional store, remove any conditional
5011 life that may have existed. */
5012 if (cond == NULL_RTX)
5013 splay_tree_remove (pbi->reg_cond_dead, regno);
5016 splay_tree_node node;
5017 struct reg_cond_life_info *rcli;
5020 /* Otherwise this is a conditional set. Record that fact.
5021 It may have been conditionally used, or there may be a
5022 subsequent set with a complimentary condition. */
5024 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5027 /* The register was unconditionally live previously.
5028 Record the current condition as the condition under
5029 which it is dead. */
5030 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5031 rcli->condition = cond;
5032 rcli->stores = cond;
5033 rcli->orig_condition = const0_rtx;
5034 splay_tree_insert (pbi->reg_cond_dead, regno,
5035 (splay_tree_value) rcli);
5037 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5039 /* Not unconditionaly dead. */
5044 /* The register was conditionally live previously.
5045 Add the new condition to the old. */
5046 rcli = (struct reg_cond_life_info *) node->value;
5047 ncond = rcli->condition;
5048 ncond = ior_reg_cond (ncond, cond, 1);
5049 if (rcli->stores == const0_rtx)
5050 rcli->stores = cond;
5051 else if (rcli->stores != const1_rtx)
5052 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
5054 /* If the register is now unconditionally dead, remove the entry
5055 in the splay_tree. A register is unconditionally dead if the
5056 dead condition ncond is true. A register is also unconditionally
5057 dead if the sum of all conditional stores is an unconditional
5058 store (stores is true), and the dead condition is identically the
5059 same as the original dead condition initialized at the end of
5060 the block. This is a pointer compare, not an rtx_equal_p
5062 if (ncond == const1_rtx
5063 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
5064 splay_tree_remove (pbi->reg_cond_dead, regno);
5067 rcli->condition = ncond;
5069 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5071 /* Not unconditionaly dead. */
5080 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5083 free_reg_cond_life_info (value)
5084 splay_tree_value value;
5086 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
5090 /* Helper function for flush_reg_cond_reg. */
5093 flush_reg_cond_reg_1 (node, data)
5094 splay_tree_node node;
5097 struct reg_cond_life_info *rcli;
5098 int *xdata = (int *) data;
5099 unsigned int regno = xdata[0];
5101 /* Don't need to search if last flushed value was farther on in
5102 the in-order traversal. */
5103 if (xdata[1] >= (int) node->key)
5106 /* Splice out portions of the expression that refer to regno. */
5107 rcli = (struct reg_cond_life_info *) node->value;
5108 rcli->condition = elim_reg_cond (rcli->condition, regno);
5109 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
5110 rcli->stores = elim_reg_cond (rcli->stores, regno);
5112 /* If the entire condition is now false, signal the node to be removed. */
5113 if (rcli->condition == const0_rtx)
5115 xdata[1] = node->key;
5118 else if (rcli->condition == const1_rtx)
5124 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5127 flush_reg_cond_reg (pbi, regno)
5128 struct propagate_block_info *pbi;
5135 while (splay_tree_foreach (pbi->reg_cond_dead,
5136 flush_reg_cond_reg_1, pair) == -1)
5137 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5139 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5142 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5143 For ior/and, the ADD flag determines whether we want to add the new
5144 condition X to the old one unconditionally. If it is zero, we will
5145 only return a new expression if X allows us to simplify part of
5146 OLD, otherwise we return OLD unchanged to the caller.
5147 If ADD is nonzero, we will return a new condition in all cases. The
5148 toplevel caller of one of these functions should always pass 1 for
5152 ior_reg_cond (old, x, add)
5158 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5160 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5161 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
5162 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5164 if (GET_CODE (x) == GET_CODE (old)
5165 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5169 return gen_rtx_IOR (0, old, x);
5172 switch (GET_CODE (old))
5175 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5176 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5177 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5179 if (op0 == const0_rtx)
5181 if (op1 == const0_rtx)
5183 if (op0 == const1_rtx || op1 == const1_rtx)
5185 if (op0 == XEXP (old, 0))
5186 op0 = gen_rtx_IOR (0, op0, x);
5188 op1 = gen_rtx_IOR (0, op1, x);
5189 return gen_rtx_IOR (0, op0, op1);
5193 return gen_rtx_IOR (0, old, x);
5196 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5197 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5198 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5200 if (op0 == const1_rtx)
5202 if (op1 == const1_rtx)
5204 if (op0 == const0_rtx || op1 == const0_rtx)
5206 if (op0 == XEXP (old, 0))
5207 op0 = gen_rtx_IOR (0, op0, x);
5209 op1 = gen_rtx_IOR (0, op1, x);
5210 return gen_rtx_AND (0, op0, op1);
5214 return gen_rtx_IOR (0, old, x);
5217 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5218 if (op0 != XEXP (old, 0))
5219 return not_reg_cond (op0);
5222 return gen_rtx_IOR (0, old, x);
5233 enum rtx_code x_code;
5235 if (x == const0_rtx)
5237 else if (x == const1_rtx)
5239 x_code = GET_CODE (x);
5242 if (GET_RTX_CLASS (x_code) == '<'
5243 && GET_CODE (XEXP (x, 0)) == REG)
5245 if (XEXP (x, 1) != const0_rtx)
5248 return gen_rtx_fmt_ee (reverse_condition (x_code),
5249 VOIDmode, XEXP (x, 0), const0_rtx);
5251 return gen_rtx_NOT (0, x);
5255 and_reg_cond (old, x, add)
5261 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5263 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5264 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5265 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5267 if (GET_CODE (x) == GET_CODE (old)
5268 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5272 return gen_rtx_AND (0, old, x);
5275 switch (GET_CODE (old))
5278 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5279 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5280 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5282 if (op0 == const0_rtx)
5284 if (op1 == const0_rtx)
5286 if (op0 == const1_rtx || op1 == const1_rtx)
5288 if (op0 == XEXP (old, 0))
5289 op0 = gen_rtx_AND (0, op0, x);
5291 op1 = gen_rtx_AND (0, op1, x);
5292 return gen_rtx_IOR (0, op0, op1);
5296 return gen_rtx_AND (0, old, x);
5299 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5300 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5301 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5303 if (op0 == const1_rtx)
5305 if (op1 == const1_rtx)
5307 if (op0 == const0_rtx || op1 == const0_rtx)
5309 if (op0 == XEXP (old, 0))
5310 op0 = gen_rtx_AND (0, op0, x);
5312 op1 = gen_rtx_AND (0, op1, x);
5313 return gen_rtx_AND (0, op0, op1);
5318 /* If X is identical to one of the existing terms of the AND,
5319 then just return what we already have. */
5320 /* ??? There really should be some sort of recursive check here in
5321 case there are nested ANDs. */
5322 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
5323 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
5324 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
5325 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
5328 return gen_rtx_AND (0, old, x);
5331 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5332 if (op0 != XEXP (old, 0))
5333 return not_reg_cond (op0);
5336 return gen_rtx_AND (0, old, x);
5343 /* Given a condition X, remove references to reg REGNO and return the
5344 new condition. The removal will be done so that all conditions
5345 involving REGNO are considered to evaluate to false. This function
5346 is used when the value of REGNO changes. */
5349 elim_reg_cond (x, regno)
5355 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5357 if (REGNO (XEXP (x, 0)) == regno)
5362 switch (GET_CODE (x))
5365 op0 = elim_reg_cond (XEXP (x, 0), regno);
5366 op1 = elim_reg_cond (XEXP (x, 1), regno);
5367 if (op0 == const0_rtx || op1 == const0_rtx)
5369 if (op0 == const1_rtx)
5371 if (op1 == const1_rtx)
5373 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5375 return gen_rtx_AND (0, op0, op1);
5378 op0 = elim_reg_cond (XEXP (x, 0), regno);
5379 op1 = elim_reg_cond (XEXP (x, 1), regno);
5380 if (op0 == const1_rtx || op1 == const1_rtx)
5382 if (op0 == const0_rtx)
5384 if (op1 == const0_rtx)
5386 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5388 return gen_rtx_IOR (0, op0, op1);
5391 op0 = elim_reg_cond (XEXP (x, 0), regno);
5392 if (op0 == const0_rtx)
5394 if (op0 == const1_rtx)
5396 if (op0 != XEXP (x, 0))
5397 return not_reg_cond (op0);
5404 #endif /* HAVE_conditional_execution */
5408 /* Try to substitute the auto-inc expression INC as the address inside
5409 MEM which occurs in INSN. Currently, the address of MEM is an expression
5410 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5411 that has a single set whose source is a PLUS of INCR_REG and something
5415 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5416 struct propagate_block_info *pbi;
5417 rtx inc, insn, mem, incr, incr_reg;
5419 int regno = REGNO (incr_reg);
5420 rtx set = single_set (incr);
5421 rtx q = SET_DEST (set);
5422 rtx y = SET_SRC (set);
5423 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5425 /* Make sure this reg appears only once in this insn. */
5426 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5429 if (dead_or_set_p (incr, incr_reg)
5430 /* Mustn't autoinc an eliminable register. */
5431 && (regno >= FIRST_PSEUDO_REGISTER
5432 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5434 /* This is the simple case. Try to make the auto-inc. If
5435 we can't, we are done. Otherwise, we will do any
5436 needed updates below. */
5437 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5440 else if (GET_CODE (q) == REG
5441 /* PREV_INSN used here to check the semi-open interval
5443 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5444 /* We must also check for sets of q as q may be
5445 a call clobbered hard register and there may
5446 be a call between PREV_INSN (insn) and incr. */
5447 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5449 /* We have *p followed sometime later by q = p+size.
5450 Both p and q must be live afterward,
5451 and q is not used between INSN and its assignment.
5452 Change it to q = p, ...*q..., q = q+size.
5453 Then fall into the usual case. */
5457 emit_move_insn (q, incr_reg);
5458 insns = get_insns ();
5461 if (basic_block_for_insn)
5462 for (temp = insns; temp; temp = NEXT_INSN (temp))
5463 set_block_for_insn (temp, pbi->bb);
5465 /* If we can't make the auto-inc, or can't make the
5466 replacement into Y, exit. There's no point in making
5467 the change below if we can't do the auto-inc and doing
5468 so is not correct in the pre-inc case. */
5471 validate_change (insn, &XEXP (mem, 0), inc, 1);
5472 validate_change (incr, &XEXP (y, opnum), q, 1);
5473 if (! apply_change_group ())
5476 /* We now know we'll be doing this change, so emit the
5477 new insn(s) and do the updates. */
5478 emit_insns_before (insns, insn);
5480 if (pbi->bb->head == insn)
5481 pbi->bb->head = insns;
5483 /* INCR will become a NOTE and INSN won't contain a
5484 use of INCR_REG. If a use of INCR_REG was just placed in
5485 the insn before INSN, make that the next use.
5486 Otherwise, invalidate it. */
5487 if (GET_CODE (PREV_INSN (insn)) == INSN
5488 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5489 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5490 pbi->reg_next_use[regno] = PREV_INSN (insn);
5492 pbi->reg_next_use[regno] = 0;
5497 /* REGNO is now used in INCR which is below INSN, but
5498 it previously wasn't live here. If we don't mark
5499 it as live, we'll put a REG_DEAD note for it
5500 on this insn, which is incorrect. */
5501 SET_REGNO_REG_SET (pbi->reg_live, regno);
5503 /* If there are any calls between INSN and INCR, show
5504 that REGNO now crosses them. */
5505 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5506 if (GET_CODE (temp) == CALL_INSN)
5507 REG_N_CALLS_CROSSED (regno)++;
5512 /* If we haven't returned, it means we were able to make the
5513 auto-inc, so update the status. First, record that this insn
5514 has an implicit side effect. */
5516 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5518 /* Modify the old increment-insn to simply copy
5519 the already-incremented value of our register. */
5520 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5523 /* If that makes it a no-op (copying the register into itself) delete
5524 it so it won't appear to be a "use" and a "set" of this
5526 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5528 /* If the original source was dead, it's dead now. */
5531 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5533 remove_note (incr, note);
5534 if (XEXP (note, 0) != incr_reg)
5535 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5538 PUT_CODE (incr, NOTE);
5539 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5540 NOTE_SOURCE_FILE (incr) = 0;
5543 if (regno >= FIRST_PSEUDO_REGISTER)
5545 /* Count an extra reference to the reg. When a reg is
5546 incremented, spilling it is worse, so we want to make
5547 that less likely. */
5548 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
5549 ? 1 : pbi->bb->frequency);
5551 /* Count the increment as a setting of the register,
5552 even though it isn't a SET in rtl. */
5553 REG_N_SETS (regno)++;
5557 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5561 find_auto_inc (pbi, x, insn)
5562 struct propagate_block_info *pbi;
5566 rtx addr = XEXP (x, 0);
5567 HOST_WIDE_INT offset = 0;
5568 rtx set, y, incr, inc_val;
5570 int size = GET_MODE_SIZE (GET_MODE (x));
5572 if (GET_CODE (insn) == JUMP_INSN)
5575 /* Here we detect use of an index register which might be good for
5576 postincrement, postdecrement, preincrement, or predecrement. */
5578 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5579 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5581 if (GET_CODE (addr) != REG)
5584 regno = REGNO (addr);
5586 /* Is the next use an increment that might make auto-increment? */
5587 incr = pbi->reg_next_use[regno];
5588 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5590 set = single_set (incr);
5591 if (set == 0 || GET_CODE (set) != SET)
5595 if (GET_CODE (y) != PLUS)
5598 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5599 inc_val = XEXP (y, 1);
5600 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5601 inc_val = XEXP (y, 0);
5605 if (GET_CODE (inc_val) == CONST_INT)
5607 if (HAVE_POST_INCREMENT
5608 && (INTVAL (inc_val) == size && offset == 0))
5609 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5611 else if (HAVE_POST_DECREMENT
5612 && (INTVAL (inc_val) == -size && offset == 0))
5613 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5615 else if (HAVE_PRE_INCREMENT
5616 && (INTVAL (inc_val) == size && offset == size))
5617 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5619 else if (HAVE_PRE_DECREMENT
5620 && (INTVAL (inc_val) == -size && offset == -size))
5621 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5623 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5624 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5625 gen_rtx_PLUS (Pmode,
5628 insn, x, incr, addr);
5630 else if (GET_CODE (inc_val) == REG
5631 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5635 if (HAVE_POST_MODIFY_REG && offset == 0)
5636 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5637 gen_rtx_PLUS (Pmode,
5640 insn, x, incr, addr);
5644 #endif /* AUTO_INC_DEC */
5647 mark_used_reg (pbi, reg, cond, insn)
5648 struct propagate_block_info *pbi;
5650 rtx cond ATTRIBUTE_UNUSED;
5653 unsigned int regno_first, regno_last, i;
5654 int some_was_live, some_was_dead, some_not_set;
5656 regno_last = regno_first = REGNO (reg);
5657 if (regno_first < FIRST_PSEUDO_REGISTER)
5658 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5660 /* Find out if any of this register is live after this instruction. */
5661 some_was_live = some_was_dead = 0;
5662 for (i = regno_first; i <= regno_last; ++i)
5664 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5665 some_was_live |= needed_regno;
5666 some_was_dead |= ! needed_regno;
5669 /* Find out if any of the register was set this insn. */
5671 for (i = regno_first; i <= regno_last; ++i)
5672 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
5674 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5676 /* Record where each reg is used, so when the reg is set we know
5677 the next insn that uses it. */
5678 pbi->reg_next_use[regno_first] = insn;
5681 if (pbi->flags & PROP_REG_INFO)
5683 if (regno_first < FIRST_PSEUDO_REGISTER)
5685 /* If this is a register we are going to try to eliminate,
5686 don't mark it live here. If we are successful in
5687 eliminating it, it need not be live unless it is used for
5688 pseudos, in which case it will have been set live when it
5689 was allocated to the pseudos. If the register will not
5690 be eliminated, reload will set it live at that point.
5692 Otherwise, record that this function uses this register. */
5693 /* ??? The PPC backend tries to "eliminate" on the pic
5694 register to itself. This should be fixed. In the mean
5695 time, hack around it. */
5697 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
5698 && (regno_first == FRAME_POINTER_REGNUM
5699 || regno_first == ARG_POINTER_REGNUM)))
5700 for (i = regno_first; i <= regno_last; ++i)
5701 regs_ever_live[i] = 1;
5705 /* Keep track of which basic block each reg appears in. */
5707 register int blocknum = pbi->bb->index;
5708 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
5709 REG_BASIC_BLOCK (regno_first) = blocknum;
5710 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
5711 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
5713 /* Count (weighted) number of uses of each reg. */
5714 REG_FREQ (regno_first)
5715 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
5716 REG_N_REFS (regno_first)++;
5720 /* Record and count the insns in which a reg dies. If it is used in
5721 this insn and was dead below the insn then it dies in this insn.
5722 If it was set in this insn, we do not make a REG_DEAD note;
5723 likewise if we already made such a note. */
5724 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5728 /* Check for the case where the register dying partially
5729 overlaps the register set by this insn. */
5730 if (regno_first != regno_last)
5731 for (i = regno_first; i <= regno_last; ++i)
5732 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
5734 /* If none of the words in X is needed, make a REG_DEAD note.
5735 Otherwise, we must make partial REG_DEAD notes. */
5736 if (! some_was_live)
5738 if ((pbi->flags & PROP_DEATH_NOTES)
5739 && ! find_regno_note (insn, REG_DEAD, regno_first))
5741 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5743 if (pbi->flags & PROP_REG_INFO)
5744 REG_N_DEATHS (regno_first)++;
5748 /* Don't make a REG_DEAD note for a part of a register
5749 that is set in the insn. */
5750 for (i = regno_first; i <= regno_last; ++i)
5751 if (! REGNO_REG_SET_P (pbi->reg_live, i)
5752 && ! dead_or_set_regno_p (insn, i))
5754 = alloc_EXPR_LIST (REG_DEAD,
5755 gen_rtx_REG (reg_raw_mode[i], i),
5760 /* Mark the register as being live. */
5761 for (i = regno_first; i <= regno_last; ++i)
5763 SET_REGNO_REG_SET (pbi->reg_live, i);
5765 #ifdef HAVE_conditional_execution
5766 /* If this is a conditional use, record that fact. If it is later
5767 conditionally set, we'll know to kill the register. */
5768 if (cond != NULL_RTX)
5770 splay_tree_node node;
5771 struct reg_cond_life_info *rcli;
5776 node = splay_tree_lookup (pbi->reg_cond_dead, i);
5779 /* The register was unconditionally live previously.
5780 No need to do anything. */
5784 /* The register was conditionally live previously.
5785 Subtract the new life cond from the old death cond. */
5786 rcli = (struct reg_cond_life_info *) node->value;
5787 ncond = rcli->condition;
5788 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
5790 /* If the register is now unconditionally live,
5791 remove the entry in the splay_tree. */
5792 if (ncond == const0_rtx)
5793 splay_tree_remove (pbi->reg_cond_dead, i);
5796 rcli->condition = ncond;
5797 SET_REGNO_REG_SET (pbi->reg_cond_reg,
5798 REGNO (XEXP (cond, 0)));
5804 /* The register was not previously live at all. Record
5805 the condition under which it is still dead. */
5806 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5807 rcli->condition = not_reg_cond (cond);
5808 rcli->stores = const0_rtx;
5809 rcli->orig_condition = const0_rtx;
5810 splay_tree_insert (pbi->reg_cond_dead, i,
5811 (splay_tree_value) rcli);
5813 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5816 else if (some_was_live)
5818 /* The register may have been conditionally live previously, but
5819 is now unconditionally live. Remove it from the conditionally
5820 dead list, so that a conditional set won't cause us to think
5822 splay_tree_remove (pbi->reg_cond_dead, i);
5828 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5829 This is done assuming the registers needed from X are those that
5830 have 1-bits in PBI->REG_LIVE.
5832 INSN is the containing instruction. If INSN is dead, this function
5836 mark_used_regs (pbi, x, cond, insn)
5837 struct propagate_block_info *pbi;
5840 register RTX_CODE code;
5842 int flags = pbi->flags;
5845 code = GET_CODE (x);
5865 /* If we are clobbering a MEM, mark any registers inside the address
5867 if (GET_CODE (XEXP (x, 0)) == MEM)
5868 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5872 /* Don't bother watching stores to mems if this is not the
5873 final pass. We'll not be deleting dead stores this round. */
5874 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5876 /* Invalidate the data for the last MEM stored, but only if MEM is
5877 something that can be stored into. */
5878 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5879 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5880 /* Needn't clear the memory set list. */
5884 rtx temp = pbi->mem_set_list;
5885 rtx prev = NULL_RTX;
5890 next = XEXP (temp, 1);
5891 if (anti_dependence (XEXP (temp, 0), x))
5893 /* Splice temp out of the list. */
5895 XEXP (prev, 1) = next;
5897 pbi->mem_set_list = next;
5898 free_EXPR_LIST_node (temp);
5899 pbi->mem_set_list_len--;
5907 /* If the memory reference had embedded side effects (autoincrement
5908 address modes. Then we may need to kill some entries on the
5911 invalidate_mems_from_autoinc (pbi, insn);
5915 if (flags & PROP_AUTOINC)
5916 find_auto_inc (pbi, x, insn);
5921 #ifdef CLASS_CANNOT_CHANGE_MODE
5922 if (GET_CODE (SUBREG_REG (x)) == REG
5923 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5924 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5925 GET_MODE (SUBREG_REG (x))))
5926 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5929 /* While we're here, optimize this case. */
5931 if (GET_CODE (x) != REG)
5936 /* See a register other than being set => mark it as needed. */
5937 mark_used_reg (pbi, x, cond, insn);
5942 register rtx testreg = SET_DEST (x);
5945 /* If storing into MEM, don't show it as being used. But do
5946 show the address as being used. */
5947 if (GET_CODE (testreg) == MEM)
5950 if (flags & PROP_AUTOINC)
5951 find_auto_inc (pbi, testreg, insn);
5953 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5954 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5958 /* Storing in STRICT_LOW_PART is like storing in a reg
5959 in that this SET might be dead, so ignore it in TESTREG.
5960 but in some other ways it is like using the reg.
5962 Storing in a SUBREG or a bit field is like storing the entire
5963 register in that if the register's value is not used
5964 then this SET is not needed. */
5965 while (GET_CODE (testreg) == STRICT_LOW_PART
5966 || GET_CODE (testreg) == ZERO_EXTRACT
5967 || GET_CODE (testreg) == SIGN_EXTRACT
5968 || GET_CODE (testreg) == SUBREG)
5970 #ifdef CLASS_CANNOT_CHANGE_MODE
5971 if (GET_CODE (testreg) == SUBREG
5972 && GET_CODE (SUBREG_REG (testreg)) == REG
5973 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5974 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5975 GET_MODE (testreg)))
5976 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5979 /* Modifying a single register in an alternate mode
5980 does not use any of the old value. But these other
5981 ways of storing in a register do use the old value. */
5982 if (GET_CODE (testreg) == SUBREG
5983 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5988 testreg = XEXP (testreg, 0);
5991 /* If this is a store into a register or group of registers,
5992 recursively scan the value being stored. */
5994 if ((GET_CODE (testreg) == PARALLEL
5995 && GET_MODE (testreg) == BLKmode)
5996 || (GET_CODE (testreg) == REG
5997 && (regno = REGNO (testreg),
5998 ! (regno == FRAME_POINTER_REGNUM
5999 && (! reload_completed || frame_pointer_needed)))
6000 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6001 && ! (regno == HARD_FRAME_POINTER_REGNUM
6002 && (! reload_completed || frame_pointer_needed))
6004 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6005 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6010 mark_used_regs (pbi, SET_DEST (x), cond, insn);
6011 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6018 case UNSPEC_VOLATILE:
6022 /* Traditional and volatile asm instructions must be considered to use
6023 and clobber all hard registers, all pseudo-registers and all of
6024 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6026 Consider for instance a volatile asm that changes the fpu rounding
6027 mode. An insn should not be moved across this even if it only uses
6028 pseudo-regs because it might give an incorrectly rounded result.
6030 ?!? Unfortunately, marking all hard registers as live causes massive
6031 problems for the register allocator and marking all pseudos as live
6032 creates mountains of uninitialized variable warnings.
6034 So for now, just clear the memory set list and mark any regs
6035 we can find in ASM_OPERANDS as used. */
6036 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
6038 free_EXPR_LIST_list (&pbi->mem_set_list);
6039 pbi->mem_set_list_len = 0;
6042 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6043 We can not just fall through here since then we would be confused
6044 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6045 traditional asms unlike their normal usage. */
6046 if (code == ASM_OPERANDS)
6050 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
6051 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
6057 if (cond != NULL_RTX)
6060 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
6062 cond = COND_EXEC_TEST (x);
6063 x = COND_EXEC_CODE (x);
6067 /* We _do_not_ want to scan operands of phi nodes. Operands of
6068 a phi function are evaluated only when control reaches this
6069 block along a particular edge. Therefore, regs that appear
6070 as arguments to phi should not be added to the global live at
6078 /* Recursively scan the operands of this expression. */
6081 register const char *fmt = GET_RTX_FORMAT (code);
6084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6088 /* Tail recursive case: save a function call level. */
6094 mark_used_regs (pbi, XEXP (x, i), cond, insn);
6096 else if (fmt[i] == 'E')
6099 for (j = 0; j < XVECLEN (x, i); j++)
6100 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6109 try_pre_increment_1 (pbi, insn)
6110 struct propagate_block_info *pbi;
6113 /* Find the next use of this reg. If in same basic block,
6114 make it do pre-increment or pre-decrement if appropriate. */
6115 rtx x = single_set (insn);
6116 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6117 * INTVAL (XEXP (SET_SRC (x), 1)));
6118 int regno = REGNO (SET_DEST (x));
6119 rtx y = pbi->reg_next_use[regno];
6121 && SET_DEST (x) != stack_pointer_rtx
6122 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6123 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6124 mode would be better. */
6125 && ! dead_or_set_p (y, SET_DEST (x))
6126 && try_pre_increment (y, SET_DEST (x), amount))
6128 /* We have found a suitable auto-increment and already changed
6129 insn Y to do it. So flush this increment instruction. */
6130 propagate_block_delete_insn (pbi->bb, insn);
6132 /* Count a reference to this reg for the increment insn we are
6133 deleting. When a reg is incremented, spilling it is worse,
6134 so we want to make that less likely. */
6135 if (regno >= FIRST_PSEUDO_REGISTER)
6137 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6138 ? 1 : pbi->bb->frequency);
6139 REG_N_SETS (regno)++;
6142 /* Flush any remembered memories depending on the value of
6143 the incremented register. */
6144 invalidate_mems_from_set (pbi, SET_DEST (x));
6151 /* Try to change INSN so that it does pre-increment or pre-decrement
6152 addressing on register REG in order to add AMOUNT to REG.
6153 AMOUNT is negative for pre-decrement.
6154 Returns 1 if the change could be made.
6155 This checks all about the validity of the result of modifying INSN. */
6158 try_pre_increment (insn, reg, amount)
6160 HOST_WIDE_INT amount;
6164 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6165 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6167 /* Nonzero if we can try to make a post-increment or post-decrement.
6168 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6169 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6170 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6173 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6176 /* From the sign of increment, see which possibilities are conceivable
6177 on this target machine. */
6178 if (HAVE_PRE_INCREMENT && amount > 0)
6180 if (HAVE_POST_INCREMENT && amount > 0)
6183 if (HAVE_PRE_DECREMENT && amount < 0)
6185 if (HAVE_POST_DECREMENT && amount < 0)
6188 if (! (pre_ok || post_ok))
6191 /* It is not safe to add a side effect to a jump insn
6192 because if the incremented register is spilled and must be reloaded
6193 there would be no way to store the incremented value back in memory. */
6195 if (GET_CODE (insn) == JUMP_INSN)
6200 use = find_use_as_address (PATTERN (insn), reg, 0);
6201 if (post_ok && (use == 0 || use == (rtx) 1))
6203 use = find_use_as_address (PATTERN (insn), reg, -amount);
6207 if (use == 0 || use == (rtx) 1)
6210 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6213 /* See if this combination of instruction and addressing mode exists. */
6214 if (! validate_change (insn, &XEXP (use, 0),
6215 gen_rtx_fmt_e (amount > 0
6216 ? (do_post ? POST_INC : PRE_INC)
6217 : (do_post ? POST_DEC : PRE_DEC),
6221 /* Record that this insn now has an implicit side effect on X. */
6222 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6226 #endif /* AUTO_INC_DEC */
6228 /* Find the place in the rtx X where REG is used as a memory address.
6229 Return the MEM rtx that so uses it.
6230 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6231 (plus REG (const_int PLUSCONST)).
6233 If such an address does not appear, return 0.
6234 If REG appears more than once, or is used other than in such an address,
6238 find_use_as_address (x, reg, plusconst)
6241 HOST_WIDE_INT plusconst;
6243 enum rtx_code code = GET_CODE (x);
6244 const char *fmt = GET_RTX_FORMAT (code);
6246 register rtx value = 0;
6249 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6252 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6253 && XEXP (XEXP (x, 0), 0) == reg
6254 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6255 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6258 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6260 /* If REG occurs inside a MEM used in a bit-field reference,
6261 that is unacceptable. */
6262 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6263 return (rtx) (HOST_WIDE_INT) 1;
6267 return (rtx) (HOST_WIDE_INT) 1;
6269 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6273 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6277 return (rtx) (HOST_WIDE_INT) 1;
6279 else if (fmt[i] == 'E')
6282 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6284 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6288 return (rtx) (HOST_WIDE_INT) 1;
6296 /* Write information about registers and basic blocks into FILE.
6297 This is part of making a debugging dump. */
6300 dump_regset (r, outf)
6307 fputs (" (nil)", outf);
6311 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6313 fprintf (outf, " %d", i);
6314 if (i < FIRST_PSEUDO_REGISTER)
6315 fprintf (outf, " [%s]",
6320 /* Print a human-reaable representation of R on the standard error
6321 stream. This function is designed to be used from within the
6328 dump_regset (r, stderr);
6329 putc ('\n', stderr);
6333 dump_flow_info (file)
6337 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6339 fprintf (file, "%d registers.\n", max_regno);
6340 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6343 enum reg_class class, altclass;
6344 fprintf (file, "\nRegister %d used %d times across %d insns",
6345 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6346 if (REG_BASIC_BLOCK (i) >= 0)
6347 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6349 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6350 (REG_N_SETS (i) == 1) ? "" : "s");
6351 if (REG_USERVAR_P (regno_reg_rtx[i]))
6352 fprintf (file, "; user var");
6353 if (REG_N_DEATHS (i) != 1)
6354 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6355 if (REG_N_CALLS_CROSSED (i) == 1)
6356 fprintf (file, "; crosses 1 call");
6357 else if (REG_N_CALLS_CROSSED (i))
6358 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6359 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6360 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6361 class = reg_preferred_class (i);
6362 altclass = reg_alternate_class (i);
6363 if (class != GENERAL_REGS || altclass != ALL_REGS)
6365 if (altclass == ALL_REGS || class == ALL_REGS)
6366 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6367 else if (altclass == NO_REGS)
6368 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6370 fprintf (file, "; pref %s, else %s",
6371 reg_class_names[(int) class],
6372 reg_class_names[(int) altclass]);
6374 if (REG_POINTER (regno_reg_rtx[i]))
6375 fprintf (file, "; pointer");
6376 fprintf (file, ".\n");
6379 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6380 for (i = 0; i < n_basic_blocks; i++)
6382 register basic_block bb = BASIC_BLOCK (i);
6385 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
6386 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
6387 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6388 fprintf (file, ", freq %i.\n", bb->frequency);
6390 fprintf (file, "Predecessors: ");
6391 for (e = bb->pred; e; e = e->pred_next)
6392 dump_edge_info (file, e, 0);
6394 fprintf (file, "\nSuccessors: ");
6395 for (e = bb->succ; e; e = e->succ_next)
6396 dump_edge_info (file, e, 1);
6398 fprintf (file, "\nRegisters live at start:");
6399 dump_regset (bb->global_live_at_start, file);
6401 fprintf (file, "\nRegisters live at end:");
6402 dump_regset (bb->global_live_at_end, file);
6413 dump_flow_info (stderr);
6417 dump_edge_info (file, e, do_succ)
6422 basic_block side = (do_succ ? e->dest : e->src);
6424 if (side == ENTRY_BLOCK_PTR)
6425 fputs (" ENTRY", file);
6426 else if (side == EXIT_BLOCK_PTR)
6427 fputs (" EXIT", file);
6429 fprintf (file, " %d", side->index);
6432 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
6436 fprintf (file, " count:");
6437 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
6442 static const char * const bitnames[] = {
6443 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6446 int i, flags = e->flags;
6450 for (i = 0; flags; i++)
6451 if (flags & (1 << i))
6457 if (i < (int) ARRAY_SIZE (bitnames))
6458 fputs (bitnames[i], file);
6460 fprintf (file, "%d", i);
6467 /* Print out one basic block with live information at start and end. */
6478 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
6479 bb->index, bb->loop_depth, bb->count);
6480 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6483 fputs (";; Predecessors: ", outf);
6484 for (e = bb->pred; e; e = e->pred_next)
6485 dump_edge_info (outf, e, 0);
6488 fputs (";; Registers live at start:", outf);
6489 dump_regset (bb->global_live_at_start, outf);
6492 for (insn = bb->head, last = NEXT_INSN (bb->end);
6494 insn = NEXT_INSN (insn))
6495 print_rtl_single (outf, insn);
6497 fputs (";; Registers live at end:", outf);
6498 dump_regset (bb->global_live_at_end, outf);
6501 fputs (";; Successors: ", outf);
6502 for (e = bb->succ; e; e = e->succ_next)
6503 dump_edge_info (outf, e, 1);
6511 dump_bb (bb, stderr);
6518 dump_bb (BASIC_BLOCK (n), stderr);
6521 /* Like print_rtl, but also print out live information for the start of each
6525 print_rtl_with_bb (outf, rtx_first)
6529 register rtx tmp_rtx;
6532 fprintf (outf, "(nil)\n");
6536 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6537 int max_uid = get_max_uid ();
6538 basic_block *start = (basic_block *)
6539 xcalloc (max_uid, sizeof (basic_block));
6540 basic_block *end = (basic_block *)
6541 xcalloc (max_uid, sizeof (basic_block));
6542 enum bb_state *in_bb_p = (enum bb_state *)
6543 xcalloc (max_uid, sizeof (enum bb_state));
6545 for (i = n_basic_blocks - 1; i >= 0; i--)
6547 basic_block bb = BASIC_BLOCK (i);
6550 start[INSN_UID (bb->head)] = bb;
6551 end[INSN_UID (bb->end)] = bb;
6552 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6554 enum bb_state state = IN_MULTIPLE_BB;
6555 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6557 in_bb_p[INSN_UID (x)] = state;
6564 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6569 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6571 fprintf (outf, ";; Start of basic block %d, registers live:",
6573 dump_regset (bb->global_live_at_start, outf);
6577 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6578 && GET_CODE (tmp_rtx) != NOTE
6579 && GET_CODE (tmp_rtx) != BARRIER)
6580 fprintf (outf, ";; Insn is not within a basic block\n");
6581 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6582 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6584 did_output = print_rtl_single (outf, tmp_rtx);
6586 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6588 fprintf (outf, ";; End of basic block %d, registers live:\n",
6590 dump_regset (bb->global_live_at_end, outf);
6603 if (current_function_epilogue_delay_list != 0)
6605 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6606 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6607 tmp_rtx = XEXP (tmp_rtx, 1))
6608 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6612 /* Dump the rtl into the current debugging dump file, then abort. */
6615 print_rtl_and_abort_fcn (file, line, function)
6618 const char *function;
6622 print_rtl_with_bb (rtl_dump_file, get_insns ());
6623 fclose (rtl_dump_file);
6626 fancy_abort (file, line, function);
6629 /* Recompute register set/reference counts immediately prior to register
6632 This avoids problems with set/reference counts changing to/from values
6633 which have special meanings to the register allocators.
6635 Additionally, the reference counts are the primary component used by the
6636 register allocators to prioritize pseudos for allocation to hard regs.
6637 More accurate reference counts generally lead to better register allocation.
6639 F is the first insn to be scanned.
6641 LOOP_STEP denotes how much loop_depth should be incremented per
6642 loop nesting level in order to increase the ref count more for
6643 references in a loop.
6645 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6646 possibly other information which is used by the register allocators. */
6649 recompute_reg_usage (f, loop_step)
6650 rtx f ATTRIBUTE_UNUSED;
6651 int loop_step ATTRIBUTE_UNUSED;
6653 allocate_reg_life_data ();
6654 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6657 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6658 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6659 of the number of registers that died. */
6662 count_or_remove_death_notes (blocks, kill)
6668 for (i = n_basic_blocks - 1; i >= 0; --i)
6673 if (blocks && ! TEST_BIT (blocks, i))
6676 bb = BASIC_BLOCK (i);
6678 for (insn = bb->head;; insn = NEXT_INSN (insn))
6682 rtx *pprev = ®_NOTES (insn);
6687 switch (REG_NOTE_KIND (link))
6690 if (GET_CODE (XEXP (link, 0)) == REG)
6692 rtx reg = XEXP (link, 0);
6695 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6698 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6706 rtx next = XEXP (link, 1);
6707 free_EXPR_LIST_node (link);
6708 *pprev = link = next;
6714 pprev = &XEXP (link, 1);
6721 if (insn == bb->end)
6730 /* Update insns block within BB. */
6733 update_bb_for_insn (bb)
6738 if (! basic_block_for_insn)
6741 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6743 set_block_for_insn (insn, bb);
6745 if (insn == bb->end)
6751 /* Record INSN's block as BB. */
6754 set_block_for_insn (insn, bb)
6758 size_t uid = INSN_UID (insn);
6759 if (uid >= basic_block_for_insn->num_elements)
6763 /* Add one-eighth the size so we don't keep calling xrealloc. */
6764 new_size = uid + (uid + 7) / 8;
6766 VARRAY_GROW (basic_block_for_insn, new_size);
6768 VARRAY_BB (basic_block_for_insn, uid) = bb;
6771 /* When a new insn has been inserted into an existing block, it will
6772 sometimes emit more than a single insn. This routine will set the
6773 block number for the specified insn, and look backwards in the insn
6774 chain to see if there are any other uninitialized insns immediately
6775 previous to this one, and set the block number for them too. */
6778 set_block_for_new_insns (insn, bb)
6782 set_block_for_insn (insn, bb);
6784 /* Scan the previous instructions setting the block number until we find
6785 an instruction that has the block number set, or we find a note
6787 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
6789 if (GET_CODE (insn) == NOTE)
6791 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
6792 || BLOCK_FOR_INSN (insn) == 0)
6793 set_block_for_insn (insn, bb);
6799 /* Verify the CFG consistency. This function check some CFG invariants and
6800 aborts when something is wrong. Hope that this function will help to
6801 convert many optimization passes to preserve CFG consistent.
6803 Currently it does following checks:
6805 - test head/end pointers
6806 - overlapping of basic blocks
6807 - edge list corectness
6808 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6809 - tails of basic blocks (ensure that boundary is necesary)
6810 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6811 and NOTE_INSN_BASIC_BLOCK
6812 - check that all insns are in the basic blocks
6813 (except the switch handling code, barriers and notes)
6814 - check that all returns are followed by barriers
6816 In future it can be extended check a lot of other stuff as well
6817 (reachability of basic blocks, life information, etc. etc.). */
6822 const int max_uid = get_max_uid ();
6823 const rtx rtx_first = get_insns ();
6824 rtx last_head = get_last_insn ();
6825 basic_block *bb_info;
6827 int i, last_bb_num_seen, num_bb_notes, err = 0;
6829 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6831 for (i = n_basic_blocks - 1; i >= 0; i--)
6833 basic_block bb = BASIC_BLOCK (i);
6834 rtx head = bb->head;
6837 /* Verify the end of the basic block is in the INSN chain. */
6838 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6843 error ("End insn %d for block %d not found in the insn stream.",
6844 INSN_UID (end), bb->index);
6848 /* Work backwards from the end to the head of the basic block
6849 to verify the head is in the RTL chain. */
6850 for (; x != NULL_RTX; x = PREV_INSN (x))
6852 /* While walking over the insn chain, verify insns appear
6853 in only one basic block and initialize the BB_INFO array
6854 used by other passes. */
6855 if (bb_info[INSN_UID (x)] != NULL)
6857 error ("Insn %d is in multiple basic blocks (%d and %d)",
6858 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6861 bb_info[INSN_UID (x)] = bb;
6868 error ("Head insn %d for block %d not found in the insn stream.",
6869 INSN_UID (head), bb->index);
6876 /* Now check the basic blocks (boundaries etc.) */
6877 for (i = n_basic_blocks - 1; i >= 0; i--)
6879 basic_block bb = BASIC_BLOCK (i);
6880 /* Check corectness of edge lists */
6889 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6891 fprintf (stderr, "Predecessor: ");
6892 dump_edge_info (stderr, e, 0);
6893 fprintf (stderr, "\nSuccessor: ");
6894 dump_edge_info (stderr, e, 1);
6898 if (e->dest != EXIT_BLOCK_PTR)
6900 edge e2 = e->dest->pred;
6901 while (e2 && e2 != e)
6905 error ("Basic block %i edge lists are corrupted", bb->index);
6917 error ("Basic block %d pred edge is corrupted", bb->index);
6918 fputs ("Predecessor: ", stderr);
6919 dump_edge_info (stderr, e, 0);
6920 fputs ("\nSuccessor: ", stderr);
6921 dump_edge_info (stderr, e, 1);
6922 fputc ('\n', stderr);
6925 if (e->src != ENTRY_BLOCK_PTR)
6927 edge e2 = e->src->succ;
6928 while (e2 && e2 != e)
6932 error ("Basic block %i edge lists are corrupted", bb->index);
6939 /* OK pointers are correct. Now check the header of basic
6940 block. It ought to contain optional CODE_LABEL followed
6941 by NOTE_BASIC_BLOCK. */
6943 if (GET_CODE (x) == CODE_LABEL)
6947 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6953 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6955 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6962 /* Do checks for empty blocks here */
6969 if (NOTE_INSN_BASIC_BLOCK_P (x))
6971 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6972 INSN_UID (x), bb->index);
6979 if (GET_CODE (x) == JUMP_INSN
6980 || GET_CODE (x) == CODE_LABEL
6981 || GET_CODE (x) == BARRIER)
6983 error ("In basic block %d:", bb->index);
6984 fatal_insn ("Flow control insn inside a basic block", x);
6992 last_bb_num_seen = -1;
6997 if (NOTE_INSN_BASIC_BLOCK_P (x))
6999 basic_block bb = NOTE_BASIC_BLOCK (x);
7001 if (bb->index != last_bb_num_seen + 1)
7002 /* Basic blocks not numbered consecutively. */
7005 last_bb_num_seen = bb->index;
7008 if (!bb_info[INSN_UID (x)])
7010 switch (GET_CODE (x))
7017 /* An addr_vec is placed outside any block block. */
7019 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
7020 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
7021 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
7026 /* But in any case, non-deletable labels can appear anywhere. */
7030 fatal_insn ("Insn outside basic block", x);
7035 && GET_CODE (x) == JUMP_INSN
7036 && returnjump_p (x) && ! condjump_p (x)
7037 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
7038 fatal_insn ("Return not followed by barrier", x);
7043 if (num_bb_notes != n_basic_blocks)
7045 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
7046 num_bb_notes, n_basic_blocks);
7055 /* Functions to access an edge list with a vector representation.
7056 Enough data is kept such that given an index number, the
7057 pred and succ that edge represents can be determined, or
7058 given a pred and a succ, its index number can be returned.
7059 This allows algorithms which consume a lot of memory to
7060 represent the normally full matrix of edge (pred,succ) with a
7061 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7062 wasted space in the client code due to sparse flow graphs. */
7064 /* This functions initializes the edge list. Basically the entire
7065 flowgraph is processed, and all edges are assigned a number,
7066 and the data structure is filled in. */
7071 struct edge_list *elist;
7077 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
7081 /* Determine the number of edges in the flow graph by counting successor
7082 edges on each basic block. */
7083 for (x = 0; x < n_basic_blocks; x++)
7085 basic_block bb = BASIC_BLOCK (x);
7087 for (e = bb->succ; e; e = e->succ_next)
7090 /* Don't forget successors of the entry block. */
7091 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7094 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
7095 elist->num_blocks = block_count;
7096 elist->num_edges = num_edges;
7097 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
7101 /* Follow successors of the entry block, and register these edges. */
7102 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7104 elist->index_to_edge[num_edges] = e;
7108 for (x = 0; x < n_basic_blocks; x++)
7110 basic_block bb = BASIC_BLOCK (x);
7112 /* Follow all successors of blocks, and register these edges. */
7113 for (e = bb->succ; e; e = e->succ_next)
7115 elist->index_to_edge[num_edges] = e;
7122 /* This function free's memory associated with an edge list. */
7125 free_edge_list (elist)
7126 struct edge_list *elist;
7130 free (elist->index_to_edge);
7135 /* This function provides debug output showing an edge list. */
7138 print_edge_list (f, elist)
7140 struct edge_list *elist;
7143 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7144 elist->num_blocks - 2, elist->num_edges);
7146 for (x = 0; x < elist->num_edges; x++)
7148 fprintf (f, " %-4d - edge(", x);
7149 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7150 fprintf (f, "entry,");
7152 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7154 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7155 fprintf (f, "exit)\n");
7157 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7161 /* This function provides an internal consistency check of an edge list,
7162 verifying that all edges are present, and that there are no
7166 verify_edge_list (f, elist)
7168 struct edge_list *elist;
7170 int x, pred, succ, index;
7173 for (x = 0; x < n_basic_blocks; x++)
7175 basic_block bb = BASIC_BLOCK (x);
7177 for (e = bb->succ; e; e = e->succ_next)
7179 pred = e->src->index;
7180 succ = e->dest->index;
7181 index = EDGE_INDEX (elist, e->src, e->dest);
7182 if (index == EDGE_INDEX_NO_EDGE)
7184 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7187 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7188 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7189 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7190 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7191 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7192 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7195 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7197 pred = e->src->index;
7198 succ = e->dest->index;
7199 index = EDGE_INDEX (elist, e->src, e->dest);
7200 if (index == EDGE_INDEX_NO_EDGE)
7202 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7205 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7206 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7207 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7208 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7209 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7210 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7212 /* We've verified that all the edges are in the list, no lets make sure
7213 there are no spurious edges in the list. */
7215 for (pred = 0; pred < n_basic_blocks; pred++)
7216 for (succ = 0; succ < n_basic_blocks; succ++)
7218 basic_block p = BASIC_BLOCK (pred);
7219 basic_block s = BASIC_BLOCK (succ);
7223 for (e = p->succ; e; e = e->succ_next)
7229 for (e = s->pred; e; e = e->pred_next)
7235 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7236 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7237 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7239 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7240 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7241 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7242 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7243 BASIC_BLOCK (succ)));
7245 for (succ = 0; succ < n_basic_blocks; succ++)
7247 basic_block p = ENTRY_BLOCK_PTR;
7248 basic_block s = BASIC_BLOCK (succ);
7252 for (e = p->succ; e; e = e->succ_next)
7258 for (e = s->pred; e; e = e->pred_next)
7264 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7265 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7266 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7268 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7269 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7270 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7271 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7272 BASIC_BLOCK (succ)));
7274 for (pred = 0; pred < n_basic_blocks; pred++)
7276 basic_block p = BASIC_BLOCK (pred);
7277 basic_block s = EXIT_BLOCK_PTR;
7281 for (e = p->succ; e; e = e->succ_next)
7287 for (e = s->pred; e; e = e->pred_next)
7293 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7294 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7295 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7297 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7298 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7299 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7300 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7305 /* This routine will determine what, if any, edge there is between
7306 a specified predecessor and successor. */
7309 find_edge_index (edge_list, pred, succ)
7310 struct edge_list *edge_list;
7311 basic_block pred, succ;
7314 for (x = 0; x < NUM_EDGES (edge_list); x++)
7316 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7317 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7320 return (EDGE_INDEX_NO_EDGE);
7323 /* This function will remove an edge from the flow graph. */
7329 edge last_pred = NULL;
7330 edge last_succ = NULL;
7332 basic_block src, dest;
7335 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7341 last_succ->succ_next = e->succ_next;
7343 src->succ = e->succ_next;
7345 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7351 last_pred->pred_next = e->pred_next;
7353 dest->pred = e->pred_next;
7359 /* This routine will remove any fake successor edges for a basic block.
7360 When the edge is removed, it is also removed from whatever predecessor
7364 remove_fake_successors (bb)
7368 for (e = bb->succ; e;)
7372 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7377 /* This routine will remove all fake edges from the flow graph. If
7378 we remove all fake successors, it will automatically remove all
7379 fake predecessors. */
7382 remove_fake_edges ()
7386 for (x = 0; x < n_basic_blocks; x++)
7387 remove_fake_successors (BASIC_BLOCK (x));
7389 /* We've handled all successors except the entry block's. */
7390 remove_fake_successors (ENTRY_BLOCK_PTR);
7393 /* This function will add a fake edge between any block which has no
7394 successors, and the exit block. Some data flow equations require these
7398 add_noreturn_fake_exit_edges ()
7402 for (x = 0; x < n_basic_blocks; x++)
7403 if (BASIC_BLOCK (x)->succ == NULL)
7404 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7407 /* This function adds a fake edge between any infinite loops to the
7408 exit block. Some optimizations require a path from each node to
7411 See also Morgan, Figure 3.10, pp. 82-83.
7413 The current implementation is ugly, not attempting to minimize the
7414 number of inserted fake edges. To reduce the number of fake edges
7415 to insert, add fake edges from _innermost_ loops containing only
7416 nodes not reachable from the exit block. */
7419 connect_infinite_loops_to_exit ()
7421 basic_block unvisited_block;
7423 /* Perform depth-first search in the reverse graph to find nodes
7424 reachable from the exit block. */
7425 struct depth_first_search_dsS dfs_ds;
7427 flow_dfs_compute_reverse_init (&dfs_ds);
7428 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7430 /* Repeatedly add fake edges, updating the unreachable nodes. */
7433 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7434 if (!unvisited_block)
7436 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7437 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7440 flow_dfs_compute_reverse_finish (&dfs_ds);
7445 /* Redirect an edge's successor from one block to another. */
7448 redirect_edge_succ (e, new_succ)
7450 basic_block new_succ;
7454 /* Disconnect the edge from the old successor block. */
7455 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7457 *pe = (*pe)->pred_next;
7459 /* Reconnect the edge to the new successor block. */
7460 e->pred_next = new_succ->pred;
7465 /* Redirect an edge's predecessor from one block to another. */
7468 redirect_edge_pred (e, new_pred)
7470 basic_block new_pred;
7474 /* Disconnect the edge from the old predecessor block. */
7475 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7477 *pe = (*pe)->succ_next;
7479 /* Reconnect the edge to the new predecessor block. */
7480 e->succ_next = new_pred->succ;
7485 /* Dump the list of basic blocks in the bitmap NODES. */
7488 flow_nodes_print (str, nodes, file)
7490 const sbitmap nodes;
7498 fprintf (file, "%s { ", str);
7499 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7500 fputs ("}\n", file);
7504 /* Dump the list of edges in the array EDGE_LIST. */
7507 flow_edge_list_print (str, edge_list, num_edges, file)
7509 const edge *edge_list;
7518 fprintf (file, "%s { ", str);
7519 for (i = 0; i < num_edges; i++)
7520 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7521 edge_list[i]->dest->index);
7522 fputs ("}\n", file);
7526 /* Dump loop related CFG information. */
7529 flow_loops_cfg_dump (loops, file)
7530 const struct loops *loops;
7535 if (! loops->num || ! file || ! loops->cfg.dom)
7538 for (i = 0; i < n_basic_blocks; i++)
7542 fprintf (file, ";; %d succs { ", i);
7543 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7544 fprintf (file, "%d ", succ->dest->index);
7545 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7548 /* Dump the DFS node order. */
7549 if (loops->cfg.dfs_order)
7551 fputs (";; DFS order: ", file);
7552 for (i = 0; i < n_basic_blocks; i++)
7553 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7556 /* Dump the reverse completion node order. */
7557 if (loops->cfg.rc_order)
7559 fputs (";; RC order: ", file);
7560 for (i = 0; i < n_basic_blocks; i++)
7561 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7566 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7569 flow_loop_nested_p (outer, loop)
7573 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7577 /* Dump the loop information specified by LOOP to the stream FILE
7578 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7580 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7581 const struct loop *loop;
7583 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7586 if (! loop || ! loop->header)
7589 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7590 loop->num, INSN_UID (loop->first->head),
7591 INSN_UID (loop->last->end),
7592 loop->shared ? " shared" : "",
7593 loop->invalid ? " invalid" : "");
7594 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7595 loop->header->index, loop->latch->index,
7596 loop->pre_header ? loop->pre_header->index : -1,
7597 loop->first->index, loop->last->index);
7598 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7599 loop->depth, loop->level,
7600 (long) (loop->outer ? loop->outer->num : -1));
7602 if (loop->pre_header_edges)
7603 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7604 loop->num_pre_header_edges, file);
7605 flow_edge_list_print (";; entry edges", loop->entry_edges,
7606 loop->num_entries, file);
7607 fprintf (file, ";; %d", loop->num_nodes);
7608 flow_nodes_print (" nodes", loop->nodes, file);
7609 flow_edge_list_print (";; exit edges", loop->exit_edges,
7610 loop->num_exits, file);
7611 if (loop->exits_doms)
7612 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7614 loop_dump_aux (loop, file, verbose);
7618 /* Dump the loop information specified by LOOPS to the stream FILE,
7619 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7621 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7622 const struct loops *loops;
7624 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7630 num_loops = loops->num;
7631 if (! num_loops || ! file)
7634 fprintf (file, ";; %d loops found, %d levels\n",
7635 num_loops, loops->levels);
7637 for (i = 0; i < num_loops; i++)
7639 struct loop *loop = &loops->array[i];
7641 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7647 for (j = 0; j < i; j++)
7649 struct loop *oloop = &loops->array[j];
7651 if (loop->header == oloop->header)
7656 smaller = loop->num_nodes < oloop->num_nodes;
7658 /* If the union of LOOP and OLOOP is different than
7659 the larger of LOOP and OLOOP then LOOP and OLOOP
7660 must be disjoint. */
7661 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7662 smaller ? oloop : loop);
7664 ";; loop header %d shared by loops %d, %d %s\n",
7665 loop->header->index, i, j,
7666 disjoint ? "disjoint" : "nested");
7673 flow_loops_cfg_dump (loops, file);
7677 /* Free all the memory allocated for LOOPS. */
7680 flow_loops_free (loops)
7681 struct loops *loops;
7690 /* Free the loop descriptors. */
7691 for (i = 0; i < loops->num; i++)
7693 struct loop *loop = &loops->array[i];
7695 if (loop->pre_header_edges)
7696 free (loop->pre_header_edges);
7698 sbitmap_free (loop->nodes);
7699 if (loop->entry_edges)
7700 free (loop->entry_edges);
7701 if (loop->exit_edges)
7702 free (loop->exit_edges);
7703 if (loop->exits_doms)
7704 sbitmap_free (loop->exits_doms);
7706 free (loops->array);
7707 loops->array = NULL;
7710 sbitmap_vector_free (loops->cfg.dom);
7711 if (loops->cfg.dfs_order)
7712 free (loops->cfg.dfs_order);
7714 if (loops->shared_headers)
7715 sbitmap_free (loops->shared_headers);
7720 /* Find the entry edges into the loop with header HEADER and nodes
7721 NODES and store in ENTRY_EDGES array. Return the number of entry
7722 edges from the loop. */
7725 flow_loop_entry_edges_find (header, nodes, entry_edges)
7727 const sbitmap nodes;
7733 *entry_edges = NULL;
7736 for (e = header->pred; e; e = e->pred_next)
7738 basic_block src = e->src;
7740 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7747 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7750 for (e = header->pred; e; e = e->pred_next)
7752 basic_block src = e->src;
7754 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7755 (*entry_edges)[num_entries++] = e;
7762 /* Find the exit edges from the loop using the bitmap of loop nodes
7763 NODES and store in EXIT_EDGES array. Return the number of
7764 exit edges from the loop. */
7767 flow_loop_exit_edges_find (nodes, exit_edges)
7768 const sbitmap nodes;
7777 /* Check all nodes within the loop to see if there are any
7778 successors not in the loop. Note that a node may have multiple
7779 exiting edges ????? A node can have one jumping edge and one fallthru
7780 edge so only one of these can exit the loop. */
7782 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7783 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7785 basic_block dest = e->dest;
7787 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7795 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7797 /* Store all exiting edges into an array. */
7799 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7800 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7802 basic_block dest = e->dest;
7804 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7805 (*exit_edges)[num_exits++] = e;
7813 /* Find the nodes contained within the loop with header HEADER and
7814 latch LATCH and store in NODES. Return the number of nodes within
7818 flow_loop_nodes_find (header, latch, nodes)
7827 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7830 /* Start with only the loop header in the set of loop nodes. */
7831 sbitmap_zero (nodes);
7832 SET_BIT (nodes, header->index);
7834 header->loop_depth++;
7836 /* Push the loop latch on to the stack. */
7837 if (! TEST_BIT (nodes, latch->index))
7839 SET_BIT (nodes, latch->index);
7840 latch->loop_depth++;
7842 stack[sp++] = latch;
7851 for (e = node->pred; e; e = e->pred_next)
7853 basic_block ancestor = e->src;
7855 /* If each ancestor not marked as part of loop, add to set of
7856 loop nodes and push on to stack. */
7857 if (ancestor != ENTRY_BLOCK_PTR
7858 && ! TEST_BIT (nodes, ancestor->index))
7860 SET_BIT (nodes, ancestor->index);
7861 ancestor->loop_depth++;
7863 stack[sp++] = ancestor;
7871 /* Compute the depth first search order and store in the array
7872 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7873 RC_ORDER is non-zero, return the reverse completion number for each
7874 node. Returns the number of nodes visited. A depth first search
7875 tries to get as far away from the starting point as quickly as
7879 flow_depth_first_order_compute (dfs_order, rc_order)
7886 int rcnum = n_basic_blocks - 1;
7889 /* Allocate stack for back-tracking up CFG. */
7890 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7893 /* Allocate bitmap to track nodes that have been visited. */
7894 visited = sbitmap_alloc (n_basic_blocks);
7896 /* None of the nodes in the CFG have been visited yet. */
7897 sbitmap_zero (visited);
7899 /* Push the first edge on to the stack. */
7900 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7908 /* Look at the edge on the top of the stack. */
7913 /* Check if the edge destination has been visited yet. */
7914 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7916 /* Mark that we have visited the destination. */
7917 SET_BIT (visited, dest->index);
7920 dfs_order[dfsnum++] = dest->index;
7924 /* Since the DEST node has been visited for the first
7925 time, check its successors. */
7926 stack[sp++] = dest->succ;
7930 /* There are no successors for the DEST node so assign
7931 its reverse completion number. */
7933 rc_order[rcnum--] = dest->index;
7938 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7940 /* There are no more successors for the SRC node
7941 so assign its reverse completion number. */
7943 rc_order[rcnum--] = src->index;
7947 stack[sp - 1] = e->succ_next;
7954 sbitmap_free (visited);
7956 /* The number of nodes visited should not be greater than
7958 if (dfsnum > n_basic_blocks)
7961 /* There are some nodes left in the CFG that are unreachable. */
7962 if (dfsnum < n_basic_blocks)
7967 /* Compute the depth first search order on the _reverse_ graph and
7968 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7969 Returns the number of nodes visited.
7971 The computation is split into three pieces:
7973 flow_dfs_compute_reverse_init () creates the necessary data
7976 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7977 structures. The block will start the search.
7979 flow_dfs_compute_reverse_execute () continues (or starts) the
7980 search using the block on the top of the stack, stopping when the
7983 flow_dfs_compute_reverse_finish () destroys the necessary data
7986 Thus, the user will probably call ..._init(), call ..._add_bb() to
7987 add a beginning basic block to the stack, call ..._execute(),
7988 possibly add another bb to the stack and again call ..._execute(),
7989 ..., and finally call _finish(). */
7991 /* Initialize the data structures used for depth-first search on the
7992 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7993 added to the basic block stack. DATA is the current depth-first
7994 search context. If INITIALIZE_STACK is non-zero, there is an
7995 element on the stack. */
7998 flow_dfs_compute_reverse_init (data)
7999 depth_first_search_ds data;
8001 /* Allocate stack for back-tracking up CFG. */
8003 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
8004 * sizeof (basic_block));
8007 /* Allocate bitmap to track nodes that have been visited. */
8008 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
8010 /* None of the nodes in the CFG have been visited yet. */
8011 sbitmap_zero (data->visited_blocks);
8016 /* Add the specified basic block to the top of the dfs data
8017 structures. When the search continues, it will start at the
8021 flow_dfs_compute_reverse_add_bb (data, bb)
8022 depth_first_search_ds data;
8025 data->stack[data->sp++] = bb;
8029 /* Continue the depth-first search through the reverse graph starting
8030 with the block at the stack's top and ending when the stack is
8031 empty. Visited nodes are marked. Returns an unvisited basic
8032 block, or NULL if there is none available. */
8035 flow_dfs_compute_reverse_execute (data)
8036 depth_first_search_ds data;
8042 while (data->sp > 0)
8044 bb = data->stack[--data->sp];
8046 /* Mark that we have visited this node. */
8047 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
8049 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
8051 /* Perform depth-first search on adjacent vertices. */
8052 for (e = bb->pred; e; e = e->pred_next)
8053 flow_dfs_compute_reverse_add_bb (data, e->src);
8057 /* Determine if there are unvisited basic blocks. */
8058 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
8059 if (!TEST_BIT (data->visited_blocks, i))
8060 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
8064 /* Destroy the data structures needed for depth-first search on the
8068 flow_dfs_compute_reverse_finish (data)
8069 depth_first_search_ds data;
8072 sbitmap_free (data->visited_blocks);
8077 /* Find the root node of the loop pre-header extended basic block and
8078 the edges along the trace from the root node to the loop header. */
8081 flow_loop_pre_header_scan (loop)
8087 loop->num_pre_header_edges = 0;
8089 if (loop->num_entries != 1)
8092 ebb = loop->entry_edges[0]->src;
8094 if (ebb != ENTRY_BLOCK_PTR)
8098 /* Count number of edges along trace from loop header to
8099 root of pre-header extended basic block. Usually this is
8100 only one or two edges. */
8102 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
8104 ebb = ebb->pred->src;
8108 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
8109 loop->num_pre_header_edges = num;
8111 /* Store edges in order that they are followed. The source
8112 of the first edge is the root node of the pre-header extended
8113 basic block and the destination of the last last edge is
8115 for (e = loop->entry_edges[0]; num; e = e->src->pred)
8117 loop->pre_header_edges[--num] = e;
8123 /* Return the block for the pre-header of the loop with header
8124 HEADER where DOM specifies the dominator information. Return NULL if
8125 there is no pre-header. */
8128 flow_loop_pre_header_find (header, dom)
8132 basic_block pre_header;
8135 /* If block p is a predecessor of the header and is the only block
8136 that the header does not dominate, then it is the pre-header. */
8138 for (e = header->pred; e; e = e->pred_next)
8140 basic_block node = e->src;
8142 if (node != ENTRY_BLOCK_PTR
8143 && ! TEST_BIT (dom[node->index], header->index))
8145 if (pre_header == NULL)
8149 /* There are multiple edges into the header from outside
8150 the loop so there is no pre-header block. */
8159 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8160 previously added. The insertion algorithm assumes that the loops
8161 are added in the order found by a depth first search of the CFG. */
8164 flow_loop_tree_node_add (prevloop, loop)
8165 struct loop *prevloop;
8169 if (flow_loop_nested_p (prevloop, loop))
8171 prevloop->inner = loop;
8172 loop->outer = prevloop;
8176 while (prevloop->outer)
8178 if (flow_loop_nested_p (prevloop->outer, loop))
8180 prevloop->next = loop;
8181 loop->outer = prevloop->outer;
8184 prevloop = prevloop->outer;
8187 prevloop->next = loop;
8191 /* Build the loop hierarchy tree for LOOPS. */
8194 flow_loops_tree_build (loops)
8195 struct loops *loops;
8200 num_loops = loops->num;
8204 /* Root the loop hierarchy tree with the first loop found.
8205 Since we used a depth first search this should be the
8207 loops->tree = &loops->array[0];
8208 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8210 /* Add the remaining loops to the tree. */
8211 for (i = 1; i < num_loops; i++)
8212 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8215 /* Helper function to compute loop nesting depth and enclosed loop level
8216 for the natural loop specified by LOOP at the loop depth DEPTH.
8217 Returns the loop level. */
8220 flow_loop_level_compute (loop, depth)
8230 /* Traverse loop tree assigning depth and computing level as the
8231 maximum level of all the inner loops of this loop. The loop
8232 level is equivalent to the height of the loop in the loop tree
8233 and corresponds to the number of enclosed loop levels (including
8235 for (inner = loop->inner; inner; inner = inner->next)
8239 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8244 loop->level = level;
8245 loop->depth = depth;
8249 /* Compute the loop nesting depth and enclosed loop level for the loop
8250 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8254 flow_loops_level_compute (loops)
8255 struct loops *loops;
8261 /* Traverse all the outer level loops. */
8262 for (loop = loops->tree; loop; loop = loop->next)
8264 level = flow_loop_level_compute (loop, 1);
8272 /* Scan a single natural loop specified by LOOP collecting information
8273 about it specified by FLAGS. */
8276 flow_loop_scan (loops, loop, flags)
8277 struct loops *loops;
8281 /* Determine prerequisites. */
8282 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8283 flags |= LOOP_EXIT_EDGES;
8285 if (flags & LOOP_ENTRY_EDGES)
8287 /* Find edges which enter the loop header.
8288 Note that the entry edges should only
8289 enter the header of a natural loop. */
8291 = flow_loop_entry_edges_find (loop->header,
8293 &loop->entry_edges);
8296 if (flags & LOOP_EXIT_EDGES)
8298 /* Find edges which exit the loop. */
8300 = flow_loop_exit_edges_find (loop->nodes,
8304 if (flags & LOOP_EXITS_DOMS)
8308 /* Determine which loop nodes dominate all the exits
8310 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8311 sbitmap_copy (loop->exits_doms, loop->nodes);
8312 for (j = 0; j < loop->num_exits; j++)
8313 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8314 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8316 /* The header of a natural loop must dominate
8318 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8322 if (flags & LOOP_PRE_HEADER)
8324 /* Look to see if the loop has a pre-header node. */
8326 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8328 /* Find the blocks within the extended basic block of
8329 the loop pre-header. */
8330 flow_loop_pre_header_scan (loop);
8336 /* Find all the natural loops in the function and save in LOOPS structure
8337 and recalculate loop_depth information in basic block structures.
8338 FLAGS controls which loop information is collected.
8339 Return the number of natural loops found. */
8342 flow_loops_find (loops, flags)
8343 struct loops *loops;
8355 /* This function cannot be repeatedly called with different
8356 flags to build up the loop information. The loop tree
8357 must always be built if this function is called. */
8358 if (! (flags & LOOP_TREE))
8361 memset (loops, 0, sizeof (*loops));
8363 /* Taking care of this degenerate case makes the rest of
8364 this code simpler. */
8365 if (n_basic_blocks == 0)
8371 /* Compute the dominators. */
8372 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8373 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8375 /* Count the number of loop edges (back edges). This should be the
8376 same as the number of natural loops. */
8379 for (b = 0; b < n_basic_blocks; b++)
8383 header = BASIC_BLOCK (b);
8384 header->loop_depth = 0;
8386 for (e = header->pred; e; e = e->pred_next)
8388 basic_block latch = e->src;
8390 /* Look for back edges where a predecessor is dominated
8391 by this block. A natural loop has a single entry
8392 node (header) that dominates all the nodes in the
8393 loop. It also has single back edge to the header
8394 from a latch node. Note that multiple natural loops
8395 may share the same header. */
8396 if (b != header->index)
8399 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8406 /* Compute depth first search order of the CFG so that outer
8407 natural loops will be found before inner natural loops. */
8408 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8409 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8410 flow_depth_first_order_compute (dfs_order, rc_order);
8412 /* Save CFG derived information to avoid recomputing it. */
8413 loops->cfg.dom = dom;
8414 loops->cfg.dfs_order = dfs_order;
8415 loops->cfg.rc_order = rc_order;
8417 /* Allocate loop structures. */
8419 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8421 headers = sbitmap_alloc (n_basic_blocks);
8422 sbitmap_zero (headers);
8424 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8425 sbitmap_zero (loops->shared_headers);
8427 /* Find and record information about all the natural loops
8430 for (b = 0; b < n_basic_blocks; b++)
8434 /* Search the nodes of the CFG in reverse completion order
8435 so that we can find outer loops first. */
8436 header = BASIC_BLOCK (rc_order[b]);
8438 /* Look for all the possible latch blocks for this header. */
8439 for (e = header->pred; e; e = e->pred_next)
8441 basic_block latch = e->src;
8443 /* Look for back edges where a predecessor is dominated
8444 by this block. A natural loop has a single entry
8445 node (header) that dominates all the nodes in the
8446 loop. It also has single back edge to the header
8447 from a latch node. Note that multiple natural loops
8448 may share the same header. */
8449 if (latch != ENTRY_BLOCK_PTR
8450 && TEST_BIT (dom[latch->index], header->index))
8454 loop = loops->array + num_loops;
8456 loop->header = header;
8457 loop->latch = latch;
8458 loop->num = num_loops;
8465 for (i = 0; i < num_loops; i++)
8467 struct loop *loop = &loops->array[i];
8469 /* Keep track of blocks that are loop headers so
8470 that we can tell which loops should be merged. */
8471 if (TEST_BIT (headers, loop->header->index))
8472 SET_BIT (loops->shared_headers, loop->header->index);
8473 SET_BIT (headers, loop->header->index);
8475 /* Find nodes contained within the loop. */
8476 loop->nodes = sbitmap_alloc (n_basic_blocks);
8478 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8480 /* Compute first and last blocks within the loop.
8481 These are often the same as the loop header and
8482 loop latch respectively, but this is not always
8485 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8487 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8489 flow_loop_scan (loops, loop, flags);
8492 /* Natural loops with shared headers may either be disjoint or
8493 nested. Disjoint loops with shared headers cannot be inner
8494 loops and should be merged. For now just mark loops that share
8496 for (i = 0; i < num_loops; i++)
8497 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8498 loops->array[i].shared = 1;
8500 sbitmap_free (headers);
8503 loops->num = num_loops;
8505 /* Build the loop hierarchy tree. */
8506 flow_loops_tree_build (loops);
8508 /* Assign the loop nesting depth and enclosed loop level for each
8510 loops->levels = flow_loops_level_compute (loops);
8516 /* Update the information regarding the loops in the CFG
8517 specified by LOOPS. */
8519 flow_loops_update (loops, flags)
8520 struct loops *loops;
8523 /* One day we may want to update the current loop data. For now
8524 throw away the old stuff and rebuild what we need. */
8526 flow_loops_free (loops);
8528 return flow_loops_find (loops, flags);
8532 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8535 flow_loop_outside_edge_p (loop, e)
8536 const struct loop *loop;
8539 if (e->dest != loop->header)
8541 return (e->src == ENTRY_BLOCK_PTR)
8542 || ! TEST_BIT (loop->nodes, e->src->index);
8545 /* Clear LOG_LINKS fields of insns in a chain.
8546 Also clear the global_live_at_{start,end} fields of the basic block
8550 clear_log_links (insns)
8556 for (i = insns; i; i = NEXT_INSN (i))
8560 for (b = 0; b < n_basic_blocks; b++)
8562 basic_block bb = BASIC_BLOCK (b);
8564 bb->global_live_at_start = NULL;
8565 bb->global_live_at_end = NULL;
8568 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8569 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8572 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8573 correspond to the hard registers, if any, set in that map. This
8574 could be done far more efficiently by having all sorts of special-cases
8575 with moving single words, but probably isn't worth the trouble. */
8578 reg_set_to_hard_reg_set (to, from)
8584 EXECUTE_IF_SET_IN_BITMAP
8587 if (i >= FIRST_PSEUDO_REGISTER)
8589 SET_HARD_REG_BIT (*to, i);
8593 /* Called once at intialization time. */
8598 static int initialized;
8602 gcc_obstack_init (&flow_obstack);
8603 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8608 obstack_free (&flow_obstack, flow_firstobj);
8609 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);