1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 /* The obstack on which the flow graph components are allocated. */
173 struct obstack flow_obstack;
174 static char *flow_firstobj;
176 /* Number of basic blocks in the current function. */
180 /* Number of edges in the current function. */
184 /* The basic block array. */
186 varray_type basic_block_info;
188 /* The special entry and exit blocks. */
190 struct basic_block_def entry_exit_blocks[2]
195 NULL, /* local_set */
196 NULL, /* global_live_at_start */
197 NULL, /* global_live_at_end */
199 ENTRY_BLOCK, /* index */
201 -1, -1, /* eh_beg, eh_end */
209 NULL, /* local_set */
210 NULL, /* global_live_at_start */
211 NULL, /* global_live_at_end */
213 EXIT_BLOCK, /* index */
215 -1, -1, /* eh_beg, eh_end */
220 /* Nonzero if the second flow pass has completed. */
223 /* Maximum register number used in this function, plus one. */
227 /* Indexed by n, giving various register information */
229 varray_type reg_n_info;
231 /* Size of a regset for the current function,
232 in (1) bytes and (2) elements. */
237 /* Regset of regs live when calls to `setjmp'-like functions happen. */
238 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240 regset regs_live_at_setjmp;
242 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
243 that have to go in the same hard reg.
244 The first two regs in the list are a pair, and the next two
245 are another pair, etc. */
248 /* Set of registers that may be eliminable. These are handled specially
249 in updating regs_ever_live. */
251 static HARD_REG_SET elim_reg_set;
253 /* The basic block structure for every insn, indexed by uid. */
255 varray_type basic_block_for_insn;
257 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
258 /* ??? Should probably be using LABEL_NUSES instead. It would take a
259 bit of surgery to be able to use or co-opt the routines in jump. */
261 static rtx label_value_list;
262 static rtx tail_recursion_label_list;
264 /* Holds information for tracking conditional register life information. */
265 struct reg_cond_life_info
267 /* An EXPR_LIST of conditions under which a register is dead. */
270 /* ??? Could store mask of bytes that are dead, so that we could finally
271 track lifetimes of multi-word registers accessed via subregs. */
274 /* For use in communicating between propagate_block and its subroutines.
275 Holds all information needed to compute life and def-use information. */
277 struct propagate_block_info
279 /* The basic block we're considering. */
282 /* Bit N is set if register N is conditionally or unconditionally live. */
285 /* Bit N is set if register N is set this insn. */
288 /* Element N is the next insn that uses (hard or pseudo) register N
289 within the current basic block; or zero, if there is no such insn. */
292 /* Contains a list of all the MEMs we are tracking for dead store
296 /* If non-null, record the set of registers set in the basic block. */
299 #ifdef HAVE_conditional_execution
300 /* Indexed by register number, holds a reg_cond_life_info for each
301 register that is not unconditionally live or dead. */
302 splay_tree reg_cond_dead;
304 /* Bit N is set if register N is in an expression in reg_cond_dead. */
308 /* Non-zero if the value of CC0 is live. */
311 /* Flags controling the set of information propagate_block collects. */
315 /* Store the data structures necessary for depth-first search. */
316 struct depth_first_search_dsS {
317 /* stack for backtracking during the algorithm */
320 /* number of edges in the stack. That is, positions 0, ..., sp-1
324 /* record of basic blocks already seen by depth-first search */
325 sbitmap visited_blocks;
327 typedef struct depth_first_search_dsS *depth_first_search_ds;
329 /* Forward declarations */
330 static int count_basic_blocks PARAMS ((rtx));
331 static void find_basic_blocks_1 PARAMS ((rtx));
332 static rtx find_label_refs PARAMS ((rtx, rtx));
333 static void clear_edges PARAMS ((void));
334 static void make_edges PARAMS ((rtx));
335 static void make_label_edge PARAMS ((sbitmap *, basic_block,
337 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
338 basic_block, rtx, int));
339 static void mark_critical_edges PARAMS ((void));
340 static void move_stray_eh_region_notes PARAMS ((void));
341 static void record_active_eh_regions PARAMS ((rtx));
343 static void commit_one_edge_insertion PARAMS ((edge));
345 static void delete_unreachable_blocks PARAMS ((void));
346 static void delete_eh_regions PARAMS ((void));
347 static int can_delete_note_p PARAMS ((rtx));
348 static void expunge_block PARAMS ((basic_block));
349 static int can_delete_label_p PARAMS ((rtx));
350 static int tail_recursion_label_p PARAMS ((rtx));
351 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
353 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
355 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
356 static void try_merge_blocks PARAMS ((void));
357 static void tidy_fallthru_edges PARAMS ((void));
358 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
359 static void verify_wide_reg PARAMS ((int, rtx, rtx));
360 static void verify_local_live_at_start PARAMS ((regset, basic_block));
361 static int set_noop_p PARAMS ((rtx));
362 static int noop_move_p PARAMS ((rtx));
363 static void delete_noop_moves PARAMS ((rtx));
364 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
365 static void notice_stack_pointer_modification PARAMS ((rtx));
366 static void mark_reg PARAMS ((rtx, void *));
367 static void mark_regs_live_at_end PARAMS ((regset));
368 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
369 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
370 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
371 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
372 static int insn_dead_p PARAMS ((struct propagate_block_info *,
374 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
376 static void mark_set_regs PARAMS ((struct propagate_block_info *,
378 static void mark_set_1 PARAMS ((struct propagate_block_info *,
379 enum rtx_code, rtx, rtx,
381 #ifdef HAVE_conditional_execution
382 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
384 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
385 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
386 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
388 static rtx ior_reg_cond PARAMS ((rtx, rtx));
389 static rtx not_reg_cond PARAMS ((rtx));
390 static rtx nand_reg_cond PARAMS ((rtx, rtx));
393 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
394 rtx, rtx, rtx, rtx, rtx));
395 static void find_auto_inc PARAMS ((struct propagate_block_info *,
397 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
399 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
401 static void mark_used_reg PARAMS ((struct propagate_block_info *,
403 static void mark_used_regs PARAMS ((struct propagate_block_info *,
405 void dump_flow_info PARAMS ((FILE *));
406 void debug_flow_info PARAMS ((void));
407 static void dump_edge_info PARAMS ((FILE *, edge, int));
409 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
411 static void remove_fake_successors PARAMS ((basic_block));
412 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
414 static void flow_edge_list_print PARAMS ((const char *, const edge *,
416 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
418 static int flow_loop_nested_p PARAMS ((struct loop *,
420 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
422 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
423 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
424 static int flow_depth_first_order_compute PARAMS ((int *, int *));
425 static void flow_dfs_compute_reverse_init
426 PARAMS ((depth_first_search_ds));
427 static void flow_dfs_compute_reverse_add_bb
428 PARAMS ((depth_first_search_ds, basic_block));
429 static basic_block flow_dfs_compute_reverse_execute
430 PARAMS ((depth_first_search_ds));
431 static void flow_dfs_compute_reverse_finish
432 PARAMS ((depth_first_search_ds));
433 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
434 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
436 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
437 static void flow_loops_tree_build PARAMS ((struct loops *));
438 static int flow_loop_level_compute PARAMS ((struct loop *, int));
439 static int flow_loops_level_compute PARAMS ((struct loops *));
440 static void allocate_bb_life_data PARAMS ((void));
442 /* Find basic blocks of the current function.
443 F is the first insn of the function and NREGS the number of register
447 find_basic_blocks (f, nregs, file)
449 int nregs ATTRIBUTE_UNUSED;
450 FILE *file ATTRIBUTE_UNUSED;
454 /* Flush out existing data. */
455 if (basic_block_info != NULL)
461 /* Clear bb->aux on all extant basic blocks. We'll use this as a
462 tag for reuse during create_basic_block, just in case some pass
463 copies around basic block notes improperly. */
464 for (i = 0; i < n_basic_blocks; ++i)
465 BASIC_BLOCK (i)->aux = NULL;
467 VARRAY_FREE (basic_block_info);
470 n_basic_blocks = count_basic_blocks (f);
472 /* Size the basic block table. The actual structures will be allocated
473 by find_basic_blocks_1, since we want to keep the structure pointers
474 stable across calls to find_basic_blocks. */
475 /* ??? This whole issue would be much simpler if we called find_basic_blocks
476 exactly once, and thereafter we don't have a single long chain of
477 instructions at all until close to the end of compilation when we
478 actually lay them out. */
480 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
482 find_basic_blocks_1 (f);
484 /* Record the block to which an insn belongs. */
485 /* ??? This should be done another way, by which (perhaps) a label is
486 tagged directly with the basic block that it starts. It is used for
487 more than that currently, but IMO that is the only valid use. */
489 max_uid = get_max_uid ();
491 /* Leave space for insns life_analysis makes in some cases for auto-inc.
492 These cases are rare, so we don't need too much space. */
493 max_uid += max_uid / 10;
496 compute_bb_for_insn (max_uid);
498 /* Discover the edges of our cfg. */
499 record_active_eh_regions (f);
500 make_edges (label_value_list);
502 /* Do very simple cleanup now, for the benefit of code that runs between
503 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
504 tidy_fallthru_edges ();
506 mark_critical_edges ();
508 #ifdef ENABLE_CHECKING
514 check_function_return_warnings ()
516 if (warn_missing_noreturn
517 && !TREE_THIS_VOLATILE (cfun->decl)
518 && EXIT_BLOCK_PTR->pred == NULL)
519 warning ("function might be possible candidate for attribute `noreturn'");
521 /* If we have a path to EXIT, then we do return. */
522 if (TREE_THIS_VOLATILE (cfun->decl)
523 && EXIT_BLOCK_PTR->pred != NULL)
524 warning ("`noreturn' function does return");
526 /* If the clobber_return_insn appears in some basic block, then we
527 do reach the end without returning a value. */
528 else if (warn_return_type
529 && cfun->x_clobber_return_insn != NULL
530 && EXIT_BLOCK_PTR->pred != NULL)
532 int max_uid = get_max_uid ();
534 /* If clobber_return_insn was excised by jump1, then renumber_insns
535 can make max_uid smaller than the number still recorded in our rtx.
536 That's fine, since this is a quick way of verifying that the insn
537 is no longer in the chain. */
538 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
540 /* Recompute insn->block mapping, since the initial mapping is
541 set before we delete unreachable blocks. */
542 compute_bb_for_insn (max_uid);
544 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
545 warning ("control reaches end of non-void function");
550 /* Count the basic blocks of the function. */
553 count_basic_blocks (f)
557 register RTX_CODE prev_code;
558 register int count = 0;
560 int call_had_abnormal_edge = 0;
562 prev_code = JUMP_INSN;
563 for (insn = f; insn; insn = NEXT_INSN (insn))
565 register RTX_CODE code = GET_CODE (insn);
567 if (code == CODE_LABEL
568 || (GET_RTX_CLASS (code) == 'i'
569 && (prev_code == JUMP_INSN
570 || prev_code == BARRIER
571 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
574 /* Record whether this call created an edge. */
575 if (code == CALL_INSN)
577 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
578 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
580 call_had_abnormal_edge = 0;
582 /* If there is an EH region or rethrow, we have an edge. */
583 if ((eh_region && region > 0)
584 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
585 call_had_abnormal_edge = 1;
586 else if (nonlocal_goto_handler_labels && region >= 0)
587 /* If there is a nonlocal goto label and the specified
588 region number isn't -1, we have an edge. (0 means
589 no throw, but might have a nonlocal goto). */
590 call_had_abnormal_edge = 1;
595 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
597 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
601 /* The rest of the compiler works a bit smoother when we don't have to
602 check for the edge case of do-nothing functions with no basic blocks. */
605 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
612 /* Scan a list of insns for labels referred to other than by jumps.
613 This is used to scan the alternatives of a call placeholder. */
615 find_label_refs (f, lvl)
621 for (insn = f; insn; insn = NEXT_INSN (insn))
626 /* Make a list of all labels referred to other than by jumps
627 (which just don't have the REG_LABEL notes).
629 Make a special exception for labels followed by an ADDR*VEC,
630 as this would be a part of the tablejump setup code.
632 Make a special exception for the eh_return_stub_label, which
633 we know isn't part of any otherwise visible control flow. */
635 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
636 if (REG_NOTE_KIND (note) == REG_LABEL)
638 rtx lab = XEXP (note, 0), next;
640 if (lab == eh_return_stub_label)
642 else if ((next = next_nonnote_insn (lab)) != NULL
643 && GET_CODE (next) == JUMP_INSN
644 && (GET_CODE (PATTERN (next)) == ADDR_VEC
645 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
647 else if (GET_CODE (lab) == NOTE)
650 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
657 /* Find all basic blocks of the function whose first insn is F.
659 Collect and return a list of labels whose addresses are taken. This
660 will be used in make_edges for use with computed gotos. */
663 find_basic_blocks_1 (f)
666 register rtx insn, next;
668 rtx bb_note = NULL_RTX;
669 rtx eh_list = NULL_RTX;
675 /* We process the instructions in a slightly different way than we did
676 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
677 closed out the previous block, so that it gets attached at the proper
678 place. Since this form should be equivalent to the previous,
679 count_basic_blocks continues to use the old form as a check. */
681 for (insn = f; insn; insn = next)
683 enum rtx_code code = GET_CODE (insn);
685 next = NEXT_INSN (insn);
691 int kind = NOTE_LINE_NUMBER (insn);
693 /* Keep a LIFO list of the currently active exception notes. */
694 if (kind == NOTE_INSN_EH_REGION_BEG)
695 eh_list = alloc_INSN_LIST (insn, eh_list);
696 else if (kind == NOTE_INSN_EH_REGION_END)
700 eh_list = XEXP (eh_list, 1);
701 free_INSN_LIST_node (t);
704 /* Look for basic block notes with which to keep the
705 basic_block_info pointers stable. Unthread the note now;
706 we'll put it back at the right place in create_basic_block.
707 Or not at all if we've already found a note in this block. */
708 else if (kind == NOTE_INSN_BASIC_BLOCK)
710 if (bb_note == NULL_RTX)
713 next = flow_delete_insn (insn);
719 /* A basic block starts at a label. If we've closed one off due
720 to a barrier or some such, no need to do it again. */
721 if (head != NULL_RTX)
723 /* While we now have edge lists with which other portions of
724 the compiler might determine a call ending a basic block
725 does not imply an abnormal edge, it will be a bit before
726 everything can be updated. So continue to emit a noop at
727 the end of such a block. */
728 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
730 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
731 end = emit_insn_after (nop, end);
734 create_basic_block (i++, head, end, bb_note);
742 /* A basic block ends at a jump. */
743 if (head == NULL_RTX)
747 /* ??? Make a special check for table jumps. The way this
748 happens is truly and amazingly gross. We are about to
749 create a basic block that contains just a code label and
750 an addr*vec jump insn. Worse, an addr_diff_vec creates
751 its own natural loop.
753 Prevent this bit of brain damage, pasting things together
754 correctly in make_edges.
756 The correct solution involves emitting the table directly
757 on the tablejump instruction as a note, or JUMP_LABEL. */
759 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
760 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
768 goto new_bb_inclusive;
771 /* A basic block ends at a barrier. It may be that an unconditional
772 jump already closed the basic block -- no need to do it again. */
773 if (head == NULL_RTX)
776 /* While we now have edge lists with which other portions of the
777 compiler might determine a call ending a basic block does not
778 imply an abnormal edge, it will be a bit before everything can
779 be updated. So continue to emit a noop at the end of such a
781 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
783 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
784 end = emit_insn_after (nop, end);
786 goto new_bb_exclusive;
790 /* Record whether this call created an edge. */
791 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
792 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
793 int call_has_abnormal_edge = 0;
795 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
797 /* Scan each of the alternatives for label refs. */
798 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
799 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
800 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
801 /* Record its tail recursion label, if any. */
802 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
803 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
806 /* If there is an EH region or rethrow, we have an edge. */
807 if ((eh_list && region > 0)
808 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
809 call_has_abnormal_edge = 1;
810 else if (nonlocal_goto_handler_labels && region >= 0)
811 /* If there is a nonlocal goto label and the specified
812 region number isn't -1, we have an edge. (0 means
813 no throw, but might have a nonlocal goto). */
814 call_has_abnormal_edge = 1;
816 /* A basic block ends at a call that can either throw or
817 do a non-local goto. */
818 if (call_has_abnormal_edge)
821 if (head == NULL_RTX)
826 create_basic_block (i++, head, end, bb_note);
827 head = end = NULL_RTX;
835 if (GET_RTX_CLASS (code) == 'i')
837 if (head == NULL_RTX)
844 if (GET_RTX_CLASS (code) == 'i')
848 /* Make a list of all labels referred to other than by jumps
849 (which just don't have the REG_LABEL notes).
851 Make a special exception for labels followed by an ADDR*VEC,
852 as this would be a part of the tablejump setup code.
854 Make a special exception for the eh_return_stub_label, which
855 we know isn't part of any otherwise visible control flow. */
857 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
858 if (REG_NOTE_KIND (note) == REG_LABEL)
860 rtx lab = XEXP (note, 0), next;
862 if (lab == eh_return_stub_label)
864 else if ((next = next_nonnote_insn (lab)) != NULL
865 && GET_CODE (next) == JUMP_INSN
866 && (GET_CODE (PATTERN (next)) == ADDR_VEC
867 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
869 else if (GET_CODE (lab) == NOTE)
872 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
877 if (head != NULL_RTX)
878 create_basic_block (i++, head, end, bb_note);
880 flow_delete_insn (bb_note);
882 if (i != n_basic_blocks)
885 label_value_list = lvl;
886 tail_recursion_label_list = trll;
889 /* Tidy the CFG by deleting unreachable code and whatnot. */
895 delete_unreachable_blocks ();
896 move_stray_eh_region_notes ();
897 record_active_eh_regions (f);
899 mark_critical_edges ();
901 /* Kill the data we won't maintain. */
902 free_EXPR_LIST_list (&label_value_list);
903 free_EXPR_LIST_list (&tail_recursion_label_list);
906 /* Create a new basic block consisting of the instructions between
907 HEAD and END inclusive. Reuses the note and basic block struct
908 in BB_NOTE, if any. */
911 create_basic_block (index, head, end, bb_note)
913 rtx head, end, bb_note;
918 && ! RTX_INTEGRATED_P (bb_note)
919 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
922 /* If we found an existing note, thread it back onto the chain. */
926 if (GET_CODE (head) == CODE_LABEL)
930 after = PREV_INSN (head);
934 if (after != bb_note && NEXT_INSN (after) != bb_note)
935 reorder_insns (bb_note, bb_note, after);
939 /* Otherwise we must create a note and a basic block structure.
940 Since we allow basic block structs in rtl, give the struct
941 the same lifetime by allocating it off the function obstack
942 rather than using malloc. */
944 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
945 memset (bb, 0, sizeof (*bb));
947 if (GET_CODE (head) == CODE_LABEL)
948 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
951 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
954 NOTE_BASIC_BLOCK (bb_note) = bb;
957 /* Always include the bb note in the block. */
958 if (NEXT_INSN (end) == bb_note)
964 BASIC_BLOCK (index) = bb;
966 /* Tag the block so that we know it has been used when considering
967 other basic block notes. */
971 /* Records the basic block struct in BB_FOR_INSN, for every instruction
972 indexed by INSN_UID. MAX is the size of the array. */
975 compute_bb_for_insn (max)
980 if (basic_block_for_insn)
981 VARRAY_FREE (basic_block_for_insn);
982 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
984 for (i = 0; i < n_basic_blocks; ++i)
986 basic_block bb = BASIC_BLOCK (i);
993 int uid = INSN_UID (insn);
995 VARRAY_BB (basic_block_for_insn, uid) = bb;
998 insn = NEXT_INSN (insn);
1003 /* Free the memory associated with the edge structures. */
1011 for (i = 0; i < n_basic_blocks; ++i)
1013 basic_block bb = BASIC_BLOCK (i);
1015 for (e = bb->succ; e; e = n)
1025 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1031 ENTRY_BLOCK_PTR->succ = 0;
1032 EXIT_BLOCK_PTR->pred = 0;
1037 /* Identify the edges between basic blocks.
1039 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1040 that are otherwise unreachable may be reachable with a non-local goto.
1042 BB_EH_END is an array indexed by basic block number in which we record
1043 the list of exception regions active at the end of the basic block. */
1046 make_edges (label_value_list)
1047 rtx label_value_list;
1050 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1051 sbitmap *edge_cache = NULL;
1053 /* Assume no computed jump; revise as we create edges. */
1054 current_function_has_computed_jump = 0;
1056 /* Heavy use of computed goto in machine-generated code can lead to
1057 nearly fully-connected CFGs. In that case we spend a significant
1058 amount of time searching the edge lists for duplicates. */
1059 if (forced_labels || label_value_list)
1061 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1062 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1065 /* By nature of the way these get numbered, block 0 is always the entry. */
1066 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1068 for (i = 0; i < n_basic_blocks; ++i)
1070 basic_block bb = BASIC_BLOCK (i);
1073 int force_fallthru = 0;
1075 /* Examine the last instruction of the block, and discover the
1076 ways we can leave the block. */
1079 code = GET_CODE (insn);
1082 if (code == JUMP_INSN)
1086 /* ??? Recognize a tablejump and do the right thing. */
1087 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1088 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1089 && GET_CODE (tmp) == JUMP_INSN
1090 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1091 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1096 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1097 vec = XVEC (PATTERN (tmp), 0);
1099 vec = XVEC (PATTERN (tmp), 1);
1101 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1102 make_label_edge (edge_cache, bb,
1103 XEXP (RTVEC_ELT (vec, j), 0), 0);
1105 /* Some targets (eg, ARM) emit a conditional jump that also
1106 contains the out-of-range target. Scan for these and
1107 add an edge if necessary. */
1108 if ((tmp = single_set (insn)) != NULL
1109 && SET_DEST (tmp) == pc_rtx
1110 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1111 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1112 make_label_edge (edge_cache, bb,
1113 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1115 #ifdef CASE_DROPS_THROUGH
1116 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1117 us naturally detecting fallthru into the next block. */
1122 /* If this is a computed jump, then mark it as reaching
1123 everything on the label_value_list and forced_labels list. */
1124 else if (computed_jump_p (insn))
1126 current_function_has_computed_jump = 1;
1128 for (x = label_value_list; x; x = XEXP (x, 1))
1129 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1131 for (x = forced_labels; x; x = XEXP (x, 1))
1132 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1135 /* Returns create an exit out. */
1136 else if (returnjump_p (insn))
1137 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1139 /* Otherwise, we have a plain conditional or unconditional jump. */
1142 if (! JUMP_LABEL (insn))
1144 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1148 /* If this is a sibling call insn, then this is in effect a
1149 combined call and return, and so we need an edge to the
1150 exit block. No need to worry about EH edges, since we
1151 wouldn't have created the sibling call in the first place. */
1153 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1154 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1155 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1157 /* If this is a CALL_INSN, then mark it as reaching the active EH
1158 handler for this CALL_INSN. If we're handling asynchronous
1159 exceptions then any insn can reach any of the active handlers.
1161 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1163 else if (code == CALL_INSN || asynchronous_exceptions)
1165 /* Add any appropriate EH edges. We do this unconditionally
1166 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1167 on the call, and this needn't be within an EH region. */
1168 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1170 /* If we have asynchronous exceptions, do the same for *all*
1171 exception regions active in the block. */
1172 if (asynchronous_exceptions
1173 && bb->eh_beg != bb->eh_end)
1175 if (bb->eh_beg >= 0)
1176 make_eh_edge (edge_cache, eh_nest_info, bb,
1177 NULL_RTX, bb->eh_beg);
1179 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1180 if (GET_CODE (x) == NOTE
1181 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1182 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1184 int region = NOTE_EH_HANDLER (x);
1185 make_eh_edge (edge_cache, eh_nest_info, bb,
1190 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1192 /* ??? This could be made smarter: in some cases it's possible
1193 to tell that certain calls will not do a nonlocal goto.
1195 For example, if the nested functions that do the nonlocal
1196 gotos do not have their addresses taken, then only calls to
1197 those functions or to other nested functions that use them
1198 could possibly do nonlocal gotos. */
1199 /* We do know that a REG_EH_REGION note with a value less
1200 than 0 is guaranteed not to perform a non-local goto. */
1201 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1202 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1203 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1204 make_label_edge (edge_cache, bb, XEXP (x, 0),
1205 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1209 /* We know something about the structure of the function __throw in
1210 libgcc2.c. It is the only function that ever contains eh_stub
1211 labels. It modifies its return address so that the last block
1212 returns to one of the eh_stub labels within it. So we have to
1213 make additional edges in the flow graph. */
1214 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1215 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1217 /* Find out if we can drop through to the next block. */
1218 insn = next_nonnote_insn (insn);
1219 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1220 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1221 else if (i + 1 < n_basic_blocks)
1223 rtx tmp = BLOCK_HEAD (i + 1);
1224 if (GET_CODE (tmp) == NOTE)
1225 tmp = next_nonnote_insn (tmp);
1226 if (force_fallthru || insn == tmp)
1227 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1231 free_eh_nesting_info (eh_nest_info);
1233 sbitmap_vector_free (edge_cache);
1236 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1237 about the edge that is accumulated between calls. */
1240 make_edge (edge_cache, src, dst, flags)
1241 sbitmap *edge_cache;
1242 basic_block src, dst;
1248 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1249 many edges to them, and we didn't allocate memory for it. */
1250 use_edge_cache = (edge_cache
1251 && src != ENTRY_BLOCK_PTR
1252 && dst != EXIT_BLOCK_PTR);
1254 /* Make sure we don't add duplicate edges. */
1255 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1256 for (e = src->succ; e; e = e->succ_next)
1263 e = (edge) xcalloc (1, sizeof (*e));
1266 e->succ_next = src->succ;
1267 e->pred_next = dst->pred;
1276 SET_BIT (edge_cache[src->index], dst->index);
1279 /* Create an edge from a basic block to a label. */
1282 make_label_edge (edge_cache, src, label, flags)
1283 sbitmap *edge_cache;
1288 if (GET_CODE (label) != CODE_LABEL)
1291 /* If the label was never emitted, this insn is junk, but avoid a
1292 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1293 as a result of a syntax error and a diagnostic has already been
1296 if (INSN_UID (label) == 0)
1299 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1302 /* Create the edges generated by INSN in REGION. */
1305 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1306 sbitmap *edge_cache;
1307 eh_nesting_info *eh_nest_info;
1312 handler_info **handler_list;
1315 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1316 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1319 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1320 EDGE_ABNORMAL | EDGE_EH | is_call);
1324 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1325 dangerous if we intend to move basic blocks around. Move such notes
1326 into the following block. */
1329 move_stray_eh_region_notes ()
1334 if (n_basic_blocks < 2)
1337 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1338 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1340 rtx insn, next, list = NULL_RTX;
1342 b1 = BASIC_BLOCK (i);
1343 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1345 next = NEXT_INSN (insn);
1346 if (GET_CODE (insn) == NOTE
1347 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1348 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1350 /* Unlink from the insn chain. */
1351 NEXT_INSN (PREV_INSN (insn)) = next;
1352 PREV_INSN (next) = PREV_INSN (insn);
1355 NEXT_INSN (insn) = list;
1360 if (list == NULL_RTX)
1363 /* Find where to insert these things. */
1365 if (GET_CODE (insn) == CODE_LABEL)
1366 insn = NEXT_INSN (insn);
1370 next = NEXT_INSN (list);
1371 add_insn_after (list, insn);
1377 /* Recompute eh_beg/eh_end for each basic block. */
1380 record_active_eh_regions (f)
1383 rtx insn, eh_list = NULL_RTX;
1385 basic_block bb = BASIC_BLOCK (0);
1387 for (insn = f; insn; insn = NEXT_INSN (insn))
1389 if (bb->head == insn)
1390 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1392 if (GET_CODE (insn) == NOTE)
1394 int kind = NOTE_LINE_NUMBER (insn);
1395 if (kind == NOTE_INSN_EH_REGION_BEG)
1396 eh_list = alloc_INSN_LIST (insn, eh_list);
1397 else if (kind == NOTE_INSN_EH_REGION_END)
1399 rtx t = XEXP (eh_list, 1);
1400 free_INSN_LIST_node (eh_list);
1405 if (bb->end == insn)
1407 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1409 if (i == n_basic_blocks)
1411 bb = BASIC_BLOCK (i);
1416 /* Identify critical edges and set the bits appropriately. */
1419 mark_critical_edges ()
1421 int i, n = n_basic_blocks;
1424 /* We begin with the entry block. This is not terribly important now,
1425 but could be if a front end (Fortran) implemented alternate entry
1427 bb = ENTRY_BLOCK_PTR;
1434 /* (1) Critical edges must have a source with multiple successors. */
1435 if (bb->succ && bb->succ->succ_next)
1437 for (e = bb->succ; e; e = e->succ_next)
1439 /* (2) Critical edges must have a destination with multiple
1440 predecessors. Note that we know there is at least one
1441 predecessor -- the edge we followed to get here. */
1442 if (e->dest->pred->pred_next)
1443 e->flags |= EDGE_CRITICAL;
1445 e->flags &= ~EDGE_CRITICAL;
1450 for (e = bb->succ; e; e = e->succ_next)
1451 e->flags &= ~EDGE_CRITICAL;
1456 bb = BASIC_BLOCK (i);
1460 /* Split a block BB after insn INSN creating a new fallthru edge.
1461 Return the new edge. Note that to keep other parts of the compiler happy,
1462 this function renumbers all the basic blocks so that the new
1463 one has a number one greater than the block split. */
1466 split_block (bb, insn)
1476 /* There is no point splitting the block after its end. */
1477 if (bb->end == insn)
1480 /* Create the new structures. */
1481 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1482 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1485 memset (new_bb, 0, sizeof (*new_bb));
1487 new_bb->head = NEXT_INSN (insn);
1488 new_bb->end = bb->end;
1491 new_bb->succ = bb->succ;
1492 bb->succ = new_edge;
1493 new_bb->pred = new_edge;
1494 new_bb->count = bb->count;
1495 new_bb->loop_depth = bb->loop_depth;
1498 new_edge->dest = new_bb;
1499 new_edge->flags = EDGE_FALLTHRU;
1500 new_edge->probability = REG_BR_PROB_BASE;
1501 new_edge->count = bb->count;
1503 /* Redirect the src of the successor edges of bb to point to new_bb. */
1504 for (e = new_bb->succ; e; e = e->succ_next)
1507 /* Place the new block just after the block being split. */
1508 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1510 /* Some parts of the compiler expect blocks to be number in
1511 sequential order so insert the new block immediately after the
1512 block being split.. */
1514 for (i = n_basic_blocks - 1; i > j + 1; --i)
1516 basic_block tmp = BASIC_BLOCK (i - 1);
1517 BASIC_BLOCK (i) = tmp;
1521 BASIC_BLOCK (i) = new_bb;
1524 /* Create the basic block note. */
1525 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1527 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1528 new_bb->head = bb_note;
1530 update_bb_for_insn (new_bb);
1532 if (bb->global_live_at_start)
1534 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1535 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1536 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1538 /* We now have to calculate which registers are live at the end
1539 of the split basic block and at the start of the new basic
1540 block. Start with those registers that are known to be live
1541 at the end of the original basic block and get
1542 propagate_block to determine which registers are live. */
1543 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1544 propagate_block (new_bb, new_bb->global_live_at_start, NULL, 0);
1545 COPY_REG_SET (bb->global_live_at_end,
1546 new_bb->global_live_at_start);
1553 /* Split a (typically critical) edge. Return the new block.
1554 Abort on abnormal edges.
1556 ??? The code generally expects to be called on critical edges.
1557 The case of a block ending in an unconditional jump to a
1558 block with multiple predecessors is not handled optimally. */
1561 split_edge (edge_in)
1564 basic_block old_pred, bb, old_succ;
1569 /* Abnormal edges cannot be split. */
1570 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1573 old_pred = edge_in->src;
1574 old_succ = edge_in->dest;
1576 /* Remove the existing edge from the destination's pred list. */
1579 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1581 *pp = edge_in->pred_next;
1582 edge_in->pred_next = NULL;
1585 /* Create the new structures. */
1586 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1587 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1590 memset (bb, 0, sizeof (*bb));
1592 /* ??? This info is likely going to be out of date very soon. */
1593 if (old_succ->global_live_at_start)
1595 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1596 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1597 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1598 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1603 bb->succ = edge_out;
1604 bb->count = edge_in->count;
1607 edge_in->flags &= ~EDGE_CRITICAL;
1609 edge_out->pred_next = old_succ->pred;
1610 edge_out->succ_next = NULL;
1612 edge_out->dest = old_succ;
1613 edge_out->flags = EDGE_FALLTHRU;
1614 edge_out->probability = REG_BR_PROB_BASE;
1615 edge_out->count = edge_in->count;
1617 old_succ->pred = edge_out;
1619 /* Tricky case -- if there existed a fallthru into the successor
1620 (and we're not it) we must add a new unconditional jump around
1621 the new block we're actually interested in.
1623 Further, if that edge is critical, this means a second new basic
1624 block must be created to hold it. In order to simplify correct
1625 insn placement, do this before we touch the existing basic block
1626 ordering for the block we were really wanting. */
1627 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1630 for (e = edge_out->pred_next; e; e = e->pred_next)
1631 if (e->flags & EDGE_FALLTHRU)
1636 basic_block jump_block;
1639 if ((e->flags & EDGE_CRITICAL) == 0
1640 && e->src != ENTRY_BLOCK_PTR)
1642 /* Non critical -- we can simply add a jump to the end
1643 of the existing predecessor. */
1644 jump_block = e->src;
1648 /* We need a new block to hold the jump. The simplest
1649 way to do the bulk of the work here is to recursively
1651 jump_block = split_edge (e);
1652 e = jump_block->succ;
1655 /* Now add the jump insn ... */
1656 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1658 jump_block->end = pos;
1659 if (basic_block_for_insn)
1660 set_block_for_insn (pos, jump_block);
1661 emit_barrier_after (pos);
1663 /* ... let jump know that label is in use, ... */
1664 JUMP_LABEL (pos) = old_succ->head;
1665 ++LABEL_NUSES (old_succ->head);
1667 /* ... and clear fallthru on the outgoing edge. */
1668 e->flags &= ~EDGE_FALLTHRU;
1670 /* Continue splitting the interesting edge. */
1674 /* Place the new block just in front of the successor. */
1675 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1676 if (old_succ == EXIT_BLOCK_PTR)
1677 j = n_basic_blocks - 1;
1679 j = old_succ->index;
1680 for (i = n_basic_blocks - 1; i > j; --i)
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1686 BASIC_BLOCK (i) = bb;
1689 /* Create the basic block note.
1691 Where we place the note can have a noticable impact on the generated
1692 code. Consider this cfg:
1702 If we need to insert an insn on the edge from block 0 to block 1,
1703 we want to ensure the instructions we insert are outside of any
1704 loop notes that physically sit between block 0 and block 1. Otherwise
1705 we confuse the loop optimizer into thinking the loop is a phony. */
1706 if (old_succ != EXIT_BLOCK_PTR
1707 && PREV_INSN (old_succ->head)
1708 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1709 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1710 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1711 PREV_INSN (old_succ->head));
1712 else if (old_succ != EXIT_BLOCK_PTR)
1713 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1715 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1716 NOTE_BASIC_BLOCK (bb_note) = bb;
1717 bb->head = bb->end = bb_note;
1719 /* Not quite simple -- for non-fallthru edges, we must adjust the
1720 predecessor's jump instruction to target our new block. */
1721 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1723 rtx tmp, insn = old_pred->end;
1724 rtx old_label = old_succ->head;
1725 rtx new_label = gen_label_rtx ();
1727 if (GET_CODE (insn) != JUMP_INSN)
1730 /* ??? Recognize a tablejump and adjust all matching cases. */
1731 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1732 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1733 && GET_CODE (tmp) == JUMP_INSN
1734 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1735 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1740 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1741 vec = XVEC (PATTERN (tmp), 0);
1743 vec = XVEC (PATTERN (tmp), 1);
1745 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1746 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1748 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1749 --LABEL_NUSES (old_label);
1750 ++LABEL_NUSES (new_label);
1753 /* Handle casesi dispatch insns */
1754 if ((tmp = single_set (insn)) != NULL
1755 && SET_DEST (tmp) == pc_rtx
1756 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1757 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1758 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1760 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1762 --LABEL_NUSES (old_label);
1763 ++LABEL_NUSES (new_label);
1768 /* This would have indicated an abnormal edge. */
1769 if (computed_jump_p (insn))
1772 /* A return instruction can't be redirected. */
1773 if (returnjump_p (insn))
1776 /* If the insn doesn't go where we think, we're confused. */
1777 if (JUMP_LABEL (insn) != old_label)
1780 redirect_jump (insn, new_label, 0);
1783 emit_label_before (new_label, bb_note);
1784 bb->head = new_label;
1790 /* Queue instructions for insertion on an edge between two basic blocks.
1791 The new instructions and basic blocks (if any) will not appear in the
1792 CFG until commit_edge_insertions is called. */
1795 insert_insn_on_edge (pattern, e)
1799 /* We cannot insert instructions on an abnormal critical edge.
1800 It will be easier to find the culprit if we die now. */
1801 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1802 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1805 if (e->insns == NULL_RTX)
1808 push_to_sequence (e->insns);
1810 emit_insn (pattern);
1812 e->insns = get_insns ();
1816 /* Update the CFG for the instructions queued on edge E. */
1819 commit_one_edge_insertion (e)
1822 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1825 /* Pull the insns off the edge now since the edge might go away. */
1827 e->insns = NULL_RTX;
1829 /* Figure out where to put these things. If the destination has
1830 one predecessor, insert there. Except for the exit block. */
1831 if (e->dest->pred->pred_next == NULL
1832 && e->dest != EXIT_BLOCK_PTR)
1836 /* Get the location correct wrt a code label, and "nice" wrt
1837 a basic block note, and before everything else. */
1839 if (GET_CODE (tmp) == CODE_LABEL)
1840 tmp = NEXT_INSN (tmp);
1841 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1842 tmp = NEXT_INSN (tmp);
1843 if (tmp == bb->head)
1846 after = PREV_INSN (tmp);
1849 /* If the source has one successor and the edge is not abnormal,
1850 insert there. Except for the entry block. */
1851 else if ((e->flags & EDGE_ABNORMAL) == 0
1852 && e->src->succ->succ_next == NULL
1853 && e->src != ENTRY_BLOCK_PTR)
1856 /* It is possible to have a non-simple jump here. Consider a target
1857 where some forms of unconditional jumps clobber a register. This
1858 happens on the fr30 for example.
1860 We know this block has a single successor, so we can just emit
1861 the queued insns before the jump. */
1862 if (GET_CODE (bb->end) == JUMP_INSN)
1868 /* We'd better be fallthru, or we've lost track of what's what. */
1869 if ((e->flags & EDGE_FALLTHRU) == 0)
1876 /* Otherwise we must split the edge. */
1879 bb = split_edge (e);
1883 /* Now that we've found the spot, do the insertion. */
1885 /* Set the new block number for these insns, if structure is allocated. */
1886 if (basic_block_for_insn)
1889 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1890 set_block_for_insn (i, bb);
1895 emit_insns_before (insns, before);
1896 if (before == bb->head)
1899 last = prev_nonnote_insn (before);
1903 last = emit_insns_after (insns, after);
1904 if (after == bb->end)
1908 if (returnjump_p (last))
1910 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1911 This is not currently a problem because this only happens
1912 for the (single) epilogue, which already has a fallthru edge
1916 if (e->dest != EXIT_BLOCK_PTR
1917 || e->succ_next != NULL
1918 || (e->flags & EDGE_FALLTHRU) == 0)
1920 e->flags &= ~EDGE_FALLTHRU;
1922 emit_barrier_after (last);
1926 flow_delete_insn (before);
1928 else if (GET_CODE (last) == JUMP_INSN)
1932 /* Update the CFG for all queued instructions. */
1935 commit_edge_insertions ()
1940 #ifdef ENABLE_CHECKING
1941 verify_flow_info ();
1945 bb = ENTRY_BLOCK_PTR;
1950 for (e = bb->succ; e; e = next)
1952 next = e->succ_next;
1954 commit_one_edge_insertion (e);
1957 if (++i >= n_basic_blocks)
1959 bb = BASIC_BLOCK (i);
1963 /* Delete all unreachable basic blocks. */
1966 delete_unreachable_blocks ()
1968 basic_block *worklist, *tos;
1969 int deleted_handler;
1974 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1976 /* Use basic_block->aux as a marker. Clear them all. */
1978 for (i = 0; i < n; ++i)
1979 BASIC_BLOCK (i)->aux = NULL;
1981 /* Add our starting points to the worklist. Almost always there will
1982 be only one. It isn't inconcievable that we might one day directly
1983 support Fortran alternate entry points. */
1985 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1989 /* Mark the block with a handy non-null value. */
1993 /* Iterate: find everything reachable from what we've already seen. */
1995 while (tos != worklist)
1997 basic_block b = *--tos;
1999 for (e = b->succ; e; e = e->succ_next)
2007 /* Delete all unreachable basic blocks. Count down so that we don't
2008 interfere with the block renumbering that happens in flow_delete_block. */
2010 deleted_handler = 0;
2012 for (i = n - 1; i >= 0; --i)
2014 basic_block b = BASIC_BLOCK (i);
2017 /* This block was found. Tidy up the mark. */
2020 deleted_handler |= flow_delete_block (b);
2023 tidy_fallthru_edges ();
2025 /* If we deleted an exception handler, we may have EH region begin/end
2026 blocks to remove as well. */
2027 if (deleted_handler)
2028 delete_eh_regions ();
2033 /* Find EH regions for which there is no longer a handler, and delete them. */
2036 delete_eh_regions ()
2040 update_rethrow_references ();
2042 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2043 if (GET_CODE (insn) == NOTE)
2045 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2046 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2048 int num = NOTE_EH_HANDLER (insn);
2049 /* A NULL handler indicates a region is no longer needed,
2050 as long as its rethrow label isn't used. */
2051 if (get_first_handler (num) == NULL && ! rethrow_used (num))
2053 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2054 NOTE_SOURCE_FILE (insn) = 0;
2060 /* Return true if NOTE is not one of the ones that must be kept paired,
2061 so that we may simply delete them. */
2064 can_delete_note_p (note)
2067 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2068 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2071 /* Unlink a chain of insns between START and FINISH, leaving notes
2072 that must be paired. */
2075 flow_delete_insn_chain (start, finish)
2078 /* Unchain the insns one by one. It would be quicker to delete all
2079 of these with a single unchaining, rather than one at a time, but
2080 we need to keep the NOTE's. */
2086 next = NEXT_INSN (start);
2087 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2089 else if (GET_CODE (start) == CODE_LABEL
2090 && ! can_delete_label_p (start))
2092 const char *name = LABEL_NAME (start);
2093 PUT_CODE (start, NOTE);
2094 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2095 NOTE_SOURCE_FILE (start) = name;
2098 next = flow_delete_insn (start);
2100 if (start == finish)
2106 /* Delete the insns in a (non-live) block. We physically delete every
2107 non-deleted-note insn, and update the flow graph appropriately.
2109 Return nonzero if we deleted an exception handler. */
2111 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2112 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2115 flow_delete_block (b)
2118 int deleted_handler = 0;
2121 /* If the head of this block is a CODE_LABEL, then it might be the
2122 label for an exception handler which can't be reached.
2124 We need to remove the label from the exception_handler_label list
2125 and remove the associated NOTE_INSN_EH_REGION_BEG and
2126 NOTE_INSN_EH_REGION_END notes. */
2130 never_reached_warning (insn);
2132 if (GET_CODE (insn) == CODE_LABEL)
2134 rtx x, *prev = &exception_handler_labels;
2136 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2138 if (XEXP (x, 0) == insn)
2140 /* Found a match, splice this label out of the EH label list. */
2141 *prev = XEXP (x, 1);
2142 XEXP (x, 1) = NULL_RTX;
2143 XEXP (x, 0) = NULL_RTX;
2145 /* Remove the handler from all regions */
2146 remove_handler (insn);
2147 deleted_handler = 1;
2150 prev = &XEXP (x, 1);
2154 /* Include any jump table following the basic block. */
2156 if (GET_CODE (end) == JUMP_INSN
2157 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2158 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2159 && GET_CODE (tmp) == JUMP_INSN
2160 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2161 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2164 /* Include any barrier that may follow the basic block. */
2165 tmp = next_nonnote_insn (end);
2166 if (tmp && GET_CODE (tmp) == BARRIER)
2169 /* Selectively delete the entire chain. */
2170 flow_delete_insn_chain (insn, end);
2172 /* Remove the edges into and out of this block. Note that there may
2173 indeed be edges in, if we are removing an unreachable loop. */
2177 for (e = b->pred; e; e = next)
2179 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2182 next = e->pred_next;
2186 for (e = b->succ; e; e = next)
2188 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2191 next = e->succ_next;
2200 /* Remove the basic block from the array, and compact behind it. */
2203 return deleted_handler;
2206 /* Remove block B from the basic block array and compact behind it. */
2212 int i, n = n_basic_blocks;
2214 for (i = b->index; i + 1 < n; ++i)
2216 basic_block x = BASIC_BLOCK (i + 1);
2217 BASIC_BLOCK (i) = x;
2221 basic_block_info->num_elements--;
2225 /* Delete INSN by patching it out. Return the next insn. */
2228 flow_delete_insn (insn)
2231 rtx prev = PREV_INSN (insn);
2232 rtx next = NEXT_INSN (insn);
2235 PREV_INSN (insn) = NULL_RTX;
2236 NEXT_INSN (insn) = NULL_RTX;
2237 INSN_DELETED_P (insn) = 1;
2240 NEXT_INSN (prev) = next;
2242 PREV_INSN (next) = prev;
2244 set_last_insn (prev);
2246 if (GET_CODE (insn) == CODE_LABEL)
2247 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2249 /* If deleting a jump, decrement the use count of the label. Deleting
2250 the label itself should happen in the normal course of block merging. */
2251 if (GET_CODE (insn) == JUMP_INSN
2252 && JUMP_LABEL (insn)
2253 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2254 LABEL_NUSES (JUMP_LABEL (insn))--;
2256 /* Also if deleting an insn that references a label. */
2257 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2258 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2259 LABEL_NUSES (XEXP (note, 0))--;
2264 /* True if a given label can be deleted. */
2267 can_delete_label_p (label)
2272 if (LABEL_PRESERVE_P (label))
2275 for (x = forced_labels; x; x = XEXP (x, 1))
2276 if (label == XEXP (x, 0))
2278 for (x = label_value_list; x; x = XEXP (x, 1))
2279 if (label == XEXP (x, 0))
2281 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2282 if (label == XEXP (x, 0))
2285 /* User declared labels must be preserved. */
2286 if (LABEL_NAME (label) != 0)
2293 tail_recursion_label_p (label)
2298 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2299 if (label == XEXP (x, 0))
2305 /* Blocks A and B are to be merged into a single block A. The insns
2306 are already contiguous, hence `nomove'. */
2309 merge_blocks_nomove (a, b)
2313 rtx b_head, b_end, a_end;
2314 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2317 /* If there was a CODE_LABEL beginning B, delete it. */
2320 if (GET_CODE (b_head) == CODE_LABEL)
2322 /* Detect basic blocks with nothing but a label. This can happen
2323 in particular at the end of a function. */
2324 if (b_head == b_end)
2326 del_first = del_last = b_head;
2327 b_head = NEXT_INSN (b_head);
2330 /* Delete the basic block note. */
2331 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2333 if (b_head == b_end)
2338 b_head = NEXT_INSN (b_head);
2341 /* If there was a jump out of A, delete it. */
2343 if (GET_CODE (a_end) == JUMP_INSN)
2347 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2348 if (GET_CODE (prev) != NOTE
2349 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2356 /* If this was a conditional jump, we need to also delete
2357 the insn that set cc0. */
2358 if (prev && sets_cc0_p (prev))
2361 prev = prev_nonnote_insn (prev);
2370 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2371 del_first = NEXT_INSN (a_end);
2373 /* Delete everything marked above as well as crap that might be
2374 hanging out between the two blocks. */
2375 flow_delete_insn_chain (del_first, del_last);
2377 /* Normally there should only be one successor of A and that is B, but
2378 partway though the merge of blocks for conditional_execution we'll
2379 be merging a TEST block with THEN and ELSE successors. Free the
2380 whole lot of them and hope the caller knows what they're doing. */
2382 remove_edge (a->succ);
2384 /* Adjust the edges out of B for the new owner. */
2385 for (e = b->succ; e; e = e->succ_next)
2389 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2390 b->pred = b->succ = NULL;
2392 /* Reassociate the insns of B with A. */
2395 if (basic_block_for_insn)
2397 BLOCK_FOR_INSN (b_head) = a;
2398 while (b_head != b_end)
2400 b_head = NEXT_INSN (b_head);
2401 BLOCK_FOR_INSN (b_head) = a;
2411 /* Blocks A and B are to be merged into a single block. A has no incoming
2412 fallthru edge, so it can be moved before B without adding or modifying
2413 any jumps (aside from the jump from A to B). */
2416 merge_blocks_move_predecessor_nojumps (a, b)
2419 rtx start, end, barrier;
2425 barrier = next_nonnote_insn (end);
2426 if (GET_CODE (barrier) != BARRIER)
2428 flow_delete_insn (barrier);
2430 /* Move block and loop notes out of the chain so that we do not
2431 disturb their order.
2433 ??? A better solution would be to squeeze out all the non-nested notes
2434 and adjust the block trees appropriately. Even better would be to have
2435 a tighter connection between block trees and rtl so that this is not
2437 start = squeeze_notes (start, end);
2439 /* Scramble the insn chain. */
2440 if (end != PREV_INSN (b->head))
2441 reorder_insns (start, end, PREV_INSN (b->head));
2445 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2446 a->index, b->index);
2449 /* Swap the records for the two blocks around. Although we are deleting B,
2450 A is now where B was and we want to compact the BB array from where
2452 BASIC_BLOCK (a->index) = b;
2453 BASIC_BLOCK (b->index) = a;
2455 a->index = b->index;
2458 /* Now blocks A and B are contiguous. Merge them. */
2459 merge_blocks_nomove (a, b);
2464 /* Blocks A and B are to be merged into a single block. B has no outgoing
2465 fallthru edge, so it can be moved after A without adding or modifying
2466 any jumps (aside from the jump from A to B). */
2469 merge_blocks_move_successor_nojumps (a, b)
2472 rtx start, end, barrier;
2476 barrier = NEXT_INSN (end);
2478 /* Recognize a jump table following block B. */
2479 if (GET_CODE (barrier) == CODE_LABEL
2480 && NEXT_INSN (barrier)
2481 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2482 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2483 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2485 end = NEXT_INSN (barrier);
2486 barrier = NEXT_INSN (end);
2489 /* There had better have been a barrier there. Delete it. */
2490 if (GET_CODE (barrier) != BARRIER)
2492 flow_delete_insn (barrier);
2494 /* Move block and loop notes out of the chain so that we do not
2495 disturb their order.
2497 ??? A better solution would be to squeeze out all the non-nested notes
2498 and adjust the block trees appropriately. Even better would be to have
2499 a tighter connection between block trees and rtl so that this is not
2501 start = squeeze_notes (start, end);
2503 /* Scramble the insn chain. */
2504 reorder_insns (start, end, a->end);
2506 /* Now blocks A and B are contiguous. Merge them. */
2507 merge_blocks_nomove (a, b);
2511 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2512 b->index, a->index);
2518 /* Attempt to merge basic blocks that are potentially non-adjacent.
2519 Return true iff the attempt succeeded. */
2522 merge_blocks (e, b, c)
2526 /* If C has a tail recursion label, do not merge. There is no
2527 edge recorded from the call_placeholder back to this label, as
2528 that would make optimize_sibling_and_tail_recursive_calls more
2529 complex for no gain. */
2530 if (GET_CODE (c->head) == CODE_LABEL
2531 && tail_recursion_label_p (c->head))
2534 /* If B has a fallthru edge to C, no need to move anything. */
2535 if (e->flags & EDGE_FALLTHRU)
2537 merge_blocks_nomove (b, c);
2541 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2542 b->index, c->index);
2551 int c_has_outgoing_fallthru;
2552 int b_has_incoming_fallthru;
2554 /* We must make sure to not munge nesting of exception regions,
2555 lexical blocks, and loop notes.
2557 The first is taken care of by requiring that the active eh
2558 region at the end of one block always matches the active eh
2559 region at the beginning of the next block.
2561 The later two are taken care of by squeezing out all the notes. */
2563 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2564 executed and we may want to treat blocks which have two out
2565 edges, one normal, one abnormal as only having one edge for
2566 block merging purposes. */
2568 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2569 if (tmp_edge->flags & EDGE_FALLTHRU)
2571 c_has_outgoing_fallthru = (tmp_edge != NULL);
2573 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2574 if (tmp_edge->flags & EDGE_FALLTHRU)
2576 b_has_incoming_fallthru = (tmp_edge != NULL);
2578 /* If B does not have an incoming fallthru, and the exception regions
2579 match, then it can be moved immediately before C without introducing
2582 C can not be the first block, so we do not have to worry about
2583 accessing a non-existent block. */
2584 d = BASIC_BLOCK (c->index - 1);
2585 if (! b_has_incoming_fallthru
2586 && d->eh_end == b->eh_beg
2587 && b->eh_end == c->eh_beg)
2588 return merge_blocks_move_predecessor_nojumps (b, c);
2590 /* Otherwise, we're going to try to move C after B. Make sure the
2591 exception regions match.
2593 If B is the last basic block, then we must not try to access the
2594 block structure for block B + 1. Luckily in that case we do not
2595 need to worry about matching exception regions. */
2596 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2597 if (b->eh_end == c->eh_beg
2598 && (d == NULL || c->eh_end == d->eh_beg))
2600 /* If C does not have an outgoing fallthru, then it can be moved
2601 immediately after B without introducing or modifying jumps. */
2602 if (! c_has_outgoing_fallthru)
2603 return merge_blocks_move_successor_nojumps (b, c);
2605 /* Otherwise, we'll need to insert an extra jump, and possibly
2606 a new block to contain it. */
2607 /* ??? Not implemented yet. */
2614 /* Top level driver for merge_blocks. */
2621 /* Attempt to merge blocks as made possible by edge removal. If a block
2622 has only one successor, and the successor has only one predecessor,
2623 they may be combined. */
2625 for (i = 0; i < n_basic_blocks;)
2627 basic_block c, b = BASIC_BLOCK (i);
2630 /* A loop because chains of blocks might be combineable. */
2631 while ((s = b->succ) != NULL
2632 && s->succ_next == NULL
2633 && (s->flags & EDGE_EH) == 0
2634 && (c = s->dest) != EXIT_BLOCK_PTR
2635 && c->pred->pred_next == NULL
2636 /* If the jump insn has side effects, we can't kill the edge. */
2637 && (GET_CODE (b->end) != JUMP_INSN
2638 || onlyjump_p (b->end))
2639 && merge_blocks (s, b, c))
2642 /* Don't get confused by the index shift caused by deleting blocks. */
2647 /* The given edge should potentially be a fallthru edge. If that is in
2648 fact true, delete the jump and barriers that are in the way. */
2651 tidy_fallthru_edge (e, b, c)
2657 /* ??? In a late-running flow pass, other folks may have deleted basic
2658 blocks by nopping out blocks, leaving multiple BARRIERs between here
2659 and the target label. They ought to be chastized and fixed.
2661 We can also wind up with a sequence of undeletable labels between
2662 one block and the next.
2664 So search through a sequence of barriers, labels, and notes for
2665 the head of block C and assert that we really do fall through. */
2667 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2670 /* Remove what will soon cease being the jump insn from the source block.
2671 If block B consisted only of this single jump, turn it into a deleted
2674 if (GET_CODE (q) == JUMP_INSN
2676 && (any_uncondjump_p (q)
2677 || (b->succ == e && e->succ_next == NULL)))
2680 /* If this was a conditional jump, we need to also delete
2681 the insn that set cc0. */
2682 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2689 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2690 NOTE_SOURCE_FILE (q) = 0;
2698 /* Selectively unlink the sequence. */
2699 if (q != PREV_INSN (c->head))
2700 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2702 e->flags |= EDGE_FALLTHRU;
2705 /* Fix up edges that now fall through, or rather should now fall through
2706 but previously required a jump around now deleted blocks. Simplify
2707 the search by only examining blocks numerically adjacent, since this
2708 is how find_basic_blocks created them. */
2711 tidy_fallthru_edges ()
2715 for (i = 1; i < n_basic_blocks; ++i)
2717 basic_block b = BASIC_BLOCK (i - 1);
2718 basic_block c = BASIC_BLOCK (i);
2721 /* We care about simple conditional or unconditional jumps with
2724 If we had a conditional branch to the next instruction when
2725 find_basic_blocks was called, then there will only be one
2726 out edge for the block which ended with the conditional
2727 branch (since we do not create duplicate edges).
2729 Furthermore, the edge will be marked as a fallthru because we
2730 merge the flags for the duplicate edges. So we do not want to
2731 check that the edge is not a FALLTHRU edge. */
2732 if ((s = b->succ) != NULL
2733 && s->succ_next == NULL
2735 /* If the jump insn has side effects, we can't tidy the edge. */
2736 && (GET_CODE (b->end) != JUMP_INSN
2737 || onlyjump_p (b->end)))
2738 tidy_fallthru_edge (s, b, c);
2742 /* Perform data flow analysis.
2743 F is the first insn of the function; FLAGS is a set of PROP_* flags
2744 to be used in accumulating flow info. */
2747 life_analysis (f, file, flags)
2752 #ifdef ELIMINABLE_REGS
2754 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2757 /* Record which registers will be eliminated. We use this in
2760 CLEAR_HARD_REG_SET (elim_reg_set);
2762 #ifdef ELIMINABLE_REGS
2763 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2764 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2766 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2770 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2772 /* The post-reload life analysis have (on a global basis) the same
2773 registers live as was computed by reload itself. elimination
2774 Otherwise offsets and such may be incorrect.
2776 Reload will make some registers as live even though they do not
2779 We don't want to create new auto-incs after reload, since they
2780 are unlikely to be useful and can cause problems with shared
2782 if (reload_completed)
2783 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2785 /* We want alias analysis information for local dead store elimination. */
2786 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2787 init_alias_analysis ();
2789 /* Always remove no-op moves. Do this before other processing so
2790 that we don't have to keep re-scanning them. */
2791 delete_noop_moves (f);
2793 /* Some targets can emit simpler epilogues if they know that sp was
2794 not ever modified during the function. After reload, of course,
2795 we've already emitted the epilogue so there's no sense searching. */
2796 if (! reload_completed)
2797 notice_stack_pointer_modification (f);
2799 /* Allocate and zero out data structures that will record the
2800 data from lifetime analysis. */
2801 allocate_reg_life_data ();
2802 allocate_bb_life_data ();
2804 /* Find the set of registers live on function exit. */
2805 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2807 /* "Update" life info from zero. It'd be nice to begin the
2808 relaxation with just the exit and noreturn blocks, but that set
2809 is not immediately handy. */
2811 if (flags & PROP_REG_INFO)
2812 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2813 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2816 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2817 end_alias_analysis ();
2820 dump_flow_info (file);
2822 free_basic_block_vars (1);
2825 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2826 Search for REGNO. If found, abort if it is not wider than word_mode. */
2829 verify_wide_reg_1 (px, pregno)
2834 unsigned int regno = *(int *) pregno;
2836 if (GET_CODE (x) == REG && REGNO (x) == regno)
2838 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2845 /* A subroutine of verify_local_live_at_start. Search through insns
2846 between HEAD and END looking for register REGNO. */
2849 verify_wide_reg (regno, head, end)
2856 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2860 head = NEXT_INSN (head);
2863 /* We didn't find the register at all. Something's way screwy. */
2867 /* A subroutine of update_life_info. Verify that there are no untoward
2868 changes in live_at_start during a local update. */
2871 verify_local_live_at_start (new_live_at_start, bb)
2872 regset new_live_at_start;
2875 if (reload_completed)
2877 /* After reload, there are no pseudos, nor subregs of multi-word
2878 registers. The regsets should exactly match. */
2879 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2886 /* Find the set of changed registers. */
2887 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2889 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2891 /* No registers should die. */
2892 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2894 /* Verify that the now-live register is wider than word_mode. */
2895 verify_wide_reg (i, bb->head, bb->end);
2900 /* Updates life information starting with the basic blocks set in BLOCKS.
2901 If BLOCKS is null, consider it to be the universal set.
2903 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2904 we are only expecting local modifications to basic blocks. If we find
2905 extra registers live at the beginning of a block, then we either killed
2906 useful data, or we have a broken split that wants data not provided.
2907 If we find registers removed from live_at_start, that means we have
2908 a broken peephole that is killing a register it shouldn't.
2910 ??? This is not true in one situation -- when a pre-reload splitter
2911 generates subregs of a multi-word pseudo, current life analysis will
2912 lose the kill. So we _can_ have a pseudo go live. How irritating.
2914 Including PROP_REG_INFO does not properly refresh regs_ever_live
2915 unless the caller resets it to zero. */
2918 update_life_info (blocks, extent, prop_flags)
2920 enum update_life_extent extent;
2924 regset_head tmp_head;
2927 tmp = INITIALIZE_REG_SET (tmp_head);
2929 /* For a global update, we go through the relaxation process again. */
2930 if (extent != UPDATE_LIFE_LOCAL)
2932 calculate_global_regs_live (blocks, blocks,
2933 prop_flags & PROP_SCAN_DEAD_CODE);
2935 /* If asked, remove notes from the blocks we'll update. */
2936 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2937 count_or_remove_death_notes (blocks, 1);
2942 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2944 basic_block bb = BASIC_BLOCK (i);
2946 COPY_REG_SET (tmp, bb->global_live_at_end);
2947 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2949 if (extent == UPDATE_LIFE_LOCAL)
2950 verify_local_live_at_start (tmp, bb);
2955 for (i = n_basic_blocks - 1; i >= 0; --i)
2957 basic_block bb = BASIC_BLOCK (i);
2959 COPY_REG_SET (tmp, bb->global_live_at_end);
2960 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2962 if (extent == UPDATE_LIFE_LOCAL)
2963 verify_local_live_at_start (tmp, bb);
2969 if (prop_flags & PROP_REG_INFO)
2971 /* The only pseudos that are live at the beginning of the function
2972 are those that were not set anywhere in the function. local-alloc
2973 doesn't know how to handle these correctly, so mark them as not
2974 local to any one basic block. */
2975 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2976 FIRST_PSEUDO_REGISTER, i,
2977 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2979 /* We have a problem with any pseudoreg that lives across the setjmp.
2980 ANSI says that if a user variable does not change in value between
2981 the setjmp and the longjmp, then the longjmp preserves it. This
2982 includes longjmp from a place where the pseudo appears dead.
2983 (In principle, the value still exists if it is in scope.)
2984 If the pseudo goes in a hard reg, some other value may occupy
2985 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2986 Conclusion: such a pseudo must not go in a hard reg. */
2987 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2988 FIRST_PSEUDO_REGISTER, i,
2990 if (regno_reg_rtx[i] != 0)
2992 REG_LIVE_LENGTH (i) = -1;
2993 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2999 /* Free the variables allocated by find_basic_blocks.
3001 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3004 free_basic_block_vars (keep_head_end_p)
3005 int keep_head_end_p;
3007 if (basic_block_for_insn)
3009 VARRAY_FREE (basic_block_for_insn);
3010 basic_block_for_insn = NULL;
3013 if (! keep_head_end_p)
3016 VARRAY_FREE (basic_block_info);
3019 ENTRY_BLOCK_PTR->aux = NULL;
3020 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3021 EXIT_BLOCK_PTR->aux = NULL;
3022 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3026 /* Return nonzero if the destination of SET equals the source. */
3032 rtx src = SET_SRC (set);
3033 rtx dst = SET_DEST (set);
3035 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
3037 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
3039 src = SUBREG_REG (src);
3040 dst = SUBREG_REG (dst);
3043 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
3044 && REGNO (src) == REGNO (dst));
3047 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3054 rtx pat = PATTERN (insn);
3056 /* Insns carrying these notes are useful later on. */
3057 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3060 if (GET_CODE (pat) == SET && set_noop_p (pat))
3063 if (GET_CODE (pat) == PARALLEL)
3066 /* If nothing but SETs of registers to themselves,
3067 this insn can also be deleted. */
3068 for (i = 0; i < XVECLEN (pat, 0); i++)
3070 rtx tem = XVECEXP (pat, 0, i);
3072 if (GET_CODE (tem) == USE
3073 || GET_CODE (tem) == CLOBBER)
3076 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3085 /* Delete any insns that copy a register to itself. */
3088 delete_noop_moves (f)
3092 for (insn = f; insn; insn = NEXT_INSN (insn))
3094 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3096 PUT_CODE (insn, NOTE);
3097 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3098 NOTE_SOURCE_FILE (insn) = 0;
3103 /* Determine if the stack pointer is constant over the life of the function.
3104 Only useful before prologues have been emitted. */
3107 notice_stack_pointer_modification_1 (x, pat, data)
3109 rtx pat ATTRIBUTE_UNUSED;
3110 void *data ATTRIBUTE_UNUSED;
3112 if (x == stack_pointer_rtx
3113 /* The stack pointer is only modified indirectly as the result
3114 of a push until later in flow. See the comments in rtl.texi
3115 regarding Embedded Side-Effects on Addresses. */
3116 || (GET_CODE (x) == MEM
3117 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
3118 || GET_CODE (XEXP (x, 0)) == PRE_INC
3119 || GET_CODE (XEXP (x, 0)) == POST_DEC
3120 || GET_CODE (XEXP (x, 0)) == POST_INC)
3121 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3122 current_function_sp_is_unchanging = 0;
3126 notice_stack_pointer_modification (f)
3131 /* Assume that the stack pointer is unchanging if alloca hasn't
3133 current_function_sp_is_unchanging = !current_function_calls_alloca;
3134 if (! current_function_sp_is_unchanging)
3137 for (insn = f; insn; insn = NEXT_INSN (insn))
3141 /* Check if insn modifies the stack pointer. */
3142 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3144 if (! current_function_sp_is_unchanging)
3150 /* Mark a register in SET. Hard registers in large modes get all
3151 of their component registers set as well. */
3154 mark_reg (reg, xset)
3158 regset set = (regset) xset;
3159 int regno = REGNO (reg);
3161 if (GET_MODE (reg) == BLKmode)
3164 SET_REGNO_REG_SET (set, regno);
3165 if (regno < FIRST_PSEUDO_REGISTER)
3167 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3169 SET_REGNO_REG_SET (set, regno + n);
3173 /* Mark those regs which are needed at the end of the function as live
3174 at the end of the last basic block. */
3177 mark_regs_live_at_end (set)
3182 /* If exiting needs the right stack value, consider the stack pointer
3183 live at the end of the function. */
3184 if ((HAVE_epilogue && reload_completed)
3185 || ! EXIT_IGNORE_STACK
3186 || (! FRAME_POINTER_REQUIRED
3187 && ! current_function_calls_alloca
3188 && flag_omit_frame_pointer)
3189 || current_function_sp_is_unchanging)
3191 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3194 /* Mark the frame pointer if needed at the end of the function. If
3195 we end up eliminating it, it will be removed from the live list
3196 of each basic block by reload. */
3198 if (! reload_completed || frame_pointer_needed)
3200 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3201 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3202 /* If they are different, also mark the hard frame pointer as live. */
3203 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3204 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3208 #ifdef PIC_OFFSET_TABLE_REGNUM
3209 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3210 /* Many architectures have a GP register even without flag_pic.
3211 Assume the pic register is not in use, or will be handled by
3212 other means, if it is not fixed. */
3213 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3214 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3218 /* Mark all global registers, and all registers used by the epilogue
3219 as being live at the end of the function since they may be
3220 referenced by our caller. */
3221 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3222 if (global_regs[i] || EPILOGUE_USES (i))
3223 SET_REGNO_REG_SET (set, i);
3225 /* Mark all call-saved registers that we actaully used. */
3226 if (HAVE_epilogue && reload_completed)
3228 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3229 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3230 SET_REGNO_REG_SET (set, i);
3233 /* Mark function return value. */
3234 diddle_return_value (mark_reg, set);
3237 /* Callback function for for_each_successor_phi. DATA is a regset.
3238 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3239 INSN, in the regset. */
3242 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3243 rtx insn ATTRIBUTE_UNUSED;
3244 int dest_regno ATTRIBUTE_UNUSED;
3248 regset live = (regset) data;
3249 SET_REGNO_REG_SET (live, src_regno);
3253 /* Propagate global life info around the graph of basic blocks. Begin
3254 considering blocks with their corresponding bit set in BLOCKS_IN.
3255 If BLOCKS_IN is null, consider it the universal set.
3257 BLOCKS_OUT is set for every block that was changed. */
3260 calculate_global_regs_live (blocks_in, blocks_out, flags)
3261 sbitmap blocks_in, blocks_out;
3264 basic_block *queue, *qhead, *qtail, *qend;
3265 regset tmp, new_live_at_end;
3266 regset_head tmp_head;
3267 regset_head new_live_at_end_head;
3270 tmp = INITIALIZE_REG_SET (tmp_head);
3271 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3273 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3274 because the `head == tail' style test for an empty queue doesn't
3275 work with a full queue. */
3276 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3278 qhead = qend = queue + n_basic_blocks + 2;
3280 /* Clear out the garbage that might be hanging out in bb->aux. */
3281 for (i = n_basic_blocks - 1; i >= 0; --i)
3282 BASIC_BLOCK (i)->aux = NULL;
3284 /* Queue the blocks set in the initial mask. Do this in reverse block
3285 number order so that we are more likely for the first round to do
3286 useful work. We use AUX non-null to flag that the block is queued. */
3289 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3291 basic_block bb = BASIC_BLOCK (i);
3298 for (i = 0; i < n_basic_blocks; ++i)
3300 basic_block bb = BASIC_BLOCK (i);
3307 sbitmap_zero (blocks_out);
3309 while (qhead != qtail)
3311 int rescan, changed;
3320 /* Begin by propogating live_at_start from the successor blocks. */
3321 CLEAR_REG_SET (new_live_at_end);
3322 for (e = bb->succ; e; e = e->succ_next)
3324 basic_block sb = e->dest;
3325 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3328 /* Force the stack pointer to be live -- which might not already be
3329 the case for blocks within infinite loops. */
3330 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3332 /* Similarly for the frame pointer before reload. Any reference
3333 to any pseudo before reload is a potential reference of the
3335 if (! reload_completed)
3336 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3338 /* Regs used in phi nodes are not included in
3339 global_live_at_start, since they are live only along a
3340 particular edge. Set those regs that are live because of a
3341 phi node alternative corresponding to this particular block. */
3343 for_each_successor_phi (bb, &set_phi_alternative_reg,
3346 if (bb == ENTRY_BLOCK_PTR)
3348 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3352 /* On our first pass through this block, we'll go ahead and continue.
3353 Recognize first pass by local_set NULL. On subsequent passes, we
3354 get to skip out early if live_at_end wouldn't have changed. */
3356 if (bb->local_set == NULL)
3358 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3363 /* If any bits were removed from live_at_end, we'll have to
3364 rescan the block. This wouldn't be necessary if we had
3365 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3366 local_live is really dependent on live_at_end. */
3367 CLEAR_REG_SET (tmp);
3368 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3369 new_live_at_end, BITMAP_AND_COMPL);
3373 /* Find the set of changed bits. Take this opportunity
3374 to notice that this set is empty and early out. */
3375 CLEAR_REG_SET (tmp);
3376 changed = bitmap_operation (tmp, bb->global_live_at_end,
3377 new_live_at_end, BITMAP_XOR);
3381 /* If any of the changed bits overlap with local_set,
3382 we'll have to rescan the block. Detect overlap by
3383 the AND with ~local_set turning off bits. */
3384 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3389 /* Let our caller know that BB changed enough to require its
3390 death notes updated. */
3392 SET_BIT (blocks_out, bb->index);
3396 /* Add to live_at_start the set of all registers in
3397 new_live_at_end that aren't in the old live_at_end. */
3399 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3401 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3403 changed = bitmap_operation (bb->global_live_at_start,
3404 bb->global_live_at_start,
3411 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3413 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3414 into live_at_start. */
3415 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3417 /* If live_at start didn't change, no need to go farther. */
3418 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3421 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3424 /* Queue all predecessors of BB so that we may re-examine
3425 their live_at_end. */
3426 for (e = bb->pred; e; e = e->pred_next)
3428 basic_block pb = e->src;
3429 if (pb->aux == NULL)
3440 FREE_REG_SET (new_live_at_end);
3444 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3446 basic_block bb = BASIC_BLOCK (i);
3447 FREE_REG_SET (bb->local_set);
3452 for (i = n_basic_blocks - 1; i >= 0; --i)
3454 basic_block bb = BASIC_BLOCK (i);
3455 FREE_REG_SET (bb->local_set);
3462 /* Subroutines of life analysis. */
3464 /* Allocate the permanent data structures that represent the results
3465 of life analysis. Not static since used also for stupid life analysis. */
3468 allocate_bb_life_data ()
3472 for (i = 0; i < n_basic_blocks; i++)
3474 basic_block bb = BASIC_BLOCK (i);
3476 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3477 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3480 ENTRY_BLOCK_PTR->global_live_at_end
3481 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3482 EXIT_BLOCK_PTR->global_live_at_start
3483 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3485 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3489 allocate_reg_life_data ()
3493 max_regno = max_reg_num ();
3495 /* Recalculate the register space, in case it has grown. Old style
3496 vector oriented regsets would set regset_{size,bytes} here also. */
3497 allocate_reg_info (max_regno, FALSE, FALSE);
3499 /* Reset all the data we'll collect in propagate_block and its
3501 for (i = 0; i < max_regno; i++)
3505 REG_N_DEATHS (i) = 0;
3506 REG_N_CALLS_CROSSED (i) = 0;
3507 REG_LIVE_LENGTH (i) = 0;
3508 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3512 /* Delete dead instructions for propagate_block. */
3515 propagate_block_delete_insn (bb, insn)
3519 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3521 /* If the insn referred to a label, and that label was attached to
3522 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3523 pretty much mandatory to delete it, because the ADDR_VEC may be
3524 referencing labels that no longer exist. */
3528 rtx label = XEXP (inote, 0);
3531 if (LABEL_NUSES (label) == 1
3532 && (next = next_nonnote_insn (label)) != NULL
3533 && GET_CODE (next) == JUMP_INSN
3534 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3535 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3537 rtx pat = PATTERN (next);
3538 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3539 int len = XVECLEN (pat, diff_vec_p);
3542 for (i = 0; i < len; i++)
3543 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3545 flow_delete_insn (next);
3549 if (bb->end == insn)
3550 bb->end = PREV_INSN (insn);
3551 flow_delete_insn (insn);
3554 /* Delete dead libcalls for propagate_block. Return the insn
3555 before the libcall. */
3558 propagate_block_delete_libcall (bb, insn, note)
3562 rtx first = XEXP (note, 0);
3563 rtx before = PREV_INSN (first);
3565 if (insn == bb->end)
3568 flow_delete_insn_chain (first, insn);
3572 /* Update the life-status of regs for one insn. Return the previous insn. */
3575 propagate_one_insn (pbi, insn)
3576 struct propagate_block_info *pbi;
3579 rtx prev = PREV_INSN (insn);
3580 int flags = pbi->flags;
3581 int insn_is_dead = 0;
3582 int libcall_is_dead = 0;
3586 if (! INSN_P (insn))
3589 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3590 if (flags & PROP_SCAN_DEAD_CODE)
3592 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3594 libcall_is_dead = (insn_is_dead && note != 0
3595 && libcall_dead_p (pbi, note, insn));
3598 /* We almost certainly don't want to delete prologue or epilogue
3599 instructions. Warn about probable compiler losage. */
3602 && (((HAVE_epilogue || HAVE_prologue)
3603 && prologue_epilogue_contains (insn))
3604 || (HAVE_sibcall_epilogue
3605 && sibcall_epilogue_contains (insn)))
3606 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3608 if (flags & PROP_KILL_DEAD_CODE)
3610 warning ("ICE: would have deleted prologue/epilogue insn");
3611 if (!inhibit_warnings)
3614 libcall_is_dead = insn_is_dead = 0;
3617 /* If an instruction consists of just dead store(s) on final pass,
3619 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3621 /* Record sets. Do this even for dead instructions, since they
3622 would have killed the values if they hadn't been deleted. */
3623 mark_set_regs (pbi, PATTERN (insn), insn);
3625 /* CC0 is now known to be dead. Either this insn used it,
3626 in which case it doesn't anymore, or clobbered it,
3627 so the next insn can't use it. */
3630 if (libcall_is_dead)
3632 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3633 insn = NEXT_INSN (prev);
3636 propagate_block_delete_insn (pbi->bb, insn);
3641 /* See if this is an increment or decrement that can be merged into
3642 a following memory address. */
3645 register rtx x = single_set (insn);
3647 /* Does this instruction increment or decrement a register? */
3648 if ((flags & PROP_AUTOINC)
3650 && GET_CODE (SET_DEST (x)) == REG
3651 && (GET_CODE (SET_SRC (x)) == PLUS
3652 || GET_CODE (SET_SRC (x)) == MINUS)
3653 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3654 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3655 /* Ok, look for a following memory ref we can combine with.
3656 If one is found, change the memory ref to a PRE_INC
3657 or PRE_DEC, cancel this insn, and return 1.
3658 Return 0 if nothing has been done. */
3659 && try_pre_increment_1 (pbi, insn))
3662 #endif /* AUTO_INC_DEC */
3664 CLEAR_REG_SET (pbi->new_set);
3666 /* If this is not the final pass, and this insn is copying the value of
3667 a library call and it's dead, don't scan the insns that perform the
3668 library call, so that the call's arguments are not marked live. */
3669 if (libcall_is_dead)
3671 /* Record the death of the dest reg. */
3672 mark_set_regs (pbi, PATTERN (insn), insn);
3674 insn = XEXP (note, 0);
3675 return PREV_INSN (insn);
3677 else if (GET_CODE (PATTERN (insn)) == SET
3678 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3679 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3680 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3681 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3682 /* We have an insn to pop a constant amount off the stack.
3683 (Such insns use PLUS regardless of the direction of the stack,
3684 and any insn to adjust the stack by a constant is always a pop.)
3685 These insns, if not dead stores, have no effect on life. */
3689 /* Any regs live at the time of a call instruction must not go
3690 in a register clobbered by calls. Find all regs now live and
3691 record this for them. */
3693 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3694 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3695 { REG_N_CALLS_CROSSED (i)++; });
3697 /* Record sets. Do this even for dead instructions, since they
3698 would have killed the values if they hadn't been deleted. */
3699 mark_set_regs (pbi, PATTERN (insn), insn);
3701 if (GET_CODE (insn) == CALL_INSN)
3707 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3708 cond = COND_EXEC_TEST (PATTERN (insn));
3710 /* Non-constant calls clobber memory. */
3711 if (! CONST_CALL_P (insn))
3712 free_EXPR_LIST_list (&pbi->mem_set_list);
3714 /* There may be extra registers to be clobbered. */
3715 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3717 note = XEXP (note, 1))
3718 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3719 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3720 cond, insn, pbi->flags);
3722 /* Calls change all call-used and global registers. */
3723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3724 if (call_used_regs[i] && ! global_regs[i]
3727 /* We do not want REG_UNUSED notes for these registers. */
3728 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3730 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3734 /* If an insn doesn't use CC0, it becomes dead since we assume
3735 that every insn clobbers it. So show it dead here;
3736 mark_used_regs will set it live if it is referenced. */
3741 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3743 /* Sometimes we may have inserted something before INSN (such as a move)
3744 when we make an auto-inc. So ensure we will scan those insns. */
3746 prev = PREV_INSN (insn);
3749 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3755 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3756 cond = COND_EXEC_TEST (PATTERN (insn));
3758 /* Calls use their arguments. */
3759 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3761 note = XEXP (note, 1))
3762 if (GET_CODE (XEXP (note, 0)) == USE)
3763 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3766 /* The stack ptr is used (honorarily) by a CALL insn. */
3767 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3769 /* Calls may also reference any of the global registers,
3770 so they are made live. */
3771 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3773 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3778 /* On final pass, update counts of how many insns in which each reg
3780 if (flags & PROP_REG_INFO)
3781 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3782 { REG_LIVE_LENGTH (i)++; });
3787 /* Initialize a propagate_block_info struct for public consumption.
3788 Note that the structure itself is opaque to this file, but that
3789 the user can use the regsets provided here. */
3791 struct propagate_block_info *
3792 init_propagate_block_info (bb, live, local_set, flags)
3798 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3801 pbi->reg_live = live;
3802 pbi->mem_set_list = NULL_RTX;
3803 pbi->local_set = local_set;
3807 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3808 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3810 pbi->reg_next_use = NULL;
3812 pbi->new_set = BITMAP_XMALLOC ();
3814 #ifdef HAVE_conditional_execution
3815 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3816 free_reg_cond_life_info);
3817 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3819 /* If this block ends in a conditional branch, for each register live
3820 from one side of the branch and not the other, record the register
3821 as conditionally dead. */
3822 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3823 && GET_CODE (bb->end) == JUMP_INSN
3824 && any_condjump_p (bb->end))
3826 regset_head diff_head;
3827 regset diff = INITIALIZE_REG_SET (diff_head);
3828 basic_block bb_true, bb_false;
3829 rtx cond_true, cond_false, set_src;
3832 /* Identify the successor blocks. */
3833 bb_true = bb->succ->dest;
3834 if (bb->succ->succ_next != NULL)
3836 bb_false = bb->succ->succ_next->dest;
3838 if (bb->succ->flags & EDGE_FALLTHRU)
3840 basic_block t = bb_false;
3844 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3849 /* This can happen with a conditional jump to the next insn. */
3850 if (JUMP_LABEL (bb->end) != bb_true->head)
3853 /* Simplest way to do nothing. */
3857 /* Extract the condition from the branch. */
3858 set_src = SET_SRC (pc_set (bb->end));
3859 cond_true = XEXP (set_src, 0);
3860 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3861 GET_MODE (cond_true), XEXP (cond_true, 0),
3862 XEXP (cond_true, 1));
3863 if (GET_CODE (XEXP (set_src, 1)) == PC)
3866 cond_false = cond_true;
3870 /* Compute which register lead different lives in the successors. */
3871 if (bitmap_operation (diff, bb_true->global_live_at_start,
3872 bb_false->global_live_at_start, BITMAP_XOR))
3874 rtx reg = XEXP (cond_true, 0);
3876 if (GET_CODE (reg) == SUBREG)
3877 reg = SUBREG_REG (reg);
3879 if (GET_CODE (reg) != REG)
3882 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3884 /* For each such register, mark it conditionally dead. */
3885 EXECUTE_IF_SET_IN_REG_SET
3888 struct reg_cond_life_info *rcli;
3891 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3893 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3897 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3899 splay_tree_insert (pbi->reg_cond_dead, i,
3900 (splay_tree_value) rcli);
3904 FREE_REG_SET (diff);
3908 /* If this block has no successors, any stores to the frame that aren't
3909 used later in the block are dead. So make a pass over the block
3910 recording any such that are made and show them dead at the end. We do
3911 a very conservative and simple job here. */
3913 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3914 && (TYPE_RETURNS_STACK_DEPRESSED
3915 (TREE_TYPE (current_function_decl))))
3916 && (flags & PROP_SCAN_DEAD_CODE)
3917 && (bb->succ == NULL
3918 || (bb->succ->succ_next == NULL
3919 && bb->succ->dest == EXIT_BLOCK_PTR)))
3922 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3923 if (GET_CODE (insn) == INSN
3924 && GET_CODE (PATTERN (insn)) == SET
3925 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3927 rtx mem = SET_DEST (PATTERN (insn));
3929 if (XEXP (mem, 0) == frame_pointer_rtx
3930 || (GET_CODE (XEXP (mem, 0)) == PLUS
3931 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3932 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3933 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3940 /* Release a propagate_block_info struct. */
3943 free_propagate_block_info (pbi)
3944 struct propagate_block_info *pbi;
3946 free_EXPR_LIST_list (&pbi->mem_set_list);
3948 BITMAP_XFREE (pbi->new_set);
3950 #ifdef HAVE_conditional_execution
3951 splay_tree_delete (pbi->reg_cond_dead);
3952 BITMAP_XFREE (pbi->reg_cond_reg);
3955 if (pbi->reg_next_use)
3956 free (pbi->reg_next_use);
3961 /* Compute the registers live at the beginning of a basic block BB from
3962 those live at the end.
3964 When called, REG_LIVE contains those live at the end. On return, it
3965 contains those live at the beginning.
3967 LOCAL_SET, if non-null, will be set with all registers killed by
3968 this basic block. */
3971 propagate_block (bb, live, local_set, flags)
3977 struct propagate_block_info *pbi;
3980 pbi = init_propagate_block_info (bb, live, local_set, flags);
3982 if (flags & PROP_REG_INFO)
3986 /* Process the regs live at the end of the block.
3987 Mark them as not local to any one basic block. */
3988 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3989 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3992 /* Scan the block an insn at a time from end to beginning. */
3994 for (insn = bb->end;; insn = prev)
3996 /* If this is a call to `setjmp' et al, warn if any
3997 non-volatile datum is live. */
3998 if ((flags & PROP_REG_INFO)
3999 && GET_CODE (insn) == NOTE
4000 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4001 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4003 prev = propagate_one_insn (pbi, insn);
4005 if (insn == bb->head)
4009 free_propagate_block_info (pbi);
4012 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4013 (SET expressions whose destinations are registers dead after the insn).
4014 NEEDED is the regset that says which regs are alive after the insn.
4016 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4018 If X is the entire body of an insn, NOTES contains the reg notes
4019 pertaining to the insn. */
4022 insn_dead_p (pbi, x, call_ok, notes)
4023 struct propagate_block_info *pbi;
4026 rtx notes ATTRIBUTE_UNUSED;
4028 enum rtx_code code = GET_CODE (x);
4031 /* If flow is invoked after reload, we must take existing AUTO_INC
4032 expresions into account. */
4033 if (reload_completed)
4035 for (; notes; notes = XEXP (notes, 1))
4037 if (REG_NOTE_KIND (notes) == REG_INC)
4039 int regno = REGNO (XEXP (notes, 0));
4041 /* Don't delete insns to set global regs. */
4042 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4043 || REGNO_REG_SET_P (pbi->reg_live, regno))
4050 /* If setting something that's a reg or part of one,
4051 see if that register's altered value will be live. */
4055 rtx r = SET_DEST (x);
4058 if (GET_CODE (r) == CC0)
4059 return ! pbi->cc0_live;
4062 /* A SET that is a subroutine call cannot be dead. */
4063 if (GET_CODE (SET_SRC (x)) == CALL)
4069 /* Don't eliminate loads from volatile memory or volatile asms. */
4070 else if (volatile_refs_p (SET_SRC (x)))
4073 if (GET_CODE (r) == MEM)
4077 if (MEM_VOLATILE_P (r))
4080 /* Walk the set of memory locations we are currently tracking
4081 and see if one is an identical match to this memory location.
4082 If so, this memory write is dead (remember, we're walking
4083 backwards from the end of the block to the start). */
4084 temp = pbi->mem_set_list;
4087 rtx mem = XEXP (temp, 0);
4089 if (rtx_equal_p (mem, r))
4092 /* Check if memory reference matches an auto increment. Only
4093 post increment/decrement or modify are valid. */
4094 if (GET_MODE (mem) == GET_MODE (r)
4095 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4096 || GET_CODE (XEXP (mem, 0)) == POST_INC
4097 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4098 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4099 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4102 temp = XEXP (temp, 1);
4107 while (GET_CODE (r) == SUBREG
4108 || GET_CODE (r) == STRICT_LOW_PART
4109 || GET_CODE (r) == ZERO_EXTRACT)
4112 if (GET_CODE (r) == REG)
4114 int regno = REGNO (r);
4117 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4120 /* If this is a hard register, verify that subsequent
4121 words are not needed. */
4122 if (regno < FIRST_PSEUDO_REGISTER)
4124 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4127 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4131 /* Don't delete insns to set global regs. */
4132 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4135 /* Make sure insns to set the stack pointer aren't deleted. */
4136 if (regno == STACK_POINTER_REGNUM)
4139 /* Make sure insns to set the frame pointer aren't deleted. */
4140 if (regno == FRAME_POINTER_REGNUM
4141 && (! reload_completed || frame_pointer_needed))
4143 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4144 if (regno == HARD_FRAME_POINTER_REGNUM
4145 && (! reload_completed || frame_pointer_needed))
4149 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4150 /* Make sure insns to set arg pointer are never deleted
4151 (if the arg pointer isn't fixed, there will be a USE
4152 for it, so we can treat it normally). */
4153 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4157 #ifdef PIC_OFFSET_TABLE_REGNUM
4158 /* Before reload, do not allow sets of the pic register
4159 to be deleted. Reload can insert references to
4160 constant pool memory anywhere in the function, making
4161 the PIC register live where it wasn't before. */
4162 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
4163 && ! reload_completed)
4167 /* Otherwise, the set is dead. */
4173 /* If performing several activities, insn is dead if each activity
4174 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4175 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4177 else if (code == PARALLEL)
4179 int i = XVECLEN (x, 0);
4181 for (i--; i >= 0; i--)
4182 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4183 && GET_CODE (XVECEXP (x, 0, i)) != USE
4184 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4190 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4191 is not necessarily true for hard registers. */
4192 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4193 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4194 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4197 /* We do not check other CLOBBER or USE here. An insn consisting of just
4198 a CLOBBER or just a USE should not be deleted. */
4202 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4203 return 1 if the entire library call is dead.
4204 This is true if INSN copies a register (hard or pseudo)
4205 and if the hard return reg of the call insn is dead.
4206 (The caller should have tested the destination of the SET inside
4207 INSN already for death.)
4209 If this insn doesn't just copy a register, then we don't
4210 have an ordinary libcall. In that case, cse could not have
4211 managed to substitute the source for the dest later on,
4212 so we can assume the libcall is dead.
4214 PBI is the block info giving pseudoregs live before this insn.
4215 NOTE is the REG_RETVAL note of the insn. */
4218 libcall_dead_p (pbi, note, insn)
4219 struct propagate_block_info *pbi;
4223 rtx x = single_set (insn);
4227 register rtx r = SET_SRC (x);
4228 if (GET_CODE (r) == REG)
4230 rtx call = XEXP (note, 0);
4234 /* Find the call insn. */
4235 while (call != insn && GET_CODE (call) != CALL_INSN)
4236 call = NEXT_INSN (call);
4238 /* If there is none, do nothing special,
4239 since ordinary death handling can understand these insns. */
4243 /* See if the hard reg holding the value is dead.
4244 If this is a PARALLEL, find the call within it. */
4245 call_pat = PATTERN (call);
4246 if (GET_CODE (call_pat) == PARALLEL)
4248 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4249 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4250 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4253 /* This may be a library call that is returning a value
4254 via invisible pointer. Do nothing special, since
4255 ordinary death handling can understand these insns. */
4259 call_pat = XVECEXP (call_pat, 0, i);
4262 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4268 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4269 live at function entry. Don't count global register variables, variables
4270 in registers that can be used for function arg passing, or variables in
4271 fixed hard registers. */
4274 regno_uninitialized (regno)
4277 if (n_basic_blocks == 0
4278 || (regno < FIRST_PSEUDO_REGISTER
4279 && (global_regs[regno]
4280 || fixed_regs[regno]
4281 || FUNCTION_ARG_REGNO_P (regno))))
4284 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4287 /* 1 if register REGNO was alive at a place where `setjmp' was called
4288 and was set more than once or is an argument.
4289 Such regs may be clobbered by `longjmp'. */
4292 regno_clobbered_at_setjmp (regno)
4295 if (n_basic_blocks == 0)
4298 return ((REG_N_SETS (regno) > 1
4299 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4300 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4303 /* INSN references memory, possibly using autoincrement addressing modes.
4304 Find any entries on the mem_set_list that need to be invalidated due
4305 to an address change. */
4308 invalidate_mems_from_autoinc (pbi, insn)
4309 struct propagate_block_info *pbi;
4312 rtx note = REG_NOTES (insn);
4313 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4315 if (REG_NOTE_KIND (note) == REG_INC)
4317 rtx temp = pbi->mem_set_list;
4318 rtx prev = NULL_RTX;
4323 next = XEXP (temp, 1);
4324 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4326 /* Splice temp out of list. */
4328 XEXP (prev, 1) = next;
4330 pbi->mem_set_list = next;
4331 free_EXPR_LIST_node (temp);
4341 /* Process the registers that are set within X. Their bits are set to
4342 1 in the regset DEAD, because they are dead prior to this insn.
4344 If INSN is nonzero, it is the insn being processed.
4346 FLAGS is the set of operations to perform. */
4349 mark_set_regs (pbi, x, insn)
4350 struct propagate_block_info *pbi;
4353 rtx cond = NULL_RTX;
4358 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4360 if (REG_NOTE_KIND (link) == REG_INC)
4361 mark_set_1 (pbi, SET, XEXP (link, 0),
4362 (GET_CODE (x) == COND_EXEC
4363 ? COND_EXEC_TEST (x) : NULL_RTX),
4367 switch (code = GET_CODE (x))
4371 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4375 cond = COND_EXEC_TEST (x);
4376 x = COND_EXEC_CODE (x);
4382 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4384 rtx sub = XVECEXP (x, 0, i);
4385 switch (code = GET_CODE (sub))
4388 if (cond != NULL_RTX)
4391 cond = COND_EXEC_TEST (sub);
4392 sub = COND_EXEC_CODE (sub);
4393 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4399 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4414 /* Process a single SET rtx, X. */
4417 mark_set_1 (pbi, code, reg, cond, insn, flags)
4418 struct propagate_block_info *pbi;
4420 rtx reg, cond, insn;
4423 int regno_first = -1, regno_last = -1;
4427 /* Some targets place small structures in registers for
4428 return values of functions. We have to detect this
4429 case specially here to get correct flow information. */
4430 if (GET_CODE (reg) == PARALLEL
4431 && GET_MODE (reg) == BLKmode)
4433 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4434 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4438 /* Modifying just one hardware register of a multi-reg value or just a
4439 byte field of a register does not mean the value from before this insn
4440 is now dead. Of course, if it was dead after it's unused now. */
4442 switch (GET_CODE (reg))
4446 case STRICT_LOW_PART:
4447 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4449 reg = XEXP (reg, 0);
4450 while (GET_CODE (reg) == SUBREG
4451 || GET_CODE (reg) == ZERO_EXTRACT
4452 || GET_CODE (reg) == SIGN_EXTRACT
4453 || GET_CODE (reg) == STRICT_LOW_PART);
4454 if (GET_CODE (reg) == MEM)
4456 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4460 regno_last = regno_first = REGNO (reg);
4461 if (regno_first < FIRST_PSEUDO_REGISTER)
4462 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4466 if (GET_CODE (SUBREG_REG (reg)) == REG)
4468 enum machine_mode outer_mode = GET_MODE (reg);
4469 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4471 /* Identify the range of registers affected. This is moderately
4472 tricky for hard registers. See alter_subreg. */
4474 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4475 if (regno_first < FIRST_PSEUDO_REGISTER)
4477 #ifdef ALTER_HARD_SUBREG
4478 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4479 inner_mode, regno_first);
4481 regno_first += SUBREG_WORD (reg);
4483 regno_last = (regno_first
4484 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4486 /* Since we've just adjusted the register number ranges, make
4487 sure REG matches. Otherwise some_was_live will be clear
4488 when it shouldn't have been, and we'll create incorrect
4489 REG_UNUSED notes. */
4490 reg = gen_rtx_REG (outer_mode, regno_first);
4494 /* If the number of words in the subreg is less than the number
4495 of words in the full register, we have a well-defined partial
4496 set. Otherwise the high bits are undefined.
4498 This is only really applicable to pseudos, since we just took
4499 care of multi-word hard registers. */
4500 if (((GET_MODE_SIZE (outer_mode)
4501 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4502 < ((GET_MODE_SIZE (inner_mode)
4503 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4504 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4506 reg = SUBREG_REG (reg);
4510 reg = SUBREG_REG (reg);
4517 /* If this set is a MEM, then it kills any aliased writes.
4518 If this set is a REG, then it kills any MEMs which use the reg. */
4519 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4521 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4523 rtx temp = pbi->mem_set_list;
4524 rtx prev = NULL_RTX;
4529 next = XEXP (temp, 1);
4530 if ((GET_CODE (reg) == MEM
4531 && output_dependence (XEXP (temp, 0), reg))
4532 || (GET_CODE (reg) == REG
4533 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4535 /* Splice this entry out of the list. */
4537 XEXP (prev, 1) = next;
4539 pbi->mem_set_list = next;
4540 free_EXPR_LIST_node (temp);
4548 /* If the memory reference had embedded side effects (autoincrement
4549 address modes. Then we may need to kill some entries on the
4551 if (insn && GET_CODE (reg) == MEM)
4552 invalidate_mems_from_autoinc (pbi, insn);
4554 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4555 /* ??? With more effort we could track conditional memory life. */
4557 /* We do not know the size of a BLKmode store, so we do not track
4558 them for redundant store elimination. */
4559 && GET_MODE (reg) != BLKmode
4560 /* There are no REG_INC notes for SP, so we can't assume we'll see
4561 everything that invalidates it. To be safe, don't eliminate any
4562 stores though SP; none of them should be redundant anyway. */
4563 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4564 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4567 if (GET_CODE (reg) == REG
4568 && ! (regno_first == FRAME_POINTER_REGNUM
4569 && (! reload_completed || frame_pointer_needed))
4570 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4571 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4572 && (! reload_completed || frame_pointer_needed))
4574 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4575 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4579 int some_was_live = 0, some_was_dead = 0;
4581 for (i = regno_first; i <= regno_last; ++i)
4583 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4585 SET_REGNO_REG_SET (pbi->local_set, i);
4586 if (code != CLOBBER)
4587 SET_REGNO_REG_SET (pbi->new_set, i);
4589 some_was_live |= needed_regno;
4590 some_was_dead |= ! needed_regno;
4593 #ifdef HAVE_conditional_execution
4594 /* Consider conditional death in deciding that the register needs
4596 if (some_was_live && ! not_dead
4597 /* The stack pointer is never dead. Well, not strictly true,
4598 but it's very difficult to tell from here. Hopefully
4599 combine_stack_adjustments will fix up the most egregious
4601 && regno_first != STACK_POINTER_REGNUM)
4603 for (i = regno_first; i <= regno_last; ++i)
4604 if (! mark_regno_cond_dead (pbi, i, cond))
4609 /* Additional data to record if this is the final pass. */
4610 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4611 | PROP_DEATH_NOTES | PROP_AUTOINC))
4614 register int blocknum = pbi->bb->index;
4617 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4619 y = pbi->reg_next_use[regno_first];
4621 /* The next use is no longer next, since a store intervenes. */
4622 for (i = regno_first; i <= regno_last; ++i)
4623 pbi->reg_next_use[i] = 0;
4626 if (flags & PROP_REG_INFO)
4628 for (i = regno_first; i <= regno_last; ++i)
4630 /* Count (weighted) references, stores, etc. This counts a
4631 register twice if it is modified, but that is correct. */
4632 REG_N_SETS (i) += 1;
4633 REG_N_REFS (i) += (optimize_size ? 1
4634 : pbi->bb->loop_depth + 1);
4636 /* The insns where a reg is live are normally counted
4637 elsewhere, but we want the count to include the insn
4638 where the reg is set, and the normal counting mechanism
4639 would not count it. */
4640 REG_LIVE_LENGTH (i) += 1;
4643 /* If this is a hard reg, record this function uses the reg. */
4644 if (regno_first < FIRST_PSEUDO_REGISTER)
4646 for (i = regno_first; i <= regno_last; i++)
4647 regs_ever_live[i] = 1;
4651 /* Keep track of which basic blocks each reg appears in. */
4652 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4653 REG_BASIC_BLOCK (regno_first) = blocknum;
4654 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4655 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4659 if (! some_was_dead)
4661 if (flags & PROP_LOG_LINKS)
4663 /* Make a logical link from the next following insn
4664 that uses this register, back to this insn.
4665 The following insns have already been processed.
4667 We don't build a LOG_LINK for hard registers containing
4668 in ASM_OPERANDs. If these registers get replaced,
4669 we might wind up changing the semantics of the insn,
4670 even if reload can make what appear to be valid
4671 assignments later. */
4672 if (y && (BLOCK_NUM (y) == blocknum)
4673 && (regno_first >= FIRST_PSEUDO_REGISTER
4674 || asm_noperands (PATTERN (y)) < 0))
4675 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4680 else if (! some_was_live)
4682 if (flags & PROP_REG_INFO)
4683 REG_N_DEATHS (regno_first) += 1;
4685 if (flags & PROP_DEATH_NOTES)
4687 /* Note that dead stores have already been deleted
4688 when possible. If we get here, we have found a
4689 dead store that cannot be eliminated (because the
4690 same insn does something useful). Indicate this
4691 by marking the reg being set as dying here. */
4693 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4698 if (flags & PROP_DEATH_NOTES)
4700 /* This is a case where we have a multi-word hard register
4701 and some, but not all, of the words of the register are
4702 needed in subsequent insns. Write REG_UNUSED notes
4703 for those parts that were not needed. This case should
4706 for (i = regno_first; i <= regno_last; ++i)
4707 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4709 = alloc_EXPR_LIST (REG_UNUSED,
4710 gen_rtx_REG (reg_raw_mode[i], i),
4716 /* Mark the register as being dead. */
4719 /* The stack pointer is never dead. Well, not strictly true,
4720 but it's very difficult to tell from here. Hopefully
4721 combine_stack_adjustments will fix up the most egregious
4723 && regno_first != STACK_POINTER_REGNUM)
4725 for (i = regno_first; i <= regno_last; ++i)
4726 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4729 else if (GET_CODE (reg) == REG)
4731 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4732 pbi->reg_next_use[regno_first] = 0;
4735 /* If this is the last pass and this is a SCRATCH, show it will be dying
4736 here and count it. */
4737 else if (GET_CODE (reg) == SCRATCH)
4739 if (flags & PROP_DEATH_NOTES)
4741 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4745 #ifdef HAVE_conditional_execution
4746 /* Mark REGNO conditionally dead.
4747 Return true if the register is now unconditionally dead. */
4750 mark_regno_cond_dead (pbi, regno, cond)
4751 struct propagate_block_info *pbi;
4755 /* If this is a store to a predicate register, the value of the
4756 predicate is changing, we don't know that the predicate as seen
4757 before is the same as that seen after. Flush all dependent
4758 conditions from reg_cond_dead. This will make all such
4759 conditionally live registers unconditionally live. */
4760 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4761 flush_reg_cond_reg (pbi, regno);
4763 /* If this is an unconditional store, remove any conditional
4764 life that may have existed. */
4765 if (cond == NULL_RTX)
4766 splay_tree_remove (pbi->reg_cond_dead, regno);
4769 splay_tree_node node;
4770 struct reg_cond_life_info *rcli;
4773 /* Otherwise this is a conditional set. Record that fact.
4774 It may have been conditionally used, or there may be a
4775 subsequent set with a complimentary condition. */
4777 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4780 /* The register was unconditionally live previously.
4781 Record the current condition as the condition under
4782 which it is dead. */
4783 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4784 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4785 splay_tree_insert (pbi->reg_cond_dead, regno,
4786 (splay_tree_value) rcli);
4788 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4790 /* Not unconditionaly dead. */
4795 /* The register was conditionally live previously.
4796 Add the new condition to the old. */
4797 rcli = (struct reg_cond_life_info *) node->value;
4798 ncond = rcli->condition;
4799 ncond = ior_reg_cond (ncond, cond);
4801 /* If the register is now unconditionally dead,
4802 remove the entry in the splay_tree. */
4803 if (ncond == const1_rtx)
4804 splay_tree_remove (pbi->reg_cond_dead, regno);
4807 rcli->condition = ncond;
4809 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4811 /* Not unconditionaly dead. */
4820 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4823 free_reg_cond_life_info (value)
4824 splay_tree_value value;
4826 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4827 free_EXPR_LIST_list (&rcli->condition);
4831 /* Helper function for flush_reg_cond_reg. */
4834 flush_reg_cond_reg_1 (node, data)
4835 splay_tree_node node;
4838 struct reg_cond_life_info *rcli;
4839 int *xdata = (int *) data;
4840 unsigned int regno = xdata[0];
4843 /* Don't need to search if last flushed value was farther on in
4844 the in-order traversal. */
4845 if (xdata[1] >= (int) node->key)
4848 /* Splice out portions of the expression that refer to regno. */
4849 rcli = (struct reg_cond_life_info *) node->value;
4850 c = *(prev = &rcli->condition);
4853 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4855 rtx next = XEXP (c, 1);
4856 free_EXPR_LIST_node (c);
4860 c = *(prev = &XEXP (c, 1));
4863 /* If the entire condition is now NULL, signal the node to be removed. */
4864 if (! rcli->condition)
4866 xdata[1] = node->key;
4873 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4876 flush_reg_cond_reg (pbi, regno)
4877 struct propagate_block_info *pbi;
4884 while (splay_tree_foreach (pbi->reg_cond_dead,
4885 flush_reg_cond_reg_1, pair) == -1)
4886 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4888 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4891 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4892 We actually use EXPR_LIST to chain the sub-expressions together
4893 instead of IOR because it's easier to manipulate and we have
4894 the lists.c functions to reuse nodes.
4896 Return a new rtl expression as appropriate. */
4899 ior_reg_cond (old, x)
4902 enum rtx_code x_code;
4906 /* We expect these conditions to be of the form (eq reg 0). */
4907 x_code = GET_CODE (x);
4908 if (GET_RTX_CLASS (x_code) != '<'
4909 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4910 || XEXP (x, 1) != const0_rtx)
4913 /* Search the expression for an existing sub-expression of X_REG. */
4914 for (c = old; c; c = XEXP (c, 1))
4916 rtx y = XEXP (c, 0);
4917 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4919 /* If we find X already present in OLD, we need do nothing. */
4920 if (GET_CODE (y) == x_code)
4923 /* If we find X being a compliment of a condition in OLD,
4924 then the entire condition is true. */
4925 if (GET_CODE (y) == reverse_condition (x_code))
4930 /* Otherwise just add to the chain. */
4931 return alloc_EXPR_LIST (0, x, old);
4938 enum rtx_code x_code;
4941 /* We expect these conditions to be of the form (eq reg 0). */
4942 x_code = GET_CODE (x);
4943 if (GET_RTX_CLASS (x_code) != '<'
4944 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4945 || XEXP (x, 1) != const0_rtx)
4948 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4949 VOIDmode, x_reg, const0_rtx),
4954 nand_reg_cond (old, x)
4957 enum rtx_code x_code;
4961 /* We expect these conditions to be of the form (eq reg 0). */
4962 x_code = GET_CODE (x);
4963 if (GET_RTX_CLASS (x_code) != '<'
4964 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4965 || XEXP (x, 1) != const0_rtx)
4968 /* Search the expression for an existing sub-expression of X_REG. */
4970 for (c = *(prev = &old); c; c = *(prev = &XEXP (c, 1)))
4972 rtx y = XEXP (c, 0);
4973 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4975 /* If we find X already present in OLD, then we need to
4977 if (GET_CODE (y) == x_code)
4979 *prev = XEXP (c, 1);
4980 free_EXPR_LIST_node (c);
4981 return old ? old : const0_rtx;
4984 /* If we find X being a compliment of a condition in OLD,
4985 then we need do nothing. */
4986 if (GET_CODE (y) == reverse_condition (x_code))
4991 /* Otherwise, by implication, the register in question is now live for
4992 the inverse of the condition X. */
4993 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4994 VOIDmode, x_reg, const0_rtx),
4997 #endif /* HAVE_conditional_execution */
5001 /* Try to substitute the auto-inc expression INC as the address inside
5002 MEM which occurs in INSN. Currently, the address of MEM is an expression
5003 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5004 that has a single set whose source is a PLUS of INCR_REG and something
5008 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5009 struct propagate_block_info *pbi;
5010 rtx inc, insn, mem, incr, incr_reg;
5012 int regno = REGNO (incr_reg);
5013 rtx set = single_set (incr);
5014 rtx q = SET_DEST (set);
5015 rtx y = SET_SRC (set);
5016 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5018 /* Make sure this reg appears only once in this insn. */
5019 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5022 if (dead_or_set_p (incr, incr_reg)
5023 /* Mustn't autoinc an eliminable register. */
5024 && (regno >= FIRST_PSEUDO_REGISTER
5025 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5027 /* This is the simple case. Try to make the auto-inc. If
5028 we can't, we are done. Otherwise, we will do any
5029 needed updates below. */
5030 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5033 else if (GET_CODE (q) == REG
5034 /* PREV_INSN used here to check the semi-open interval
5036 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5037 /* We must also check for sets of q as q may be
5038 a call clobbered hard register and there may
5039 be a call between PREV_INSN (insn) and incr. */
5040 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5042 /* We have *p followed sometime later by q = p+size.
5043 Both p and q must be live afterward,
5044 and q is not used between INSN and its assignment.
5045 Change it to q = p, ...*q..., q = q+size.
5046 Then fall into the usual case. */
5050 emit_move_insn (q, incr_reg);
5051 insns = get_insns ();
5054 if (basic_block_for_insn)
5055 for (temp = insns; temp; temp = NEXT_INSN (temp))
5056 set_block_for_insn (temp, pbi->bb);
5058 /* If we can't make the auto-inc, or can't make the
5059 replacement into Y, exit. There's no point in making
5060 the change below if we can't do the auto-inc and doing
5061 so is not correct in the pre-inc case. */
5064 validate_change (insn, &XEXP (mem, 0), inc, 1);
5065 validate_change (incr, &XEXP (y, opnum), q, 1);
5066 if (! apply_change_group ())
5069 /* We now know we'll be doing this change, so emit the
5070 new insn(s) and do the updates. */
5071 emit_insns_before (insns, insn);
5073 if (pbi->bb->head == insn)
5074 pbi->bb->head = insns;
5076 /* INCR will become a NOTE and INSN won't contain a
5077 use of INCR_REG. If a use of INCR_REG was just placed in
5078 the insn before INSN, make that the next use.
5079 Otherwise, invalidate it. */
5080 if (GET_CODE (PREV_INSN (insn)) == INSN
5081 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5082 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5083 pbi->reg_next_use[regno] = PREV_INSN (insn);
5085 pbi->reg_next_use[regno] = 0;
5090 /* REGNO is now used in INCR which is below INSN, but
5091 it previously wasn't live here. If we don't mark
5092 it as live, we'll put a REG_DEAD note for it
5093 on this insn, which is incorrect. */
5094 SET_REGNO_REG_SET (pbi->reg_live, regno);
5096 /* If there are any calls between INSN and INCR, show
5097 that REGNO now crosses them. */
5098 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5099 if (GET_CODE (temp) == CALL_INSN)
5100 REG_N_CALLS_CROSSED (regno)++;
5105 /* If we haven't returned, it means we were able to make the
5106 auto-inc, so update the status. First, record that this insn
5107 has an implicit side effect. */
5109 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5111 /* Modify the old increment-insn to simply copy
5112 the already-incremented value of our register. */
5113 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5116 /* If that makes it a no-op (copying the register into itself) delete
5117 it so it won't appear to be a "use" and a "set" of this
5119 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5121 /* If the original source was dead, it's dead now. */
5124 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5126 remove_note (incr, note);
5127 if (XEXP (note, 0) != incr_reg)
5128 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5131 PUT_CODE (incr, NOTE);
5132 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5133 NOTE_SOURCE_FILE (incr) = 0;
5136 if (regno >= FIRST_PSEUDO_REGISTER)
5138 /* Count an extra reference to the reg. When a reg is
5139 incremented, spilling it is worse, so we want to make
5140 that less likely. */
5141 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5143 /* Count the increment as a setting of the register,
5144 even though it isn't a SET in rtl. */
5145 REG_N_SETS (regno)++;
5149 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5153 find_auto_inc (pbi, x, insn)
5154 struct propagate_block_info *pbi;
5158 rtx addr = XEXP (x, 0);
5159 HOST_WIDE_INT offset = 0;
5160 rtx set, y, incr, inc_val;
5162 int size = GET_MODE_SIZE (GET_MODE (x));
5164 if (GET_CODE (insn) == JUMP_INSN)
5167 /* Here we detect use of an index register which might be good for
5168 postincrement, postdecrement, preincrement, or predecrement. */
5170 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5171 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5173 if (GET_CODE (addr) != REG)
5176 regno = REGNO (addr);
5178 /* Is the next use an increment that might make auto-increment? */
5179 incr = pbi->reg_next_use[regno];
5180 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5182 set = single_set (incr);
5183 if (set == 0 || GET_CODE (set) != SET)
5187 if (GET_CODE (y) != PLUS)
5190 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5191 inc_val = XEXP (y, 1);
5192 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5193 inc_val = XEXP (y, 0);
5197 if (GET_CODE (inc_val) == CONST_INT)
5199 if (HAVE_POST_INCREMENT
5200 && (INTVAL (inc_val) == size && offset == 0))
5201 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5203 else if (HAVE_POST_DECREMENT
5204 && (INTVAL (inc_val) == -size && offset == 0))
5205 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5207 else if (HAVE_PRE_INCREMENT
5208 && (INTVAL (inc_val) == size && offset == size))
5209 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5211 else if (HAVE_PRE_DECREMENT
5212 && (INTVAL (inc_val) == -size && offset == -size))
5213 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5215 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5216 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5217 gen_rtx_PLUS (Pmode,
5220 insn, x, incr, addr);
5222 else if (GET_CODE (inc_val) == REG
5223 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5227 if (HAVE_POST_MODIFY_REG && offset == 0)
5228 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5229 gen_rtx_PLUS (Pmode,
5232 insn, x, incr, addr);
5236 #endif /* AUTO_INC_DEC */
5239 mark_used_reg (pbi, reg, cond, insn)
5240 struct propagate_block_info *pbi;
5242 rtx cond ATTRIBUTE_UNUSED;
5245 int regno = REGNO (reg);
5246 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5247 int some_was_dead = ! some_was_live;
5251 /* A hard reg in a wide mode may really be multiple registers.
5252 If so, mark all of them just like the first. */
5253 if (regno < FIRST_PSEUDO_REGISTER)
5255 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5258 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5259 some_was_live |= needed_regno;
5260 some_was_dead |= ! needed_regno;
5264 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5266 /* Record where each reg is used, so when the reg is set we know
5267 the next insn that uses it. */
5268 pbi->reg_next_use[regno] = insn;
5271 if (pbi->flags & PROP_REG_INFO)
5273 if (regno < FIRST_PSEUDO_REGISTER)
5275 /* If this is a register we are going to try to eliminate,
5276 don't mark it live here. If we are successful in
5277 eliminating it, it need not be live unless it is used for
5278 pseudos, in which case it will have been set live when it
5279 was allocated to the pseudos. If the register will not
5280 be eliminated, reload will set it live at that point.
5282 Otherwise, record that this function uses this register. */
5283 /* ??? The PPC backend tries to "eliminate" on the pic
5284 register to itself. This should be fixed. In the mean
5285 time, hack around it. */
5287 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5288 && (regno == FRAME_POINTER_REGNUM
5289 || regno == ARG_POINTER_REGNUM)))
5291 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5293 regs_ever_live[regno + --n] = 1;
5299 /* Keep track of which basic block each reg appears in. */
5301 register int blocknum = pbi->bb->index;
5302 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5303 REG_BASIC_BLOCK (regno) = blocknum;
5304 else if (REG_BASIC_BLOCK (regno) != blocknum)
5305 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5307 /* Count (weighted) number of uses of each reg. */
5308 REG_N_REFS (regno) += (optimize_size ? 1
5309 : pbi->bb->loop_depth + 1);
5313 /* Find out if any of the register was set this insn. */
5314 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5315 if (regno < FIRST_PSEUDO_REGISTER)
5317 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5319 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5322 /* Record and count the insns in which a reg dies. If it is used in
5323 this insn and was dead below the insn then it dies in this insn.
5324 If it was set in this insn, we do not make a REG_DEAD note;
5325 likewise if we already made such a note. */
5326 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5330 /* Check for the case where the register dying partially
5331 overlaps the register set by this insn. */
5332 if (regno < FIRST_PSEUDO_REGISTER
5333 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5335 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5337 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5340 /* If none of the words in X is needed, make a REG_DEAD note.
5341 Otherwise, we must make partial REG_DEAD notes. */
5342 if (! some_was_live)
5344 if ((pbi->flags & PROP_DEATH_NOTES)
5345 && ! find_regno_note (insn, REG_DEAD, regno))
5347 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5349 if (pbi->flags & PROP_REG_INFO)
5350 REG_N_DEATHS (regno)++;
5354 /* Don't make a REG_DEAD note for a part of a register
5355 that is set in the insn. */
5357 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5358 for (; n >= regno; n--)
5359 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5360 && ! dead_or_set_regno_p (insn, n))
5362 = alloc_EXPR_LIST (REG_DEAD,
5363 gen_rtx_REG (reg_raw_mode[n], n),
5368 SET_REGNO_REG_SET (pbi->reg_live, regno);
5369 if (regno < FIRST_PSEUDO_REGISTER)
5371 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5373 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5376 #ifdef HAVE_conditional_execution
5377 /* If this is a conditional use, record that fact. If it is later
5378 conditionally set, we'll know to kill the register. */
5379 if (cond != NULL_RTX)
5381 splay_tree_node node;
5382 struct reg_cond_life_info *rcli;
5387 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5390 /* The register was unconditionally live previously.
5391 No need to do anything. */
5395 /* The register was conditionally live previously.
5396 Subtract the new life cond from the old death cond. */
5397 rcli = (struct reg_cond_life_info *) node->value;
5398 ncond = rcli->condition;
5399 ncond = nand_reg_cond (ncond, cond);
5401 /* If the register is now unconditionally live, remove the
5402 entry in the splay_tree. */
5403 if (ncond == const0_rtx)
5405 rcli->condition = NULL_RTX;
5406 splay_tree_remove (pbi->reg_cond_dead, regno);
5410 rcli->condition = ncond;
5411 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5417 /* The register was not previously live at all. Record
5418 the condition under which it is still dead. */
5419 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5420 rcli->condition = not_reg_cond (cond);
5421 splay_tree_insert (pbi->reg_cond_dead, regno,
5422 (splay_tree_value) rcli);
5424 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5427 else if (some_was_live)
5429 splay_tree_node node;
5430 struct reg_cond_life_info *rcli;
5432 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5435 /* The register was conditionally live previously, but is now
5436 unconditionally so. Remove it from the conditionally dead
5437 list, so that a conditional set won't cause us to think
5439 rcli = (struct reg_cond_life_info *) node->value;
5440 rcli->condition = NULL_RTX;
5441 splay_tree_remove (pbi->reg_cond_dead, regno);
5448 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5449 This is done assuming the registers needed from X are those that
5450 have 1-bits in PBI->REG_LIVE.
5452 INSN is the containing instruction. If INSN is dead, this function
5456 mark_used_regs (pbi, x, cond, insn)
5457 struct propagate_block_info *pbi;
5460 register RTX_CODE code;
5462 int flags = pbi->flags;
5465 code = GET_CODE (x);
5485 /* If we are clobbering a MEM, mark any registers inside the address
5487 if (GET_CODE (XEXP (x, 0)) == MEM)
5488 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5492 /* Don't bother watching stores to mems if this is not the
5493 final pass. We'll not be deleting dead stores this round. */
5494 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5496 /* Invalidate the data for the last MEM stored, but only if MEM is
5497 something that can be stored into. */
5498 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5499 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5500 /* Needn't clear the memory set list. */
5504 rtx temp = pbi->mem_set_list;
5505 rtx prev = NULL_RTX;
5510 next = XEXP (temp, 1);
5511 if (anti_dependence (XEXP (temp, 0), x))
5513 /* Splice temp out of the list. */
5515 XEXP (prev, 1) = next;
5517 pbi->mem_set_list = next;
5518 free_EXPR_LIST_node (temp);
5526 /* If the memory reference had embedded side effects (autoincrement
5527 address modes. Then we may need to kill some entries on the
5530 invalidate_mems_from_autoinc (pbi, insn);
5534 if (flags & PROP_AUTOINC)
5535 find_auto_inc (pbi, x, insn);
5540 #ifdef CLASS_CANNOT_CHANGE_MODE
5541 if (GET_CODE (SUBREG_REG (x)) == REG
5542 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5543 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5544 GET_MODE (SUBREG_REG (x))))
5545 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5548 /* While we're here, optimize this case. */
5550 if (GET_CODE (x) != REG)
5555 /* See a register other than being set => mark it as needed. */
5556 mark_used_reg (pbi, x, cond, insn);
5561 register rtx testreg = SET_DEST (x);
5564 /* If storing into MEM, don't show it as being used. But do
5565 show the address as being used. */
5566 if (GET_CODE (testreg) == MEM)
5569 if (flags & PROP_AUTOINC)
5570 find_auto_inc (pbi, testreg, insn);
5572 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5573 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5577 /* Storing in STRICT_LOW_PART is like storing in a reg
5578 in that this SET might be dead, so ignore it in TESTREG.
5579 but in some other ways it is like using the reg.
5581 Storing in a SUBREG or a bit field is like storing the entire
5582 register in that if the register's value is not used
5583 then this SET is not needed. */
5584 while (GET_CODE (testreg) == STRICT_LOW_PART
5585 || GET_CODE (testreg) == ZERO_EXTRACT
5586 || GET_CODE (testreg) == SIGN_EXTRACT
5587 || GET_CODE (testreg) == SUBREG)
5589 #ifdef CLASS_CANNOT_CHANGE_MODE
5590 if (GET_CODE (testreg) == SUBREG
5591 && GET_CODE (SUBREG_REG (testreg)) == REG
5592 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5593 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5594 GET_MODE (testreg)))
5595 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5598 /* Modifying a single register in an alternate mode
5599 does not use any of the old value. But these other
5600 ways of storing in a register do use the old value. */
5601 if (GET_CODE (testreg) == SUBREG
5602 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5607 testreg = XEXP (testreg, 0);
5610 /* If this is a store into a register, recursively scan the
5611 value being stored. */
5613 if ((GET_CODE (testreg) == PARALLEL
5614 && GET_MODE (testreg) == BLKmode)
5615 || (GET_CODE (testreg) == REG
5616 && (regno = REGNO (testreg),
5617 ! (regno == FRAME_POINTER_REGNUM
5618 && (! reload_completed || frame_pointer_needed)))
5619 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5620 && ! (regno == HARD_FRAME_POINTER_REGNUM
5621 && (! reload_completed || frame_pointer_needed))
5623 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5624 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5629 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5630 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5637 case UNSPEC_VOLATILE:
5641 /* Traditional and volatile asm instructions must be considered to use
5642 and clobber all hard registers, all pseudo-registers and all of
5643 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5645 Consider for instance a volatile asm that changes the fpu rounding
5646 mode. An insn should not be moved across this even if it only uses
5647 pseudo-regs because it might give an incorrectly rounded result.
5649 ?!? Unfortunately, marking all hard registers as live causes massive
5650 problems for the register allocator and marking all pseudos as live
5651 creates mountains of uninitialized variable warnings.
5653 So for now, just clear the memory set list and mark any regs
5654 we can find in ASM_OPERANDS as used. */
5655 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5656 free_EXPR_LIST_list (&pbi->mem_set_list);
5658 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5659 We can not just fall through here since then we would be confused
5660 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5661 traditional asms unlike their normal usage. */
5662 if (code == ASM_OPERANDS)
5666 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5667 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5673 if (cond != NULL_RTX)
5676 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5678 cond = COND_EXEC_TEST (x);
5679 x = COND_EXEC_CODE (x);
5683 /* We _do_not_ want to scan operands of phi nodes. Operands of
5684 a phi function are evaluated only when control reaches this
5685 block along a particular edge. Therefore, regs that appear
5686 as arguments to phi should not be added to the global live at
5694 /* Recursively scan the operands of this expression. */
5697 register const char *fmt = GET_RTX_FORMAT (code);
5700 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5704 /* Tail recursive case: save a function call level. */
5710 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5712 else if (fmt[i] == 'E')
5715 for (j = 0; j < XVECLEN (x, i); j++)
5716 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5725 try_pre_increment_1 (pbi, insn)
5726 struct propagate_block_info *pbi;
5729 /* Find the next use of this reg. If in same basic block,
5730 make it do pre-increment or pre-decrement if appropriate. */
5731 rtx x = single_set (insn);
5732 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5733 * INTVAL (XEXP (SET_SRC (x), 1)));
5734 int regno = REGNO (SET_DEST (x));
5735 rtx y = pbi->reg_next_use[regno];
5737 && SET_DEST (x) != stack_pointer_rtx
5738 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5739 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5740 mode would be better. */
5741 && ! dead_or_set_p (y, SET_DEST (x))
5742 && try_pre_increment (y, SET_DEST (x), amount))
5744 /* We have found a suitable auto-increment
5745 and already changed insn Y to do it.
5746 So flush this increment-instruction. */
5747 PUT_CODE (insn, NOTE);
5748 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5749 NOTE_SOURCE_FILE (insn) = 0;
5750 /* Count a reference to this reg for the increment
5751 insn we are deleting. When a reg is incremented.
5752 spilling it is worse, so we want to make that
5754 if (regno >= FIRST_PSEUDO_REGISTER)
5756 REG_N_REFS (regno) += (optimize_size ? 1
5757 : pbi->bb->loop_depth + 1);
5758 REG_N_SETS (regno)++;
5765 /* Try to change INSN so that it does pre-increment or pre-decrement
5766 addressing on register REG in order to add AMOUNT to REG.
5767 AMOUNT is negative for pre-decrement.
5768 Returns 1 if the change could be made.
5769 This checks all about the validity of the result of modifying INSN. */
5772 try_pre_increment (insn, reg, amount)
5774 HOST_WIDE_INT amount;
5778 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5779 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5781 /* Nonzero if we can try to make a post-increment or post-decrement.
5782 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5783 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5784 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5787 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5790 /* From the sign of increment, see which possibilities are conceivable
5791 on this target machine. */
5792 if (HAVE_PRE_INCREMENT && amount > 0)
5794 if (HAVE_POST_INCREMENT && amount > 0)
5797 if (HAVE_PRE_DECREMENT && amount < 0)
5799 if (HAVE_POST_DECREMENT && amount < 0)
5802 if (! (pre_ok || post_ok))
5805 /* It is not safe to add a side effect to a jump insn
5806 because if the incremented register is spilled and must be reloaded
5807 there would be no way to store the incremented value back in memory. */
5809 if (GET_CODE (insn) == JUMP_INSN)
5814 use = find_use_as_address (PATTERN (insn), reg, 0);
5815 if (post_ok && (use == 0 || use == (rtx) 1))
5817 use = find_use_as_address (PATTERN (insn), reg, -amount);
5821 if (use == 0 || use == (rtx) 1)
5824 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5827 /* See if this combination of instruction and addressing mode exists. */
5828 if (! validate_change (insn, &XEXP (use, 0),
5829 gen_rtx_fmt_e (amount > 0
5830 ? (do_post ? POST_INC : PRE_INC)
5831 : (do_post ? POST_DEC : PRE_DEC),
5835 /* Record that this insn now has an implicit side effect on X. */
5836 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5840 #endif /* AUTO_INC_DEC */
5842 /* Find the place in the rtx X where REG is used as a memory address.
5843 Return the MEM rtx that so uses it.
5844 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5845 (plus REG (const_int PLUSCONST)).
5847 If such an address does not appear, return 0.
5848 If REG appears more than once, or is used other than in such an address,
5852 find_use_as_address (x, reg, plusconst)
5855 HOST_WIDE_INT plusconst;
5857 enum rtx_code code = GET_CODE (x);
5858 const char *fmt = GET_RTX_FORMAT (code);
5860 register rtx value = 0;
5863 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5866 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5867 && XEXP (XEXP (x, 0), 0) == reg
5868 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5869 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5872 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5874 /* If REG occurs inside a MEM used in a bit-field reference,
5875 that is unacceptable. */
5876 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5877 return (rtx) (HOST_WIDE_INT) 1;
5881 return (rtx) (HOST_WIDE_INT) 1;
5883 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5887 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5891 return (rtx) (HOST_WIDE_INT) 1;
5893 else if (fmt[i] == 'E')
5896 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5898 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5902 return (rtx) (HOST_WIDE_INT) 1;
5910 /* Write information about registers and basic blocks into FILE.
5911 This is part of making a debugging dump. */
5914 dump_regset (r, outf)
5921 fputs (" (nil)", outf);
5925 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5927 fprintf (outf, " %d", i);
5928 if (i < FIRST_PSEUDO_REGISTER)
5929 fprintf (outf, " [%s]",
5938 dump_regset (r, stderr);
5939 putc ('\n', stderr);
5943 dump_flow_info (file)
5947 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5949 fprintf (file, "%d registers.\n", max_regno);
5950 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5953 enum reg_class class, altclass;
5954 fprintf (file, "\nRegister %d used %d times across %d insns",
5955 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5956 if (REG_BASIC_BLOCK (i) >= 0)
5957 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5959 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5960 (REG_N_SETS (i) == 1) ? "" : "s");
5961 if (REG_USERVAR_P (regno_reg_rtx[i]))
5962 fprintf (file, "; user var");
5963 if (REG_N_DEATHS (i) != 1)
5964 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5965 if (REG_N_CALLS_CROSSED (i) == 1)
5966 fprintf (file, "; crosses 1 call");
5967 else if (REG_N_CALLS_CROSSED (i))
5968 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5969 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5970 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5971 class = reg_preferred_class (i);
5972 altclass = reg_alternate_class (i);
5973 if (class != GENERAL_REGS || altclass != ALL_REGS)
5975 if (altclass == ALL_REGS || class == ALL_REGS)
5976 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5977 else if (altclass == NO_REGS)
5978 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5980 fprintf (file, "; pref %s, else %s",
5981 reg_class_names[(int) class],
5982 reg_class_names[(int) altclass]);
5984 if (REGNO_POINTER_FLAG (i))
5985 fprintf (file, "; pointer");
5986 fprintf (file, ".\n");
5989 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5990 for (i = 0; i < n_basic_blocks; i++)
5992 register basic_block bb = BASIC_BLOCK (i);
5995 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5996 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5998 fprintf (file, "Predecessors: ");
5999 for (e = bb->pred; e; e = e->pred_next)
6000 dump_edge_info (file, e, 0);
6002 fprintf (file, "\nSuccessors: ");
6003 for (e = bb->succ; e; e = e->succ_next)
6004 dump_edge_info (file, e, 1);
6006 fprintf (file, "\nRegisters live at start:");
6007 dump_regset (bb->global_live_at_start, file);
6009 fprintf (file, "\nRegisters live at end:");
6010 dump_regset (bb->global_live_at_end, file);
6021 dump_flow_info (stderr);
6025 dump_edge_info (file, e, do_succ)
6030 basic_block side = (do_succ ? e->dest : e->src);
6032 if (side == ENTRY_BLOCK_PTR)
6033 fputs (" ENTRY", file);
6034 else if (side == EXIT_BLOCK_PTR)
6035 fputs (" EXIT", file);
6037 fprintf (file, " %d", side->index);
6040 fprintf (file, " count:%d", e->count);
6044 static const char * const bitnames[] = {
6045 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6048 int i, flags = e->flags;
6052 for (i = 0; flags; i++)
6053 if (flags & (1 << i))
6059 if (i < (int) ARRAY_SIZE (bitnames))
6060 fputs (bitnames[i], file);
6062 fprintf (file, "%d", i);
6069 /* Print out one basic block with live information at start and end. */
6080 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6081 bb->index, bb->loop_depth, bb->count);
6082 if (bb->eh_beg != -1 || bb->eh_end != -1)
6083 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
6086 fputs (";; Predecessors: ", outf);
6087 for (e = bb->pred; e; e = e->pred_next)
6088 dump_edge_info (outf, e, 0);
6091 fputs (";; Registers live at start:", outf);
6092 dump_regset (bb->global_live_at_start, outf);
6095 for (insn = bb->head, last = NEXT_INSN (bb->end);
6097 insn = NEXT_INSN (insn))
6098 print_rtl_single (outf, insn);
6100 fputs (";; Registers live at end:", outf);
6101 dump_regset (bb->global_live_at_end, outf);
6104 fputs (";; Successors: ", outf);
6105 for (e = bb->succ; e; e = e->succ_next)
6106 dump_edge_info (outf, e, 1);
6114 dump_bb (bb, stderr);
6121 dump_bb (BASIC_BLOCK (n), stderr);
6124 /* Like print_rtl, but also print out live information for the start of each
6128 print_rtl_with_bb (outf, rtx_first)
6132 register rtx tmp_rtx;
6135 fprintf (outf, "(nil)\n");
6139 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6140 int max_uid = get_max_uid ();
6141 basic_block *start = (basic_block *)
6142 xcalloc (max_uid, sizeof (basic_block));
6143 basic_block *end = (basic_block *)
6144 xcalloc (max_uid, sizeof (basic_block));
6145 enum bb_state *in_bb_p = (enum bb_state *)
6146 xcalloc (max_uid, sizeof (enum bb_state));
6148 for (i = n_basic_blocks - 1; i >= 0; i--)
6150 basic_block bb = BASIC_BLOCK (i);
6153 start[INSN_UID (bb->head)] = bb;
6154 end[INSN_UID (bb->end)] = bb;
6155 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6157 enum bb_state state = IN_MULTIPLE_BB;
6158 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6160 in_bb_p[INSN_UID (x)] = state;
6167 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6172 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6174 fprintf (outf, ";; Start of basic block %d, registers live:",
6176 dump_regset (bb->global_live_at_start, outf);
6180 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6181 && GET_CODE (tmp_rtx) != NOTE
6182 && GET_CODE (tmp_rtx) != BARRIER)
6183 fprintf (outf, ";; Insn is not within a basic block\n");
6184 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6185 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6187 did_output = print_rtl_single (outf, tmp_rtx);
6189 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6191 fprintf (outf, ";; End of basic block %d, registers live:\n",
6193 dump_regset (bb->global_live_at_end, outf);
6206 if (current_function_epilogue_delay_list != 0)
6208 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6209 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6210 tmp_rtx = XEXP (tmp_rtx, 1))
6211 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6215 /* Compute dominator relationships using new flow graph structures. */
6218 compute_flow_dominators (dominators, post_dominators)
6219 sbitmap *dominators;
6220 sbitmap *post_dominators;
6223 sbitmap *temp_bitmap;
6225 basic_block *worklist, *workend, *qin, *qout;
6228 /* Allocate a worklist array/queue. Entries are only added to the
6229 list if they were not already on the list. So the size is
6230 bounded by the number of basic blocks. */
6231 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6232 workend = &worklist[n_basic_blocks];
6234 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6235 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6239 /* The optimistic setting of dominators requires us to put every
6240 block on the work list initially. */
6241 qin = qout = worklist;
6242 for (bb = 0; bb < n_basic_blocks; bb++)
6244 *qin++ = BASIC_BLOCK (bb);
6245 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6247 qlen = n_basic_blocks;
6250 /* We want a maximal solution, so initially assume everything dominates
6252 sbitmap_vector_ones (dominators, n_basic_blocks);
6254 /* Mark successors of the entry block so we can identify them below. */
6255 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6256 e->dest->aux = ENTRY_BLOCK_PTR;
6258 /* Iterate until the worklist is empty. */
6261 /* Take the first entry off the worklist. */
6262 basic_block b = *qout++;
6263 if (qout >= workend)
6269 /* Compute the intersection of the dominators of all the
6272 If one of the predecessor blocks is the ENTRY block, then the
6273 intersection of the dominators of the predecessor blocks is
6274 defined as the null set. We can identify such blocks by the
6275 special value in the AUX field in the block structure. */
6276 if (b->aux == ENTRY_BLOCK_PTR)
6278 /* Do not clear the aux field for blocks which are
6279 successors of the ENTRY block. That way we never add
6280 them to the worklist again.
6282 The intersect of dominators of the preds of this block is
6283 defined as the null set. */
6284 sbitmap_zero (temp_bitmap[bb]);
6288 /* Clear the aux field of this block so it can be added to
6289 the worklist again if necessary. */
6291 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6294 /* Make sure each block always dominates itself. */
6295 SET_BIT (temp_bitmap[bb], bb);
6297 /* If the out state of this block changed, then we need to
6298 add the successors of this block to the worklist if they
6299 are not already on the worklist. */
6300 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6302 for (e = b->succ; e; e = e->succ_next)
6304 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6318 if (post_dominators)
6320 /* The optimistic setting of dominators requires us to put every
6321 block on the work list initially. */
6322 qin = qout = worklist;
6323 for (bb = 0; bb < n_basic_blocks; bb++)
6325 *qin++ = BASIC_BLOCK (bb);
6326 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6328 qlen = n_basic_blocks;
6331 /* We want a maximal solution, so initially assume everything post
6332 dominates everything else. */
6333 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6335 /* Mark predecessors of the exit block so we can identify them below. */
6336 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6337 e->src->aux = EXIT_BLOCK_PTR;
6339 /* Iterate until the worklist is empty. */
6342 /* Take the first entry off the worklist. */
6343 basic_block b = *qout++;
6344 if (qout >= workend)
6350 /* Compute the intersection of the post dominators of all the
6353 If one of the successor blocks is the EXIT block, then the
6354 intersection of the dominators of the successor blocks is
6355 defined as the null set. We can identify such blocks by the
6356 special value in the AUX field in the block structure. */
6357 if (b->aux == EXIT_BLOCK_PTR)
6359 /* Do not clear the aux field for blocks which are
6360 predecessors of the EXIT block. That way we we never
6361 add them to the worklist again.
6363 The intersect of dominators of the succs of this block is
6364 defined as the null set. */
6365 sbitmap_zero (temp_bitmap[bb]);
6369 /* Clear the aux field of this block so it can be added to
6370 the worklist again if necessary. */
6372 sbitmap_intersection_of_succs (temp_bitmap[bb],
6373 post_dominators, bb);
6376 /* Make sure each block always post dominates itself. */
6377 SET_BIT (temp_bitmap[bb], bb);
6379 /* If the out state of this block changed, then we need to
6380 add the successors of this block to the worklist if they
6381 are not already on the worklist. */
6382 if (sbitmap_a_and_b (post_dominators[bb],
6383 post_dominators[bb],
6386 for (e = b->pred; e; e = e->pred_next)
6388 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6406 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6407 block dominates only itself, its entry remains as INVALID_BLOCK. */
6410 compute_immediate_dominators (idom, dominators)
6412 sbitmap *dominators;
6417 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6419 /* Begin with tmp(n) = dom(n) - { n }. */
6420 for (b = n_basic_blocks; --b >= 0;)
6422 sbitmap_copy (tmp[b], dominators[b]);
6423 RESET_BIT (tmp[b], b);
6426 /* Subtract out all of our dominator's dominators. */
6427 for (b = n_basic_blocks; --b >= 0;)
6429 sbitmap tmp_b = tmp[b];
6432 for (s = n_basic_blocks; --s >= 0;)
6433 if (TEST_BIT (tmp_b, s))
6434 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6437 /* Find the one bit set in the bitmap and put it in the output array. */
6438 for (b = n_basic_blocks; --b >= 0;)
6441 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6444 sbitmap_vector_free (tmp);
6447 /* Given POSTDOMINATORS, compute the immediate postdominators into
6448 IDOM. If a block is only dominated by itself, its entry remains as
6452 compute_immediate_postdominators (idom, postdominators)
6454 sbitmap *postdominators;
6456 compute_immediate_dominators (idom, postdominators);
6459 /* Recompute register set/reference counts immediately prior to register
6462 This avoids problems with set/reference counts changing to/from values
6463 which have special meanings to the register allocators.
6465 Additionally, the reference counts are the primary component used by the
6466 register allocators to prioritize pseudos for allocation to hard regs.
6467 More accurate reference counts generally lead to better register allocation.
6469 F is the first insn to be scanned.
6471 LOOP_STEP denotes how much loop_depth should be incremented per
6472 loop nesting level in order to increase the ref count more for
6473 references in a loop.
6475 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6476 possibly other information which is used by the register allocators. */
6479 recompute_reg_usage (f, loop_step)
6480 rtx f ATTRIBUTE_UNUSED;
6481 int loop_step ATTRIBUTE_UNUSED;
6483 allocate_reg_life_data ();
6484 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6487 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6488 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6489 of the number of registers that died. */
6492 count_or_remove_death_notes (blocks, kill)
6498 for (i = n_basic_blocks - 1; i >= 0; --i)
6503 if (blocks && ! TEST_BIT (blocks, i))
6506 bb = BASIC_BLOCK (i);
6508 for (insn = bb->head;; insn = NEXT_INSN (insn))
6512 rtx *pprev = ®_NOTES (insn);
6517 switch (REG_NOTE_KIND (link))
6520 if (GET_CODE (XEXP (link, 0)) == REG)
6522 rtx reg = XEXP (link, 0);
6525 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6528 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6536 rtx next = XEXP (link, 1);
6537 free_EXPR_LIST_node (link);
6538 *pprev = link = next;
6544 pprev = &XEXP (link, 1);
6551 if (insn == bb->end)
6560 /* Update insns block within BB. */
6563 update_bb_for_insn (bb)
6568 if (! basic_block_for_insn)
6571 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6573 set_block_for_insn (insn, bb);
6575 if (insn == bb->end)
6581 /* Record INSN's block as BB. */
6584 set_block_for_insn (insn, bb)
6588 size_t uid = INSN_UID (insn);
6589 if (uid >= basic_block_for_insn->num_elements)
6593 /* Add one-eighth the size so we don't keep calling xrealloc. */
6594 new_size = uid + (uid + 7) / 8;
6596 VARRAY_GROW (basic_block_for_insn, new_size);
6598 VARRAY_BB (basic_block_for_insn, uid) = bb;
6601 /* Record INSN's block number as BB. */
6602 /* ??? This has got to go. */
6605 set_block_num (insn, bb)
6609 set_block_for_insn (insn, BASIC_BLOCK (bb));
6612 /* Verify the CFG consistency. This function check some CFG invariants and
6613 aborts when something is wrong. Hope that this function will help to
6614 convert many optimization passes to preserve CFG consistent.
6616 Currently it does following checks:
6618 - test head/end pointers
6619 - overlapping of basic blocks
6620 - edge list corectness
6621 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6622 - tails of basic blocks (ensure that boundary is necesary)
6623 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6624 and NOTE_INSN_BASIC_BLOCK
6625 - check that all insns are in the basic blocks
6626 (except the switch handling code, barriers and notes)
6627 - check that all returns are followed by barriers
6629 In future it can be extended check a lot of other stuff as well
6630 (reachability of basic blocks, life information, etc. etc.). */
6635 const int max_uid = get_max_uid ();
6636 const rtx rtx_first = get_insns ();
6637 rtx last_head = get_last_insn ();
6638 basic_block *bb_info;
6640 int i, last_bb_num_seen, num_bb_notes, err = 0;
6642 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6644 for (i = n_basic_blocks - 1; i >= 0; i--)
6646 basic_block bb = BASIC_BLOCK (i);
6647 rtx head = bb->head;
6650 /* Verify the end of the basic block is in the INSN chain. */
6651 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6656 error ("End insn %d for block %d not found in the insn stream.",
6657 INSN_UID (end), bb->index);
6661 /* Work backwards from the end to the head of the basic block
6662 to verify the head is in the RTL chain. */
6663 for (; x != NULL_RTX; x = PREV_INSN (x))
6665 /* While walking over the insn chain, verify insns appear
6666 in only one basic block and initialize the BB_INFO array
6667 used by other passes. */
6668 if (bb_info[INSN_UID (x)] != NULL)
6670 error ("Insn %d is in multiple basic blocks (%d and %d)",
6671 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6674 bb_info[INSN_UID (x)] = bb;
6681 error ("Head insn %d for block %d not found in the insn stream.",
6682 INSN_UID (head), bb->index);
6689 /* Now check the basic blocks (boundaries etc.) */
6690 for (i = n_basic_blocks - 1; i >= 0; i--)
6692 basic_block bb = BASIC_BLOCK (i);
6693 /* Check corectness of edge lists */
6702 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6704 fprintf (stderr, "Predecessor: ");
6705 dump_edge_info (stderr, e, 0);
6706 fprintf (stderr, "\nSuccessor: ");
6707 dump_edge_info (stderr, e, 1);
6711 if (e->dest != EXIT_BLOCK_PTR)
6713 edge e2 = e->dest->pred;
6714 while (e2 && e2 != e)
6718 error ("Basic block %i edge lists are corrupted", bb->index);
6730 error ("Basic block %d pred edge is corrupted", bb->index);
6731 fputs ("Predecessor: ", stderr);
6732 dump_edge_info (stderr, e, 0);
6733 fputs ("\nSuccessor: ", stderr);
6734 dump_edge_info (stderr, e, 1);
6735 fputc ('\n', stderr);
6738 if (e->src != ENTRY_BLOCK_PTR)
6740 edge e2 = e->src->succ;
6741 while (e2 && e2 != e)
6745 error ("Basic block %i edge lists are corrupted", bb->index);
6752 /* OK pointers are correct. Now check the header of basic
6753 block. It ought to contain optional CODE_LABEL followed
6754 by NOTE_BASIC_BLOCK. */
6756 if (GET_CODE (x) == CODE_LABEL)
6760 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6766 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6768 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6775 /* Do checks for empty blocks here */
6782 if (NOTE_INSN_BASIC_BLOCK_P (x))
6784 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6785 INSN_UID (x), bb->index);
6792 if (GET_CODE (x) == JUMP_INSN
6793 || GET_CODE (x) == CODE_LABEL
6794 || GET_CODE (x) == BARRIER)
6796 error ("In basic block %d:", bb->index);
6797 fatal_insn ("Flow control insn inside a basic block", x);
6805 last_bb_num_seen = -1;
6810 if (NOTE_INSN_BASIC_BLOCK_P (x))
6812 basic_block bb = NOTE_BASIC_BLOCK (x);
6814 if (bb->index != last_bb_num_seen + 1)
6815 fatal ("Basic blocks not numbered consecutively");
6816 last_bb_num_seen = bb->index;
6819 if (!bb_info[INSN_UID (x)])
6821 switch (GET_CODE (x))
6828 /* An addr_vec is placed outside any block block. */
6830 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6831 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6832 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6837 /* But in any case, non-deletable labels can appear anywhere. */
6841 fatal_insn ("Insn outside basic block", x);
6846 && GET_CODE (x) == JUMP_INSN
6847 && returnjump_p (x) && ! condjump_p (x)
6848 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6849 fatal_insn ("Return not followed by barrier", x);
6854 if (num_bb_notes != n_basic_blocks)
6855 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6856 num_bb_notes, n_basic_blocks);
6865 /* Functions to access an edge list with a vector representation.
6866 Enough data is kept such that given an index number, the
6867 pred and succ that edge represents can be determined, or
6868 given a pred and a succ, its index number can be returned.
6869 This allows algorithms which consume a lot of memory to
6870 represent the normally full matrix of edge (pred,succ) with a
6871 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6872 wasted space in the client code due to sparse flow graphs. */
6874 /* This functions initializes the edge list. Basically the entire
6875 flowgraph is processed, and all edges are assigned a number,
6876 and the data structure is filled in. */
6881 struct edge_list *elist;
6887 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6891 /* Determine the number of edges in the flow graph by counting successor
6892 edges on each basic block. */
6893 for (x = 0; x < n_basic_blocks; x++)
6895 basic_block bb = BASIC_BLOCK (x);
6897 for (e = bb->succ; e; e = e->succ_next)
6900 /* Don't forget successors of the entry block. */
6901 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6904 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6905 elist->num_blocks = block_count;
6906 elist->num_edges = num_edges;
6907 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6911 /* Follow successors of the entry block, and register these edges. */
6912 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6914 elist->index_to_edge[num_edges] = e;
6918 for (x = 0; x < n_basic_blocks; x++)
6920 basic_block bb = BASIC_BLOCK (x);
6922 /* Follow all successors of blocks, and register these edges. */
6923 for (e = bb->succ; e; e = e->succ_next)
6925 elist->index_to_edge[num_edges] = e;
6932 /* This function free's memory associated with an edge list. */
6935 free_edge_list (elist)
6936 struct edge_list *elist;
6940 free (elist->index_to_edge);
6945 /* This function provides debug output showing an edge list. */
6948 print_edge_list (f, elist)
6950 struct edge_list *elist;
6953 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6954 elist->num_blocks - 2, elist->num_edges);
6956 for (x = 0; x < elist->num_edges; x++)
6958 fprintf (f, " %-4d - edge(", x);
6959 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6960 fprintf (f, "entry,");
6962 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6964 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6965 fprintf (f, "exit)\n");
6967 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6971 /* This function provides an internal consistency check of an edge list,
6972 verifying that all edges are present, and that there are no
6976 verify_edge_list (f, elist)
6978 struct edge_list *elist;
6980 int x, pred, succ, index;
6983 for (x = 0; x < n_basic_blocks; x++)
6985 basic_block bb = BASIC_BLOCK (x);
6987 for (e = bb->succ; e; e = e->succ_next)
6989 pred = e->src->index;
6990 succ = e->dest->index;
6991 index = EDGE_INDEX (elist, e->src, e->dest);
6992 if (index == EDGE_INDEX_NO_EDGE)
6994 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6997 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6998 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6999 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7000 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7001 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7002 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7005 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7007 pred = e->src->index;
7008 succ = e->dest->index;
7009 index = EDGE_INDEX (elist, e->src, e->dest);
7010 if (index == EDGE_INDEX_NO_EDGE)
7012 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7015 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7016 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7017 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7018 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7019 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7020 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7022 /* We've verified that all the edges are in the list, no lets make sure
7023 there are no spurious edges in the list. */
7025 for (pred = 0; pred < n_basic_blocks; pred++)
7026 for (succ = 0; succ < n_basic_blocks; succ++)
7028 basic_block p = BASIC_BLOCK (pred);
7029 basic_block s = BASIC_BLOCK (succ);
7033 for (e = p->succ; e; e = e->succ_next)
7039 for (e = s->pred; e; e = e->pred_next)
7045 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7046 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7047 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7049 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7050 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7051 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7052 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7053 BASIC_BLOCK (succ)));
7055 for (succ = 0; succ < n_basic_blocks; succ++)
7057 basic_block p = ENTRY_BLOCK_PTR;
7058 basic_block s = BASIC_BLOCK (succ);
7062 for (e = p->succ; e; e = e->succ_next)
7068 for (e = s->pred; e; e = e->pred_next)
7074 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7075 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7076 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7078 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7079 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7080 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7081 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7082 BASIC_BLOCK (succ)));
7084 for (pred = 0; pred < n_basic_blocks; pred++)
7086 basic_block p = BASIC_BLOCK (pred);
7087 basic_block s = EXIT_BLOCK_PTR;
7091 for (e = p->succ; e; e = e->succ_next)
7097 for (e = s->pred; e; e = e->pred_next)
7103 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7104 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7105 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7107 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7108 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7109 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7110 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7115 /* This routine will determine what, if any, edge there is between
7116 a specified predecessor and successor. */
7119 find_edge_index (edge_list, pred, succ)
7120 struct edge_list *edge_list;
7121 basic_block pred, succ;
7124 for (x = 0; x < NUM_EDGES (edge_list); x++)
7126 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7127 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7130 return (EDGE_INDEX_NO_EDGE);
7133 /* This function will remove an edge from the flow graph. */
7139 edge last_pred = NULL;
7140 edge last_succ = NULL;
7142 basic_block src, dest;
7145 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7151 last_succ->succ_next = e->succ_next;
7153 src->succ = e->succ_next;
7155 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7161 last_pred->pred_next = e->pred_next;
7163 dest->pred = e->pred_next;
7169 /* This routine will remove any fake successor edges for a basic block.
7170 When the edge is removed, it is also removed from whatever predecessor
7174 remove_fake_successors (bb)
7178 for (e = bb->succ; e;)
7182 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7187 /* This routine will remove all fake edges from the flow graph. If
7188 we remove all fake successors, it will automatically remove all
7189 fake predecessors. */
7192 remove_fake_edges ()
7196 for (x = 0; x < n_basic_blocks; x++)
7197 remove_fake_successors (BASIC_BLOCK (x));
7199 /* We've handled all successors except the entry block's. */
7200 remove_fake_successors (ENTRY_BLOCK_PTR);
7203 /* This function will add a fake edge between any block which has no
7204 successors, and the exit block. Some data flow equations require these
7208 add_noreturn_fake_exit_edges ()
7212 for (x = 0; x < n_basic_blocks; x++)
7213 if (BASIC_BLOCK (x)->succ == NULL)
7214 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7217 /* This function adds a fake edge between any infinite loops to the
7218 exit block. Some optimizations require a path from each node to
7221 See also Morgan, Figure 3.10, pp. 82-83.
7223 The current implementation is ugly, not attempting to minimize the
7224 number of inserted fake edges. To reduce the number of fake edges
7225 to insert, add fake edges from _innermost_ loops containing only
7226 nodes not reachable from the exit block. */
7229 connect_infinite_loops_to_exit ()
7231 basic_block unvisited_block;
7233 /* Perform depth-first search in the reverse graph to find nodes
7234 reachable from the exit block. */
7235 struct depth_first_search_dsS dfs_ds;
7237 flow_dfs_compute_reverse_init (&dfs_ds);
7238 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7240 /* Repeatedly add fake edges, updating the unreachable nodes. */
7243 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7244 if (!unvisited_block)
7246 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7247 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7250 flow_dfs_compute_reverse_finish (&dfs_ds);
7255 /* Redirect an edge's successor from one block to another. */
7258 redirect_edge_succ (e, new_succ)
7260 basic_block new_succ;
7264 /* Disconnect the edge from the old successor block. */
7265 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7267 *pe = (*pe)->pred_next;
7269 /* Reconnect the edge to the new successor block. */
7270 e->pred_next = new_succ->pred;
7275 /* Redirect an edge's predecessor from one block to another. */
7278 redirect_edge_pred (e, new_pred)
7280 basic_block new_pred;
7284 /* Disconnect the edge from the old predecessor block. */
7285 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7287 *pe = (*pe)->succ_next;
7289 /* Reconnect the edge to the new predecessor block. */
7290 e->succ_next = new_pred->succ;
7295 /* Dump the list of basic blocks in the bitmap NODES. */
7298 flow_nodes_print (str, nodes, file)
7300 const sbitmap nodes;
7308 fprintf (file, "%s { ", str);
7309 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7310 fputs ("}\n", file);
7314 /* Dump the list of edges in the array EDGE_LIST. */
7317 flow_edge_list_print (str, edge_list, num_edges, file)
7319 const edge *edge_list;
7328 fprintf (file, "%s { ", str);
7329 for (i = 0; i < num_edges; i++)
7330 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7331 edge_list[i]->dest->index);
7332 fputs ("}\n", file);
7336 /* Dump loop related CFG information. */
7339 flow_loops_cfg_dump (loops, file)
7340 const struct loops *loops;
7345 if (! loops->num || ! file || ! loops->cfg.dom)
7348 for (i = 0; i < n_basic_blocks; i++)
7352 fprintf (file, ";; %d succs { ", i);
7353 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7354 fprintf (file, "%d ", succ->dest->index);
7355 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7358 /* Dump the DFS node order. */
7359 if (loops->cfg.dfs_order)
7361 fputs (";; DFS order: ", file);
7362 for (i = 0; i < n_basic_blocks; i++)
7363 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7366 /* Dump the reverse completion node order. */
7367 if (loops->cfg.rc_order)
7369 fputs (";; RC order: ", file);
7370 for (i = 0; i < n_basic_blocks; i++)
7371 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7376 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7379 flow_loop_nested_p (outer, loop)
7383 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7387 /* Dump the loop information specified by LOOP to the stream FILE
7388 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7390 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7391 const struct loop *loop;
7393 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7396 if (! loop || ! loop->header)
7399 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7400 loop->num, INSN_UID (loop->first->head),
7401 INSN_UID (loop->last->end),
7402 loop->shared ? " shared" : "",
7403 loop->invalid ? " invalid" : "");
7404 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7405 loop->header->index, loop->latch->index,
7406 loop->pre_header ? loop->pre_header->index : -1,
7407 loop->first->index, loop->last->index);
7408 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7409 loop->depth, loop->level,
7410 (long) (loop->outer ? loop->outer->num : -1));
7412 if (loop->pre_header_edges)
7413 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7414 loop->num_pre_header_edges, file);
7415 flow_edge_list_print (";; entry edges", loop->entry_edges,
7416 loop->num_entries, file);
7417 fprintf (file, ";; %d", loop->num_nodes);
7418 flow_nodes_print (" nodes", loop->nodes, file);
7419 flow_edge_list_print (";; exit edges", loop->exit_edges,
7420 loop->num_exits, file);
7421 if (loop->exits_doms)
7422 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7424 loop_dump_aux (loop, file, verbose);
7428 /* Dump the loop information specified by LOOPS to the stream FILE,
7429 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7431 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7432 const struct loops *loops;
7434 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7440 num_loops = loops->num;
7441 if (! num_loops || ! file)
7444 fprintf (file, ";; %d loops found, %d levels\n",
7445 num_loops, loops->levels);
7447 for (i = 0; i < num_loops; i++)
7449 struct loop *loop = &loops->array[i];
7451 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7457 for (j = 0; j < i; j++)
7459 struct loop *oloop = &loops->array[j];
7461 if (loop->header == oloop->header)
7466 smaller = loop->num_nodes < oloop->num_nodes;
7468 /* If the union of LOOP and OLOOP is different than
7469 the larger of LOOP and OLOOP then LOOP and OLOOP
7470 must be disjoint. */
7471 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7472 smaller ? oloop : loop);
7474 ";; loop header %d shared by loops %d, %d %s\n",
7475 loop->header->index, i, j,
7476 disjoint ? "disjoint" : "nested");
7483 flow_loops_cfg_dump (loops, file);
7487 /* Free all the memory allocated for LOOPS. */
7490 flow_loops_free (loops)
7491 struct loops *loops;
7500 /* Free the loop descriptors. */
7501 for (i = 0; i < loops->num; i++)
7503 struct loop *loop = &loops->array[i];
7505 if (loop->pre_header_edges)
7506 free (loop->pre_header_edges);
7508 sbitmap_free (loop->nodes);
7509 if (loop->entry_edges)
7510 free (loop->entry_edges);
7511 if (loop->exit_edges)
7512 free (loop->exit_edges);
7513 if (loop->exits_doms)
7514 sbitmap_free (loop->exits_doms);
7516 free (loops->array);
7517 loops->array = NULL;
7520 sbitmap_vector_free (loops->cfg.dom);
7521 if (loops->cfg.dfs_order)
7522 free (loops->cfg.dfs_order);
7524 if (loops->shared_headers)
7525 sbitmap_free (loops->shared_headers);
7530 /* Find the entry edges into the loop with header HEADER and nodes
7531 NODES and store in ENTRY_EDGES array. Return the number of entry
7532 edges from the loop. */
7535 flow_loop_entry_edges_find (header, nodes, entry_edges)
7537 const sbitmap nodes;
7543 *entry_edges = NULL;
7546 for (e = header->pred; e; e = e->pred_next)
7548 basic_block src = e->src;
7550 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7557 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7560 for (e = header->pred; e; e = e->pred_next)
7562 basic_block src = e->src;
7564 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7565 (*entry_edges)[num_entries++] = e;
7572 /* Find the exit edges from the loop using the bitmap of loop nodes
7573 NODES and store in EXIT_EDGES array. Return the number of
7574 exit edges from the loop. */
7577 flow_loop_exit_edges_find (nodes, exit_edges)
7578 const sbitmap nodes;
7587 /* Check all nodes within the loop to see if there are any
7588 successors not in the loop. Note that a node may have multiple
7589 exiting edges ????? A node can have one jumping edge and one fallthru
7590 edge so only one of these can exit the loop. */
7592 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7593 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7595 basic_block dest = e->dest;
7597 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7605 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7607 /* Store all exiting edges into an array. */
7609 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7610 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7612 basic_block dest = e->dest;
7614 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7615 (*exit_edges)[num_exits++] = e;
7623 /* Find the nodes contained within the loop with header HEADER and
7624 latch LATCH and store in NODES. Return the number of nodes within
7628 flow_loop_nodes_find (header, latch, nodes)
7637 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7640 /* Start with only the loop header in the set of loop nodes. */
7641 sbitmap_zero (nodes);
7642 SET_BIT (nodes, header->index);
7644 header->loop_depth++;
7646 /* Push the loop latch on to the stack. */
7647 if (! TEST_BIT (nodes, latch->index))
7649 SET_BIT (nodes, latch->index);
7650 latch->loop_depth++;
7652 stack[sp++] = latch;
7661 for (e = node->pred; e; e = e->pred_next)
7663 basic_block ancestor = e->src;
7665 /* If each ancestor not marked as part of loop, add to set of
7666 loop nodes and push on to stack. */
7667 if (ancestor != ENTRY_BLOCK_PTR
7668 && ! TEST_BIT (nodes, ancestor->index))
7670 SET_BIT (nodes, ancestor->index);
7671 ancestor->loop_depth++;
7673 stack[sp++] = ancestor;
7681 /* Compute the depth first search order and store in the array
7682 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7683 RC_ORDER is non-zero, return the reverse completion number for each
7684 node. Returns the number of nodes visited. A depth first search
7685 tries to get as far away from the starting point as quickly as
7689 flow_depth_first_order_compute (dfs_order, rc_order)
7696 int rcnum = n_basic_blocks - 1;
7699 /* Allocate stack for back-tracking up CFG. */
7700 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7703 /* Allocate bitmap to track nodes that have been visited. */
7704 visited = sbitmap_alloc (n_basic_blocks);
7706 /* None of the nodes in the CFG have been visited yet. */
7707 sbitmap_zero (visited);
7709 /* Push the first edge on to the stack. */
7710 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7718 /* Look at the edge on the top of the stack. */
7723 /* Check if the edge destination has been visited yet. */
7724 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7726 /* Mark that we have visited the destination. */
7727 SET_BIT (visited, dest->index);
7730 dfs_order[dfsnum++] = dest->index;
7734 /* Since the DEST node has been visited for the first
7735 time, check its successors. */
7736 stack[sp++] = dest->succ;
7740 /* There are no successors for the DEST node so assign
7741 its reverse completion number. */
7743 rc_order[rcnum--] = dest->index;
7748 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7750 /* There are no more successors for the SRC node
7751 so assign its reverse completion number. */
7753 rc_order[rcnum--] = src->index;
7757 stack[sp - 1] = e->succ_next;
7764 sbitmap_free (visited);
7766 /* The number of nodes visited should not be greater than
7768 if (dfsnum > n_basic_blocks)
7771 /* There are some nodes left in the CFG that are unreachable. */
7772 if (dfsnum < n_basic_blocks)
7777 /* Compute the depth first search order on the _reverse_ graph and
7778 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7779 Returns the number of nodes visited.
7781 The computation is split into three pieces:
7783 flow_dfs_compute_reverse_init () creates the necessary data
7786 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7787 structures. The block will start the search.
7789 flow_dfs_compute_reverse_execute () continues (or starts) the
7790 search using the block on the top of the stack, stopping when the
7793 flow_dfs_compute_reverse_finish () destroys the necessary data
7796 Thus, the user will probably call ..._init(), call ..._add_bb() to
7797 add a beginning basic block to the stack, call ..._execute(),
7798 possibly add another bb to the stack and again call ..._execute(),
7799 ..., and finally call _finish(). */
7801 /* Initialize the data structures used for depth-first search on the
7802 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7803 added to the basic block stack. DATA is the current depth-first
7804 search context. If INITIALIZE_STACK is non-zero, there is an
7805 element on the stack. */
7808 flow_dfs_compute_reverse_init (data)
7809 depth_first_search_ds data;
7811 /* Allocate stack for back-tracking up CFG. */
7813 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7814 * sizeof (basic_block));
7817 /* Allocate bitmap to track nodes that have been visited. */
7818 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7820 /* None of the nodes in the CFG have been visited yet. */
7821 sbitmap_zero (data->visited_blocks);
7826 /* Add the specified basic block to the top of the dfs data
7827 structures. When the search continues, it will start at the
7831 flow_dfs_compute_reverse_add_bb (data, bb)
7832 depth_first_search_ds data;
7835 data->stack[data->sp++] = bb;
7839 /* Continue the depth-first search through the reverse graph starting
7840 with the block at the stack's top and ending when the stack is
7841 empty. Visited nodes are marked. Returns an unvisited basic
7842 block, or NULL if there is none available. */
7845 flow_dfs_compute_reverse_execute (data)
7846 depth_first_search_ds data;
7852 while (data->sp > 0)
7854 bb = data->stack[--data->sp];
7856 /* Mark that we have visited this node. */
7857 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7859 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7861 /* Perform depth-first search on adjacent vertices. */
7862 for (e = bb->pred; e; e = e->pred_next)
7863 flow_dfs_compute_reverse_add_bb (data, e->src);
7867 /* Determine if there are unvisited basic blocks. */
7868 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7869 if (!TEST_BIT (data->visited_blocks, i))
7870 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7874 /* Destroy the data structures needed for depth-first search on the
7878 flow_dfs_compute_reverse_finish (data)
7879 depth_first_search_ds data;
7882 sbitmap_free (data->visited_blocks);
7887 /* Find the root node of the loop pre-header extended basic block and
7888 the edges along the trace from the root node to the loop header. */
7891 flow_loop_pre_header_scan (loop)
7897 loop->num_pre_header_edges = 0;
7899 if (loop->num_entries != 1)
7902 ebb = loop->entry_edges[0]->src;
7904 if (ebb != ENTRY_BLOCK_PTR)
7908 /* Count number of edges along trace from loop header to
7909 root of pre-header extended basic block. Usually this is
7910 only one or two edges. */
7912 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
7914 ebb = ebb->pred->src;
7918 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
7919 loop->num_pre_header_edges = num;
7921 /* Store edges in order that they are followed. The source
7922 of the first edge is the root node of the pre-header extended
7923 basic block and the destination of the last last edge is
7925 for (e = loop->entry_edges[0]; num; e = e->src->pred)
7927 loop->pre_header_edges[--num] = e;
7933 /* Return the block for the pre-header of the loop with header
7934 HEADER where DOM specifies the dominator information. Return NULL if
7935 there is no pre-header. */
7938 flow_loop_pre_header_find (header, dom)
7942 basic_block pre_header;
7945 /* If block p is a predecessor of the header and is the only block
7946 that the header does not dominate, then it is the pre-header. */
7948 for (e = header->pred; e; e = e->pred_next)
7950 basic_block node = e->src;
7952 if (node != ENTRY_BLOCK_PTR
7953 && ! TEST_BIT (dom[node->index], header->index))
7955 if (pre_header == NULL)
7959 /* There are multiple edges into the header from outside
7960 the loop so there is no pre-header block. */
7969 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7970 previously added. The insertion algorithm assumes that the loops
7971 are added in the order found by a depth first search of the CFG. */
7974 flow_loop_tree_node_add (prevloop, loop)
7975 struct loop *prevloop;
7979 if (flow_loop_nested_p (prevloop, loop))
7981 prevloop->inner = loop;
7982 loop->outer = prevloop;
7986 while (prevloop->outer)
7988 if (flow_loop_nested_p (prevloop->outer, loop))
7990 prevloop->next = loop;
7991 loop->outer = prevloop->outer;
7994 prevloop = prevloop->outer;
7997 prevloop->next = loop;
8001 /* Build the loop hierarchy tree for LOOPS. */
8004 flow_loops_tree_build (loops)
8005 struct loops *loops;
8010 num_loops = loops->num;
8014 /* Root the loop hierarchy tree with the first loop found.
8015 Since we used a depth first search this should be the
8017 loops->tree = &loops->array[0];
8018 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8020 /* Add the remaining loops to the tree. */
8021 for (i = 1; i < num_loops; i++)
8022 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8025 /* Helper function to compute loop nesting depth and enclosed loop level
8026 for the natural loop specified by LOOP at the loop depth DEPTH.
8027 Returns the loop level. */
8030 flow_loop_level_compute (loop, depth)
8040 /* Traverse loop tree assigning depth and computing level as the
8041 maximum level of all the inner loops of this loop. The loop
8042 level is equivalent to the height of the loop in the loop tree
8043 and corresponds to the number of enclosed loop levels (including
8045 for (inner = loop->inner; inner; inner = inner->next)
8049 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8054 loop->level = level;
8055 loop->depth = depth;
8059 /* Compute the loop nesting depth and enclosed loop level for the loop
8060 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8064 flow_loops_level_compute (loops)
8065 struct loops *loops;
8071 /* Traverse all the outer level loops. */
8072 for (loop = loops->tree; loop; loop = loop->next)
8074 level = flow_loop_level_compute (loop, 1);
8082 /* Find all the natural loops in the function and save in LOOPS structure
8083 and recalculate loop_depth information in basic block structures.
8084 FLAGS controls which loop information is collected.
8085 Return the number of natural loops found. */
8088 flow_loops_find (loops, flags)
8089 struct loops *loops;
8101 /* This function cannot be repeatedly called with different
8102 flags to build up the loop information. The loop tree
8103 must always be built if this function is called. */
8104 if (! (flags & LOOP_TREE))
8107 memset (loops, 0, sizeof (*loops));
8109 /* Taking care of this degenerate case makes the rest of
8110 this code simpler. */
8111 if (n_basic_blocks == 0)
8117 /* Compute the dominators. */
8118 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8119 compute_flow_dominators (dom, NULL);
8121 /* Count the number of loop edges (back edges). This should be the
8122 same as the number of natural loops. */
8125 for (b = 0; b < n_basic_blocks; b++)
8129 header = BASIC_BLOCK (b);
8130 header->loop_depth = 0;
8132 for (e = header->pred; e; e = e->pred_next)
8134 basic_block latch = e->src;
8136 /* Look for back edges where a predecessor is dominated
8137 by this block. A natural loop has a single entry
8138 node (header) that dominates all the nodes in the
8139 loop. It also has single back edge to the header
8140 from a latch node. Note that multiple natural loops
8141 may share the same header. */
8142 if (b != header->index)
8145 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8152 /* Compute depth first search order of the CFG so that outer
8153 natural loops will be found before inner natural loops. */
8154 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8155 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8156 flow_depth_first_order_compute (dfs_order, rc_order);
8158 /* Allocate loop structures. */
8160 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8162 headers = sbitmap_alloc (n_basic_blocks);
8163 sbitmap_zero (headers);
8165 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8166 sbitmap_zero (loops->shared_headers);
8168 /* Find and record information about all the natural loops
8171 for (b = 0; b < n_basic_blocks; b++)
8175 /* Search the nodes of the CFG in reverse completion order
8176 so that we can find outer loops first. */
8177 header = BASIC_BLOCK (rc_order[b]);
8179 /* Look for all the possible latch blocks for this header. */
8180 for (e = header->pred; e; e = e->pred_next)
8182 basic_block latch = e->src;
8184 /* Look for back edges where a predecessor is dominated
8185 by this block. A natural loop has a single entry
8186 node (header) that dominates all the nodes in the
8187 loop. It also has single back edge to the header
8188 from a latch node. Note that multiple natural loops
8189 may share the same header. */
8190 if (latch != ENTRY_BLOCK_PTR
8191 && TEST_BIT (dom[latch->index], header->index))
8195 loop = loops->array + num_loops;
8197 loop->header = header;
8198 loop->latch = latch;
8199 loop->num = num_loops;
8206 for (i = 0; i < num_loops; i++)
8208 struct loop *loop = &loops->array[i];
8211 /* Keep track of blocks that are loop headers so
8212 that we can tell which loops should be merged. */
8213 if (TEST_BIT (headers, loop->header->index))
8214 SET_BIT (loops->shared_headers, loop->header->index);
8215 SET_BIT (headers, loop->header->index);
8217 /* Find nodes contained within the loop. */
8218 loop->nodes = sbitmap_alloc (n_basic_blocks);
8220 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8222 /* Compute first and last blocks within the loop.
8223 These are often the same as the loop header and
8224 loop latch respectively, but this is not always
8227 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8229 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8231 if (flags & LOOP_EDGES)
8233 /* Find edges which enter the loop header.
8234 Note that the entry edges should only
8235 enter the header of a natural loop. */
8237 = flow_loop_entry_edges_find (loop->header,
8239 &loop->entry_edges);
8241 /* Find edges which exit the loop. */
8243 = flow_loop_exit_edges_find (loop->nodes,
8246 /* Determine which loop nodes dominate all the exits
8248 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8249 sbitmap_copy (loop->exits_doms, loop->nodes);
8250 for (j = 0; j < loop->num_exits; j++)
8251 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8252 dom[loop->exit_edges[j]->src->index]);
8254 /* The header of a natural loop must dominate
8256 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8260 if (flags & LOOP_PRE_HEADER)
8262 /* Look to see if the loop has a pre-header node. */
8264 = flow_loop_pre_header_find (loop->header, dom);
8266 flow_loop_pre_header_scan (loop);
8270 /* Natural loops with shared headers may either be disjoint or
8271 nested. Disjoint loops with shared headers cannot be inner
8272 loops and should be merged. For now just mark loops that share
8274 for (i = 0; i < num_loops; i++)
8275 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8276 loops->array[i].shared = 1;
8278 sbitmap_free (headers);
8281 loops->num = num_loops;
8283 /* Save CFG derived information to avoid recomputing it. */
8284 loops->cfg.dom = dom;
8285 loops->cfg.dfs_order = dfs_order;
8286 loops->cfg.rc_order = rc_order;
8288 /* Build the loop hierarchy tree. */
8289 flow_loops_tree_build (loops);
8291 /* Assign the loop nesting depth and enclosed loop level for each
8293 loops->levels = flow_loops_level_compute (loops);
8299 /* Update the information regarding the loops in the CFG
8300 specified by LOOPS. */
8302 flow_loops_update (loops, flags)
8303 struct loops *loops;
8306 /* One day we may want to update the current loop data. For now
8307 throw away the old stuff and rebuild what we need. */
8309 flow_loops_free (loops);
8311 return flow_loops_find (loops, flags);
8315 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8318 flow_loop_outside_edge_p (loop, e)
8319 const struct loop *loop;
8322 if (e->dest != loop->header)
8324 return (e->src == ENTRY_BLOCK_PTR)
8325 || ! TEST_BIT (loop->nodes, e->src->index);
8328 /* Clear LOG_LINKS fields of insns in a chain.
8329 Also clear the global_live_at_{start,end} fields of the basic block
8333 clear_log_links (insns)
8339 for (i = insns; i; i = NEXT_INSN (i))
8343 for (b = 0; b < n_basic_blocks; b++)
8345 basic_block bb = BASIC_BLOCK (b);
8347 bb->global_live_at_start = NULL;
8348 bb->global_live_at_end = NULL;
8351 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8352 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8355 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8356 correspond to the hard registers, if any, set in that map. This
8357 could be done far more efficiently by having all sorts of special-cases
8358 with moving single words, but probably isn't worth the trouble. */
8361 reg_set_to_hard_reg_set (to, from)
8367 EXECUTE_IF_SET_IN_BITMAP
8370 if (i >= FIRST_PSEUDO_REGISTER)
8372 SET_HARD_REG_BIT (*to, i);
8376 /* Called once at intialization time. */
8381 static int initialized;
8385 gcc_obstack_init (&flow_obstack);
8386 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8391 obstack_free (&flow_obstack, flow_firstobj);
8392 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);